Skip to content

Commit 5ea1a9c

Browse files
committed
feat: support literal for ARRAY top level
1 parent 6445a92 commit 5ea1a9c

File tree

2 files changed

+8
-50
lines changed

2 files changed

+8
-50
lines changed

spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala

Lines changed: 7 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -146,52 +146,6 @@ object QueryPlanSerde extends Logging with CometExprShim {
146146
false
147147
}
148148

149-
// def convertArrayToProtoLiteral(array: Seq[Any], arrayType: ArrayType): Literal = {
150-
// val elementType = arrayType.elementType
151-
// val listLiteralBuilder = ListLiteral.newBuilder()
152-
//
153-
// elementType match {
154-
// case BooleanType =>
155-
// listLiteralBuilder.addAllBooleanValues(array.map(_.asInstanceOf[Boolean]).asJava)
156-
//
157-
// case ByteType =>
158-
// listLiteralBuilder.addAllByteValues(array.map(_.asInstanceOf[Byte].toInt).asJava)
159-
//
160-
// case ShortType =>
161-
// listLiteralBuilder.addAllShortValues(array.map(_.asInstanceOf[Short].toInt).asJava)
162-
//
163-
// case IntegerType =>
164-
// listLiteralBuilder.addAllIntValues(array.map(_.asInstanceOf[Int]).asJava)
165-
//
166-
// case LongType =>
167-
// listLiteralBuilder.addAllLongValues(array.map(_.asInstanceOf[Long]).asJava)
168-
//
169-
// case FloatType =>
170-
// listLiteralBuilder.addAllFloatValues(array.map(_.asInstanceOf[Float]).asJava)
171-
//
172-
// case DoubleType =>
173-
// listLiteralBuilder.addAllDoubleValues(array.map(_.asInstanceOf[Double]).asJava)
174-
//
175-
// case StringType =>
176-
// listLiteralBuilder.addAllStringValues(array.map(_.asInstanceOf[String]).asJava)
177-
//
178-
// case BinaryType =>
179-
// listLiteralBuilder.addAllBytesValues
180-
// (array.map(x => com.google.protobuf
181-
// .ByteString.copyFrom(x.asInstanceOf[Array[Byte]])).asJava)
182-
//
183-
// case nested: ArrayType =>
184-
// val nestedListLiterals = array.map {
185-
// case null => ListLiteral.newBuilder().build() // or handle nulls appropriately
186-
// case seq: Seq[_] => convertArrayToProtoLiteral(seq, nested).getListVal
187-
// }
188-
// listLiteralBuilder.addAllListValues(nestedListLiterals.asJava)
189-
//
190-
// case _ =>
191-
// throw new UnsupportedOperationException(s"Unsupported element type: $elementType")
192-
// }
193-
// }
194-
195149
/**
196150
* Serializes Spark datatype to protobuf. Note that, a datatype can be serialized by this method
197151
* doesn't mean it is supported by Comet native execution, i.e., `supportedDataType` may return
@@ -889,7 +843,13 @@ object QueryPlanSerde extends Logging with CometExprShim {
889843
binding,
890844
(builder, binaryExpr) => builder.setLtEq(binaryExpr))
891845

892-
case Literal(value, dataType) if supportedDataType(dataType, allowComplex = true) =>
846+
case Literal(value, dataType)
847+
if supportedDataType(
848+
dataType,
849+
allowComplex = value == null || Seq(
850+
CometConf.SCAN_NATIVE_ICEBERG_COMPAT,
851+
CometConf.SCAN_NATIVE_DATAFUSION).contains(
852+
CometConf.COMET_NATIVE_SCAN_IMPL.get())) =>
893853
val exprBuilder = ExprOuterClass.Literal.newBuilder()
894854

895855
if (value == null) {

spark/src/test/scala/org/apache/comet/CometArrayExpressionSuite.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -219,9 +219,7 @@ class CometArrayExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelp
219219
}
220220

221221
test("array_contains") {
222-
withSQLConf(
223-
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
224-
CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true") {
222+
withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
225223
withTempDir { dir =>
226224
val path = new Path(dir.toURI.toString, "test.parquet")
227225
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = false, n = 10000)

0 commit comments

Comments
 (0)