Skip to content

Commit d370010

Browse files
committed
feat: support literal for ARRAY top level
1 parent 625f49c commit d370010

File tree

2 files changed

+8
-50
lines changed

2 files changed

+8
-50
lines changed

spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala

Lines changed: 7 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -121,52 +121,6 @@ object QueryPlanSerde extends Logging with CometExprShim {
121121
false
122122
}
123123

124-
// def convertArrayToProtoLiteral(array: Seq[Any], arrayType: ArrayType): Literal = {
125-
// val elementType = arrayType.elementType
126-
// val listLiteralBuilder = ListLiteral.newBuilder()
127-
//
128-
// elementType match {
129-
// case BooleanType =>
130-
// listLiteralBuilder.addAllBooleanValues(array.map(_.asInstanceOf[Boolean]).asJava)
131-
//
132-
// case ByteType =>
133-
// listLiteralBuilder.addAllByteValues(array.map(_.asInstanceOf[Byte].toInt).asJava)
134-
//
135-
// case ShortType =>
136-
// listLiteralBuilder.addAllShortValues(array.map(_.asInstanceOf[Short].toInt).asJava)
137-
//
138-
// case IntegerType =>
139-
// listLiteralBuilder.addAllIntValues(array.map(_.asInstanceOf[Int]).asJava)
140-
//
141-
// case LongType =>
142-
// listLiteralBuilder.addAllLongValues(array.map(_.asInstanceOf[Long]).asJava)
143-
//
144-
// case FloatType =>
145-
// listLiteralBuilder.addAllFloatValues(array.map(_.asInstanceOf[Float]).asJava)
146-
//
147-
// case DoubleType =>
148-
// listLiteralBuilder.addAllDoubleValues(array.map(_.asInstanceOf[Double]).asJava)
149-
//
150-
// case StringType =>
151-
// listLiteralBuilder.addAllStringValues(array.map(_.asInstanceOf[String]).asJava)
152-
//
153-
// case BinaryType =>
154-
// listLiteralBuilder.addAllBytesValues
155-
// (array.map(x => com.google.protobuf
156-
// .ByteString.copyFrom(x.asInstanceOf[Array[Byte]])).asJava)
157-
//
158-
// case nested: ArrayType =>
159-
// val nestedListLiterals = array.map {
160-
// case null => ListLiteral.newBuilder().build() // or handle nulls appropriately
161-
// case seq: Seq[_] => convertArrayToProtoLiteral(seq, nested).getListVal
162-
// }
163-
// listLiteralBuilder.addAllListValues(nestedListLiterals.asJava)
164-
//
165-
// case _ =>
166-
// throw new UnsupportedOperationException(s"Unsupported element type: $elementType")
167-
// }
168-
// }
169-
170124
/**
171125
* Serializes Spark datatype to protobuf. Note that, a datatype can be serialized by this method
172126
* doesn't mean it is supported by Comet native execution, i.e., `supportedDataType` may return
@@ -864,7 +818,13 @@ object QueryPlanSerde extends Logging with CometExprShim {
864818
binding,
865819
(builder, binaryExpr) => builder.setLtEq(binaryExpr))
866820

867-
case Literal(value, dataType) if supportedDataType(dataType, allowComplex = true) =>
821+
case Literal(value, dataType)
822+
if supportedDataType(
823+
dataType,
824+
allowComplex = value == null || Seq(
825+
CometConf.SCAN_NATIVE_ICEBERG_COMPAT,
826+
CometConf.SCAN_NATIVE_DATAFUSION).contains(
827+
CometConf.COMET_NATIVE_SCAN_IMPL.get())) =>
868828
val exprBuilder = ExprOuterClass.Literal.newBuilder()
869829

870830
if (value == null) {

spark/src/test/scala/org/apache/comet/CometArrayExpressionSuite.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -219,9 +219,7 @@ class CometArrayExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelp
219219
}
220220

221221
test("array_contains") {
222-
withSQLConf(
223-
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
224-
CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true") {
222+
withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
225223
withTempDir { dir =>
226224
val path = new Path(dir.toURI.toString, "test.parquet")
227225
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = false, n = 10000)

0 commit comments

Comments
 (0)