Skip to content

Commit 19e334c

Browse files
committed
feat: support literal for ARRAY top level
1 parent 1a0c24a commit 19e334c

File tree

2 files changed

+8
-50
lines changed

2 files changed

+8
-50
lines changed

spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala

Lines changed: 7 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -123,52 +123,6 @@ object QueryPlanSerde extends Logging with CometExprShim {
123123
false
124124
}
125125

126-
// def convertArrayToProtoLiteral(array: Seq[Any], arrayType: ArrayType): Literal = {
127-
// val elementType = arrayType.elementType
128-
// val listLiteralBuilder = ListLiteral.newBuilder()
129-
//
130-
// elementType match {
131-
// case BooleanType =>
132-
// listLiteralBuilder.addAllBooleanValues(array.map(_.asInstanceOf[Boolean]).asJava)
133-
//
134-
// case ByteType =>
135-
// listLiteralBuilder.addAllByteValues(array.map(_.asInstanceOf[Byte].toInt).asJava)
136-
//
137-
// case ShortType =>
138-
// listLiteralBuilder.addAllShortValues(array.map(_.asInstanceOf[Short].toInt).asJava)
139-
//
140-
// case IntegerType =>
141-
// listLiteralBuilder.addAllIntValues(array.map(_.asInstanceOf[Int]).asJava)
142-
//
143-
// case LongType =>
144-
// listLiteralBuilder.addAllLongValues(array.map(_.asInstanceOf[Long]).asJava)
145-
//
146-
// case FloatType =>
147-
// listLiteralBuilder.addAllFloatValues(array.map(_.asInstanceOf[Float]).asJava)
148-
//
149-
// case DoubleType =>
150-
// listLiteralBuilder.addAllDoubleValues(array.map(_.asInstanceOf[Double]).asJava)
151-
//
152-
// case StringType =>
153-
// listLiteralBuilder.addAllStringValues(array.map(_.asInstanceOf[String]).asJava)
154-
//
155-
// case BinaryType =>
156-
// listLiteralBuilder.addAllBytesValues
157-
// (array.map(x => com.google.protobuf
158-
// .ByteString.copyFrom(x.asInstanceOf[Array[Byte]])).asJava)
159-
//
160-
// case nested: ArrayType =>
161-
// val nestedListLiterals = array.map {
162-
// case null => ListLiteral.newBuilder().build() // or handle nulls appropriately
163-
// case seq: Seq[_] => convertArrayToProtoLiteral(seq, nested).getListVal
164-
// }
165-
// listLiteralBuilder.addAllListValues(nestedListLiterals.asJava)
166-
//
167-
// case _ =>
168-
// throw new UnsupportedOperationException(s"Unsupported element type: $elementType")
169-
// }
170-
// }
171-
172126
/**
173127
* Serializes Spark datatype to protobuf. Note that, a datatype can be serialized by this method
174128
* doesn't mean it is supported by Comet native execution, i.e., `supportedDataType` may return
@@ -866,7 +820,13 @@ object QueryPlanSerde extends Logging with CometExprShim {
866820
binding,
867821
(builder, binaryExpr) => builder.setLtEq(binaryExpr))
868822

869-
case Literal(value, dataType) if supportedDataType(dataType, allowComplex = true) =>
823+
case Literal(value, dataType)
824+
if supportedDataType(
825+
dataType,
826+
allowComplex = value == null || Seq(
827+
CometConf.SCAN_NATIVE_ICEBERG_COMPAT,
828+
CometConf.SCAN_NATIVE_DATAFUSION).contains(
829+
CometConf.COMET_NATIVE_SCAN_IMPL.get())) =>
870830
val exprBuilder = ExprOuterClass.Literal.newBuilder()
871831

872832
if (value == null) {

spark/src/test/scala/org/apache/comet/CometArrayExpressionSuite.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -219,9 +219,7 @@ class CometArrayExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelp
219219
}
220220

221221
test("array_contains") {
222-
withSQLConf(
223-
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
224-
CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true") {
222+
withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
225223
withTempDir { dir =>
226224
val path = new Path(dir.toURI.toString, "test.parquet")
227225
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = false, n = 10000)

0 commit comments

Comments
 (0)