Skip to content

Commit 3b15ee8

Browse files
committed
[SPARK-52670][SQL] Make HiveResult work with UserDefinedType#stringifyValue
### What changes were proposed in this pull request? Make HiveResult work with UserDefinedType#stringifyValue ### Why are the changes needed? If the `toString` of the underlying class of a UDT is not well-defined, the JDBC/thrift side might not be able to get a meaningful value. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? new tests ### Was this patch authored or co-authored using generative AI tooling? no Closes #51358 from yaooqinn/SPARK-52670. Authored-by: Kent Yao <yao@apache.org> Signed-off-by: Kent Yao <yao@apache.org>
1 parent 2b3eae0 commit 3b15ee8

File tree

2 files changed

+18
-4
lines changed

2 files changed

+18
-4
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,6 @@ object HiveResult extends SQLConfHelper {
149149
startField,
150150
endField)
151151
case (v: VariantVal, VariantType) => v.toString
152-
case (other, _: UserDefinedType[_]) => other.toString
152+
case (other, u: UserDefinedType[_]) => u.stringifyValue(other)
153153
}
154154
}

sql/core/src/test/scala/org/apache/spark/sql/execution/HiveResultSuite.scala

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,15 @@
1717

1818
package org.apache.spark.sql.execution
1919

20-
import java.time.{Duration, Period}
20+
import java.time.{Duration, Period, Year}
2121

22+
import org.apache.spark.sql.YearUDT
2223
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
2324
import org.apache.spark.sql.connector.catalog.InMemoryTableCatalog
2425
import org.apache.spark.sql.execution.HiveResult._
2526
import org.apache.spark.sql.internal.SQLConf
2627
import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT, SharedSparkSession}
27-
import org.apache.spark.sql.types.{YearMonthIntervalType => YM}
28-
import org.apache.spark.sql.types.YearMonthIntervalType
28+
import org.apache.spark.sql.types.{YearMonthIntervalType, YearMonthIntervalType => YM}
2929

3030

3131
class HiveResultSuite extends SharedSparkSession {
@@ -172,4 +172,18 @@ class HiveResultSuite extends SharedSparkSession {
172172
val plan2 = df.selectExpr("array(i)").queryExecution.executedPlan
173173
assert(hiveResultString(plan2) === Seq("[5 00:00:00.010000000]"))
174174
}
175+
176+
test("SPARK-52650: Use stringifyValue to get UDT string representation") {
177+
val year = Year.of(18)
178+
val tpe = new YearUDT()
179+
assert(toHiveString((year, tpe),
180+
nested = false, getTimeFormatters, getBinaryFormatter) === "18")
181+
val tpe2 = new YearUDT() {
182+
override def stringifyValue(obj: Any): String = {
183+
f"${obj.asInstanceOf[Year].getValue}%04d"
184+
}
185+
}
186+
assert(toHiveString((year, tpe2),
187+
nested = false, getTimeFormatters, getBinaryFormatter) === "0018")
188+
}
175189
}

0 commit comments

Comments
 (0)