Skip to content

Commit c7c1021

Browse files
pan3793LuciferYang
authored andcommitted
[SPARK-52300][SQL][TEST] Fix invalid AnalysisConfOverrideSuite
### What changes were proposed in this pull request? Test `AnalysisConfOverrideSuite` is invalid, see inline comments for details. ### Why are the changes needed? Fix invalid tests. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? ``` $ build/sbt "sql/testOnly *AnalysisConfOverrideSuite" ... [info] AnalysisConfOverrideSuite: [info] - simple plan (34 milliseconds) [info] - CTE (43 milliseconds) [info] - Subquery (46 milliseconds) [info] - View (2 seconds, 395 milliseconds) [info] - user defined SQL functions (596 milliseconds) [info] - user defined SQL functions - test conf disabled (449 milliseconds) [info] Run completed in 6 seconds, 817 milliseconds. [info] Total number of tests run: 6 [info] Suites: completed 1, aborted 0 [info] Tests: succeeded 6, failed 0, canceled 0, ignored 0, pending 0 [info] All tests passed. ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51612 from pan3793/SPARK-52300-followup. Authored-by: Cheng Pan <chengpan@apache.org> Signed-off-by: yangjie01 <yangjie01@baidu.com>
1 parent e31ea9f commit c7c1021

File tree

1 file changed

+20
-26
lines changed

1 file changed

+20
-26
lines changed

sql/core/src/test/scala/org/apache/spark/sql/analysis/AnalysisConfOverrideSuite.scala

Lines changed: 20 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.spark.sql.analysis
1919

2020
import org.apache.spark.SparkConf
21+
import org.apache.spark.SparkNoSuchElementException
2122
import org.apache.spark.sql.SparkSessionExtensions
2223
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2324
import org.apache.spark.sql.catalyst.rules.Rule
@@ -27,7 +28,7 @@ class AnalysisConfOverrideSuite extends SharedSparkSession {
2728

2829
override protected def sparkConf: SparkConf = {
2930
super.sparkConf
30-
.set("spark.sql.extensions", "com.databricks.sql.ConfOverrideValidationExtensions")
31+
.set("spark.sql.extensions", classOf[ConfOverrideValidationExtensions].getName)
3132
}
3233

3334
override def beforeAll(): Unit = {
@@ -47,7 +48,7 @@ class AnalysisConfOverrideSuite extends SharedSparkSession {
4748
val key = "spark.sql.catalog.x.y"
4849
val value = "true"
4950
withSQLConf(key -> value) {
50-
f
51+
f(key, value)
5152
}
5253
}
5354
}
@@ -107,18 +108,15 @@ class AnalysisConfOverrideSuite extends SharedSparkSession {
107108
|""".stripMargin
108109
)
109110
spark.sql(
110-
"""CREATE OR REPLACE FUNCTION f3(in bigint) RETURNS (out bigint)
111+
"""CREATE OR REPLACE FUNCTION f3(in bigint) RETURNS bigint
111112
|RETURN in + 1
112113
|""".stripMargin
113114
)
114-
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
115-
spark.sql("SELECT * FROM f1()")
116-
}
117-
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
118-
spark.sql("SELECT * FROM f2()")
119-
}
120-
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
121-
spark.sql("SELECT f3(1)")
115+
116+
("SELECT * FROM f1()" :: "SELECT * FROM f2()" :: "SELECT f3(1)" :: Nil).foreach { query =>
117+
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
118+
spark.sql(query)
119+
}
122120
}
123121
}
124122
}
@@ -142,24 +140,20 @@ class AnalysisConfOverrideSuite extends SharedSparkSession {
142140
|""".stripMargin
143141
)
144142
spark.sql(
145-
"""CREATE OR REPLACE FUNCTION f3(in bigint) RETURNS (out bigint)
143+
"""CREATE OR REPLACE FUNCTION f3(in bigint) RETURNS bigint
146144
|RETURN in + 1
147145
|""".stripMargin
148146
)
149-
intercept[AssertionError] {
150-
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
151-
spark.sql("SELECT * FROM f1()")
152-
}
153-
}
154-
intercept[AssertionError] {
155-
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
156-
spark.sql("SELECT * FROM f2()")
157-
}
158-
}
159-
intercept[AssertionError] {
160-
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
161-
spark.sql("SELECT f3(1)")
162-
}
147+
148+
("SELECT * FROM f1()" :: "SELECT * FROM f2()" :: "SELECT f3(1)" :: Nil).foreach { query =>
149+
checkError(
150+
exception = intercept[SparkNoSuchElementException] {
151+
ValidateConfOverrideRule.withConfValidationEnabled(key, value) {
152+
spark.sql(query)
153+
}
154+
},
155+
condition = "SQL_CONF_NOT_FOUND",
156+
parameters = Map("sqlConf" -> "\"spark.sql.catalog.x.y\""))
163157
}
164158
}
165159
}

0 commit comments

Comments
 (0)