Skip to content
Closed
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ jobs:
comment: 'verify-on-spark-3.4-binary'
- java: 17
spark: '3.5'
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-4.0.0-preview1 -Dspark.archive.name=spark-4.0.0-preview1-bin-hadoop3.tgz'
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://dist.apache.org/repos/dist/dev/spark/v4.0.0-preview2-rc1-bin -Dspark.archive.name=spark-4.0.0-preview2-bin-hadoop3.tgz'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-4.0-binary'
env:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ trait DropNamespaceSuiteBase extends DDLCommandTestUtils {
sql(s"DROP NAMESPACE $catalogName.unknown")
}.getMessage
assert(message.contains(s"'unknown' not found") ||
message.contains(s"The schema `unknown` cannot be found"))
message.contains(s"The schema `unknown` cannot be found") ||
message.contains("SCHEMA_NOT_FOUND"))
}

test("drop non-empty namespace with a non-cascading mode") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ object SparkSQLEngine extends Logging {
// "Cannot mutate ReadOnlySQLConf" exception when task calling HiveResult.getBinaryFormatter.
// Here we follow the HiveResult.getBinaryFormatter behavior to set it to UTF8 if configuration
// is absent to reserve the legacy behavior for compatibility.
_sparkConf.setIfMissing("spark.sql.binaryOutputStyle", "UTF8")
_sparkConf.setIfMissing("spark.sql.binaryOutputStyle", "UTF-8")
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_sparkConf.setIfMissing("spark.master", "local")
_sparkConf.set(
"spark.redaction.regex",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ object SparkCatalogUtils extends Logging {
private def getGlobalTempViewManager(
spark: SparkSession,
schemaPattern: String): Seq[String] = {
val database = spark.sharedState.globalTempViewManager.database
val database = spark.conf.get("spark.sql.globalTempDatabase")
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

see SPARK-48559

Option(database).filter(_.matches(schemaPattern)).toSeq
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -514,7 +514,6 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
assert(status.getStatusCode === TStatusCode.ERROR_STATUS)
if (SPARK_ENGINE_RUNTIME_VERSION >= "3.4") {
assert(errorMessage.contains("[SCHEMA_NOT_FOUND]"))
assert(errorMessage.contains(s"The schema `$dbName` cannot be found."))
} else {
assert(errorMessage.contains(s"Database '$dbName' not found"))
}
Expand Down
56 changes: 36 additions & 20 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -458,15 +458,6 @@
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<exclusions>
<!-- Use log4j2 -->
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<!-- SPARK-40511 upgrade SLF4J2, which is not compatible w/ SLF4J1 -->
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
Expand All @@ -485,6 +476,13 @@
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<exclusions>
<!-- SPARK-40511 upgrade SLF4J2, which is not compatible w/ SLF4J1 -->
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j2-impl</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
Expand All @@ -499,15 +497,6 @@
<version>${spark.version}</version>
<type>test-jar</type>
<exclusions>
<!-- Use log4j2 -->
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<!-- SPARK-40511 upgrade SLF4J2, which is not compatible w/ SLF4J1 -->
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
Expand All @@ -528,6 +517,13 @@
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${spark.version}</version>
<type>test-jar</type>
<exclusions>
<!-- SPARK-40511 upgrade SLF4J2, which is not compatible w/ SLF4J1 -->
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j2-impl</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
Expand Down Expand Up @@ -1215,6 +1211,12 @@
<groupId>org.apache.paimon</groupId>
<artifactId>${paimon.artifact}</artifactId>
<version>${paimon.version}</version>
<exclusions>
<exclusion>
<groupId>*</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</dependencyManagement>
Expand Down Expand Up @@ -1251,6 +1253,18 @@
<name>Maven Repository</name>
<url>https://repo.maven.apache.org/maven2</url>
</repository>

<repository>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>staging</id>
<name>Staging Repo</name>
<url>https://repository.apache.org/content/repositories/orgapachespark-1468/</url>
</repository>
</repositories>

<pluginRepositories>
Expand Down Expand Up @@ -2010,9 +2024,10 @@
<module>extensions/spark/kyuubi-spark-connector-hive</module>
</modules>
<properties>
<spark.version>4.0.0-preview1</spark.version>
<spark.version>4.0.0-preview2</spark.version>
<spark.binary.version>4.0</spark.binary.version>
<antlr4.version>4.13.1</antlr4.version>
<!-- TODO: update once Delta support Spark 4.0.0-preview2 -->
<delta.version>4.0.0rc1</delta.version>
<delta.artifact>delta-spark_${scala.binary.version}</delta.artifact>
<!-- TODO: update once Hudi support Spark 4.0 -->
Expand All @@ -2021,7 +2036,8 @@
<iceberg.artifact>iceberg-spark-runtime-3.5_${scala.binary.version}</iceberg.artifact>
<!-- TODO: update once Paimon support Spark 4.0 -->
<paimon.artifact>paimon-spark-3.5</paimon.artifact>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
<spark.archive.mirror>https://dist.apache.org/repos/dist/dev/spark/v4.0.0-preview2-rc1-bin</spark.archive.mirror>
<spark.archive.name>spark-${spark.version}-bin-hadoop3.tgz</spark.archive.name>
</properties>
</profile>
Expand Down
Loading