Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ jobs:
comment: 'verify-on-spark-3.4-binary'
- java: 17
spark: '3.5'
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-4.0.0-preview1 -Dspark.archive.name=spark-4.0.0-preview1-bin-hadoop3.tgz'
spark-archive: '-Pscala-2.13 -Dspark.archive.mirror=https://dist.apache.org/repos/dist/dev/spark/v4.0.0-preview2-rc1-bin -Dspark.archive.name=spark-4.0.0-preview2-bin-hadoop3.tgz'
exclude-tags: '-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
comment: 'verify-on-spark-4.0-binary'
env:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ object SparkCatalogUtils extends Logging {
private def getGlobalTempViewManager(
spark: SparkSession,
schemaPattern: String): Seq[String] = {
val database = spark.sharedState.globalTempViewManager.database
val database = spark.conf.get("spark.sql.globalTempDatabase")
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

see SPARK-48559

Option(database).filter(_.matches(schemaPattern)).toSeq
}

Expand Down
21 changes: 20 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1215,6 +1215,12 @@
<groupId>org.apache.paimon</groupId>
<artifactId>${paimon.artifact}</artifactId>
<version>${paimon.version}</version>
<exclusions>
<exclusion>
<groupId>*</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</dependencyManagement>
Expand Down Expand Up @@ -1251,6 +1257,18 @@
<name>Maven Repository</name>
<url>https://repo.maven.apache.org/maven2</url>
</repository>

<repository>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>staging</id>
<name>Staging Repo</name>
<url>https://repository.apache.org/content/repositories/orgapachespark-1468/</url>
</repository>
</repositories>

<pluginRepositories>
Expand Down Expand Up @@ -2010,7 +2028,7 @@
<module>extensions/spark/kyuubi-spark-connector-hive</module>
</modules>
<properties>
<spark.version>4.0.0-preview1</spark.version>
<spark.version>4.0.0-preview2</spark.version>
<spark.binary.version>4.0</spark.binary.version>
<antlr4.version>4.13.1</antlr4.version>
<delta.version>4.0.0rc1</delta.version>
Expand All @@ -2022,6 +2040,7 @@
<!-- TODO: update once Paimon support Spark 4.0 -->
<paimon.artifact>paimon-spark-3.5</paimon.artifact>
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
<spark.archive.mirror>https://dist.apache.org/repos/dist/dev/spark/v4.0.0-preview2-rc1-bin</spark.archive.mirror>
<spark.archive.name>spark-${spark.version}-bin-hadoop3.tgz</spark.archive.name>
</properties>
</profile>
Expand Down
Loading