@@ -889,52 +889,31 @@ object SparkConnectClient {
889
889
object SparkDeclarativePipelines {
890
890
import BuildCommons .protoVersion
891
891
892
+ import BuildCommons .protoVersion
892
893
lazy val settings = Seq (
894
+ // Setting version for the protobuf compiler. This has to be propagated to every sub-project
895
+ // even if the project is not using it.
896
+ PB .protocVersion := BuildCommons .protoVersion,
893
897
// For some reason the resolution from the imported Maven build does not work for some
894
898
// of these dependendencies that we need to shade later on.
895
899
libraryDependencies ++= {
896
- val guavaVersion =
897
- SbtPomKeys .effectivePom.value.getProperties.get(
898
- " connect.guava.version" ).asInstanceOf [String ]
899
- val guavaFailureAccessVersion =
900
- SbtPomKeys .effectivePom.value.getProperties.get(
901
- " guava.failureaccess.version" ).asInstanceOf [String ]
902
900
Seq (
903
- " com.google.guava" % " guava" % guavaVersion,
904
- " com.google.guava" % " failureaccess" % guavaFailureAccessVersion,
905
901
" com.google.protobuf" % " protobuf-java" % protoVersion % " protobuf"
906
902
)
907
903
},
908
-
909
- (assembly / logLevel) := Level .Info ,
910
-
911
- // Exclude `scala-library` from assembly.
912
- (assembly / assemblyPackageScala / assembleArtifact) := false ,
913
-
914
- // SPARK-46733: Include `spark-connect-*.jar`, `unused-*.jar`,`guava-*.jar`,
915
- // `failureaccess-*.jar`, `annotations-*.jar`, `grpc-*.jar`, `protobuf-*.jar`,
916
- // `gson-*.jar`, `error_prone_annotations-*.jar`, `j2objc-annotations-*.jar`,
917
- // `animal-sniffer-annotations-*.jar`, `perfmark-api-*.jar`,
918
- // `proto-google-common-protos-*.jar` in assembly.
919
- // This needs to be consistent with the content of `maven-shade-plugin`.
920
- (assembly / assemblyExcludedJars) := {
921
- val cp = (assembly / fullClasspath).value
922
- val validPrefixes = Set (" spark-connect" , " unused-" , " guava-" , " failureaccess-" ,
923
- " annotations-" , " grpc-" , " protobuf-" , " gson" , " error_prone_annotations" ,
924
- " j2objc-annotations" , " animal-sniffer-annotations" , " perfmark-api" ,
925
- " proto-google-common-protos" )
926
- cp filterNot { v =>
927
- validPrefixes.exists(v.data.getName.startsWith)
928
- }
929
- },
930
-
931
- (assembly / assemblyMergeStrategy) := {
932
- case m if m.toLowerCase(Locale .ROOT ).endsWith(" manifest.mf" ) => MergeStrategy .discard
933
- // Drop all proto files that are not needed as artifacts of the build.
934
- case m if m.toLowerCase(Locale .ROOT ).endsWith(" .proto" ) => MergeStrategy .discard
935
- case _ => MergeStrategy .first
904
+ (Compile / PB .targets) := Seq (
905
+ PB .gens.java -> (Compile / sourceManaged).value
906
+ )
907
+ ) ++ {
908
+ val sparkProtocExecPath = sys.props.get(" spark.protoc.executable.path" )
909
+ if (sparkProtocExecPath.isDefined) {
910
+ Seq (
911
+ PB .protocExecutable := file(sparkProtocExecPath.get)
912
+ )
913
+ } else {
914
+ Seq .empty
936
915
}
937
- )
916
+ }
938
917
}
939
918
940
919
object SparkProtobuf {
0 commit comments