Skip to content

Commit cf4209d

Browse files
authored
Merge pull request #12 from nubank/sync-1.25.0
Sync 1.25.0
2 parents 78946e4 + c6e2a21 commit cf4209d

File tree

323 files changed

+33989
-22010
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

323 files changed

+33989
-22010
lines changed

.circleci/config.yml

Lines changed: 11 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ setup: true
66
# the path of an updated fileset
77
orbs:
88
continuation: circleci/continuation@1.0.0
9+
github-cli: circleci/github-cli@2.4.0
910

1011
# optional parameter when triggering to
1112
# only run a particular type of integration
@@ -51,6 +52,7 @@ jobs:
5152
- image: cimg/python:3.8
5253
steps:
5354
- checkout
55+
- github-cli/setup
5456
- run:
5557
name: Install yq
5658
command: |
@@ -150,26 +152,14 @@ jobs:
150152
name: Remove approval steps if not pull from forks.
151153
command: |
152154
pip install pyyaml==6.0.1
153-
python -c "import yaml
154-
d = yaml.safe_load(open('complete_config.yml'))
155-
for workflow_name, workflow_definition in d['workflows'].items():
156-
jobs = workflow_definition.get('jobs') if isinstance(workflow_definition, dict) else None
157-
if not jobs: continue
158-
159-
# find all approvals
160-
approvals = list(filter(lambda x: isinstance(x, dict) and list(x.values())[0].get('type') == 'approval', jobs))
161-
for approval in approvals:
162-
approval_name = next(iter(approval))
163-
approval_upstreams = approval[approval_name].get('requires')
164-
approval_downstream = list(filter(lambda x: isinstance(x, dict) and approval_name in list(x.values())[0].get('requires', ''), jobs))
165-
# replace approval with its upstream jobs
166-
for job in approval_downstream:
167-
requires = next(iter(job.values()))['requires']
168-
requires.remove(approval_name)
169-
requires.extend(approval_upstreams)
170-
jobs.remove(approval)
171-
with open('complete_config.yml', 'w') as f:
172-
f.write(yaml.dump(d, sort_keys=False))"
155+
python dev/filter_approvals.py
156+
- run: |
157+
export IS_FULL_TESTS=$(gh pr view --json labels | jq 'any(.labels[]; .name == "full-tests")')
158+
echo $IS_FULL_TESTS
159+
if [ -z "$IS_FULL_TESTS" ] || [ "$IS_FULL_TESTS" == "0" ]; then
160+
pip install pyyaml==6.0.1
161+
python dev/filter_matrix.py
162+
fi
173163
- when:
174164
condition:
175165
or:
@@ -194,6 +184,7 @@ workflows:
194184
schedule_workflow:
195185
jobs:
196186
- determine_changed_modules:
187+
context: pr
197188
filters:
198189
tags:
199190
only: /^[0-9]+(\.[0-9]+){2}(-rc\.[0-9]+)?$/

.circleci/continue_config.yml

Lines changed: 59 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,32 @@ commands:
7575
echo "Setting default Java to ${JAVA_BIN}"
7676
sudo update-alternatives --set java ${JAVA_BIN}
7777
sudo update-alternatives --set javac ${JAVAC_BIN}
78-
78+
set_java_spark_scala_version:
79+
parameters:
80+
env-variant:
81+
type: string
82+
description: "Set Java, Spark and Scala versions"
83+
steps:
84+
- run: |
85+
set -eux
86+
JAVA=$(echo << parameters.env-variant >> | cut -d '-' -f 1 | cut -d ':' -f 2)
87+
SPARK=$(echo << parameters.env-variant >> | cut -d '-' -f 2 | cut -d ':' -f 2)
88+
SCALA=$(echo << parameters.env-variant >> | cut -d '-' -f 3 | cut -d ':' -f 2)
89+
echo spark=$SPARK java=$JAVA scala=$SCALA
90+
JAVA8_HOME='/usr/lib/jvm/java-8-openjdk-amd64'
91+
JAVA17_HOME='/usr/lib/jvm/java-17-openjdk-amd64'
92+
JAVA_BIN=$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/java" || echo "$JAVA8_HOME/jre/bin/java")
93+
JAVAC_BIN=$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/javac" || echo "$JAVA8_HOME/bin/javac")
94+
95+
echo 'export JAVA17_HOME="/usr/lib/jvm/java-17-openjdk-amd64"' >> "$BASH_ENV"
96+
echo "export SPARK=\"${SPARK}\"" >> "$BASH_ENV"
97+
echo "export JAVA=\"${JAVA}\"" >> "$BASH_ENV"
98+
echo "export JAVA_BIN=\"${JAVA_BIN}\"" >> "$BASH_ENV"
99+
echo "export JAVAC_BIN=\"${JAVAC_BIN}\"" >> "$BASH_ENV"
100+
echo "export SCALA=\"${SCALA}\"" >> "$BASH_ENV"
101+
echo "Setting default Java to ${JAVA_BIN}"
102+
sudo update-alternatives --set java ${JAVA_BIN}
103+
sudo update-alternatives --set javac ${JAVAC_BIN}
79104
store_submodule_tests:
80105
parameters:
81106
submodule:
@@ -462,7 +487,7 @@ jobs:
462487
- store_test_results:
463488
path: build/test-results/test
464489
- store_test_results:
465-
path: transports-dataplex/build/test-results/test
490+
path: transports-gcplineage/build/test-results/test
466491
- store_test_results:
467492
path: transports-gcs/build/test-results/test
468493
- store_test_results:
@@ -471,7 +496,7 @@ jobs:
471496
path: build/reports/tests/test
472497
destination: test-report
473498
- store_artifacts:
474-
path: transports-dataplex/build/reports/tests/test
499+
path: transports-gcplineage/build/reports/tests/test
475500
destination: test-report
476501
- store_artifacts:
477502
path: transports-gcs/build/reports/tests/test
@@ -486,7 +511,7 @@ jobs:
486511
path: build/libs
487512
destination: libs
488513
- store_artifacts:
489-
path: transports-dataplex/build/libs
514+
path: transports-gcplineage/build/libs
490515
destination: libs
491516
- store_artifacts:
492517
path: transports-gcs/build/libs
@@ -517,8 +542,8 @@ jobs:
517542
path: ./build/libs
518543
destination: java-client-artifacts
519544
- store_artifacts:
520-
path: ./transports-dataplex/build/libs
521-
destination: transports-dataplex-artifacts
545+
path: ./transports-gcplineage/build/libs
546+
destination: transports-gcplineage-artifacts
522547
- store_artifacts:
523548
path: ./transports-gcs/build/libs
524549
destination: transports-gcs-artifacts
@@ -532,8 +557,8 @@ jobs:
532557

533558
release-integration-spark:
534559
working_directory: ~/openlineage/integration/spark
535-
docker:
536-
- image: cimg/openjdk:17.0
560+
machine:
561+
image: ubuntu-2404:current
537562
steps:
538563
- *checkout_project_root
539564
- run:
@@ -544,15 +569,16 @@ jobs:
544569
- v1-release-client-java-{{ checksum "/tmp/checksum.txt" }}
545570
- attach_workspace:
546571
at: ~/
572+
- set_java_version
547573
- run: |
548574
# Get, then decode the GPG private key used to sign *.jar
549575
export ORG_GRADLE_PROJECT_signingKey=$(echo $GPG_SIGNING_KEY | base64 -d)
550576
export RELEASE_PASSWORD=$(echo $OSSRH_TOKEN_PASSWORD)
551577
export RELEASE_USERNAME=$(echo $OSSRH_TOKEN_USERNAME)
552578
553579
# Publish *.jar
554-
./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.12 -Pjava.compile.home=/usr/local/jdk-17.0.11
555-
./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.13 -Pjava.compile.home=/usr/local/jdk-17.0.11
580+
./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.12 -Pjava.compile.home=${JAVA17_HOME}
581+
./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.13 -Pjava.compile.home=${JAVA17_HOME}
556582
- store_artifacts:
557583
path: ./build/libs
558584
destination: spark-client-artifacts
@@ -620,24 +646,8 @@ jobs:
620646
command: ./../../.circleci/checksum.sh /tmp/checksum.txt $CIRCLE_BRANCH
621647
- attach_workspace:
622648
at: ~/
623-
- run:
624-
name: Spark & Java version Variable
625-
command: |
626-
JAVA=$(echo << parameters.env-variant >> | cut -d '-' -f 1 | cut -d ':' -f 2)
627-
SPARK=$(echo << parameters.env-variant >> | cut -d '-' -f 2 | cut -d ':' -f 2)
628-
SCALA=$(echo << parameters.env-variant >> | cut -d '-' -f 3 | cut -d ':' -f 2)
629-
echo spark=$SPARK java=$JAVA scala=$SCALA
630-
JAVA8_HOME='/usr/lib/jvm/java-8-openjdk-amd64'
631-
JAVA17_HOME='/usr/lib/jvm/java-17-openjdk-amd64'
632-
633-
echo 'export JAVA17_HOME=/usr/lib/jvm/java-17-openjdk-amd64' >> "$BASH_ENV"
634-
echo 'export SPARK='${SPARK} >> "$BASH_ENV"
635-
echo 'export JAVA_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/java" || echo "$JAVA8_HOME/jre/bin/java") >> "$BASH_ENV"
636-
echo 'export JAVAC_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/javac" || echo "$JAVA8_HOME/bin/javac") >> "$BASH_ENV"
637-
echo 'export SCALA='${SCALA} >> "$BASH_ENV"
638-
echo "${JAVA}"
639-
echo "${JAVA_BIN}"
640-
echo "${JAVAC_BIN}"
649+
- set_java_spark_scala_version:
650+
env-variant: << parameters.env-variant >>
641651
- restore_cache:
642652
keys:
643653
- v1-integration-spark-{{ checksum "/tmp/checksum.txt" }}
@@ -689,33 +699,15 @@ jobs:
689699
- run:
690700
name: Generate cache key
691701
command: ./../../.circleci/checksum.sh /tmp/checksum.txt $CIRCLE_BRANCH
692-
- run:
693-
name: Spark & Java version Variable
694-
command: |
695-
JAVA=$(echo << parameters.env-variant >> | cut -d '-' -f 1 | cut -d ':' -f 2)
696-
SPARK=$(echo << parameters.env-variant >> | cut -d '-' -f 2 | cut -d ':' -f 2)
697-
SCALA=$(echo << parameters.env-variant >> | cut -d '-' -f 3 | cut -d ':' -f 2)
698-
echo spark=$SPARK java=$JAVA scala=$SCALA
699-
JAVA8_HOME='/usr/lib/jvm/java-8-openjdk-amd64'
700-
JAVA17_HOME='/usr/lib/jvm/java-17-openjdk-amd64'
701-
702-
echo 'export JAVA17_HOME=/usr/lib/jvm/java-17-openjdk-amd64' >> "$BASH_ENV"
703-
echo 'export SPARK_VERSION_VAR='${SPARK} >> "$BASH_ENV"
704-
echo 'export SCALA='${SCALA} >> "$BASH_ENV"
705-
echo 'export JAVA_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/java" || echo "$JAVA8_HOME/jre/bin/java") >> "$BASH_ENV"
706-
echo 'export JAVAC_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/javac" || echo "$JAVA8_HOME/bin/javac") >> "$BASH_ENV"
707-
echo $JAVA_BIN
702+
- set_java_spark_scala_version:
703+
env-variant: << parameters.env-variant >>
708704
- run: mkdir -p app/build/gcloud && echo $GCLOUD_SERVICE_KEY > app/build/gcloud/gcloud-service-key.json && chmod 644 app/build/gcloud/gcloud-service-key.json
709705
- restore_cache:
710706
keys:
711707
- v1-integration-spark-{{ checksum "/tmp/checksum.txt" }}
712708
- attach_workspace:
713709
at: ~/
714-
- run: |
715-
echo "Setting default Java to ${JAVA_BIN}"
716-
sudo update-alternatives --set java ${JAVA_BIN}
717-
sudo update-alternatives --set javac ${JAVAC_BIN}
718-
- run: ./gradlew --no-daemon --console=plain integrationTest -x test -Pspark.version=${SPARK_VERSION_VAR} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME}
710+
- run: ./gradlew --no-daemon --console=plain integrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME}
719711
- run: ./gradlew --no-daemon --console=plain jacocoTestReport -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME}
720712
- store_test_results:
721713
path: app/build/test-results/integrationTest
@@ -846,7 +838,7 @@ jobs:
846838

847839
integration-test-databricks-integration-spark:
848840
parameters:
849-
spark-version:
841+
env-variant:
850842
type: string
851843
working_directory: ~/openlineage/integration/spark
852844
machine:
@@ -871,20 +863,23 @@ jobs:
871863
- v1-integration-spark-{{ checksum "/tmp/checksum.txt" }}
872864
- attach_workspace:
873865
at: ~/
874-
- set_java_version
875-
- run: |
876-
sudo update-alternatives --set java ${JAVA_BIN}
877-
sudo update-alternatives --set javac ${JAVAC_BIN}
878-
- run: ./gradlew --console=plain shadowJar -x test -Pjava.compile.home=${JAVA17_HOME}
879-
- run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=<< parameters.spark-version >> -PdatabricksHost=$DATABRICKS_HOST -PdatabricksToken=$DATABRICKS_TOKEN -Pjava.compile.home=${JAVA17_HOME}
866+
- set_java_spark_scala_version:
867+
env-variant: << parameters.env-variant >>
868+
- run: mkdir -p app/build/logs
869+
- run: mkdir -p app/build/events
870+
- run: ./gradlew --console=plain clean shadowJar -x test -Pjava.compile.home=${JAVA17_HOME}
871+
- run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} -Dopenlineage.tests.databricks.workspace.host=$DATABRICKS_HOST -Dopenlineage.tests.databricks.workspace.token=$DATABRICKS_TOKEN
880872
- store_test_results:
881873
path: app/build/test-results/databricksIntegrationTest
882874
- store_artifacts:
883875
path: app/build/reports/tests/databricksIntegrationTest
884876
destination: test-report
885877
- store_artifacts:
886-
path: app/build/cluster-log4j.log
887-
destination: cluster-log4j.log
878+
path: app/build/logs
879+
destination: cluster-logs
880+
- store_artifacts:
881+
path: app/build/events
882+
destination: events
888883
- save_cache:
889884
key: v1-databricks-integration-spark-{{ checksum "/tmp/checksum.txt" }}
890885
paths:
@@ -992,8 +987,12 @@ jobs:
992987
at: ~/
993988
- set_java_version
994989
- run: chmod -R 777 data/iceberg/db
995-
- run: ./gradlew --console=plain examples:stateful:build -Pflink.version=<< parameters.flink-version >>
996-
- run: ./gradlew --no-daemon --console=plain integrationTest --i -Pflink.version=<< parameters.flink-version >>
990+
- run: |
991+
# Get, then decode the GPG private key used to sign *.jar
992+
export ORG_GRADLE_PROJECT_signingKey=$(echo $GPG_SIGNING_KEY | base64 -d)
993+
export RELEASE_PASSWORD=$(echo $OSSRH_TOKEN_PASSWORD)
994+
export RELEASE_USERNAME=$(echo $OSSRH_TOKEN_USERNAME)
995+
./gradlew --no-daemon --console=plain integrationTest --i -Pflink.version=<< parameters.flink-version >>
997996
- run:
998997
when: on_fail
999998
command: cat app/build/test-results/integrationTest/TEST-*.xml

.circleci/workflows/openlineage-flink.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ workflows:
2323
filters:
2424
tags:
2525
only: /^[0-9]+(\.[0-9]+){2}(-rc\.[0-9]+)?$/
26+
context: << pipeline.parameters.build-context >>
2627
matrix:
2728
parameters:
2829
flink-version: [ '1.15.4', '1.16.2', '1.17.1', '1.18.1', '1.19.0' ]

.circleci/workflows/openlineage-spark.yml

Lines changed: 26 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -23,17 +23,17 @@ workflows:
2323
parameters:
2424
env-variant: [
2525
'java:8-spark:2.4.8-scala:2.12',
26-
'java:8-spark:3.2.4-scala:2.12',
27-
'java:8-spark:3.2.4-scala:2.13',
28-
'java:8-spark:3.3.4-scala:2.12',
29-
'java:8-spark:3.3.4-scala:2.13',
30-
'java:17-spark:3.3.4-scala:2.12',
31-
'java:17-spark:3.3.4-scala:2.13',
32-
'java:8-spark:3.4.3-scala:2.12',
33-
'java:8-spark:3.4.3-scala:2.13',
34-
'java:8-spark:3.5.2-scala:2.12',
35-
'java:8-spark:3.5.2-scala:2.13',
36-
'java:17-spark:3.5.2-scala:2.12',
26+
'java:8-spark:3.2.4-scala:2.12-full-tests',
27+
'java:8-spark:3.2.4-scala:2.13-full-tests',
28+
'java:8-spark:3.3.4-scala:2.12-full-tests',
29+
'java:8-spark:3.3.4-scala:2.13-full-tests',
30+
'java:17-spark:3.3.4-scala:2.12-full-tests',
31+
'java:17-spark:3.3.4-scala:2.13-full-tests',
32+
'java:8-spark:3.4.3-scala:2.12-full-tests',
33+
'java:8-spark:3.4.3-scala:2.13-full-tests',
34+
'java:8-spark:3.5.2-scala:2.12-full-tests',
35+
'java:8-spark:3.5.2-scala:2.13-full-tests',
36+
'java:17-spark:3.5.2-scala:2.12-full-tests',
3737
'java:17-spark:3.5.2-scala:2.13',
3838
'java:17-spark:4.0.0-scala:2.13'
3939
]
@@ -92,7 +92,10 @@ workflows:
9292
context: integration-tests
9393
matrix:
9494
parameters:
95-
spark-version: [ '3.4.2', '3.5.2' ]
95+
env-variant: [
96+
'java:8-spark:3.4.1-scala:2.12-full-tests',
97+
'java:17-spark:3.5.0-scala:2.12-full-tests'
98+
]
9699
requires:
97100
- approval-integration-spark
98101
post-steps:
@@ -112,17 +115,17 @@ workflows:
112115
parameters:
113116
env-variant: [
114117
'java:8-spark:2.4.8-scala:2.12',
115-
'java:8-spark:3.2.4-scala:2.12',
116-
'java:8-spark:3.2.4-scala:2.13',
117-
'java:8-spark:3.3.4-scala:2.12',
118-
'java:8-spark:3.3.4-scala:2.13',
119-
'java:17-spark:3.3.4-scala:2.12',
120-
'java:17-spark:3.3.4-scala:2.13',
121-
'java:8-spark:3.4.3-scala:2.12',
122-
'java:8-spark:3.4.3-scala:2.13',
123-
'java:8-spark:3.5.2-scala:2.12',
124-
'java:8-spark:3.5.2-scala:2.13',
125-
'java:17-spark:3.5.2-scala:2.12',
118+
'java:8-spark:3.2.4-scala:2.12-full-tests',
119+
'java:8-spark:3.2.4-scala:2.13-full-tests',
120+
'java:8-spark:3.3.4-scala:2.12-full-tests',
121+
'java:8-spark:3.3.4-scala:2.13-full-tests',
122+
'java:17-spark:3.3.4-scala:2.12-full-tests',
123+
'java:17-spark:3.3.4-scala:2.13-full-tests',
124+
'java:8-spark:3.4.3-scala:2.12-full-tests',
125+
'java:8-spark:3.4.3-scala:2.13-full-tests',
126+
'java:8-spark:3.5.2-scala:2.12-full-tests',
127+
'java:8-spark:3.5.2-scala:2.13-full-tests',
128+
'java:17-spark:3.5.2-scala:2.12-full-tests',
126129
'java:17-spark:3.5.2-scala:2.13',
127130
'java:17-spark:4.0.0-scala:2.13'
128131
]

0 commit comments

Comments
 (0)