From cfcffa410fb4a227e38050450ceddb733440323c Mon Sep 17 00:00:00 2001 From: Warren <5959690+wrn14897@users.noreply.github.com> Date: Mon, 7 Jul 2025 21:20:33 -0700 Subject: [PATCH 1/3] ci: use DOWNSTREAM_TOKEN for notify_helm_charts step --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5e780dd99..19ce1728a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -111,7 +111,7 @@ jobs: env: TAG: ${{ env.IMAGE_VERSION }}${{ env.IMAGE_VERSION_SUB_TAG }} with: - github-token: ${{ secrets.GITHUB_TOKEN }} + github-token: ${{ secrets.DOWNSTREAM_TOKEN }} script: | const { TAG } = process.env; const result = await github.rest.actions.createWorkflowDispatch({ From 4787a86113fafc0ef5480e93f35304fa4d9bc147 Mon Sep 17 00:00:00 2001 From: Warren <5959690+wrn14897@users.noreply.github.com> Date: Mon, 7 Jul 2025 21:32:19 -0700 Subject: [PATCH 2/3] fix: skip pushing image if tag exists --- Makefile | 236 +++++++++++++++++++++++++++++++++---------------------- 1 file changed, 142 insertions(+), 94 deletions(-) diff --git a/Makefile b/Makefile index be1c50ca0..aac070976 100644 --- a/Makefile +++ b/Makefile @@ -157,118 +157,166 @@ build-all-in-one-nightly: .PHONY: release-otel-collector release-otel-collector: - docker buildx build --platform ${BUILD_PLATFORMS} ./docker/otel-collector \ - -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ - -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ - -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ - --target prod \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} does not exist. Building and pushing..."; \ + docker buildx build --platform ${BUILD_PLATFORMS} ./docker/otel-collector \ + -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ + -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ + -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ + --target prod \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi .PHONY: release-local release-local: - docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ - --build-context clickhouse=./docker/clickhouse \ - --build-context otel-collector=./docker/otel-collector \ - --build-context hyperdx=./docker/hyperdx \ - --build-context api=./packages/api \ - --build-context app=./packages/app \ - --build-arg CODE_VERSION=${CODE_VERSION} \ - --platform ${BUILD_PLATFORMS} \ - -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ - -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ - -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ - --target all-in-one-noauth \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} does not exist. Building and pushing..."; \ + docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ + --build-context clickhouse=./docker/clickhouse \ + --build-context otel-collector=./docker/otel-collector \ + --build-context hyperdx=./docker/hyperdx \ + --build-context api=./packages/api \ + --build-context app=./packages/app \ + --build-arg CODE_VERSION=${CODE_VERSION} \ + --platform ${BUILD_PLATFORMS} \ + -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ + -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ + -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ + --target all-in-one-noauth \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi .PHONY: release-all-in-one release-all-in-one: - docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ - --build-context clickhouse=./docker/clickhouse \ - --build-context otel-collector=./docker/otel-collector \ - --build-context hyperdx=./docker/hyperdx \ - --build-context api=./packages/api \ - --build-context app=./packages/app \ - --build-arg CODE_VERSION=${CODE_VERSION} \ - --platform ${BUILD_PLATFORMS} \ - -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ - -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ - -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ - --target all-in-one-auth \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} does not exist. Building and pushing..."; \ + docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ + --build-context clickhouse=./docker/clickhouse \ + --build-context otel-collector=./docker/otel-collector \ + --build-context hyperdx=./docker/hyperdx \ + --build-context api=./packages/api \ + --build-context app=./packages/app \ + --build-arg CODE_VERSION=${CODE_VERSION} \ + --platform ${BUILD_PLATFORMS} \ + -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ + -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ + -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ + --target all-in-one-auth \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi .PHONY: release-app release-app: - docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ - --build-context hyperdx=./docker/hyperdx \ - --build-context api=./packages/api \ - --build-context app=./packages/app \ - --build-arg CODE_VERSION=${CODE_VERSION} \ - --platform ${BUILD_PLATFORMS} \ - -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ - -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ - -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ - --target prod \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} does not exist. Building and pushing..."; \ + docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ + --build-context hyperdx=./docker/hyperdx \ + --build-context api=./packages/api \ + --build-context app=./packages/app \ + --build-arg CODE_VERSION=${CODE_VERSION} \ + --platform ${BUILD_PLATFORMS} \ + -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION}${IMAGE_VERSION_SUB_TAG} \ + -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_VERSION} \ + -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_LATEST_TAG} \ + --target prod \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi .PHONY: release-otel-collector-nightly release-otel-collector-nightly: - docker buildx build --platform ${BUILD_PLATFORMS} ./docker/otel-collector \ - -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ - --target prod \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} does not exist. Building and pushing..."; \ + docker buildx build --platform ${BUILD_PLATFORMS} ./docker/otel-collector \ + -t ${OTEL_COLLECTOR_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ + --target prod \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi .PHONY: release-app-nightly release-app-nightly: - docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ - --build-context hyperdx=./docker/hyperdx \ - --build-context api=./packages/api \ - --build-context app=./packages/app \ - --build-arg CODE_VERSION=${IMAGE_NIGHTLY_TAG} \ - --platform ${BUILD_PLATFORMS} \ - -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ - --target prod \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} does not exist. Building and pushing..."; \ + docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ + --build-context hyperdx=./docker/hyperdx \ + --build-context api=./packages/api \ + --build-context app=./packages/app \ + --build-arg CODE_VERSION=${IMAGE_NIGHTLY_TAG} \ + --platform ${BUILD_PLATFORMS} \ + -t ${IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ + --target prod \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi .PHONY: release-local-nightly release-local-nightly: - docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ - --build-context clickhouse=./docker/clickhouse \ - --build-context otel-collector=./docker/otel-collector \ - --build-context hyperdx=./docker/hyperdx \ - --build-context api=./packages/api \ - --build-context app=./packages/app \ - --build-arg CODE_VERSION=${IMAGE_NIGHTLY_TAG} \ - --platform ${BUILD_PLATFORMS} \ - -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ - --target all-in-one-noauth \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} does not exist. Building and pushing..."; \ + docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ + --build-context clickhouse=./docker/clickhouse \ + --build-context otel-collector=./docker/otel-collector \ + --build-context hyperdx=./docker/hyperdx \ + --build-context api=./packages/api \ + --build-context app=./packages/app \ + --build-arg CODE_VERSION=${IMAGE_NIGHTLY_TAG} \ + --platform ${BUILD_PLATFORMS} \ + -t ${LOCAL_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ + --target all-in-one-noauth \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi .PHONY: release-all-in-one-nightly release-all-in-one-nightly: - docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ - --build-context clickhouse=./docker/clickhouse \ - --build-context otel-collector=./docker/otel-collector \ - --build-context hyperdx=./docker/hyperdx \ - --build-context api=./packages/api \ - --build-context app=./packages/app \ - --build-arg CODE_VERSION=${IMAGE_NIGHTLY_TAG} \ - --platform ${BUILD_PLATFORMS} \ - -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ - --target all-in-one-auth \ - --push \ - --cache-from=type=gha \ - --cache-to=type=gha,mode=max + @TAG_EXISTS=$$(docker manifest inspect ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} > /dev/null 2>&1 && echo "true" || echo "false"); \ + if [ "$$TAG_EXISTS" = "true" ]; then \ + echo "Tag ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} already exists. Skipping push."; \ + else \ + echo "Tag ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} does not exist. Building and pushing..."; \ + docker buildx build --squash . -f ./docker/hyperdx/Dockerfile \ + --build-context clickhouse=./docker/clickhouse \ + --build-context otel-collector=./docker/otel-collector \ + --build-context hyperdx=./docker/hyperdx \ + --build-context api=./packages/api \ + --build-context app=./packages/app \ + --build-arg CODE_VERSION=${IMAGE_NIGHTLY_TAG} \ + --platform ${BUILD_PLATFORMS} \ + -t ${ALL_IN_ONE_IMAGE_NAME_DOCKERHUB}:${IMAGE_NIGHTLY_TAG} \ + --target all-in-one-auth \ + --push \ + --cache-from=type=gha \ + --cache-to=type=gha,mode=max; \ + fi From 66d6e57984795d36c00cc800bf9ae534f6ea5294 Mon Sep 17 00:00:00 2001 From: Warren <5959690+wrn14897@users.noreply.github.com> Date: Mon, 7 Jul 2025 22:39:30 -0700 Subject: [PATCH 3/3] ci: only notify downstream if app image is pushed --- .github/workflows/release.yml | 64 +++++++++++++++++++++++++++++++---- 1 file changed, 58 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 19ce1728a..fa5de67dd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -90,16 +90,67 @@ jobs: - name: Load Environment Variables from .env uses: xom9ikk/dotenv@v2 - name: Publish Images + id: publish if: needs.check_changesets.outputs.changeset_outputs_hasChangesets == 'false' - run: make ${{ matrix.release }} - notify_helm_charts: - name: Notify Helm-Charts Downstream + run: | + OUTPUT=$(make ${{ matrix.release }} 2>&1) + echo "$OUTPUT" + + # Store the output in a file for the specific release target + echo "$OUTPUT" > /tmp/${{ matrix.release }}-output.txt + + # Upload the output as an artifact if this is release-app + if [ "${{ matrix.release }}" = "release-app" ]; then + if echo "$OUTPUT" | grep -q "already exists. Skipping push."; then + echo "RELEASE_APP_PUSHED=false" > /tmp/release-app-status.txt + else + echo "RELEASE_APP_PUSHED=true" > /tmp/release-app-status.txt + fi + fi + - name: Upload release-app status + if: matrix.release == 'release-app' + uses: actions/upload-artifact@v4 + with: + name: release-app-status + path: /tmp/release-app-status.txt + check_release_app_pushed: + name: Check if release-app pushed needs: [check_changesets, release] runs-on: ubuntu-24.04 + outputs: + app_was_pushed: ${{ steps.check.outputs.pushed }} if: needs.check_changesets.outputs.changeset_outputs_hasChangesets == 'false' + steps: + - name: Download release-app status + uses: actions/download-artifact@v4 + with: + name: release-app-status + path: /tmp + - name: Check if release-app was pushed + id: check + run: | + if [ -f /tmp/release-app-status.txt ]; then + STATUS=$(cat /tmp/release-app-status.txt) + echo "Release app status: $STATUS" + if [ "$STATUS" = "RELEASE_APP_PUSHED=true" ]; then + echo "pushed=true" >> $GITHUB_OUTPUT + else + echo "pushed=false" >> $GITHUB_OUTPUT + fi + else + echo "No release-app status file found, assuming not pushed" + echo "pushed=false" >> $GITHUB_OUTPUT + fi + notify_helm_charts: + name: Notify Helm-Charts Downstream + needs: [check_changesets, release, check_release_app_pushed] + runs-on: ubuntu-24.04 + if: | + needs.check_changesets.outputs.changeset_outputs_hasChangesets == 'false' && + needs.check_release_app_pushed.outputs.app_was_pushed == 'true' steps: - name: Checkout uses: actions/checkout@v4 @@ -125,10 +176,11 @@ jobs: }); notify_ch: name: Notify CH Downstream - needs: [check_changesets, release] + needs: [check_changesets, release, check_release_app_pushed] runs-on: ubuntu-24.04 - if: - needs.check_changesets.outputs.changeset_outputs_hasChangesets == 'false' + if: | + needs.check_changesets.outputs.changeset_outputs_hasChangesets == 'false' && + needs.check_release_app_pushed.outputs.app_was_pushed == 'true' steps: - name: Checkout uses: actions/checkout@v4