|
33 | 33 | required: false
|
34 | 34 | description: "branch for k8s manifests to run the tests on"
|
35 | 35 | type: string
|
36 |
| - default: "v3.0.5" |
| 36 | + default: "v3.1.0" |
37 | 37 | scripted-inputs-os-list:
|
38 | 38 | required: false
|
39 | 39 | description: "list of OS used for scripted input tests"
|
@@ -989,6 +989,203 @@ jobs:
|
989 | 989 | swagger_name=swagger_$(basename "$BUILD_NAME" .spl)
|
990 | 990 | aws s3 sync "${{ steps.download-openapi.outputs.download-path }}/tmp/restapi_client/" "s3://${{ needs.setup-workflow.outputs.s3_bucket_k8s }}/ta-apps/$swagger_name/" --exclude "*" --include "README.md" --include "*swagger_client*" --only-show-errors
|
991 | 991 |
|
| 992 | + run-btool-check: |
| 993 | + if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && needs.setup-workflow.outputs.execute-knowledge-labeled == 'true' }} |
| 994 | + needs: |
| 995 | + - build |
| 996 | + - test-inventory |
| 997 | + - setup |
| 998 | + - meta |
| 999 | + - setup-workflow |
| 1000 | + runs-on: ubuntu-latest |
| 1001 | + strategy: |
| 1002 | + fail-fast: false |
| 1003 | + matrix: |
| 1004 | + splunk: ${{ fromJson(needs.meta.outputs.matrix_latestSplunk) }} |
| 1005 | + sc4s: ${{ fromJson(needs.meta.outputs.matrix_supportedSC4S) }} |
| 1006 | + container: |
| 1007 | + image: ghcr.io/splunk/workflow-engine-base:4.1.0 |
| 1008 | + env: |
| 1009 | + ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} |
| 1010 | + ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} |
| 1011 | + ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} |
| 1012 | + ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} |
| 1013 | + ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} |
| 1014 | + SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} |
| 1015 | + TEST_TYPE: "btool" |
| 1016 | + TEST_ARGS: "" |
| 1017 | + permissions: |
| 1018 | + actions: read |
| 1019 | + deployments: read |
| 1020 | + contents: read |
| 1021 | + packages: read |
| 1022 | + statuses: read |
| 1023 | + checks: write |
| 1024 | + steps: |
| 1025 | + - uses: actions/checkout@v4 |
| 1026 | + with: |
| 1027 | + submodules: recursive |
| 1028 | + - name: capture start time |
| 1029 | + id: capture-start-time |
| 1030 | + run: | |
| 1031 | + echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" |
| 1032 | + - name: Configure AWS credentials |
| 1033 | + uses: aws-actions/configure-aws-credentials@v4 |
| 1034 | + with: |
| 1035 | + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} |
| 1036 | + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} |
| 1037 | + aws-region: ${{ secrets.AWS_DEFAULT_REGION }} |
| 1038 | + - name: Read secrets from AWS Secrets Manager into environment variables |
| 1039 | + id: get-argo-token |
| 1040 | + run: | |
| 1041 | + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') |
| 1042 | + echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" |
| 1043 | + - name: create job name |
| 1044 | + id: create-job-name |
| 1045 | + shell: bash |
| 1046 | + run: | |
| 1047 | + RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) |
| 1048 | + JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} |
| 1049 | + JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} |
| 1050 | + JOB_NAME=${JOB_NAME//[_.]/-} |
| 1051 | + JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') |
| 1052 | + echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" |
| 1053 | + - name: run-btool-check |
| 1054 | + id: run-btool-check |
| 1055 | + timeout-minutes: 10 |
| 1056 | + env: |
| 1057 | + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} |
| 1058 | + uses: splunk/wfe-test-runner-action@v5.0 |
| 1059 | + with: |
| 1060 | + splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} |
| 1061 | + test-type: ${{ env.TEST_TYPE }} |
| 1062 | + test-args: "" |
| 1063 | + job-name: ${{ steps.create-job-name.outputs.job-name }} |
| 1064 | + labels: ${{ needs.setup.outputs.labels }} |
| 1065 | + workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} |
| 1066 | + workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} |
| 1067 | + addon-url: ${{ needs.setup.outputs.addon-upload-path }} |
| 1068 | + addon-name: ${{ needs.setup.outputs.addon-name }} |
| 1069 | + sc4s-version: ${{ matrix.sc4s.version }} |
| 1070 | + sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} |
| 1071 | + k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} |
| 1072 | + - name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation |
| 1073 | + id: update-argo-token |
| 1074 | + if: ${{ !cancelled() }} |
| 1075 | + run: | |
| 1076 | + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') |
| 1077 | + echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" |
| 1078 | + - name: calculate timeout |
| 1079 | + id: calculate-timeout |
| 1080 | + run: | |
| 1081 | + start_time=${{ steps.capture-start-time.outputs.start_time }} |
| 1082 | + current_time=$(date +%s) |
| 1083 | + remaining_time_minutes=$(( 10-((current_time-start_time)/60) )) |
| 1084 | + echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" |
| 1085 | + - name: Check if pod was deleted |
| 1086 | + id: is-pod-deleted |
| 1087 | + timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} |
| 1088 | + if: ${{ !cancelled() }} |
| 1089 | + shell: bash |
| 1090 | + env: |
| 1091 | + ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} |
| 1092 | + run: | |
| 1093 | + set -o xtrace |
| 1094 | + if argo watch ${{ steps.run-btool-check.outputs.workflow-name }} -n workflows | grep "pod deleted"; then |
| 1095 | + echo "retry-workflow=true" >> "$GITHUB_OUTPUT" |
| 1096 | + fi |
| 1097 | + - name: Cancel workflow |
| 1098 | + env: |
| 1099 | + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} |
| 1100 | + if: ${{ cancelled() }} |
| 1101 | + run: | |
| 1102 | + cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-btool-check.outputs.workflow-name }}) |
| 1103 | + cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) |
| 1104 | + cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) |
| 1105 | + if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-btool-check.outputs.workflow-name }} stopped"; then |
| 1106 | + echo "Workflow ${{ steps.run-btool-check.outputs.workflow-name }} stopped" |
| 1107 | + else |
| 1108 | + echo "Workflow ${{ steps.run-btool-check.outputs.workflow-name }} didn't stop" |
| 1109 | + exit 1 |
| 1110 | + fi |
| 1111 | + - name: Retrying workflow |
| 1112 | + id: retry-wf |
| 1113 | + shell: bash |
| 1114 | + env: |
| 1115 | + ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} |
| 1116 | + if: ${{ !cancelled() }} |
| 1117 | + run: | |
| 1118 | + set -o xtrace |
| 1119 | + set +e |
| 1120 | + if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] |
| 1121 | + then |
| 1122 | + WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-btool-check.outputs.workflow-name }}" | jq -r .metadata.name) |
| 1123 | + echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" |
| 1124 | + argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." |
| 1125 | + else |
| 1126 | + echo "No retry required" |
| 1127 | + argo wait "${{ steps.run-btool-check.outputs.workflow-name }}" -n workflows |
| 1128 | + argo watch "${{ steps.run-btool-check.outputs.workflow-name }}" -n workflows | grep "btool-check" |
| 1129 | + fi |
| 1130 | + - name: check workflow status |
| 1131 | + id: check-workflow-status |
| 1132 | + env: |
| 1133 | + ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} |
| 1134 | + shell: bash |
| 1135 | + if: ${{ !cancelled() }} |
| 1136 | + run: | |
| 1137 | + set +e |
| 1138 | + # shellcheck disable=SC2157 |
| 1139 | + if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then |
| 1140 | + WORKFLOW_NAME=${{ steps.run-btool-check.outputs.workflow-name }} |
| 1141 | + else |
| 1142 | + WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" |
| 1143 | + fi |
| 1144 | + ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') |
| 1145 | + while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] |
| 1146 | + do |
| 1147 | + echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." |
| 1148 | + argo wait "${WORKFLOW_NAME}" -n workflows || true |
| 1149 | + ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') |
| 1150 | + done |
| 1151 | + echo "Status of workflow:" "$ARGO_STATUS" |
| 1152 | + echo "workflow-status=$ARGO_STATUS" >> "$GITHUB_OUTPUT" |
| 1153 | + if [ "$ARGO_STATUS" == "Succeeded" ]; then |
| 1154 | + exit 0 |
| 1155 | + else |
| 1156 | + exit 1 |
| 1157 | + fi |
| 1158 | + - name: pull artifacts from s3 bucket |
| 1159 | + if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }} |
| 1160 | + run: | |
| 1161 | + echo "pulling artifacts" |
| 1162 | + aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ |
| 1163 | + tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} |
| 1164 | + - name: pull logs from s3 bucket |
| 1165 | + if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }} |
| 1166 | + run: | |
| 1167 | + # shellcheck disable=SC2157 |
| 1168 | + if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then |
| 1169 | + WORKFLOW_NAME=${{ steps.run-btool-check.outputs.workflow-name }} |
| 1170 | + else |
| 1171 | + WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" |
| 1172 | + fi |
| 1173 | + echo "pulling logs" |
| 1174 | + mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs |
| 1175 | + aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/workflows/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive |
| 1176 | + - uses: actions/upload-artifact@v4 |
| 1177 | + if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }} |
| 1178 | + with: |
| 1179 | + name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts |
| 1180 | + path: | |
| 1181 | + ${{ needs.setup.outputs.directory-path }}/test-results |
| 1182 | + - uses: actions/upload-artifact@v4 |
| 1183 | + if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }} |
| 1184 | + with: |
| 1185 | + name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs |
| 1186 | + path: | |
| 1187 | + ${{ needs.setup.outputs.directory-path }}/argo-logs |
| 1188 | +
|
992 | 1189 | run-knowledge-tests:
|
993 | 1190 | if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && needs.setup-workflow.outputs.execute-knowledge-labeled == 'true' }}
|
994 | 1191 | needs:
|
|
0 commit comments