Skip to content

Commit 911806f

Browse files
authored
fix: remove run-scripted-input-tests-canary job (#273)
`run-scripted-input-tests-canary` job is basically just a part of `run-scripted-input-tests-full-matrix` which does not take that much time to run in CI so there is no need to keep another job in the reusable workflow.
1 parent 58b5db8 commit 911806f

File tree

1 file changed

+0
-244
lines changed

1 file changed

+0
-244
lines changed

.github/workflows/reusable-build-test-release.yml

Lines changed: 0 additions & 244 deletions
Original file line numberDiff line numberDiff line change
@@ -2152,250 +2152,6 @@ jobs:
21522152
path: |
21532153
${{ needs.setup.outputs.directory-path }}/diag*
21542154
2155-
run-scripted-input-tests-canary:
2156-
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-scripted_inputs-labeled == 'true') }}
2157-
needs:
2158-
- build
2159-
- test-inventory
2160-
- setup
2161-
- meta
2162-
- setup-workflow
2163-
runs-on: ubuntu-latest
2164-
strategy:
2165-
fail-fast: false
2166-
matrix:
2167-
splunk: ${{ fromJson(needs.meta.outputs.matrix_combinedSplunkversion) }}
2168-
os: [ "ubuntu:22.04", "centos:7","redhat:8.5" ]
2169-
container:
2170-
image: ghcr.io/splunk/workflow-engine-base:2.0.12
2171-
env:
2172-
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }}
2173-
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }}
2174-
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }}
2175-
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }}
2176-
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }}
2177-
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }}
2178-
TEST_TYPE: "scripted_inputs"
2179-
permissions:
2180-
actions: read
2181-
deployments: read
2182-
contents: read
2183-
packages: read
2184-
statuses: read
2185-
checks: write
2186-
steps:
2187-
- uses: actions/checkout@v3
2188-
with:
2189-
submodules: recursive
2190-
- name: configure git # This step configures git to omit "dubious git ownership error" in later test-reporter stage
2191-
id: configure-git
2192-
run: |
2193-
git --version
2194-
git_path="$(pwd)"
2195-
echo "$git_path"
2196-
git config --global --add safe.directory "$git_path"
2197-
- name: capture start time
2198-
id: capture-start-time
2199-
run: |
2200-
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT"
2201-
- name: Configure AWS credentials
2202-
uses: aws-actions/configure-aws-credentials@v3
2203-
with:
2204-
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
2205-
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
2206-
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
2207-
- name: Read secrets from AWS Secrets Manager into environment variables
2208-
id: get-argo-token
2209-
run: |
2210-
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString')
2211-
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT"
2212-
- name: create job name
2213-
id: create-job-name
2214-
shell: bash
2215-
run: |
2216-
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4)
2217-
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING}
2218-
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}}
2219-
JOB_NAME=${JOB_NAME//[_.]/-}
2220-
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]')
2221-
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT"
2222-
- name: Splunk instance details
2223-
id: splunk-instance-details
2224-
if: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs == 'Yes' }}
2225-
shell: bash
2226-
run: |
2227-
BOLD="\033[1m"
2228-
NORMAL="\033[0m"
2229-
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts"
2230-
echo -e "Splunk username is${BOLD} admin${NORMAL}"
2231-
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD"
2232-
- name: get os name and version
2233-
id: os-name-version
2234-
shell: bash
2235-
run: |
2236-
OS_NAME_VERSION=${{ matrix.os }}
2237-
OS_NAME_VERSION=("${OS_NAME_VERSION//:/ }")
2238-
OS_NAME=${OS_NAME_VERSION[0]}
2239-
OS_VERSION=${OS_NAME_VERSION[1]}
2240-
{
2241-
echo "os-name=$OS_NAME"
2242-
echo "os-version=$OS_VERSION"
2243-
} >> "$GITHUB_OUTPUT"
2244-
- name: run-tests
2245-
id: run-tests
2246-
timeout-minutes: 340
2247-
continue-on-error: true
2248-
env:
2249-
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }}
2250-
uses: splunk/wfe-test-runner-action@v1.6
2251-
with:
2252-
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }}
2253-
test-type: ${{ env.TEST_TYPE }}
2254-
test-args: "--hostname=spl --os-name=${{ steps.os-name-version.outputs.os-name }} --os-version=${{ steps.os-name-version.outputs.os-version }} -m script_input"
2255-
job-name: ${{ steps.create-job-name.outputs.job-name }}
2256-
labels: ${{ needs.setup.outputs.labels }}
2257-
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }}
2258-
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }}
2259-
delay-destroy: "No"
2260-
addon-url: ${{ needs.setup.outputs.addon-upload-path }}
2261-
addon-name: ${{ needs.setup.outputs.addon-name }}
2262-
vendor-version: ${{ matrix.vendor-version.image }}
2263-
sc4s-version: "No"
2264-
os-name: ${{ steps.os-name-version.outputs.os-name }}
2265-
os-version: ${{ steps.os-name-version.outputs.os-version }}
2266-
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }}
2267-
- name: calculate timeout
2268-
id: calculate-timeout
2269-
run: |
2270-
start_time=${{ steps.capture-start-time.outputs.start_time }}
2271-
current_time=$(date +%s)
2272-
remaining_time_minutes=$(( 350-((current_time-start_time)/60) ))
2273-
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
2274-
- name: Check if pod was deleted
2275-
id: is-pod-deleted
2276-
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }}
2277-
if: ${{ !cancelled() }}
2278-
shell: bash
2279-
env:
2280-
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }}
2281-
run: |
2282-
set -o xtrace
2283-
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then
2284-
echo "retry-workflow=true" >> "$GITHUB_OUTPUT"
2285-
fi
2286-
- name: Cancel workflow
2287-
env:
2288-
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }}
2289-
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }}
2290-
run: |
2291-
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }})
2292-
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' )
2293-
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows)
2294-
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then
2295-
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"
2296-
else
2297-
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop"
2298-
exit 1
2299-
fi
2300-
- name: Retrying workflow
2301-
id: retry-wf
2302-
shell: bash
2303-
env:
2304-
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }}
2305-
if: ${{ !cancelled() }}
2306-
run: |
2307-
set -o xtrace
2308-
set +e
2309-
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]]
2310-
then
2311-
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name)
2312-
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT"
2313-
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..."
2314-
else
2315-
echo "No retry required"
2316-
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows
2317-
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon"
2318-
fi
2319-
- name: check if workflow completed
2320-
env:
2321-
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }}
2322-
if: ${{ !cancelled() }}
2323-
shell: bash
2324-
run: |
2325-
set +e
2326-
# shellcheck disable=SC2157
2327-
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then
2328-
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }}
2329-
else
2330-
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}"
2331-
fi
2332-
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase')
2333-
echo "Status of workflow:" "$ARGO_STATUS"
2334-
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ]
2335-
do
2336-
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete."
2337-
argo wait "${WORKFLOW_NAME}" -n workflows || true
2338-
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase')
2339-
done
2340-
- name: pull artifacts from s3 bucket
2341-
if: ${{ !cancelled() }}
2342-
run: |
2343-
echo "pulling artifacts"
2344-
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/
2345-
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }}
2346-
- name: pull logs from s3 bucket
2347-
if: ${{ !cancelled() }}
2348-
run: |
2349-
# shellcheck disable=SC2157
2350-
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then
2351-
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }}
2352-
else
2353-
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}"
2354-
fi
2355-
echo "pulling logs"
2356-
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs
2357-
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive
2358-
- uses: actions/upload-artifact@v3
2359-
if: ${{ !cancelled() }}
2360-
with:
2361-
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts
2362-
path: |
2363-
${{ needs.setup.outputs.directory-path }}/test-results
2364-
- uses: actions/upload-artifact@v3
2365-
if: ${{ !cancelled() }}
2366-
with:
2367-
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs
2368-
path: |
2369-
${{ needs.setup.outputs.directory-path }}/argo-logs
2370-
- name: Test Report
2371-
id: test_report
2372-
uses: dorny/test-reporter@v1.7.0
2373-
if: ${{ !cancelled() && !contains(matrix.splunk.version, 'unreleased-python3_9') }}
2374-
with:
2375-
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report
2376-
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml"
2377-
reporter: java-junit
2378-
- name: Test Report Python 3.9
2379-
continue-on-error: true
2380-
id: test_report_python_3_9
2381-
uses: dorny/test-reporter@v1.7.0
2382-
if: ${{ !cancelled() && contains(matrix.splunk.version, 'unreleased-python3_9') }}
2383-
with:
2384-
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report
2385-
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml"
2386-
reporter: java-junit
2387-
- name: pull diag from s3 bucket
2388-
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }}
2389-
run: |
2390-
echo "pulling diag"
2391-
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/
2392-
- uses: actions/upload-artifact@v3
2393-
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }}
2394-
with:
2395-
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests diag
2396-
path: |
2397-
${{ needs.setup.outputs.directory-path }}/diag*
2398-
23992155
pre-publish:
24002156
if: ${{ !cancelled() }}
24012157
# The following line will rename 'pre-publish' to 'pre-publish-not_main_pr' when PR is created towards main branch

0 commit comments

Comments
 (0)