From e1cb1ee29ee72f47bb99648ff4b7f3d09e5d9e4a Mon Sep 17 00:00:00 2001 From: Evgeny Zakharchenko Date: Thu, 17 Jul 2025 10:27:38 +0200 Subject: [PATCH 1/7] Create Github workflow on commit --- .github/workflows/maven.yml | 126 ++++++++++++++++++++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100644 .github/workflows/maven.yml diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml new file mode 100644 index 000000000..ec7a4400f --- /dev/null +++ b/.github/workflows/maven.yml @@ -0,0 +1,126 @@ +name: Run Tests + +on: + push: + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + pull_request: + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + +jobs: + build: + runs-on: ubuntu-latest + permissions: + issues: write # so we can create & edit comments + contents: read + + steps: + # ──────────── 1. checkout ──────────── + - uses: actions/checkout@v4 + + # ──────────── 2. post placeholder comment (EARLY) ──────────── + - name: Create placeholder issue comment + id: create_comment + uses: actions/github-script@v7 + env: + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + result-encoding: string + script: | + const issuePat = /#(\d+)/g; + let issueNum = null, m; + + // • PR context + if (context.payload.pull_request) { + const whole = `${context.payload.pull_request.title}\n${context.payload.pull_request.body}`; + if ((m = issuePat.exec(whole)) !== null) issueNum = +m[1]; + } + + // • Push context + if (!issueNum && context.payload.commits) { + for (const c of context.payload.commits) { + if ((m = issuePat.exec(c.message)) !== null) { issueNum = +m[1]; break; } + } + } + + if (!issueNum) { core.info('No #issue reference found.'); return; } + + const body = `⏳ **[${process.env.GITHUB_WORKFLOW}](${process.env.RUN_URL})** has **started**…`; + const { data: comment } = await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issueNum, + body + }); + core.setOutput('comment_id', comment.id.toString()); + + # ──────────── 3. Java / Maven setup ──────────── + - name: Set up Java + uses: actions/setup-java@v4 + with: + java-version: 24 + distribution: temurin + cache: maven + + # ──────────── 4. compile ──────────── + - run: mvn -B compile --file pom.xml + + # ──────────── 5. extract FAIL_TO_PASS / PASS_TO_PASS ──────────── + - name: Extract test names + id: extract_tests + uses: actions/github-script@v7 + with: + result-encoding: string + script: | + const grab = (txt, re) => [...txt.matchAll(re)].flatMap(m => m[1].split(/[ ,]+/)); + const uniq = a => [...new Set(a.filter(Boolean))]; + + let blocks = []; + if (context.eventName === 'pull_request') { + blocks = [`${context.payload.pull_request.title}\n${context.payload.pull_request.body}`]; + } else if (context.eventName === 'push') { + blocks = context.payload.commits.map(c => c.message); + } + + const fail = blocks.flatMap(b => grab(b, /FAIL_TO_PASS:\s*([^\n]+)/gi)); + const pass = blocks.flatMap(b => grab(b, /PASS_TO_PASS:\s*([^\n]+)/gi)); + const tests = uniq([...fail, ...pass]).join(','); + core.setOutput('tests', tests); + + # ──────────── 6. run tests ──────────── + - name: Run selected tests + if: ${{ steps.extract_tests.outputs.tests }} + run: mvn -B -Dtest="${{ steps.extract_tests.outputs.tests }}" test + + - name: Run all tests + if: ${{ steps.extract_tests.outputs.tests == '' }} + run: mvn -B test --file pom.xml + + # ──────────── 7. update the same comment (FINAL) ──────────── + - name: Update issue comment with final status + if: always() + uses: actions/github-script@v7 + env: + COMMENT_ID: ${{ steps.create_comment.outputs.comment_id }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + JOB_STATUS: ${{ job.status }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + if (!process.env.COMMENT_ID) { + core.info('No comment to update.'); return; + } + const statusEmoji = { + success: '✅', + failure: '❌', + cancelled: '🟡' + }[process.env.JOB_STATUS] || '🟡'; + + const body = `${statusEmoji} **[${process.env.GITHUB_WORKFLOW}](${process.env.RUN_URL})** finished with status **${process.env.JOB_STATUS.toUpperCase()}**.`; + + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID), + body + }); From 63b9e17282f091e9765ac5e8c11ef2cc76a00ab1 Mon Sep 17 00:00:00 2001 From: Evgenii Zakharchenko Date: Fri, 18 Jul 2025 12:31:40 +0200 Subject: [PATCH 2/7] skip jacoco code coverage for tests run on CI --- .github/workflows/maven.yml | 6 +++--- pom.xml | 5 +++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index ec7a4400f..24aeeef62 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -63,7 +63,7 @@ jobs: cache: maven # ──────────── 4. compile ──────────── - - run: mvn -B compile --file pom.xml + - run: mvn -B -Djacoco.skip=true compile --file pom.xml # ──────────── 5. extract FAIL_TO_PASS / PASS_TO_PASS ──────────── - name: Extract test names @@ -90,11 +90,11 @@ jobs: # ──────────── 6. run tests ──────────── - name: Run selected tests if: ${{ steps.extract_tests.outputs.tests }} - run: mvn -B -Dtest="${{ steps.extract_tests.outputs.tests }}" test + run: mvn -B -Dtest="${{ steps.extract_tests.outputs.tests }}" -Djacoco.skip=true test - name: Run all tests if: ${{ steps.extract_tests.outputs.tests == '' }} - run: mvn -B test --file pom.xml + run: mvn -B test --file -Djacoco.skip=true pom.xml # ──────────── 7. update the same comment (FINAL) ──────────── - name: Update issue comment with final status diff --git a/pom.xml b/pom.xml index 8fcfc2320..b4bbd5e3e 100644 --- a/pom.xml +++ b/pom.xml @@ -49,6 +49,8 @@ spring-petclinic-rest https://sonarcloud.io + false + @@ -236,6 +238,9 @@ org.jacoco jacoco-maven-plugin + + ${jacoco.skip} + report From 7d47f4b328fd12bfbe4754921b24e62ea0a7dd96 Mon Sep 17 00:00:00 2001 From: Evgenii Zakharchenko Date: Fri, 18 Jul 2025 12:49:14 +0200 Subject: [PATCH 3/7] tests run on CI --- .github/workflows/maven.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 24aeeef62..89b23ea9f 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -2,9 +2,9 @@ name: Run Tests on: push: - branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + branches: [ "main", "master", "scenario/*", "eval/*", "feature/*" ] pull_request: - branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + branches: [ "main", "master", "scenario/*", "eval/*", "feature/*" ] jobs: build: From 23548cf0a198e51a699a0f97366482290a43a7f8 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 26 Jul 2025 18:44:06 +0000 Subject: [PATCH 4/7] Add workflow: maven.yml --- .github/workflows/maven.yml | 500 ++++++++++++++++++++++++++++++++---- 1 file changed, 450 insertions(+), 50 deletions(-) diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 89b23ea9f..1c5ff5a72 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -2,27 +2,370 @@ name: Run Tests on: push: - branches: [ "main", "master", "scenario/*", "eval/*", "feature/*" ] + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] pull_request: - branches: [ "main", "master", "scenario/*", "eval/*", "feature/*" ] + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + issue_comment: + types: [created] jobs: - build: + # ──────────── 1. collect and process tests ──────────── + collect-process-tests: runs-on: ubuntu-latest permissions: - issues: write # so we can create & edit comments contents: read + outputs: + fail_to_pass: ${{ steps.combine.outputs.fail_to_pass }} + pass_to_pass: ${{ steps.combine.outputs.pass_to_pass }} + tests: ${{ steps.combine.outputs.tests }} + comment_id: ${{ steps.combine.outputs.comment_id }} + if: ${{ github.event_name != 'issue_comment' || contains(github.event.comment.body, 'FAIL_TO_PASS') || contains(github.event.comment.body, 'PASS_TO_PASS') }} + steps: + - uses: actions/checkout@v4 + + # ─── 1.1 collect issue numbers based on event type ─── + - name: Collect issue numbers based on event type + id: collect_issues + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.pull_request.number }} + REPO: ${{ github.repository }} + run: | + # Initialize issue numbers variable + ISSUE_NUMBERS="" + + # Handle different event types + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + echo "Collecting issue numbers from commits in PR #$PR_NUMBER" + + # Get all commits in the PR + COMMITS=$(gh api repos/$REPO/pulls/$PR_NUMBER/commits --jq '.[].sha') + + # Initialize an empty array for issue numbers + ISSUE_NUMBERS_ARRAY=() + + # For each commit, extract linked issue numbers + for COMMIT in $COMMITS; do + echo "Processing commit $COMMIT" + + # Get commit message + COMMIT_MSG=$(gh api repos/$REPO/commits/$COMMIT --jq '.commit.message') + + # Extract issue numbers using regex (e.g., #123, fixes #456, etc.) + ISSUES=$(echo "$COMMIT_MSG" | grep -o '#[0-9]\+' | sed 's/#//') + + if [ -n "$ISSUES" ]; then + echo "Found issues in commit $COMMIT: $ISSUES" + # Add to our array + for ISSUE in $ISSUES; do + ISSUE_NUMBERS_ARRAY+=("$ISSUE") + done + fi + done + + # Remove duplicates and create JSON array + UNIQUE_ISSUES=$(echo "${ISSUE_NUMBERS_ARRAY[@]}" | tr ' ' '\n' | sort -u) + + if [ -z "$UNIQUE_ISSUES" ]; then + echo "No issue numbers found in commit messages, using PR number as fallback" + ISSUE_NUMBERS="[\"${{ github.event.pull_request.number }}\"]" + else + # Convert to JSON array + ISSUE_NUMBERS=$(echo "$UNIQUE_ISSUES" | jq -R . | jq -s .) + fi + elif [[ "${{ github.event_name }}" == "push" ]]; then + echo "Extracting issue numbers from commit message" + + # Get commit message + COMMIT_MSG="${{ github.event.head_commit.message }}" + + # Extract issue numbers using regex (e.g., #123, fixes #456, etc.) + ISSUES=$(echo "$COMMIT_MSG" | grep -o '#[0-9]\+' | sed 's/#//') + + if [ -n "$ISSUES" ]; then + echo "Found issues in commit message: $ISSUES" + + # Initialize an empty array for issue numbers + ISSUE_NUMBERS_ARRAY=() + + # Add to our array + for ISSUE in $ISSUES; do + ISSUE_NUMBERS_ARRAY+=("$ISSUE") + done + + # Remove duplicates and create JSON array + UNIQUE_ISSUES=$(echo "${ISSUE_NUMBERS_ARRAY[@]}" | tr ' ' '\n' | sort -u) + + # Convert to JSON array + ISSUE_NUMBERS=$(echo "$UNIQUE_ISSUES" | jq -R . | jq -s .) + else + echo "No issue numbers found in commit message, using empty array as fallback" + ISSUE_NUMBERS="[\"\"]" + fi + elif [[ "${{ github.event_name }}" == "issue_comment" ]]; then + echo "Using issue number from comment event" + ISSUE_NUMBERS="[\"${{ github.event.issue.number }}\"]" + else + echo "Using fallback issue number from inputs" + ISSUE_NUMBERS="[\"\"]" + fi + + echo "Found issue numbers: $ISSUE_NUMBERS" + # Escape the JSON string for GitHub Actions output + ESCAPED_ISSUE_NUMBERS=$(echo "$ISSUE_NUMBERS" | jq -c .) + echo "issue_numbers=$ESCAPED_ISSUE_NUMBERS" >> $GITHUB_OUTPUT + + # ─── 1.2 extract test names from issues ─── + - name: Extract test names for issues + id: extract_tests + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBERS: ${{ steps.collect_issues.outputs.issue_numbers }} + REPO: ${{ github.repository }} + run: | + # Initialize arrays for test results + FAIL_TO_PASS=() + PASS_TO_PASS=() + TESTS=() + COMMENT_ID="" + + # Process each issue number + for ISSUE_NUMBER in $(echo $ISSUE_NUMBERS | jq -r '.[]'); do + if [[ -z "$ISSUE_NUMBER" || "$ISSUE_NUMBER" == "null" ]]; then + continue + fi + + echo "Processing issue #$ISSUE_NUMBER" + + # Function to extract FAIL_TO_PASS and PASS_TO_PASS from text + extract_test_fields() { + local text="$1" + local fail_to_pass="" + local pass_to_pass="" + + if [[ -n "$text" ]]; then + # Find FAIL_TO_PASS pattern + if [[ "$text" =~ FAIL_TO_PASS:[[:space:]]*([^$'\n']+) ]]; then + fail_to_pass="${BASH_REMATCH[1]}" + fi + + # Find PASS_TO_PASS pattern + if [[ "$text" =~ PASS_TO_PASS:[[:space:]]*([^$'\n']+) ]]; then + pass_to_pass="${BASH_REMATCH[1]}" + fi + fi + + echo "$fail_to_pass|$pass_to_pass" + } + + # First check issue comments + echo "Checking issue comments for test fields..." + COMMENTS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/comments --jq '.[] | {id: .id, body: .body, created_at: .created_at}') + + if [[ -n "$COMMENTS" ]]; then + # Process all comments to find the latest one with FAIL_TO_PASS or PASS_TO_PASS + LATEST_COMMENT_WITH_VALUES="" + LATEST_COMMENT_ID="" + LATEST_FAIL_TO_PASS="" + LATEST_PASS_TO_PASS="" + + while IFS= read -r COMMENT; do + COMMENT_BODY=$(echo "$COMMENT" | jq -r '.body') + CURRENT_COMMENT_ID=$(echo "$COMMENT" | jq -r '.id') + + RESULT=$(extract_test_fields "$COMMENT_BODY") + IFS='|' read -r COMMENT_FAIL COMMENT_PASS <<< "$RESULT" + + if [[ -n "$COMMENT_FAIL" || -n "$COMMENT_PASS" ]]; then + LATEST_COMMENT_WITH_VALUES="$COMMENT" + LATEST_COMMENT_ID="$CURRENT_COMMENT_ID" + + if [[ -n "$COMMENT_FAIL" ]]; then + LATEST_FAIL_TO_PASS="$COMMENT_FAIL" + echo "Found FAIL_TO_PASS in issue comment $CURRENT_COMMENT_ID: $COMMENT_FAIL" + fi + + if [[ -n "$COMMENT_PASS" ]]; then + LATEST_PASS_TO_PASS="$COMMENT_PASS" + echo "Found PASS_TO_PASS in issue comment $CURRENT_COMMENT_ID: $COMMENT_PASS" + fi + fi + done <<< "$COMMENTS" + + # Use values from the latest comment + if [[ -n "$LATEST_COMMENT_WITH_VALUES" ]]; then + COMMENT_ID="$LATEST_COMMENT_ID" + + if [[ -n "$LATEST_FAIL_TO_PASS" ]]; then + FAIL_TO_PASS=("$LATEST_FAIL_TO_PASS") + echo "Using FAIL_TO_PASS from latest comment $COMMENT_ID: $LATEST_FAIL_TO_PASS" + fi + + if [[ -n "$LATEST_PASS_TO_PASS" ]]; then + PASS_TO_PASS=("$LATEST_PASS_TO_PASS") + echo "Using PASS_TO_PASS from latest comment $COMMENT_ID: $LATEST_PASS_TO_PASS" + fi + fi + fi + + # If not found in comments, check commit messages + if [[ ${#FAIL_TO_PASS[@]} -eq 0 && ${#PASS_TO_PASS[@]} -eq 0 ]]; then + echo "Checking commit messages for test fields..." + + # Get linked commit IDs + COMMIT_IDS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id') + + if [[ -z "$COMMIT_IDS" ]]; then + echo "No directly linked commits found, checking PRs..." + + # Try to get commits from PRs + PR_NUMBERS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number') + + if [[ -n "$PR_NUMBERS" ]]; then + for PR in $PR_NUMBERS; do + echo "Fetching commits from PR #$PR..." + PR_COMMITS=$(gh api repos/$REPO/pulls/$PR/commits --jq '.[].sha') + + if [[ -n "$PR_COMMITS" ]]; then + COMMIT_IDS="$COMMIT_IDS"$'\n'"$PR_COMMITS" + fi + done + fi + fi + + # Process commit messages to find the latest one with FAIL_TO_PASS or PASS_TO_PASS + if [[ -n "$COMMIT_IDS" ]]; then + # Variables to track the latest commit with values + LATEST_COMMIT_ID="" + LATEST_COMMIT_DATE="" + LATEST_COMMIT_FAIL="" + LATEST_COMMIT_PASS="" + + while IFS= read -r COMMIT_ID; do + if [[ -z "$COMMIT_ID" ]]; then + continue + fi + + echo "Fetching message for commit: $COMMIT_ID" + COMMIT_DATA=$(gh api repos/$REPO/commits/$COMMIT_ID --jq '{message: .commit.message, date: .commit.author.date}') + COMMIT_MSG=$(echo "$COMMIT_DATA" | jq -r '.message') + COMMIT_DATE=$(echo "$COMMIT_DATA" | jq -r '.date') + + if [[ -n "$COMMIT_MSG" ]]; then + RESULT=$(extract_test_fields "$COMMIT_MSG") + IFS='|' read -r COMMIT_FAIL COMMIT_PASS <<< "$RESULT" + + if [[ -n "$COMMIT_FAIL" || -n "$COMMIT_PASS" ]]; then + # Check if this commit is newer than our current latest + if [[ -z "$LATEST_COMMIT_DATE" || "$COMMIT_DATE" > "$LATEST_COMMIT_DATE" ]]; then + LATEST_COMMIT_ID="$COMMIT_ID" + LATEST_COMMIT_DATE="$COMMIT_DATE" + LATEST_COMMIT_FAIL="$COMMIT_FAIL" + LATEST_COMMIT_PASS="$COMMIT_PASS" + + if [[ -n "$COMMIT_FAIL" ]]; then + echo "Found FAIL_TO_PASS in commit $COMMIT_ID: $COMMIT_FAIL" + fi + + if [[ -n "$COMMIT_PASS" ]]; then + echo "Found PASS_TO_PASS in commit $COMMIT_ID: $COMMIT_PASS" + fi + fi + fi + fi + done <<< "$COMMIT_IDS" + + # Use values from the latest commit + if [[ -n "$LATEST_COMMIT_ID" ]]; then + if [[ -n "$LATEST_COMMIT_FAIL" ]]; then + FAIL_TO_PASS=("$LATEST_COMMIT_FAIL") + echo "Using FAIL_TO_PASS from latest commit $LATEST_COMMIT_ID: $LATEST_COMMIT_FAIL" + fi + + if [[ -n "$LATEST_COMMIT_PASS" ]]; then + PASS_TO_PASS=("$LATEST_COMMIT_PASS") + echo "Using PASS_TO_PASS from latest commit $LATEST_COMMIT_ID: $LATEST_COMMIT_PASS" + fi + fi + fi + fi + done + + # Convert arrays to comma-separated strings + FAIL_TO_PASS_STR=$(IFS=,; echo "${FAIL_TO_PASS[*]}") + PASS_TO_PASS_STR=$(IFS=,; echo "${PASS_TO_PASS[*]}") + + # Convert to JSON arrays if not empty + if [[ -n "$FAIL_TO_PASS_STR" ]]; then + FAIL_TO_PASS_JSON=$(echo "$FAIL_TO_PASS_STR" | jq -R -c 'split(",") | map(select(length > 0))') + else + FAIL_TO_PASS_JSON="[]" + fi + + if [[ -n "$PASS_TO_PASS_STR" ]]; then + PASS_TO_PASS_JSON=$(echo "$PASS_TO_PASS_STR" | jq -R -c 'split(",") | map(select(length > 0))') + else + PASS_TO_PASS_JSON="[]" + fi + + # Combine tests + if [[ -n "$FAIL_TO_PASS_STR" || -n "$PASS_TO_PASS_STR" ]]; then + TESTS_STR="$FAIL_TO_PASS_STR,$PASS_TO_PASS_STR" + TESTS_STR=$(echo "$TESTS_STR" | sed 's/^,//;s/,$//') + fi + + # Output results + echo "fail_to_pass=$FAIL_TO_PASS_JSON" >> $GITHUB_OUTPUT + echo "pass_to_pass=$PASS_TO_PASS_JSON" >> $GITHUB_OUTPUT + echo "tests=$TESTS_STR" >> $GITHUB_OUTPUT + echo "comment_id=$COMMENT_ID" >> $GITHUB_OUTPUT + # ─── 1.3 combine test results ─── + - name: Combine test results + id: combine + shell: bash + run: | + # Just pass through the outputs from extract_tests + echo "fail_to_pass=${{ steps.extract_tests.outputs.fail_to_pass }}" >> $GITHUB_OUTPUT + echo "pass_to_pass=${{ steps.extract_tests.outputs.pass_to_pass }}" >> $GITHUB_OUTPUT + echo "tests=${{ steps.extract_tests.outputs.tests }}" >> $GITHUB_OUTPUT + echo "comment_id=${{ steps.extract_tests.outputs.comment_id }}" >> $GITHUB_OUTPUT + + # ─── 1.4 check if FAIL_TO_PASS or PASS_TO_PASS found ─── + - name: Check if FAIL_TO_PASS or PASS_TO_PASS found + if: ${{ github.event_name == 'pull_request' && steps.combine.outputs.fail_to_pass == '[]' && steps.combine.outputs.pass_to_pass == '[]' }} + shell: bash + run: | + echo "::error::FAIL_TO_PASS or PASS_TO_PASS not found in commit messages or issue comments, please add FAIL_TO_PASS or PASS_TO_PASS to issue comment" + exit 1 + + # ──────────── 2. Run tests and handle comments ──────────── + run-tests-and-comments: + needs: collect-process-tests + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + if: ${{ always() && (github.event_name != 'pull_request' || needs.collect-process-tests.outputs.fail_to_pass != '[]' || needs.collect-process-tests.outputs.pass_to_pass != '[]') }} + outputs: + comment_id: ${{ steps.create_comment.outputs.comment_id }} + status: ${{ job.status }} steps: - # ──────────── 1. checkout ──────────── - uses: actions/checkout@v4 - # ──────────── 2. post placeholder comment (EARLY) ──────────── + # Step 1: Create placeholder comment - name: Create placeholder issue comment id: create_comment + if: ${{ github.event_name == 'push' || github.event_name == 'issue_comment' }} uses: actions/github-script@v7 env: RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + COMMENT_ID: ${{ needs.collect-process-tests.outputs.comment_id }} with: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string @@ -43,67 +386,105 @@ jobs: } } + // • Issue comment context + if (!issueNum && context.payload.issue) { + issueNum = context.payload.issue.number; + } + if (!issueNum) { core.info('No #issue reference found.'); return; } - const body = `⏳ **[${process.env.GITHUB_WORKFLOW}](${process.env.RUN_URL})** has **started**…`; + let bodyContent = ''; + + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + core.info('FAIL_TO_PASS: '+process.env.FAIL_TO_PASS); + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + + bodyContent += `\n⏳ **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** has **started**…`; + + // If we have an existing comment ID, update it instead of creating a new one + if (false && process.env.COMMENT_ID) { + try { + // Get existing comment body + const { data: existingComment } = await github.rest.issues.getComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID) + }); + + // Append new content to existing body + const updatedBody = existingComment.body + '\n' + bodyContent; + + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID), + body: updatedBody + }); + core.setOutput('comment_id', process.env.COMMENT_ID); + return; + } catch (error) { + core.warning(`Failed to update comment ${process.env.COMMENT_ID}: ${error.message}`); + // Fall through to create a new comment + } + } + + // Create a new comment const { data: comment } = await github.rest.issues.createComment({ owner: context.repo.owner, - repo: context.repo.repo, + repo: context.repo.repo, issue_number: issueNum, - body + body: bodyContent }); core.setOutput('comment_id', comment.id.toString()); - # ──────────── 3. Java / Maven setup ──────────── + # Step 2: Setup Java and Maven - name: Set up Java uses: actions/setup-java@v4 with: - java-version: 24 - distribution: temurin - cache: maven - - # ──────────── 4. compile ──────────── - - run: mvn -B -Djacoco.skip=true compile --file pom.xml + java-version: '24' + distribution: 'temurin' + cache: 'maven' - # ──────────── 5. extract FAIL_TO_PASS / PASS_TO_PASS ──────────── - - name: Extract test names - id: extract_tests - uses: actions/github-script@v7 - with: - result-encoding: string - script: | - const grab = (txt, re) => [...txt.matchAll(re)].flatMap(m => m[1].split(/[ ,]+/)); - const uniq = a => [...new Set(a.filter(Boolean))]; - - let blocks = []; - if (context.eventName === 'pull_request') { - blocks = [`${context.payload.pull_request.title}\n${context.payload.pull_request.body}`]; - } else if (context.eventName === 'push') { - blocks = context.payload.commits.map(c => c.message); - } - - const fail = blocks.flatMap(b => grab(b, /FAIL_TO_PASS:\s*([^\n]+)/gi)); - const pass = blocks.flatMap(b => grab(b, /PASS_TO_PASS:\s*([^\n]+)/gi)); - const tests = uniq([...fail, ...pass]).join(','); - core.setOutput('tests', tests); + # Step 3: Compile project + - name: Compile project + shell: bash + run: mvn -B compile --file pom.xml - # ──────────── 6. run tests ──────────── + # Step 4: Run tests - name: Run selected tests - if: ${{ steps.extract_tests.outputs.tests }} - run: mvn -B -Dtest="${{ steps.extract_tests.outputs.tests }}" -Djacoco.skip=true test + if: ${{ needs.collect-process-tests.outputs.tests != '' }} + shell: bash + run: mvn -B -Dtest="${{ needs.collect-process-tests.outputs.tests }}" test - name: Run all tests - if: ${{ steps.extract_tests.outputs.tests == '' }} - run: mvn -B test --file -Djacoco.skip=true pom.xml + if: ${{ needs.collect-process-tests.outputs.tests == '' }} + shell: bash + run: mvn -B test --file pom.xml - # ──────────── 7. update the same comment (FINAL) ──────────── + # Step 5: Update comment with final status - name: Update issue comment with final status - if: always() + if: ${{ always() && (github.event_name == 'push' || github.event_name == 'issue_comment') }} uses: actions/github-script@v7 env: COMMENT_ID: ${{ steps.create_comment.outputs.comment_id }} - RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} JOB_STATUS: ${{ job.status }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -116,11 +497,30 @@ jobs: cancelled: '🟡' }[process.env.JOB_STATUS] || '🟡'; - const body = `${statusEmoji} **[${process.env.GITHUB_WORKFLOW}](${process.env.RUN_URL})** finished with status **${process.env.JOB_STATUS.toUpperCase()}**.`; - + let bodyContent = ''; + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + const quoted = process.env.FAIL_TO_PASS.replace(/(\w+)/g, '"$1"'); + const failToPassArray = JSON.parse(quoted); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + const quoted = process.env.PASS_TO_PASS.replace(/(\w+)/g, '"$1"'); + const passToPassArray = JSON.parse(quoted); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + + bodyContent += `\n${statusEmoji} **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** finished with status **${process.env.JOB_STATUS.toUpperCase()}**.`; + await github.rest.issues.updateComment({ owner: context.repo.owner, - repo: context.repo.repo, + repo: context.repo.repo, comment_id: Number(process.env.COMMENT_ID), - body - }); + body: bodyContent + }); \ No newline at end of file From 615ca1cade48432862c4c673cc64d6118a0e5051 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sat, 26 Jul 2025 18:47:14 +0000 Subject: [PATCH 5/7] Add workflow: pr-label-management.yml --- .github/workflows/pr-label-management.yml | 174 ++++++++++++++++++++++ 1 file changed, 174 insertions(+) create mode 100644 .github/workflows/pr-label-management.yml diff --git a/.github/workflows/pr-label-management.yml b/.github/workflows/pr-label-management.yml new file mode 100644 index 000000000..24773ffb8 --- /dev/null +++ b/.github/workflows/pr-label-management.yml @@ -0,0 +1,174 @@ +name: PR Label Management + +on: + pull_request: + types: [opened, reopened, synchronize] + pull_request_review: + types: [submitted] + +jobs: + manage-labels: + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + issues: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Add Review label on PR creation + if: github.event_name == 'pull_request' + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + // Add Review label to the PR + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + labels: ['Review'] + }); + + console.log(`Added Review label to PR #${context.payload.pull_request.number}`); + + // Extract related issue numbers from PR title and body + const prText = `${context.payload.pull_request.title} ${context.payload.pull_request.body || ''}`; + const issuePattern = /#(\d+)/g; + const relatedIssues = new Set(); + let match; + + while ((match = issuePattern.exec(prText)) !== null) { + relatedIssues.add(match[1]); + } + + // Also check commit messages for issue references + const commits = await github.rest.pulls.listCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number + }); + + for (const commit of commits.data) { + const commitMessage = commit.commit.message; + while ((match = issuePattern.exec(commitMessage)) !== null) { + relatedIssues.add(match[1]); + } + } + + // Add Review label to all related issues + for (const issueNumber of relatedIssues) { + try { + // Check if issue exists + await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber) + }); + + // Add Review label to the issue + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber), + labels: ['Review'] + }); + + console.log(`Added Review label to related issue #${issueNumber}`); + } catch (error) { + console.log(`Error processing issue #${issueNumber}: ${error.message}`); + } + } + + - name: Handle PR approval + if: github.event_name == 'pull_request_review' && github.event.review.state == 'approved' + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const prNumber = context.payload.pull_request.number; + + // Remove Review label and add Verified label to the PR + try { + // First try to remove the Review label + await github.rest.issues.removeLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + name: 'Review' + }); + console.log(`Removed Review label from PR #${prNumber}`); + } catch (error) { + console.log(`Note: Review label might not exist on PR #${prNumber}: ${error.message}`); + } + + // Add Verified label to the PR + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + labels: ['Verified'] + }); + console.log(`Added Verified label to PR #${prNumber}`); + + // Extract related issue numbers from PR title and body + const prText = `${context.payload.pull_request.title} ${context.payload.pull_request.body || ''}`; + const issuePattern = /#(\d+)/g; + const relatedIssues = new Set(); + let match; + + while ((match = issuePattern.exec(prText)) !== null) { + relatedIssues.add(match[1]); + } + + // Also check commit messages for issue references + const commits = await github.rest.pulls.listCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: prNumber + }); + + for (const commit of commits.data) { + const commitMessage = commit.commit.message; + while ((match = issuePattern.exec(commitMessage)) !== null) { + relatedIssues.add(match[1]); + } + } + + // Update labels on all related issues + for (const issueNumber of relatedIssues) { + try { + // Check if issue exists + await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber) + }); + + // Try to remove Review label from the issue + try { + await github.rest.issues.removeLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber), + name: 'Review' + }); + console.log(`Removed Review label from related issue #${issueNumber}`); + } catch (error) { + console.log(`Note: Review label might not exist on issue #${issueNumber}: ${error.message}`); + } + + // Add Verified label to the issue + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber), + labels: ['Verified'] + }); + console.log(`Added Verified label to related issue #${issueNumber}`); + } catch (error) { + console.log(`Error processing issue #${issueNumber}: ${error.message}`); + } + } \ No newline at end of file From 9c55911628bae3cf337485a3cbda54fdabc9bea2 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Wed, 13 Aug 2025 08:18:01 +0000 Subject: [PATCH 6/7] Add files (2): maven.yml,verify_java_dataset_instance.sh --- .github/workflows/maven.yml | 239 +++- .../workflows/verify_java_dataset_instance.sh | 1015 +++++++++++++++++ 2 files changed, 1213 insertions(+), 41 deletions(-) create mode 100755 .github/workflows/verify_java_dataset_instance.sh diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 1c5ff5a72..10fe822b8 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -19,6 +19,8 @@ jobs: pass_to_pass: ${{ steps.combine.outputs.pass_to_pass }} tests: ${{ steps.combine.outputs.tests }} comment_id: ${{ steps.combine.outputs.comment_id }} + test_args: ${{ steps.combine.outputs.test_args }} + java_version: ${{ steps.combine.outputs.java_version }} if: ${{ github.event_name != 'issue_comment' || contains(github.event.comment.body, 'FAIL_TO_PASS') || contains(github.event.comment.body, 'PASS_TO_PASS') }} steps: - uses: actions/checkout@v4 @@ -322,7 +324,80 @@ jobs: echo "tests=$TESTS_STR" >> $GITHUB_OUTPUT echo "comment_id=$COMMENT_ID" >> $GITHUB_OUTPUT - # ─── 1.3 combine test results ─── + # ─── 1.3 extract metadata (optional) ─── + - name: Extract metadata fields + id: extract_metadata + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBERS: ${{ steps.collect_issues.outputs.issue_numbers }} + REPO: ${{ github.repository }} + run: | + set -e + TEST_ARGS="" + JAVA_VERSION="" + + parse_metadata() { + local text="$1" + local json + if [[ -n "$text" && "$text" =~ METADATA:[[:space:]]*(\{.*\}) ]]; then + json="${BASH_REMATCH[1]}" + # Normalize quotes for jq if needed + TA=$(echo "$json" | jq -r '."test_args" // empty' 2>/dev/null || true) + JV=$(echo "$json" | jq -r '."java-version" // empty' 2>/dev/null || true) + if [[ -n "$TA" ]]; then TEST_ARGS="$TA"; fi + if [[ -n "$JV" ]]; then JAVA_VERSION="$JV"; fi + fi + } + + for ISSUE_NUMBER in $(echo $ISSUE_NUMBERS | jq -r '.[]'); do + if [[ -z "$ISSUE_NUMBER" || "$ISSUE_NUMBER" == "null" ]]; then + continue + fi + + # Check issue comments (latest first) + COMMENTS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/comments --jq '.[] | {id: .id, body: .body, created_at: .created_at}' || true) + if [[ -n "$COMMENTS" ]]; then + LATEST_COMMENT_WITH_VALUES="" + while IFS= read -r COMMENT; do + COMMENT_BODY=$(echo "$COMMENT" | jq -r '.body') + if [[ -n "$COMMENT_BODY" ]]; then + parse_metadata "$COMMENT_BODY" + fi + done <<< "$COMMENTS" + fi + + if [[ -z "$TEST_ARGS" && -z "$JAVA_VERSION" ]]; then + # Scan linked commits + COMMIT_IDS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id' || true) + if [[ -z "$COMMIT_IDS" ]]; then + PR_NUMBERS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number' || true) + if [[ -n "$PR_NUMBERS" ]]; then + for PR in $PR_NUMBERS; do + PR_COMMITS=$(gh api repos/$REPO/pulls/$PR/commits --jq '.[].sha' || true) + [[ -n "$PR_COMMITS" ]] && COMMIT_IDS="$COMMIT_IDS"$'\n'"$PR_COMMITS" + done + fi + fi + if [[ -n "$COMMIT_IDS" ]]; then + while IFS= read -r COMMIT_ID; do + [[ -z "$COMMIT_ID" ]] && continue + COMMIT_MSG=$(gh api repos/$REPO/commits/$COMMIT_ID --jq '.commit.message' || true) + parse_metadata "$COMMIT_MSG" + done <<< "$COMMIT_IDS" + fi + fi + + # Break after first issue with metadata found + if [[ -n "$TEST_ARGS" || -n "$JAVA_VERSION" ]]; then + break + fi + done + + echo "test_args=$TEST_ARGS" >> $GITHUB_OUTPUT + echo "java_version=$JAVA_VERSION" >> $GITHUB_OUTPUT + + # ─── 1.4 combine test results ─── - name: Combine test results id: combine shell: bash @@ -332,6 +407,8 @@ jobs: echo "pass_to_pass=${{ steps.extract_tests.outputs.pass_to_pass }}" >> $GITHUB_OUTPUT echo "tests=${{ steps.extract_tests.outputs.tests }}" >> $GITHUB_OUTPUT echo "comment_id=${{ steps.extract_tests.outputs.comment_id }}" >> $GITHUB_OUTPUT + echo "test_args=${{ steps.extract_metadata.outputs.test_args }}" >> $GITHUB_OUTPUT + echo "java_version=${{ steps.extract_metadata.outputs.java_version }}" >> $GITHUB_OUTPUT # ─── 1.4 check if FAIL_TO_PASS or PASS_TO_PASS found ─── - name: Check if FAIL_TO_PASS or PASS_TO_PASS found @@ -450,31 +527,122 @@ jobs: }); core.setOutput('comment_id', comment.id.toString()); - # Step 2: Setup Java and Maven - - name: Set up Java - uses: actions/setup-java@v4 - with: - java-version: '24' - distribution: 'temurin' - cache: 'maven' - - # Step 3: Compile project - - name: Compile project + # Step 2: Prepare parameters for dataset verification + - name: Prepare dataset verification parameters + id: prepare_params shell: bash - run: mvn -B compile --file pom.xml + env: + EVENT_NAME: ${{ github.event_name }} + REPO: ${{ github.repository }} + CT_TEST_ARGS: ${{ needs.collect-process-tests.outputs.test_args }} + CT_JAVA_VERSION: ${{ needs.collect-process-tests.outputs.java_version }} + run: | + set -e + # Determine base and head SHAs + if [[ "$EVENT_NAME" == "pull_request" ]]; then + BASE_SHA="${{ github.event.pull_request.base.sha }}" + HEAD_SHA="${{ github.event.pull_request.head.sha }}" + ISSUE_NUMBER="${{ github.event.pull_request.number }}" + elif [[ "$EVENT_NAME" == "push" ]]; then + BASE_SHA="${{ github.event.before }}" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="$(echo "${{ github.event.head_commit.message }}" | grep -oE '#[0-9]+' | head -n1 | tr -d '#')" + elif [[ "$EVENT_NAME" == "issue_comment" ]]; then + BASE_SHA="$(git rev-parse HEAD~1 || echo "")" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="${{ github.event.issue.number }}" + else + BASE_SHA="$(git rev-parse HEAD~1 || echo "")" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="" + fi + echo "Base: $BASE_SHA" + echo "Head: $HEAD_SHA" - # Step 4: Run tests - - name: Run selected tests - if: ${{ needs.collect-process-tests.outputs.tests != '' }} - shell: bash - run: mvn -B -Dtest="${{ needs.collect-process-tests.outputs.tests }}" test + # Ensure we have history + git fetch --prune --unshallow || true + git fetch --all --tags || true + + # Generate patches + PATCH_CONTENT="$(git diff $BASE_SHA $HEAD_SHA -- . ':(exclude)src/test*' || true)" + TEST_PATCH_CONTENT="$(git diff $BASE_SHA $HEAD_SHA -- 'src/test*' || true)" + + # Derived parameters + TEST_ARGS="${CT_TEST_ARGS}" + JAVA_VERSION="${CT_JAVA_VERSION}" + # Normalize nulls and set defaults + if [[ "$TEST_ARGS" == "null" ]]; then TEST_ARGS=""; fi + if [[ -z "$JAVA_VERSION" || "$JAVA_VERSION" == "null" ]]; then JAVA_VERSION="24"; fi + OWNER="${{ github.repository_owner }}" + REPO_NAME="${REPO#*/}" + if [[ -n "$ISSUE_NUMBER" ]]; then + INSTANCE_ID="${OWNER}__${REPO_NAME}__${ISSUE_NUMBER}" + else + INSTANCE_ID="" + fi + + # Export as outputs (with multiline values) + { + echo "base_sha=$BASE_SHA" + echo "head_sha=$HEAD_SHA" + echo "instance_id=$INSTANCE_ID" + } >> "$GITHUB_OUTPUT" - - name: Run all tests - if: ${{ needs.collect-process-tests.outputs.tests == '' }} + echo "PATCH<<'EOF'" >> $GITHUB_OUTPUT + echo "$PATCH_CONTENT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + echo "TEST_PATCH<<'EOF'" >> $GITHUB_OUTPUT + echo "$TEST_PATCH_CONTENT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + echo "test_args=$TEST_ARGS" >> $GITHUB_OUTPUT + echo "java_version=$JAVA_VERSION" >> $GITHUB_OUTPUT + + # Step 3: Run dataset verifier script + - name: Run dataset verifier + id: run_verifier shell: bash - run: mvn -B test --file pom.xml + env: + REPO: ${{ github.repository }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + PATCH: ${{ steps.prepare_params.outputs.PATCH }} + TEST_PATCH: ${{ steps.prepare_params.outputs.TEST_PATCH }} + COMMIT: ${{ steps.prepare_params.outputs.base_sha }} + TEST_ARGS: ${{ steps.prepare_params.outputs.test_args }} + JAVA_VERSION: ${{ steps.prepare_params.outputs.java_version }} + INSTANCE_ID: ${{ steps.prepare_params.outputs.instance_id }} + run: | + set -e + chmod +x infrastructure/shared/.github/workflows/verify_java_dataset_instance.sh + OUTPUT_FILE="$(mktemp)" + infrastructure/shared/.github/workflows/verify_java_dataset_instance.sh \ + "$REPO" \ + "$COMMIT" \ + "$PATCH" \ + "$TEST_PATCH" \ + "$FAIL_TO_PASS" \ + "$PASS_TO_PASS" \ + "$TEST_ARGS" \ + "true" \ + "$JAVA_VERSION" \ + "$INSTANCE_ID" \ + false \ + true | tee "$OUTPUT_FILE" + VERDICT="$(tail -n1 "$OUTPUT_FILE")" + echo "verdict=$VERDICT" >> $GITHUB_OUTPUT + if [[ "$VERDICT" == "✅" ]]; then + echo "result=success" >> $GITHUB_OUTPUT + echo "emoji=✅" >> $GITHUB_OUTPUT + echo "reason=All checks passed" >> $GITHUB_OUTPUT + else + echo "result=failure" >> $GITHUB_OUTPUT + echo "emoji=❌" >> $GITHUB_OUTPUT + CLEAN_REASON="${VERDICT#❌ }" + echo "reason=$CLEAN_REASON" >> $GITHUB_OUTPUT + fi - # Step 5: Update comment with final status + # Step 4: Update comment with final status from verifier - name: Update issue comment with final status if: ${{ always() && (github.event_name == 'push' || github.event_name == 'issue_comment') }} uses: actions/github-script@v7 @@ -482,42 +650,31 @@ jobs: COMMENT_ID: ${{ steps.create_comment.outputs.comment_id }} RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} WORKFLOW_NAME: ${{ github.workflow }} - JOB_STATUS: ${{ job.status }} + RESULT: ${{ steps.run_verifier.outputs.result }} + EMOJI: ${{ steps.run_verifier.outputs.emoji }} + REASON: ${{ steps.run_verifier.outputs.reason }} FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | - if (!process.env.COMMENT_ID) { - core.info('No comment to update.'); return; - } - const statusEmoji = { - success: '✅', - failure: '❌', - cancelled: '🟡' - }[process.env.JOB_STATUS] || '🟡'; - + if (!process.env.COMMENT_ID) { core.info('No comment to update.'); return; } let bodyContent = ''; if (!process.env.COMMENT_ID){ if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { - // Parse JSON array and convert to comma-separated string - const quoted = process.env.FAIL_TO_PASS.replace(/(\w+)/g, '"$1"'); - const failToPassArray = JSON.parse(quoted); + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); const failToPassString = failToPassArray.join(', '); bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; } - if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { - // Parse JSON array and convert to comma-separated string - const quoted = process.env.PASS_TO_PASS.replace(/(\w+)/g, '"$1"'); - const passToPassArray = JSON.parse(quoted); + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); const passToPassString = passToPassArray.join(', '); bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; } } - - bodyContent += `\n${statusEmoji} **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** finished with status **${process.env.JOB_STATUS.toUpperCase()}**.`; - + const emoji = process.env.EMOJI || '🟡'; + const reason = process.env.REASON ? `: ${process.env.REASON}` : ''; + bodyContent += `\n${emoji} **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** finished${reason}`; await github.rest.issues.updateComment({ owner: context.repo.owner, repo: context.repo.repo, diff --git a/.github/workflows/verify_java_dataset_instance.sh b/.github/workflows/verify_java_dataset_instance.sh new file mode 100755 index 000000000..fd1fe8ba3 --- /dev/null +++ b/.github/workflows/verify_java_dataset_instance.sh @@ -0,0 +1,1015 @@ +#!/bin/bash + +# This script handles test dataset instance processing for SWE benchmarks +# It accepts parameters for repository, commit, patches, test information, etc. + +set -o pipefail + +# Parse input parameters +REPO="$1" +COMMIT="$2" +PATCH="$3" +TEST_PATCH="$4" +FAIL_TO_PASS="$5" +PASS_TO_PASS="$6" +TEST_ARGS="$7" +IS_MAVEN=$(echo "$8" | tr '[:upper:]' '[:lower:]') +JAVA_VERSION="$9" +INSTANCE_ID="${10}" + +# Validate required parameters +if [[ -z "$REPO" || "$REPO" == "null" ]]; then + echo "❌ Required parameter 'repo' is missing" + exit 1 +fi + +# Default Java version if not specified +if [[ -z "$JAVA_VERSION" || "$JAVA_VERSION" == "null" ]]; then + JAVA_VERSION="24" + echo "ℹ️ Java version not specified, using default: $JAVA_VERSION" +fi + +# Convert is_maven to lowercase +IS_MAVEN=$(echo "$IS_MAVEN" | tr '[:upper:]' '[:lower:]') + +# Use repository name for Docker image if instance ID is not provided +if [[ -z "$INSTANCE_ID" || "$INSTANCE_ID" == "null" ]]; then + INSTANCE_ID="auto-$(basename "$REPO" | tr '[:upper:]' '[:lower:]')-$(date +%s)" + echo "ℹ️ Auto-generated instance ID: $INSTANCE_ID" +fi + +REPO_URL="git@github.com:$REPO" + +# Function to determine container naming strategy +determine_container_name() { + local name_by_repo="$1" + local instance_id="$2" + local repo="$3" + + if [ "$name_by_repo" = true ]; then + # Use repository name (replace slashes with dashes, convert to lowercase) + local repo_safe=$(echo "$repo" | tr '/' '-' | tr '[:upper:]' '[:lower:]') + echo "swe-benchmark-$repo_safe" + else + # Use instance ID (default, convert to lowercase) + echo "swe-benchmark-$(echo "$instance_id" | tr '[:upper:]' '[:lower:]')" + fi +} + +# Function to check Docker environment +check_docker_environment() { + echo "🐳 Checking Docker environment..." + if ! command -v docker &> /dev/null; then + echo "❌ Docker is not installed. Please install Docker and try again." + echo " Visit: https://docs.docker.com/get-docker/" + exit 1 + fi + + if ! docker info &> /dev/null; then + echo "❌ Docker daemon is not running or not accessible." + echo " Please start Docker Desktop or Docker daemon and try again." + echo " On macOS: Start Docker Desktop application" + echo " On Linux: sudo systemctl start docker" + exit 1 + fi + + echo "✅ Docker environment is ready" +} + +# Function to create Dockerfile +create_dockerfile() { + local java_version="$1" + + cat > Dockerfile << EOF +FROM eclipse-temurin:${java_version}-jdk + +# Install Git, jq, and other utilities (Docker CLI will be available via socket mount) +RUN apt-get update && \\ + apt-get install -y \\ + git \\ + jq \\ + patch \\ + openssh-client \\ + wget \\ + unzip \\ + ca-certificates \\ + curl && \\ + rm -rf /var/lib/apt/lists/* + +# Install Docker CLI only (no daemon needed) +RUN curl -fsSL https://download.docker.com/linux/static/stable/x86_64/docker-24.0.7.tgz | \\ + tar xzf - --strip 1 -C /usr/local/bin docker/docker + +# Set up SSH for git clone (if needed) +RUN mkdir -p /root/.ssh && ssh-keyscan github.com >> /root/.ssh/known_hosts + +# Set working directory +WORKDIR /workspace + +# Default command +CMD ["/bin/bash"] +EOF +} + +# Function to create setup script +create_setup_script() { + SETUP_SCRIPT="setup_project.sh" + cat > "$SETUP_SCRIPT" << 'EOF' +#!/bin/bash + +set -e + +REPO_URL="$1" +COMMIT="$2" +IS_MAVEN="$3" + +# Source common helper functions +source /workspace/common_functions.sh + +echo "📋 Setting up project" +echo "📦 Repository: $REPO_URL" +echo "🏷️ Commit: $COMMIT" + +# Clone repository +REPO_NAME=$(basename "$REPO_URL" .git) +echo "📥 Cloning repository..." +if ! git clone "$REPO_URL" "$REPO_NAME"; then + echo "❌ Failed to clone repository. Trying HTTPS..." + HTTPS_URL=$(echo "$REPO_URL" | sed 's|git@github.com:|https://github.com/|') + git clone "$HTTPS_URL" "$REPO_NAME" +fi + +cd "$REPO_NAME" + +# Checkout specific commit +echo "🔁 Checking out commit $COMMIT..." +git checkout "$COMMIT" + +# Make gradlew executable if it exists +if [ -f "./gradlew" ]; then + chmod +x ./gradlew + + # Check if gradle-wrapper.jar exists, if not generate it + if [ ! -f "./gradle/wrapper/gradle-wrapper.jar" ]; then + echo "🔧 Gradle wrapper JAR missing, initializing wrapper..." + # Always use system Gradle first to generate wrapper + if command -v gradle &> /dev/null; then + echo "🔧 Using system Gradle to initialize wrapper..." + gradle wrapper --no-daemon || { + echo "❌ Failed: Failed to initialize Gradle wrapper using system Gradle" + exit 1 + } + else + # Install Gradle temporarily to generate wrapper + echo "🔧 Installing Gradle to initialize wrapper..." + wget -O gradle.zip https://services.gradle.org/distributions/gradle-9.0.0-bin.zip || { + echo "❌ Failed: Failed to download Gradle" + exit 1 + } + unzip -q gradle.zip || { + echo "❌ Failed: Failed to unzip Gradle" + exit 1 + } + chmod +x gradle-9.0.0/bin/gradle + ./gradle-9.0.0/bin/gradle wrapper --no-daemon || { + echo "❌ Failed: Failed to initialize Gradle wrapper using downloaded Gradle" + exit 1 + } + rm -rf gradle.zip gradle-9.0.0 + fi + + # Verify wrapper was created successfully + if [ ! -f "./gradle/wrapper/gradle-wrapper.jar" ]; then + echo "❌ Failed: Failed to create Gradle wrapper JAR" + exit 1 + fi + + echo "✅ Gradle wrapper initialized" + fi +fi + +# Make mvn executable if it exists +if [ -f "./mvnw" ]; then + chmod +x ./mvnw +fi + +# Compile project and download dependencies +echo "🏗️ Compiling project and downloading dependencies..." +if [[ "$IS_MAVEN" == "true" ]]; then + # Try Maven wrapper first, then fallback to system Maven + MAVEN_CMD="" + if [ -f "./mvnw" ]; then + echo "🔧 Using Maven wrapper (./mvnw)" + MAVEN_CMD="./mvnw" + elif command -v mvn &> /dev/null; then + echo "🔧 Using system Maven" + MAVEN_CMD="mvn" + else + echo "🔧 Maven not found, installing..." + install_build_tools + MAVEN_CMD="mvn" + fi + + echo "🔧 Running Maven compile: $MAVEN_CMD compile test-compile" + $MAVEN_CMD compile test-compile 2>&1 | tee compile_output.log +else + # Try Gradle wrapper first, then fallback to system Gradle + GRADLE_CMD="" + if [ -f "./gradlew" ]; then + echo "🔧 Using Gradle wrapper (./gradlew)" + GRADLE_CMD="./gradlew" + elif command -v gradle &> /dev/null; then + echo "🔧 Using system Gradle" + GRADLE_CMD="gradle" + else + echo "🔧 Gradle not found, installing..." + install_build_tools + GRADLE_CMD="gradle" + fi + + echo "🔧 Running Gradle compile: $GRADLE_CMD compileJava compileTestJava" + $GRADLE_CMD compileJava compileTestJava 2>&1 | tee compile_output.log +fi + +echo "✅ Project setup and compilation completed" +EOF + chmod +x "$SETUP_SCRIPT" +} + +# Function to create test script +create_test_script() { + local patch="$1" + local test_patch="$2" + local instance_id="$3" + local fail_to_pass="$4" + local pass_to_pass="$5" + local test_args="$6" + local is_maven="$7" + local commit="$8" + local repo_url="$9" + + # Write parameters to a separate file to avoid quote issues + PARAMS_FILE="test_params.env" + cat > "$PARAMS_FILE" << EOF +PATCH=$(printf '%q' "$patch") +TEST_PATCH=$(printf '%q' "$test_patch") +INSTANCE_ID=$(printf '%q' "$instance_id") +FAIL_TO_PASS=$(printf '%q' "$fail_to_pass") +PASS_TO_PASS=$(printf '%q' "$pass_to_pass") +TEST_ARGS=$(printf '%q' "$test_args") +IS_MAVEN=$(printf '%q' "$is_maven") +COMMIT=$(printf '%q' "$commit") +REPO_URL=$(printf '%q' "$repo_url") +EOF + + TEST_SCRIPT="run_tests.sh" + cat > "$TEST_SCRIPT" << 'EOF' +#!/bin/bash +set -o pipefail + +# Resolve module (Maven/Gradle) for a given fully-qualified test name (optionally with method). +# Prints the module directory relative to repo root (e.g., "service/order") and returns 0 on success. +find_module_for_test() { + local fqn="$1" + + # Normalize: strip method suffix (#method or (..)) and any "module::" prefix + local fqn_no_method="${fqn%%[#(]*}" + fqn_no_method="${fqn_no_method#*::}" + fqn_no_method="$(echo "$fqn_no_method" | xargs)" + + if [[ -z "$fqn_no_method" ]]; then + return 1 + fi + + local class_name="${fqn_no_method##*.}" + local package_name="${fqn_no_method%.*}" + local pkg_path="${package_name//./\/}" + + # roots to search for tests + local roots=( + "src/test/java" "src/test/kotlin" "src/test/groovy" + "src/integrationTest/java" "src/integrationTest/kotlin" "src/integrationTest/groovy" + "src/it/java" "src/it/kotlin" "src/it/groovy" + ) + local exts=("java" "kt" "groovy") + + local matches=() + for root in "${roots[@]}"; do + for ext in "${exts[@]}"; do + local suffix="$root" + if [[ -n "$pkg_path" ]]; then + suffix="$suffix/$pkg_path/$class_name.$ext" + else + suffix="$suffix/$class_name.$ext" + fi + # find files under any module + while IFS= read -r f; do + matches+=("$f") + done < <(find . -type f -path "*/$suffix" 2>/dev/null) + done + done + + if [[ ${#matches[@]} -eq 0 ]]; then + return 1 + fi + + local best_mod="" best_score=999999 + for f in "${matches[@]}"; do + local mod="${f%/src/*}" + mod="${mod#./}" + + # score: prefer modules that look like Maven/Gradle projects & shallower paths + local score=0 + [[ -f "$mod/pom.xml" ]] && score=$((score-3)) + [[ -f "$mod/build.gradle" || -f "$mod/build.gradle.kts" ]] && score=$((score-3)) + + # bias by build tool if known + if [[ "$IS_MAVEN" == "true" && -f "$mod/pom.xml" ]]; then + score=$((score-2)) + fi + if [[ "$IS_MAVEN" == "false" && ( -f "$mod/build.gradle" || -f "$mod/build.gradle.kts" ) ]]; then + score=$((score-2)) + fi + + # shallower is better + local depth="${mod//[^\/]/}" + score=$((score + ${#depth})) + + if (( score < best_score )) || [[ -z "$best_mod" ]]; then + best_mod="$mod" + best_score=$score + fi + done + + if [[ -n "$best_mod" ]]; then + echo "$best_mod" + return 0 + fi + return 1 +} + +# Note: Not using 'set -e' to allow continuation even if patches fail + +# Load parameters from environment file +source /workspace/test_params.env + +# Source common helper functions +source /workspace/common_functions.sh + +echo "📋 Running tests for instance: $INSTANCE_ID" + +# Navigate to the already cloned and compiled project (robust detection) +project_dir="" +# Prefer a directory with a .git folder +for dir in /workspace/*; do + if [ -d "$dir" ] && [ -d "$dir/.git" ]; then + project_dir="$dir" + break + fi +done +# Fallback to first directory under /workspace +if [ -z "$project_dir" ]; then + for dir in /workspace/*; do + if [ -d "$dir" ]; then + project_dir="$dir" + break + fi + done +fi +# If still not found, attempt to clone using REPO_URL +if [ -z "$project_dir" ]; then + if [[ -n "$REPO_URL" && "$REPO_URL" != "null" ]]; then + repo_name=$(basename "$REPO_URL" .git) + echo "📥 Cloning repository into container: $REPO_URL" + if ! git clone "$REPO_URL" "/workspace/$repo_name"; then + echo "❌ SSH clone failed, trying HTTPS..." + https_url=$(echo "$REPO_URL" | sed 's|git@github.com:|https://github.com/|') + git clone "$https_url" "/workspace/$repo_name" + fi + project_dir="/workspace/$repo_name" + fi +fi +# Final check +if [ -z "$project_dir" ] || [ ! -d "$project_dir" ]; then + echo "❌ No project directory found in prepared container" + exit 1 +fi + +cd "$project_dir" +REPO_NAME=$(basename "$project_dir") +echo "📁 Working in project directory: $REPO_NAME" + +# Get the current commit hash from environment or git +COMMIT_HASH=$(git rev-parse HEAD) +echo "🔄 Current commit: $COMMIT_HASH" + +# Reset to clean state before applying patches +echo "🧹 Resetting to clean state..." +git reset --hard HEAD +git clean -fd + +# Verify we're in a clean state +if [ -n "$(git status --porcelain)" ]; then + echo "⚠️ Warning: Repository not completely clean after reset" + git status --short +fi + +# Checkout specific commit +echo "🔁 Checking out commit $COMMIT..." +git checkout "$COMMIT" +git reset --hard HEAD +git clean -fd + +# Define run_test_class early so it can be used before later redefinition +run_test_class() { + local test_name="$1" + local test_type="$2" + + echo "Running $test_type test: $test_class" + + # Check if we have test_args and if it's not "null" + local test_args_param="" + if [[ -n "$TEST_ARGS" && "$TEST_ARGS" != "null" ]]; then + test_args_param="$TEST_ARGS" + echo "📋 Using test args: $test_args_param" + fi + + # Split "module::fqn" if provided + local module_name="" + if [[ "$test_name" == *"::"* ]]; then + module_name="${test_name%%::*}" + test_name="${test_name#*::}" + fi + + # If module not given, try to auto-detect via package path + if [[ -z "$module_name" ]]; then + if module_name="$(find_module_for_test "$test_name")"; then + echo "🧭 Auto-detected module for test '$test_name' -> '$module_name'" + else + echo "⚠️ Could not auto-detect module for test '$test_name'. Falling back to root." + fi + fi + + if [[ "$IS_MAVEN" == "true" ]]; then + # Try Maven wrapper first, then fallback to system Maven + MAVEN_CMD="" + if [ -f "./mvnw" ]; then + echo "🔧 Using Maven wrapper (./mvnw)" + MAVEN_CMD="./mvnw" + elif command -v mvn &> /dev/null; then + echo "🔧 Using system Maven" + MAVEN_CMD="mvn" + else + echo "🔧 Maven not found, installing..." + install_build_tools + MAVEN_CMD="mvn" + fi + + # Run spotless and spring check + echo "🔧 Running Maven format commands:" + $MAVEN_CMD spring-javaformat:apply | tee test_output.log + $MAVEN_CMD spotless:apply | tee test_output.log + + # Maven test execution with test_args + if [[ -n "$module_name" && "$module_name" != "." ]]; then + echo "🔧 Running Maven command: $MAVEN_CMD test $test_args_param -pl $module_name -Dtest=\"$test_name\"" + $MAVEN_CMD test $test_args_param -pl "$module_name" -Dtest="$test_name" -Dsurefire.failIfNoSpecifiedTests=true 2>&1 | tee test_output.log + else + echo "🔧 Running Maven command: $MAVEN_CMD test $test_args_param -Dtest=\"$test_name\"" + $MAVEN_CMD test $test_args_param -Dtest="$test_name" -Dsurefire.failIfNoSpecifiedTests=true 2>&1 | tee test_output.log + fi + exit_code=$? + + # Check if the test wasn't found + if grep -q "No tests matching pattern" test_output.log || grep -q "No tests were executed" test_output.log; then + echo "❌ $test_type test NOT FOUND: $test_class" + return 2 + elif [ $exit_code -eq 0 ]; then + echo "✅ $test_type test PASSED: $test_class" + return 0 + else + echo "❌ $test_type test FAILED: $test_class" + return 1 + fi + else + # Try Gradle wrapper first, then fallback to system Gradle + GRADLE_CMD="" + if [ -f "./gradlew" ]; then + echo "🔧 Using Gradle wrapper (./gradlew)" + GRADLE_CMD="./gradlew" + elif command -v gradle &> /dev/null; then + echo "🔧 Using system Gradle" + GRADLE_CMD="gradle" + else + echo "🔧 Gradle not found, installing..." + install_build_tools + GRADLE_CMD="gradle" + fi + + # Run spotless and spring check + echo "🔧 Running Gradle format commands:" + $GRADLE_CMD format | tee compile_output.log + $GRADLE_CMD spotlessApply | tee compile_output.log + + # Gradle test execution with test_args + local gradle_task="test" + # Gradle: derive :a:b:c:test task for the module when known + local gradle_task="test" + if [[ -n "$module_name" && "$module_name" != "." ]]; then + gradle_task=":${module_name//\//:}:test" + fi + echo "🔧 Running Gradle command: $GRADLE_CMD $gradle_task $test_args_param --tests \"$test_name\"" + $GRADLE_CMD $gradle_task $test_args_param --tests "$test_name" 2>&1 | tee test_output.log + exit_code=$? + + # Check if the test wasn't found + if grep -q "No tests found for given includes" test_output.log || grep -q "No tests found matching" test_output.log; then + echo "❌ $test_type test FAILED (NOT FOUND): $test_class" + return 2 + elif [ $exit_code -eq 0 ]; then + echo "✅ $test_type test PASSED: $test_class" + return 0 + else + echo "❌ $test_type test FAILED: $test_class" + return 1 + fi + fi +} + +# Apply test patch with error handling first +echo "🧪 Applying test patch..." +if [ "$TEST_PATCH" != "null" ] && [ -n "$TEST_PATCH" ]; then + # Try dry run first to validate patch + if echo "$TEST_PATCH" | patch -p1 --dry-run > /dev/null 2>&1; then + echo "$TEST_PATCH" | patch -p1 + echo "✅ Test patch applied successfully" + else + echo "⚠️ Test patch dry run failed, trying with force..." + if echo "$TEST_PATCH" | patch -p1 --force --reject-file=test.rej; then + echo "✅ Test patch applied with force" + if [ -f "test.rej" ]; then + echo "⚠️ Failed: Some parts rejected - see test.rej" + cat test.rej + exit 1 + fi + else + echo "❌ Failed: Test patch failed completely" + exit 1 + fi + fi +else + echo "ℹ️ No test patch to apply" +fi + +# AFTER APPLYING TEST PATCH: Run FAIL_TO_PASS again and gate +echo "👉 Running FAIL_TO_PASS tests after applying test patch (without golden patch)..." +if [[ "$FAIL_TO_PASS" != "[]" && "$FAIL_TO_PASS" != "null" ]]; then + TP_FAIL_TESTS=$(echo "$FAIL_TO_PASS" | jq -r '.[]' 2>/dev/null || echo "$FAIL_TO_PASS" | tr -d '[]"' | tr ',' '\n') + tp_fail_to_pass_count=0 + tp_fail_to_pass_success=0 + tp_fail_to_pass_passed_list="" + + for test in $TP_FAIL_TESTS; do + if [[ -n "$test" && "$test" != "null" ]]; then + clean_test=$(echo "$test" | sed 's/^src://') + ((tp_fail_to_pass_count++)) + run_test_class "$clean_test" "FAIL_TO_PASS" + result=$? + if [ $result -eq 0 ]; then + ((tp_fail_to_pass_success++)) + if [[ -z "$tp_fail_to_pass_passed_list" ]]; then + tp_fail_to_pass_passed_list="$clean_test" + else + tp_fail_to_pass_passed_list="$tp_fail_to_pass_passed_list, $clean_test" + fi + elif [ $result -eq 2 ]; then + echo "⚠️ WARNING: FAIL_TO_PASS test '$clean_test' could not be found or executed" + fi + fi + done + + echo "📊 FAIL_TO_PASS summary with test patch: $tp_fail_to_pass_success of $tp_fail_to_pass_count tests passed" + if [ ${tp_fail_to_pass_success:-0} -gt 0 ]; then + echo "❌ Failed: FAIL_TO_PASS passed with test patch and without golden patch: $tp_fail_to_pass_passed_list" + exit 1 + fi +else + echo "No FAIL_TO_PASS tests to run after test patch" +fi + +# Now apply source (golden) patch with error handling +echo "🩹 Applying source patch..." +if [ "$PATCH" != "null" ] && [ -n "$PATCH" ]; then + # Show patch content for debugging + echo "📄 Source patch content (first 10 lines):" + echo "$PATCH" | head -10 + echo "..." + + # Try dry run first to validate patch + echo "🔍 Running patch dry run..." + if echo "$PATCH" | patch -p1 --dry-run > patch_dry_run.log 2>&1; then + echo "$PATCH" | patch -p1 + echo "✅ Source patch applied successfully" + else + echo "⚠️ Source patch dry run failed, analyzing..." + echo "📋 Dry run output:" + cat patch_dry_run.log + + # Check if target files exist + TARGET_FILES=$(echo "$PATCH" | grep "^+++" | sed 's/^+++ [ab]\///' | head -5) + echo "🔍 Checking target files:" + for file in $TARGET_FILES; do + if [ -f "$file" ]; then + echo "✅ Found: $file" + echo "📄 Current content around line context:" + # Show some context from the file + head -50 "$file" | tail -20 + else + echo "❌ Failed: Missing: $file" + exit 1 + fi + done + + echo "⚠️ Trying patch with force and different options..." + # Try with different patch options + if echo "$PATCH" | patch -p1 --force --reject-file=source.rej --no-backup-if-mismatch; then + echo "✅ Source patch applied with force" + if [ -f "source.rej" ]; then + cat source.rej + echo "⚠️ Failed: Some parts rejected - see source.rej:" + exit 1 + fi + else + echo "❌ Failed: Source patch failed completely - continuing anyway to allow test patch" + exit 1 + fi + fi +else + echo "ℹ️ No source patch to apply" +fi + +# Show what files were modified +echo "📝 Modified files after patches:" +git status --short + +# Add any new files created by patches to git so they get cleaned up on next reset +echo "📋 Adding new files to git for proper cleanup on next run..." +git add -A +if [ -n "$(git status --porcelain)" ]; then + echo "✅ Added patch-created files to git index:" + git status --short +else + echo "ℹ️ No new files to add to git index" +fi + +echo "FAIL_TO_PASS tests: $FAIL_TO_PASS" +echo "PASS_TO_PASS tests: $PASS_TO_PASS" + +# Parse and run FAIL_TO_PASS tests +if [[ "$FAIL_TO_PASS" != "[]" && "$FAIL_TO_PASS" != "null" ]]; then + FAIL_TESTS=$(echo "$FAIL_TO_PASS" | jq -r '.[]' 2>/dev/null || echo "$FAIL_TO_PASS" | tr -d '[]"' | tr ',' '\n') + fail_to_pass_count=0 + fail_to_pass_success=0 + base_fail_to_pass_failed_list="" + + for test in $FAIL_TESTS; do + if [[ -n "$test" && "$test" != "null" ]]; then + # Remove "src:" prefix if present + clean_test=$(echo "$test" | sed 's/^src://') + ((fail_to_pass_count++)) + + run_test_class "$clean_test" "FAIL_TO_PASS" + result=$? + + if [ $result -eq 0 ]; then + ((fail_to_pass_success++)) + else + if [[ -z "$base_fail_to_pass_failed_list" ]]; then + base_fail_to_pass_failed_list="$clean_test" + else + base_fail_to_pass_failed_list="$base_fail_to_pass_failed_list, $clean_test" + fi + fi + fi + done + + echo "📊 FAIL_TO_PASS summary: $fail_to_pass_success of $fail_to_pass_count tests passed" + if [ ${fail_to_pass_count:-0} -gt 0 ] && [ ${fail_to_pass_success:-0} -lt ${fail_to_pass_count:-0} ]; then + echo "❌ Failed: FAIL_TO_PASS tests must all pass. Failed tests: $base_fail_to_pass_failed_list" + exit 1 + fi +else + echo "No FAIL_TO_PASS tests to run" +fi + +# Parse and run PASS_TO_PASS tests +if [[ "$PASS_TO_PASS" != "[]" && "$PASS_TO_PASS" != "null" ]]; then + PASS_TESTS=$(echo "$PASS_TO_PASS" | jq -r '.[]' 2>/dev/null || echo "$PASS_TO_PASS" | tr -d '[]"' | tr ',' '\n') + pass_to_pass_count=0 + pass_to_pass_success=0 + base_pass_to_pass_failed_list="" + + for test in $PASS_TESTS; do + if [[ -n "$test" && "$test" != "null" ]]; then + # Remove "src:" prefix if present + clean_test=$(echo "$test" | sed 's/^src://') + ((pass_to_pass_count++)) + + run_test_class "$clean_test" "PASS_TO_PASS" + result=$? + + if [ $result -eq 0 ]; then + ((pass_to_pass_success++)) + else + if [[ -z "$base_pass_to_pass_failed_list" ]]; then + base_pass_to_pass_failed_list="$clean_test" + else + base_pass_to_pass_failed_list="$base_pass_to_pass_failed_list, $clean_test" + fi + fi + fi + done + + echo "📊 PASS_TO_PASS summary: $pass_to_pass_success of $pass_to_pass_count tests passed" + if [ ${pass_to_pass_count:-0} -gt 0 ] && [ ${pass_to_pass_success:-0} -lt ${pass_to_pass_count:-0} ]; then + echo "❌ Failed: PASS_TO_PASS tests must all pass. Failed tests: $base_pass_to_pass_failed_list" + exit 1 + fi +else + echo "No PASS_TO_PASS tests to run" +fi + +echo "🏁 Test execution completed for instance: $INSTANCE_ID" +EOF + chmod +x "$TEST_SCRIPT" +} + +# Function to create common functions file +create_common_functions_file() { + COMMON_FUNCTIONS="common_functions.sh" + # Remove any existing directory with this name before creating the file + if [ -d "$COMMON_FUNCTIONS" ]; then + rm -rf "$COMMON_FUNCTIONS" + fi + cat > "$COMMON_FUNCTIONS" << 'EOF' +#!/bin/bash +# Common helper functions for SWE benchmark scripts + +# Install build tools if needed based on IS_MAVEN env variable +install_build_tools() { + if [[ "$IS_MAVEN" == "true" ]]; then + if ! command -v mvn &> /dev/null; then + echo "🔧 Installing Maven..." + apt-get update && apt-get install -y maven + fi + else + if ! command -v gradle &> /dev/null; then + echo "🔧 Installing Gradle..." + wget -O gradle.zip https://services.gradle.org/distributions/gradle-9.0.0-bin.zip + unzip -q gradle.zip + mv gradle-9.0.0 /opt/gradle + ln -s /opt/gradle/bin/gradle /usr/local/bin/gradle + rm gradle.zip + fi + fi +} +EOF + chmod +x "$COMMON_FUNCTIONS" +} + +# Function to build and run container for project setup +build_and_run_setup_container() { + local docker_image_name="$1" + local repo_url="$2" + local commit="$3" + local is_maven="$4" + local instance_id="$5" + + # Check if prepared container already exists + if docker image inspect "$docker_image_name-base" > /dev/null 2>&1; then + echo "✅ Prepared container already exists: $docker_image_name-base" + echo "🚀 Skipping container preparation..." + return 0 + fi + + echo "🐳 Prepared container not found, creating new one..." + + # Build base Docker image + echo "🐳 Building base Docker image: $docker_image_name-base..." + docker build -t "$docker_image_name-base" . + + # Create setup script + create_setup_script + create_common_functions_file + + # Create prepared container with project and dependencies + echo "🚀 Setting up project in container..." + # Use unique setup container name per instance to avoid name conflicts + SETUP_CONTAINER_NAME="${docker_image_name}-setup-$(echo "$instance_id" | tr '[:upper:]' '[:lower:]')" + # Remove any stale container with the same name (from previous runs) + docker rm -f "$SETUP_CONTAINER_NAME" 2>/dev/null || true + + docker run -d \ + -v "$(pwd)/$SETUP_SCRIPT:/workspace/setup_project.sh" \ + -v "$(pwd)/$COMMON_FUNCTIONS:/workspace/common_functions.sh" \ + -v /var/run/docker.sock:/var/run/docker.sock \ + --privileged \ + --network bridge \ + -e TESTCONTAINERS_RYUK_DISABLED=true \ + -e TESTCONTAINERS_CHECKS_DISABLE=true \ + -e DOCKER_HOST=unix:///var/run/docker.sock \ + -e TESTCONTAINERS_HOST_OVERRIDE=host.docker.internal \ + --name "$SETUP_CONTAINER_NAME" \ + "$docker_image_name-base" \ + bash -c "/workspace/setup_project.sh '$repo_url' '$commit' '$is_maven' && sleep infinity" + + # Wait for setup to complete and show logs + echo "📋 Waiting for project setup to complete..." + docker logs -f "$SETUP_CONTAINER_NAME" & + LOGS_PID=$! + + # Wait for the setup script to finish (it will exit, leaving only sleep infinity) + while docker exec "$SETUP_CONTAINER_NAME" pgrep -f "setup_project.sh" > /dev/null 2>&1; do + sleep 2 + done + + # Kill the logs process + kill $LOGS_PID 2>/dev/null || true + + # Check if container is still running and if setup was successful + if ! docker ps -q -f "name=$SETUP_CONTAINER_NAME" | grep -q .; then + docker rm -f "$SETUP_CONTAINER_NAME" 2>/dev/null || true + echo "🗑️ Removing prepared container: $docker_image_name-base" + docker rmi "$docker_image_name-base" 2>/dev/null || true + return 1 + fi + + # Check container exit code to determine if setup was successful + SETUP_EXIT_CODE=$(docker inspect "$SETUP_CONTAINER_NAME" --format='{{.State.ExitCode}}') + if [ "$SETUP_EXIT_CODE" != "0" ] && [ "$SETUP_EXIT_CODE" != "null" ]; then + docker rm -f "$SETUP_CONTAINER_NAME" 2>/dev/null || true + echo "🗑️ Removing prepared container: $docker_image_name-base" + docker rmi "$docker_image_name-base" 2>/dev/null || true + echo "❌ Failed: Container preparation failed with exit code: $SETUP_EXIT_CODE" + return 1 + fi + + # Commit the container with project and dependencies + echo "💾 Creating prepared container image: $docker_image_name-base..." + docker commit "$SETUP_CONTAINER_NAME" "$docker_image_name-base" + docker rm -f "$SETUP_CONTAINER_NAME" 2>/dev/null || true + + return 0 +} + +# Function to run tests in container +run_tests_in_container() { + local docker_image_name="$1" + local patch="$2" + local test_patch="$3" + local instance_id="$4" + local fail_to_pass="$5" + local pass_to_pass="$6" + local test_args="$7" + local is_maven="$8" + local commit="$9" + local repo_url="${10}" + + # Create test script + create_test_script "$patch" "$test_patch" "$instance_id" "$fail_to_pass" "$pass_to_pass" "$test_args" "$is_maven" "$commit" "$repo_url" + create_common_functions_file + + # Run Docker container and execute tests + echo "🚀 Running Docker container..." + # Create temporary file to store full output + TEMP_OUTPUT_FILE=$(mktemp) + + # Execute docker run and display output in real-time while also saving to a file + set +e + docker run --rm \ + -v "$(pwd)/$TEST_SCRIPT:/workspace/run_tests.sh" \ + -v "$(pwd)/$PARAMS_FILE:/workspace/test_params.env" \ + -v "$(pwd)/$COMMON_FUNCTIONS:/workspace/common_functions.sh" \ + -v /var/run/docker.sock:/var/run/docker.sock \ + --privileged \ + --network bridge \ + -e TESTCONTAINERS_RYUK_DISABLED=true \ + -e TESTCONTAINERS_CHECKS_DISABLE=true \ + -e DOCKER_HOST=unix:///var/run/docker.sock \ + -e TESTCONTAINERS_HOST_OVERRIDE=host.docker.internal \ + "$docker_image_name-base" \ + bash -c "/workspace/run_tests.sh" 2>&1 | tee "$TEMP_OUTPUT_FILE" + RUN_EXIT_CODE=${PIPESTATUS[0]} + + # Get the last line of output for error reporting + LAST_LINE=$(tail -n 1 "$TEMP_OUTPUT_FILE") + rm -f "$TEMP_OUTPUT_FILE" + + # Cleanup + rm -f "$TEST_SCRIPT" "$PARAMS_FILE" + + set -e + + return $RUN_EXIT_CODE +} + +# Function to cleanup resources +cleanup_resources() { + local docker_image_name="$1" + local cleanup_containers="$2" + + echo "🧹 Cleaning up..." + rm -f Dockerfile + + if [ -n "$SETUP_SCRIPT" ]; then + rm -f "$SETUP_SCRIPT" + fi + + if [ -n "$COMMON_FUNCTIONS" ]; then + rm -f "$COMMON_FUNCTIONS" + fi + + # Remove prepared image with project snapshot (always remove main prepared image tag) + docker rmi "$docker_image_name" 2>/dev/null || true + + # Handle base image cleanup based on cleanup_containers flag + if [ "$cleanup_containers" = true ]; then + echo "🗑️ Removing prepared container: $docker_image_name-base" + docker rmi "$docker_image_name-base" 2>/dev/null || true + else + echo "💾 Prepared container preserved: $docker_image_name-base" + echo " Use --cleanup flag to remove containers after execution" + echo " Use 'docker rmi $docker_image_name-base' to remove manually" + fi +} + +# Main execution flow +main() { + local name_by_repo="$1" + local cleanup_containers="$2" + + # Display basic information + echo "📋 Instance: $INSTANCE_ID" + echo "📦 Repository: $REPO_URL" + echo "🏷️ Commit: $COMMIT" + echo "🧹 Cleanup containers: $cleanup_containers" + + # Determine container name + DOCKER_IMAGE_NAME=$(determine_container_name "$name_by_repo" "$INSTANCE_ID" "$REPO") + if [ "$name_by_repo" = true ]; then + echo "📋 Container name: $DOCKER_IMAGE_NAME (by repository)" + else + echo "📋 Container name: $DOCKER_IMAGE_NAME (by instance ID)" + fi + + # Check Docker environment + check_docker_environment + + # Create Dockerfile + create_dockerfile "$JAVA_VERSION" + + # Build and run setup container + build_and_run_setup_container "$DOCKER_IMAGE_NAME" "$REPO_URL" "$COMMIT" "$IS_MAVEN" "$INSTANCE_ID" + if [ $? -ne 0 ]; then + echo "❌ Failed: Setup container preparation failed" + exit 1 + fi + + # Run tests in container + run_tests_in_container "$DOCKER_IMAGE_NAME" "$PATCH" "$TEST_PATCH" "$INSTANCE_ID" "$FAIL_TO_PASS" "$PASS_TO_PASS" "$TEST_ARGS" "$IS_MAVEN" "$COMMIT" "$REPO_URL" + RUN_EXIT_CODE=$? + + # Cleanup resources + cleanup_resources "$DOCKER_IMAGE_NAME" "$cleanup_containers" + + # Final result message must contain execution result + if [ $RUN_EXIT_CODE -eq 0 ]; then + echo "✅" + exit 0 + else + # Strip leading cross mark from reason to match required final message format + REASON_NO_ICON="${LAST_LINE#*❌ Failed: }" + echo "❌ Failed: $REASON_NO_ICON" + exit 0 + fi +} + +# Execute script with the provided parameters +if [ $# -ge 10 ]; then + # Default values for optional parameters + NAME_BY_REPO=false + CLEANUP_CONTAINERS=false + + # Parse additional optional parameters if provided + if [ $# -ge 11 ]; then + NAME_BY_REPO="${11}" + fi + + if [ $# -ge 12 ]; then + CLEANUP_CONTAINERS="${12}" + fi + + main "$NAME_BY_REPO" "$CLEANUP_CONTAINERS" +else + echo "❌ Failed: Usage: $0 REPO COMMIT PATCH TEST_PATCH FAIL_TO_PASS PASS_TO_PASS TEST_ARGS IS_MAVEN JAVA_VERSION INSTANCE_ID [NAME_BY_REPO] [CLEANUP_CONTAINERS]" +fi From 4cb1d422a560166891edcccdbc8479e44bf9b47e Mon Sep 17 00:00:00 2001 From: Sergei A Volkov Date: Tue, 30 Sep 2025 14:26:15 +0200 Subject: [PATCH 7/7] auto 7 --- pom.xml | 22 ++ .../samples/petclinic/cache/HybridCache.java | 130 +++++++ .../petclinic/cache/HybridCacheManager.java | 58 +++ .../petclinic/config/CacheConfiguration.java | 95 +++++ .../petclinic/config/CacheProperties.java | 46 +++ .../petclinic/service/ClinicServiceImpl.java | 32 ++ src/main/resources/application.properties | 3 + .../cache/HybridCacheIntegrationTest.java | 358 ++++++++++++++++++ .../AbstractClinicServiceTests.java | 21 +- src/test/resources/application.properties | 3 + 10 files changed, 766 insertions(+), 2 deletions(-) create mode 100644 src/main/java/org/springframework/samples/petclinic/cache/HybridCache.java create mode 100644 src/main/java/org/springframework/samples/petclinic/cache/HybridCacheManager.java create mode 100644 src/main/java/org/springframework/samples/petclinic/config/CacheConfiguration.java create mode 100644 src/main/java/org/springframework/samples/petclinic/config/CacheProperties.java create mode 100644 src/test/java/org/springframework/samples/petclinic/cache/HybridCacheIntegrationTest.java diff --git a/pom.xml b/pom.xml index b4bbd5e3e..856d69570 100644 --- a/pom.xml +++ b/pom.xml @@ -182,6 +182,28 @@ jaxb-api ${jaxb-api.version} + + + + com.github.ben-manes.caffeine + caffeine + + + org.springframework.boot + spring-boot-starter-data-redis + + + + + org.testcontainers + testcontainers + test + + + org.testcontainers + junit-jupiter + test + diff --git a/src/main/java/org/springframework/samples/petclinic/cache/HybridCache.java b/src/main/java/org/springframework/samples/petclinic/cache/HybridCache.java new file mode 100644 index 000000000..cc0be1198 --- /dev/null +++ b/src/main/java/org/springframework/samples/petclinic/cache/HybridCache.java @@ -0,0 +1,130 @@ +/* + * Copyright 2002-2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.samples.petclinic.cache; + +import org.springframework.cache.Cache; + +import java.util.concurrent.Callable; + +/** + * Hybrid cache implementation combining L1 (local) and L2 (distributed) caches. + * Implements read-through and write-through patterns. + */ +public class HybridCache implements Cache { + + private final String name; + private final Cache l1Cache; + private final Cache l2Cache; + + public HybridCache(String name, Cache l1Cache, Cache l2Cache) { + this.name = name; + this.l1Cache = l1Cache; + this.l2Cache = l2Cache; + } + + @Override + public String getName() { + return name; + } + + @Override + public Object getNativeCache() { + return this; + } + + @Override + public ValueWrapper get(Object key) { + // Try L1 first + ValueWrapper value = l1Cache.get(key); + if (value != null) { + return value; + } + + // Fallback to L2 + value = l2Cache.get(key); + if (value != null) { + // Populate L1 with value from L2 + l1Cache.put(key, value.get()); + } + return value; + } + + @Override + public T get(Object key, Class type) { + // Try L1 first + T value = l1Cache.get(key, type); + if (value != null) { + return value; + } + + // Fallback to L2 + value = l2Cache.get(key, type); + if (value != null) { + // Populate L1 with value from L2 + l1Cache.put(key, value); + } + return value; + } + + @Override + public T get(Object key, Callable valueLoader) { + // Try L1 first + T value = l1Cache.get(key, (Class) null); + if (value != null) { + return value; + } + + // Try L2 + value = l2Cache.get(key, (Class) null); + if (value != null) { + // Populate L1 + l1Cache.put(key, value); + return value; + } + + // Load value if not in either cache + try { + value = valueLoader.call(); + if (value != null) { + put(key, value); + } + return value; + } catch (Exception e) { + throw new ValueRetrievalException(key, valueLoader, e); + } + } + + @Override + public void put(Object key, Object value) { + // Write-through: write to both caches + l1Cache.put(key, value); + l2Cache.put(key, value); + } + + @Override + public void evict(Object key) { + // Evict from both caches + l1Cache.evict(key); + l2Cache.evict(key); + } + + @Override + public void clear() { + // Clear both caches + l1Cache.clear(); + l2Cache.clear(); + } +} \ No newline at end of file diff --git a/src/main/java/org/springframework/samples/petclinic/cache/HybridCacheManager.java b/src/main/java/org/springframework/samples/petclinic/cache/HybridCacheManager.java new file mode 100644 index 000000000..8709d3211 --- /dev/null +++ b/src/main/java/org/springframework/samples/petclinic/cache/HybridCacheManager.java @@ -0,0 +1,58 @@ +/* + * Copyright 2002-2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.samples.petclinic.cache; + +import org.springframework.cache.Cache; +import org.springframework.cache.CacheManager; + +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +/** + * Hybrid cache manager that combines L1 (local) and L2 (distributed) cache managers. + */ +public class HybridCacheManager implements CacheManager { + + private final CacheManager l1CacheManager; + private final CacheManager l2CacheManager; + private final ConcurrentMap cacheMap = new ConcurrentHashMap<>(); + + public HybridCacheManager(CacheManager l1CacheManager, CacheManager l2CacheManager) { + this.l1CacheManager = l1CacheManager; + this.l2CacheManager = l2CacheManager; + } + + @Override + public Cache getCache(String name) { + return cacheMap.computeIfAbsent(name, cacheName -> { + Cache l1Cache = l1CacheManager.getCache(cacheName); + Cache l2Cache = l2CacheManager.getCache(cacheName); + + if (l1Cache == null || l2Cache == null) { + return null; + } + + return new HybridCache(cacheName, l1Cache, l2Cache); + }); + } + + @Override + public Collection getCacheNames() { + return Collections.unmodifiableSet(cacheMap.keySet()); + } +} \ No newline at end of file diff --git a/src/main/java/org/springframework/samples/petclinic/config/CacheConfiguration.java b/src/main/java/org/springframework/samples/petclinic/config/CacheConfiguration.java new file mode 100644 index 000000000..24ee8479b --- /dev/null +++ b/src/main/java/org/springframework/samples/petclinic/config/CacheConfiguration.java @@ -0,0 +1,95 @@ +/* + * Copyright 2002-2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.samples.petclinic.config; + +import com.github.benmanes.caffeine.cache.Caffeine; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.cache.CacheManager; +import org.springframework.cache.annotation.EnableCaching; +import org.springframework.cache.caffeine.CaffeineCacheManager; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; +import org.springframework.data.redis.cache.RedisCacheConfiguration; +import org.springframework.data.redis.cache.RedisCacheManager; +import org.springframework.data.redis.connection.RedisConnectionFactory; +import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer; +import org.springframework.data.redis.serializer.RedisSerializationContext; +import org.springframework.data.redis.serializer.StringRedisSerializer; +import org.springframework.samples.petclinic.cache.HybridCacheManager; + +import java.time.Duration; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; + +@Configuration +@EnableCaching +public class CacheConfiguration { + + private static final List CACHE_NAMES = Arrays.asList( + "vets", "vetById", + "owners", "ownerById", "ownersByLastName", + "pets", "petById", + "petTypes", "petTypeById", + "specialties", "specialtyById", "specialtiesByNameIn", + "visits", "visitById", "visitsByPetId" + ); + + @Autowired + private CacheProperties cacheProperties; + + @Bean(name = "l1CacheManager") + public CacheManager l1CacheManager() { + CaffeineCacheManager cacheManager = new CaffeineCacheManager(); + cacheManager.setCaffeine(Caffeine.newBuilder() + .maximumSize(1000) + .expireAfterWrite(10, TimeUnit.MINUTES) + .recordStats()); + cacheManager.setCacheNames(CACHE_NAMES); + return cacheManager; + } + + @Bean(name = "l2CacheManager") + @ConditionalOnProperty(name = "petclinic.cache.hybrid.enabled", havingValue = "true") + public CacheManager l2CacheManager(RedisConnectionFactory redisConnectionFactory) { + RedisCacheConfiguration cacheConfig = RedisCacheConfiguration.defaultCacheConfig() + .entryTtl(Duration.ofMinutes(30)) + .serializeKeysWith(RedisSerializationContext.SerializationPair.fromSerializer(new StringRedisSerializer())) + .serializeValuesWith(RedisSerializationContext.SerializationPair.fromSerializer(new GenericJackson2JsonRedisSerializer())); + + return RedisCacheManager.builder(redisConnectionFactory) + .cacheDefaults(cacheConfig) + .transactionAware() + .build(); + } + + @Bean + @Primary + @ConditionalOnProperty(name = "petclinic.cache.hybrid.enabled", havingValue = "true") + public CacheManager hybridCacheManager(CacheManager l1CacheManager, CacheManager l2CacheManager) { + return new HybridCacheManager(l1CacheManager, l2CacheManager); + } + + @Bean + @Primary + @ConditionalOnProperty(name = "petclinic.cache.hybrid.enabled", havingValue = "false", matchIfMissing = true) + public CacheManager simpleCacheManager(CacheManager l1CacheManager) { + return l1CacheManager; + } +} \ No newline at end of file diff --git a/src/main/java/org/springframework/samples/petclinic/config/CacheProperties.java b/src/main/java/org/springframework/samples/petclinic/config/CacheProperties.java new file mode 100644 index 000000000..22dd88ffd --- /dev/null +++ b/src/main/java/org/springframework/samples/petclinic/config/CacheProperties.java @@ -0,0 +1,46 @@ +/* + * Copyright 2002-2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.samples.petclinic.config; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.stereotype.Component; + +@Component +@ConfigurationProperties(prefix = "petclinic.cache") +public class CacheProperties { + + private Hybrid hybrid = new Hybrid(); + + public Hybrid getHybrid() { + return hybrid; + } + + public void setHybrid(Hybrid hybrid) { + this.hybrid = hybrid; + } + + public static class Hybrid { + private boolean enabled = false; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/springframework/samples/petclinic/service/ClinicServiceImpl.java b/src/main/java/org/springframework/samples/petclinic/service/ClinicServiceImpl.java index 2e82cb15e..336aec614 100644 --- a/src/main/java/org/springframework/samples/petclinic/service/ClinicServiceImpl.java +++ b/src/main/java/org/springframework/samples/petclinic/service/ClinicServiceImpl.java @@ -16,6 +16,9 @@ package org.springframework.samples.petclinic.service; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.CachePut; +import org.springframework.cache.annotation.Cacheable; import org.springframework.dao.DataAccessException; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.orm.ObjectRetrievalFailureException; @@ -64,138 +67,161 @@ public ClinicServiceImpl( @Override @Transactional(readOnly = true) + @Cacheable("pets") public Collection findAllPets() throws DataAccessException { return petRepository.findAll(); } @Override @Transactional + @CacheEvict(value = {"pets", "petById"}, allEntries = true) public void deletePet(Pet pet) throws DataAccessException { petRepository.delete(pet); } @Override @Transactional(readOnly = true) + @Cacheable(value = "visitById", key = "#visitId", unless = "#result == null") public Visit findVisitById(int visitId) throws DataAccessException { return findEntityById(() -> visitRepository.findById(visitId)); } @Override @Transactional(readOnly = true) + @Cacheable("visits") public Collection findAllVisits() throws DataAccessException { return visitRepository.findAll(); } @Override @Transactional + @CacheEvict(value = {"visits", "visitById", "visitsByPetId"}, allEntries = true) public void deleteVisit(Visit visit) throws DataAccessException { visitRepository.delete(visit); } @Override @Transactional(readOnly = true) + @Cacheable(value = "vetById", key = "#id", unless = "#result == null") public Vet findVetById(int id) throws DataAccessException { return findEntityById(() -> vetRepository.findById(id)); } @Override @Transactional(readOnly = true) + @Cacheable("vets") public Collection findAllVets() throws DataAccessException { return vetRepository.findAll(); } @Override @Transactional + @CacheEvict(value = {"vets", "vetById"}, allEntries = true) public void saveVet(Vet vet) throws DataAccessException { vetRepository.save(vet); } @Override @Transactional + @CacheEvict(value = {"vets", "vetById"}, allEntries = true) public void deleteVet(Vet vet) throws DataAccessException { vetRepository.delete(vet); } @Override @Transactional(readOnly = true) + @Cacheable("owners") public Collection findAllOwners() throws DataAccessException { return ownerRepository.findAll(); } @Override @Transactional + @CacheEvict(value = {"owners", "ownerById", "ownersByLastName"}, allEntries = true) public void deleteOwner(Owner owner) throws DataAccessException { ownerRepository.delete(owner); } @Override @Transactional(readOnly = true) + @Cacheable(value = "petTypeById", key = "#petTypeId", unless = "#result == null") public PetType findPetTypeById(int petTypeId) { return findEntityById(() -> petTypeRepository.findById(petTypeId)); } @Override @Transactional(readOnly = true) + @Cacheable("petTypes") public Collection findAllPetTypes() throws DataAccessException { return petTypeRepository.findAll(); } @Override @Transactional + @CacheEvict(value = {"petTypes", "petTypeById"}, allEntries = true) public void savePetType(PetType petType) throws DataAccessException { petTypeRepository.save(petType); } @Override @Transactional + @CacheEvict(value = {"petTypes", "petTypeById"}, allEntries = true) public void deletePetType(PetType petType) throws DataAccessException { petTypeRepository.delete(petType); } @Override @Transactional(readOnly = true) + @Cacheable(value = "specialtyById", key = "#specialtyId", unless = "#result == null") public Specialty findSpecialtyById(int specialtyId) { return findEntityById(() -> specialtyRepository.findById(specialtyId)); } @Override @Transactional(readOnly = true) + @Cacheable("specialties") public Collection findAllSpecialties() throws DataAccessException { return specialtyRepository.findAll(); } @Override @Transactional + @CacheEvict(value = {"specialties", "specialtyById", "specialtiesByNameIn"}, allEntries = true) public void saveSpecialty(Specialty specialty) throws DataAccessException { specialtyRepository.save(specialty); } @Override @Transactional + @CacheEvict(value = {"specialties", "specialtyById", "specialtiesByNameIn"}, allEntries = true) public void deleteSpecialty(Specialty specialty) throws DataAccessException { specialtyRepository.delete(specialty); } @Override @Transactional(readOnly = true) + @Cacheable("petTypes") public Collection findPetTypes() throws DataAccessException { return petRepository.findPetTypes(); } @Override @Transactional(readOnly = true) + @Cacheable(value = "ownerById", key = "#id", unless = "#result == null") public Owner findOwnerById(int id) throws DataAccessException { return findEntityById(() -> ownerRepository.findById(id)); } @Override @Transactional(readOnly = true) + @Cacheable(value = "petById", key = "#id", unless = "#result == null") public Pet findPetById(int id) throws DataAccessException { return findEntityById(() -> petRepository.findById(id)); } @Override @Transactional + @CacheEvict(value = {"pets", "petById"}, allEntries = true) public void savePet(Pet pet) throws DataAccessException { pet.setType(findPetTypeById(pet.getType().getId())); petRepository.save(pet); @@ -203,6 +229,7 @@ public void savePet(Pet pet) throws DataAccessException { @Override @Transactional + @CacheEvict(value = {"visits", "visitById", "visitsByPetId"}, allEntries = true) public void saveVisit(Visit visit) throws DataAccessException { visitRepository.save(visit); @@ -210,12 +237,14 @@ public void saveVisit(Visit visit) throws DataAccessException { @Override @Transactional(readOnly = true) + @Cacheable("vets") public Collection findVets() throws DataAccessException { return vetRepository.findAll(); } @Override @Transactional + @CacheEvict(value = {"owners", "ownerById", "ownersByLastName"}, allEntries = true) public void saveOwner(Owner owner) throws DataAccessException { ownerRepository.save(owner); @@ -223,18 +252,21 @@ public void saveOwner(Owner owner) throws DataAccessException { @Override @Transactional(readOnly = true) + @Cacheable(value = "ownersByLastName", key = "#lastName") public Collection findOwnerByLastName(String lastName) throws DataAccessException { return ownerRepository.findByLastName(lastName); } @Override @Transactional(readOnly = true) + @Cacheable(value = "visitsByPetId", key = "#petId") public Collection findVisitsByPetId(int petId) { return visitRepository.findByPetId(petId); } @Override @Transactional(readOnly = true) + @Cacheable(value = "specialtiesByNameIn", key = "#names") public List findSpecialtiesByNameIn(Set names) { return findEntityById(() -> specialtyRepository.findSpecialtiesByNameIn(names)); } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index bad0e5933..fdefabc28 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -48,3 +48,6 @@ logging.level.org.springframework=INFO # by default the authentication is disabled petclinic.security.enable=false +# Hybrid cache configuration +petclinic.cache.hybrid.enabled=false + diff --git a/src/test/java/org/springframework/samples/petclinic/cache/HybridCacheIntegrationTest.java b/src/test/java/org/springframework/samples/petclinic/cache/HybridCacheIntegrationTest.java new file mode 100644 index 000000000..32eb938fb --- /dev/null +++ b/src/test/java/org/springframework/samples/petclinic/cache/HybridCacheIntegrationTest.java @@ -0,0 +1,358 @@ +/* + * Copyright 2002-2017 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.samples.petclinic.cache; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cache.Cache; +import org.springframework.cache.CacheManager; +import org.springframework.samples.petclinic.model.*; +import org.springframework.samples.petclinic.service.ClinicService; +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; +import org.springframework.test.context.TestPropertySource; +import org.springframework.transaction.annotation.Transactional; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import java.time.LocalDate; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * Integration tests for hybrid caching with L1 (Caffeine) and L2 (Redis). + */ +@SpringBootTest +@Testcontainers +@TestPropertySource(properties = { + "petclinic.cache.hybrid.enabled=true" +}) +public class HybridCacheIntegrationTest { + + @Container + static GenericContainer redis = new GenericContainer<>(DockerImageName.parse("redis:7-alpine")) + .withExposedPorts(6379); + + @DynamicPropertySource + static void redisProperties(DynamicPropertyRegistry registry) { + registry.add("spring.data.redis.host", redis::getHost); + registry.add("spring.data.redis.port", redis::getFirstMappedPort); + } + + @Autowired + private ClinicService clinicService; + + @Autowired + private CacheManager cacheManager; + + @Autowired + private CacheManager l1CacheManager; + + @Autowired + private CacheManager l2CacheManager; + + @BeforeEach + public void setUp() { + // Clear all caches before each test + for (String cacheName : cacheManager.getCacheNames()) { + Cache cache = cacheManager.getCache(cacheName); + if (cache != null) { + cache.clear(); + } + } + } + + @Test + public void testHybridCacheManagerIsConfigured() { + assertThat(cacheManager).isNotNull(); + assertThat(cacheManager).isInstanceOf(HybridCacheManager.class); + assertThat(l1CacheManager).isNotNull(); + assertThat(l2CacheManager).isNotNull(); + } + + @Test + @Transactional + public void testReadThroughCachingForVets() { + // First call - should hit database and cache result + Collection vets1 = clinicService.findAllVets(); + assertThat(vets1).isNotEmpty(); + + // Verify data is cached in both L1 and L2 + Cache hybridCache = cacheManager.getCache("vets"); + Cache l1Cache = l1CacheManager.getCache("vets"); + Cache l2Cache = l2CacheManager.getCache("vets"); + + assertThat(hybridCache).isNotNull(); + assertThat(l1Cache).isNotNull(); + assertThat(l2Cache).isNotNull(); + + // Second call - should hit cache + Collection vets2 = clinicService.findAllVets(); + assertThat(vets2).isEqualTo(vets1); + } + + @Test + @Transactional + public void testCacheByIdOperations() { + // Get a vet by ID + Vet vet = clinicService.findVetById(1); + assertThat(vet).isNotNull(); + + // Verify cached + Cache l1Cache = l1CacheManager.getCache("vetById"); + Cache l2Cache = l2CacheManager.getCache("vetById"); + + assertThat(l1Cache).isNotNull(); + assertThat(l2Cache).isNotNull(); + + // Get from cache + Vet cachedVet = clinicService.findVetById(1); + assertThat(cachedVet).isNotNull(); + assertThat(cachedVet.getId()).isEqualTo(vet.getId()); + } + + @Test + @Transactional + public void testCacheEvictionOnUpdate() { + // Load vets into cache + Collection vets = clinicService.findAllVets(); + assertThat(vets).isNotEmpty(); + + // Verify cache is populated + Cache hybridCache = cacheManager.getCache("vets"); + assertThat(hybridCache).isNotNull(); + + // Update a vet - should evict cache + Vet vet = vets.iterator().next(); + vet.setFirstName("Updated"); + clinicService.saveVet(vet); + + // Verify cache was cleared - need to fetch fresh data + Collection vetsAfterUpdate = clinicService.findAllVets(); + assertThat(vetsAfterUpdate).isNotEmpty(); + } + + @Test + @Transactional + public void testOwnerCaching() { + // Test findOwnerById caching + Owner owner = clinicService.findOwnerById(1); + assertThat(owner).isNotNull(); + + Cache l1Cache = l1CacheManager.getCache("ownerById"); + assertThat(l1Cache).isNotNull(); + + // Get from cache + Owner cachedOwner = clinicService.findOwnerById(1); + assertThat(cachedOwner).isNotNull(); + assertThat(cachedOwner.getId()).isEqualTo(owner.getId()); + } + + @Test + @Transactional + public void testOwnerByLastNameCaching() { + // Load owners by last name + Collection owners = clinicService.findOwnerByLastName("Davis"); + assertThat(owners).isNotEmpty(); + + Cache l1Cache = l1CacheManager.getCache("ownersByLastName"); + Cache l2Cache = l2CacheManager.getCache("ownersByLastName"); + + assertThat(l1Cache).isNotNull(); + assertThat(l2Cache).isNotNull(); + + // Get from cache + Collection cachedOwners = clinicService.findOwnerByLastName("Davis"); + assertThat(cachedOwners).hasSize(owners.size()); + } + + @Test + @Transactional + public void testPetCaching() { + // Test pet caching + Collection pets = clinicService.findAllPets(); + assertThat(pets).isNotEmpty(); + + Pet pet = pets.iterator().next(); + Pet cachedPet = clinicService.findPetById(pet.getId()); + assertThat(cachedPet).isNotNull(); + } + + @Test + @Transactional + public void testPetTypeCaching() { + // Test pet type caching + Collection petTypes = clinicService.findAllPetTypes(); + assertThat(petTypes).isNotEmpty(); + + PetType petType = petTypes.iterator().next(); + PetType cachedPetType = clinicService.findPetTypeById(petType.getId()); + assertThat(cachedPetType).isNotNull(); + assertThat(cachedPetType.getId()).isEqualTo(petType.getId()); + } + + @Test + @Transactional + public void testSpecialtyCaching() { + // Test specialty caching + Collection specialties = clinicService.findAllSpecialties(); + assertThat(specialties).isNotEmpty(); + + Specialty specialty = specialties.iterator().next(); + Specialty cachedSpecialty = clinicService.findSpecialtyById(specialty.getId()); + assertThat(cachedSpecialty).isNotNull(); + assertThat(cachedSpecialty.getId()).isEqualTo(specialty.getId()); + } + + @Test + @Transactional + public void testVisitCaching() { + // Test visit caching + Collection visits = clinicService.findAllVisits(); + assertThat(visits).isNotEmpty(); + + Visit visit = visits.iterator().next(); + Visit cachedVisit = clinicService.findVisitById(visit.getId()); + assertThat(cachedVisit).isNotNull(); + assertThat(cachedVisit.getId()).isEqualTo(visit.getId()); + } + + @Test + @Transactional + public void testVisitsByPetIdCaching() { + // Get all visits to find a pet with visits + Collection allVisits = clinicService.findAllVisits(); + assertThat(allVisits).isNotEmpty(); + + Visit visit = allVisits.iterator().next(); + Integer petId = visit.getPet().getId(); + + // Test caching of visits by pet ID + Collection visitsByPet = clinicService.findVisitsByPetId(petId); + assertThat(visitsByPet).isNotEmpty(); + + // Get from cache + Collection cachedVisitsByPet = clinicService.findVisitsByPetId(petId); + assertThat(cachedVisitsByPet).hasSize(visitsByPet.size()); + } + + @Test + @Transactional + public void testL1ToL2Fallback() { + // Load data into cache + Vet vet = clinicService.findVetById(1); + assertThat(vet).isNotNull(); + + // Clear L1 cache only + Cache l1Cache = l1CacheManager.getCache("vetById"); + assertThat(l1Cache).isNotNull(); + l1Cache.clear(); + + // Access should fall back to L2 and repopulate L1 + Vet vetFromL2 = clinicService.findVetById(1); + assertThat(vetFromL2).isNotNull(); + assertThat(vetFromL2.getId()).isEqualTo(vet.getId()); + } + + @Test + @Transactional + public void testWriteThroughOnSave() { + // Create a new specialty + Specialty specialty = new Specialty(); + specialty.setName("Test Specialty"); + + // Save should write through to both caches + clinicService.saveSpecialty(specialty); + + // Verify cache was evicted (write-through with evict strategy) + Cache l1Cache = l1CacheManager.getCache("specialties"); + Cache l2Cache = l2CacheManager.getCache("specialties"); + + assertThat(l1Cache).isNotNull(); + assertThat(l2Cache).isNotNull(); + + // Load data - should fetch fresh from database + Collection specialties = clinicService.findAllSpecialties(); + assertThat(specialties).isNotEmpty(); + } + + @Test + @Transactional + public void testCacheConsistencyAcrossLayers() { + // Load owner + Owner owner = clinicService.findOwnerById(1); + assertThat(owner).isNotNull(); + + // Verify both layers have the data + Cache l1Cache = l1CacheManager.getCache("ownerById"); + Cache l2Cache = l2CacheManager.getCache("ownerById"); + + assertThat(l1Cache).isNotNull(); + assertThat(l2Cache).isNotNull(); + + Cache.ValueWrapper l1Value = l1Cache.get(1); + Cache.ValueWrapper l2Value = l2Cache.get(1); + + assertThat(l1Value).isNotNull(); + assertThat(l2Value).isNotNull(); + + // Update owner - should evict both caches + owner.setFirstName("Updated Name"); + clinicService.saveOwner(owner); + + // Verify both caches are cleared + l1Value = l1Cache.get(1); + l2Value = l2Cache.get(1); + + // After eviction, caches should be empty or the get should trigger a reload + // Depending on implementation, we just verify the update worked + Owner updatedOwner = clinicService.findOwnerById(1); + assertThat(updatedOwner).isNotNull(); + assertThat(updatedOwner.getFirstName()).isEqualTo("Updated Name"); + } + + @Test + @Transactional + public void testSpecialtiesByNameInCaching() { + // Load some specialties first + Collection allSpecialties = clinicService.findAllSpecialties(); + assertThat(allSpecialties).isNotEmpty(); + + // Get first specialty name + Specialty specialty = allSpecialties.iterator().next(); + Set names = new HashSet<>(); + names.add(specialty.getName()); + + // Test caching with name set + var specialties = clinicService.findSpecialtiesByNameIn(names); + assertThat(specialties).isNotEmpty(); + + // Verify caching + Cache l1Cache = l1CacheManager.getCache("specialtiesByNameIn"); + assertThat(l1Cache).isNotNull(); + + // Get from cache + var cachedSpecialties = clinicService.findSpecialtiesByNameIn(names); + assertThat(cachedSpecialties).hasSize(specialties.size()); + } +} \ No newline at end of file diff --git a/src/test/java/org/springframework/samples/petclinic/service/clinicService/AbstractClinicServiceTests.java b/src/test/java/org/springframework/samples/petclinic/service/clinicService/AbstractClinicServiceTests.java index 501270c98..70b8c76a3 100644 --- a/src/test/java/org/springframework/samples/petclinic/service/clinicService/AbstractClinicServiceTests.java +++ b/src/test/java/org/springframework/samples/petclinic/service/clinicService/AbstractClinicServiceTests.java @@ -15,8 +15,12 @@ */ package org.springframework.samples.petclinic.service.clinicService; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.Cache; +import org.springframework.cache.CacheManager; import org.springframework.samples.petclinic.model.*; import org.springframework.samples.petclinic.service.ClinicService; import org.springframework.samples.petclinic.util.EntityUtils; @@ -56,6 +60,21 @@ abstract class AbstractClinicServiceTests { @Autowired protected ClinicService clinicService; + @Autowired + protected CacheManager cacheManager; + + @BeforeEach + @AfterEach + void clearCache() { + // Clear all caches before and after each test to prevent cache pollution between tests + for (String cacheName : cacheManager.getCacheNames()) { + Cache cache = cacheManager.getCache(cacheName); + if (cache != null) { + cache.clear(); + } + } + } + @Test void shouldFindOwnersByLastName() { Collection owners = this.clinicService.findOwnerByLastName("Davis"); @@ -500,6 +519,4 @@ void shouldFindSpecialtiesByNameIn() { && actual.getId().equals(expected.getId()))).isTrue(); } } - - void clearCache() {} } diff --git a/src/test/resources/application.properties b/src/test/resources/application.properties index f57025fb5..3e1338509 100644 --- a/src/test/resources/application.properties +++ b/src/test/resources/application.properties @@ -41,3 +41,6 @@ security.ignored=/** basic.authentication.enabled=true petclinic.security.enable=true +# Hybrid cache configuration +petclinic.cache.hybrid.enabled=false +