diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index b6e31bc9..10fe822b 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -1,43 +1,683 @@ -name: Maven Build +name: Run Tests on: push: - branches: [ "main", "feature/*", "bugfix/*" ] - + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] pull_request: - branches: ["main"] + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + issue_comment: + types: [created] jobs: - build: + # ──────────── 1. collect and process tests ──────────── + collect-process-tests: runs-on: ubuntu-latest - env: - DOCKER_IMAGE_NAME: ${{ secrets.DOCKERHUB_USERNAME }}/ft-feature-service + permissions: + contents: read + outputs: + fail_to_pass: ${{ steps.combine.outputs.fail_to_pass }} + pass_to_pass: ${{ steps.combine.outputs.pass_to_pass }} + tests: ${{ steps.combine.outputs.tests }} + comment_id: ${{ steps.combine.outputs.comment_id }} + test_args: ${{ steps.combine.outputs.test_args }} + java_version: ${{ steps.combine.outputs.java_version }} + if: ${{ github.event_name != 'issue_comment' || contains(github.event.comment.body, 'FAIL_TO_PASS') || contains(github.event.comment.body, 'PASS_TO_PASS') }} steps: - uses: actions/checkout@v4 - - name: Set up Java - uses: actions/setup-java@v4 - with: - java-version: 24 - distribution: 'temurin' - cache: 'maven' + # ─── 1.1 collect issue numbers based on event type ─── + - name: Collect issue numbers based on event type + id: collect_issues + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.pull_request.number }} + REPO: ${{ github.repository }} + run: | + # Initialize issue numbers variable + ISSUE_NUMBERS="" + + # Handle different event types + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + echo "Collecting issue numbers from commits in PR #$PR_NUMBER" + + # Get all commits in the PR + COMMITS=$(gh api repos/$REPO/pulls/$PR_NUMBER/commits --jq '.[].sha') + + # Initialize an empty array for issue numbers + ISSUE_NUMBERS_ARRAY=() + + # For each commit, extract linked issue numbers + for COMMIT in $COMMITS; do + echo "Processing commit $COMMIT" + + # Get commit message + COMMIT_MSG=$(gh api repos/$REPO/commits/$COMMIT --jq '.commit.message') + + # Extract issue numbers using regex (e.g., #123, fixes #456, etc.) + ISSUES=$(echo "$COMMIT_MSG" | grep -o '#[0-9]\+' | sed 's/#//') + + if [ -n "$ISSUES" ]; then + echo "Found issues in commit $COMMIT: $ISSUES" + # Add to our array + for ISSUE in $ISSUES; do + ISSUE_NUMBERS_ARRAY+=("$ISSUE") + done + fi + done + + # Remove duplicates and create JSON array + UNIQUE_ISSUES=$(echo "${ISSUE_NUMBERS_ARRAY[@]}" | tr ' ' '\n' | sort -u) + + if [ -z "$UNIQUE_ISSUES" ]; then + echo "No issue numbers found in commit messages, using PR number as fallback" + ISSUE_NUMBERS="[\"${{ github.event.pull_request.number }}\"]" + else + # Convert to JSON array + ISSUE_NUMBERS=$(echo "$UNIQUE_ISSUES" | jq -R . | jq -s .) + fi + elif [[ "${{ github.event_name }}" == "push" ]]; then + echo "Extracting issue numbers from commit message" + + # Get commit message + COMMIT_MSG="${{ github.event.head_commit.message }}" + + # Extract issue numbers using regex (e.g., #123, fixes #456, etc.) + ISSUES=$(echo "$COMMIT_MSG" | grep -o '#[0-9]\+' | sed 's/#//') + + if [ -n "$ISSUES" ]; then + echo "Found issues in commit message: $ISSUES" + + # Initialize an empty array for issue numbers + ISSUE_NUMBERS_ARRAY=() + + # Add to our array + for ISSUE in $ISSUES; do + ISSUE_NUMBERS_ARRAY+=("$ISSUE") + done + + # Remove duplicates and create JSON array + UNIQUE_ISSUES=$(echo "${ISSUE_NUMBERS_ARRAY[@]}" | tr ' ' '\n' | sort -u) + + # Convert to JSON array + ISSUE_NUMBERS=$(echo "$UNIQUE_ISSUES" | jq -R . | jq -s .) + else + echo "No issue numbers found in commit message, using empty array as fallback" + ISSUE_NUMBERS="[\"\"]" + fi + elif [[ "${{ github.event_name }}" == "issue_comment" ]]; then + echo "Using issue number from comment event" + ISSUE_NUMBERS="[\"${{ github.event.issue.number }}\"]" + else + echo "Using fallback issue number from inputs" + ISSUE_NUMBERS="[\"\"]" + fi + + echo "Found issue numbers: $ISSUE_NUMBERS" + # Escape the JSON string for GitHub Actions output + ESCAPED_ISSUE_NUMBERS=$(echo "$ISSUE_NUMBERS" | jq -c .) + echo "issue_numbers=$ESCAPED_ISSUE_NUMBERS" >> $GITHUB_OUTPUT + + # ─── 1.2 extract test names from issues ─── + - name: Extract test names for issues + id: extract_tests + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBERS: ${{ steps.collect_issues.outputs.issue_numbers }} + REPO: ${{ github.repository }} + run: | + # Initialize arrays for test results + FAIL_TO_PASS=() + PASS_TO_PASS=() + TESTS=() + COMMENT_ID="" + + # Process each issue number + for ISSUE_NUMBER in $(echo $ISSUE_NUMBERS | jq -r '.[]'); do + if [[ -z "$ISSUE_NUMBER" || "$ISSUE_NUMBER" == "null" ]]; then + continue + fi + + echo "Processing issue #$ISSUE_NUMBER" + + # Function to extract FAIL_TO_PASS and PASS_TO_PASS from text + extract_test_fields() { + local text="$1" + local fail_to_pass="" + local pass_to_pass="" + + if [[ -n "$text" ]]; then + # Find FAIL_TO_PASS pattern + if [[ "$text" =~ FAIL_TO_PASS:[[:space:]]*([^$'\n']+) ]]; then + fail_to_pass="${BASH_REMATCH[1]}" + fi + + # Find PASS_TO_PASS pattern + if [[ "$text" =~ PASS_TO_PASS:[[:space:]]*([^$'\n']+) ]]; then + pass_to_pass="${BASH_REMATCH[1]}" + fi + fi + + echo "$fail_to_pass|$pass_to_pass" + } + + # First check issue comments + echo "Checking issue comments for test fields..." + COMMENTS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/comments --jq '.[] | {id: .id, body: .body, created_at: .created_at}') + + if [[ -n "$COMMENTS" ]]; then + # Process all comments to find the latest one with FAIL_TO_PASS or PASS_TO_PASS + LATEST_COMMENT_WITH_VALUES="" + LATEST_COMMENT_ID="" + LATEST_FAIL_TO_PASS="" + LATEST_PASS_TO_PASS="" + + while IFS= read -r COMMENT; do + COMMENT_BODY=$(echo "$COMMENT" | jq -r '.body') + CURRENT_COMMENT_ID=$(echo "$COMMENT" | jq -r '.id') + + RESULT=$(extract_test_fields "$COMMENT_BODY") + IFS='|' read -r COMMENT_FAIL COMMENT_PASS <<< "$RESULT" + + if [[ -n "$COMMENT_FAIL" || -n "$COMMENT_PASS" ]]; then + LATEST_COMMENT_WITH_VALUES="$COMMENT" + LATEST_COMMENT_ID="$CURRENT_COMMENT_ID" + + if [[ -n "$COMMENT_FAIL" ]]; then + LATEST_FAIL_TO_PASS="$COMMENT_FAIL" + echo "Found FAIL_TO_PASS in issue comment $CURRENT_COMMENT_ID: $COMMENT_FAIL" + fi + + if [[ -n "$COMMENT_PASS" ]]; then + LATEST_PASS_TO_PASS="$COMMENT_PASS" + echo "Found PASS_TO_PASS in issue comment $CURRENT_COMMENT_ID: $COMMENT_PASS" + fi + fi + done <<< "$COMMENTS" + + # Use values from the latest comment + if [[ -n "$LATEST_COMMENT_WITH_VALUES" ]]; then + COMMENT_ID="$LATEST_COMMENT_ID" + + if [[ -n "$LATEST_FAIL_TO_PASS" ]]; then + FAIL_TO_PASS=("$LATEST_FAIL_TO_PASS") + echo "Using FAIL_TO_PASS from latest comment $COMMENT_ID: $LATEST_FAIL_TO_PASS" + fi + + if [[ -n "$LATEST_PASS_TO_PASS" ]]; then + PASS_TO_PASS=("$LATEST_PASS_TO_PASS") + echo "Using PASS_TO_PASS from latest comment $COMMENT_ID: $LATEST_PASS_TO_PASS" + fi + fi + fi + + # If not found in comments, check commit messages + if [[ ${#FAIL_TO_PASS[@]} -eq 0 && ${#PASS_TO_PASS[@]} -eq 0 ]]; then + echo "Checking commit messages for test fields..." + + # Get linked commit IDs + COMMIT_IDS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id') + + if [[ -z "$COMMIT_IDS" ]]; then + echo "No directly linked commits found, checking PRs..." + + # Try to get commits from PRs + PR_NUMBERS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number') + + if [[ -n "$PR_NUMBERS" ]]; then + for PR in $PR_NUMBERS; do + echo "Fetching commits from PR #$PR..." + PR_COMMITS=$(gh api repos/$REPO/pulls/$PR/commits --jq '.[].sha') + + if [[ -n "$PR_COMMITS" ]]; then + COMMIT_IDS="$COMMIT_IDS"$'\n'"$PR_COMMITS" + fi + done + fi + fi + + # Process commit messages to find the latest one with FAIL_TO_PASS or PASS_TO_PASS + if [[ -n "$COMMIT_IDS" ]]; then + # Variables to track the latest commit with values + LATEST_COMMIT_ID="" + LATEST_COMMIT_DATE="" + LATEST_COMMIT_FAIL="" + LATEST_COMMIT_PASS="" + + while IFS= read -r COMMIT_ID; do + if [[ -z "$COMMIT_ID" ]]; then + continue + fi + + echo "Fetching message for commit: $COMMIT_ID" + COMMIT_DATA=$(gh api repos/$REPO/commits/$COMMIT_ID --jq '{message: .commit.message, date: .commit.author.date}') + COMMIT_MSG=$(echo "$COMMIT_DATA" | jq -r '.message') + COMMIT_DATE=$(echo "$COMMIT_DATA" | jq -r '.date') + + if [[ -n "$COMMIT_MSG" ]]; then + RESULT=$(extract_test_fields "$COMMIT_MSG") + IFS='|' read -r COMMIT_FAIL COMMIT_PASS <<< "$RESULT" + + if [[ -n "$COMMIT_FAIL" || -n "$COMMIT_PASS" ]]; then + # Check if this commit is newer than our current latest + if [[ -z "$LATEST_COMMIT_DATE" || "$COMMIT_DATE" > "$LATEST_COMMIT_DATE" ]]; then + LATEST_COMMIT_ID="$COMMIT_ID" + LATEST_COMMIT_DATE="$COMMIT_DATE" + LATEST_COMMIT_FAIL="$COMMIT_FAIL" + LATEST_COMMIT_PASS="$COMMIT_PASS" + + if [[ -n "$COMMIT_FAIL" ]]; then + echo "Found FAIL_TO_PASS in commit $COMMIT_ID: $COMMIT_FAIL" + fi + + if [[ -n "$COMMIT_PASS" ]]; then + echo "Found PASS_TO_PASS in commit $COMMIT_ID: $COMMIT_PASS" + fi + fi + fi + fi + done <<< "$COMMIT_IDS" + + # Use values from the latest commit + if [[ -n "$LATEST_COMMIT_ID" ]]; then + if [[ -n "$LATEST_COMMIT_FAIL" ]]; then + FAIL_TO_PASS=("$LATEST_COMMIT_FAIL") + echo "Using FAIL_TO_PASS from latest commit $LATEST_COMMIT_ID: $LATEST_COMMIT_FAIL" + fi + + if [[ -n "$LATEST_COMMIT_PASS" ]]; then + PASS_TO_PASS=("$LATEST_COMMIT_PASS") + echo "Using PASS_TO_PASS from latest commit $LATEST_COMMIT_ID: $LATEST_COMMIT_PASS" + fi + fi + fi + fi + done + + # Convert arrays to comma-separated strings + FAIL_TO_PASS_STR=$(IFS=,; echo "${FAIL_TO_PASS[*]}") + PASS_TO_PASS_STR=$(IFS=,; echo "${PASS_TO_PASS[*]}") + + # Convert to JSON arrays if not empty + if [[ -n "$FAIL_TO_PASS_STR" ]]; then + FAIL_TO_PASS_JSON=$(echo "$FAIL_TO_PASS_STR" | jq -R -c 'split(",") | map(select(length > 0))') + else + FAIL_TO_PASS_JSON="[]" + fi + + if [[ -n "$PASS_TO_PASS_STR" ]]; then + PASS_TO_PASS_JSON=$(echo "$PASS_TO_PASS_STR" | jq -R -c 'split(",") | map(select(length > 0))') + else + PASS_TO_PASS_JSON="[]" + fi + + # Combine tests + if [[ -n "$FAIL_TO_PASS_STR" || -n "$PASS_TO_PASS_STR" ]]; then + TESTS_STR="$FAIL_TO_PASS_STR,$PASS_TO_PASS_STR" + TESTS_STR=$(echo "$TESTS_STR" | sed 's/^,//;s/,$//') + fi + + # Output results + echo "fail_to_pass=$FAIL_TO_PASS_JSON" >> $GITHUB_OUTPUT + echo "pass_to_pass=$PASS_TO_PASS_JSON" >> $GITHUB_OUTPUT + echo "tests=$TESTS_STR" >> $GITHUB_OUTPUT + echo "comment_id=$COMMENT_ID" >> $GITHUB_OUTPUT + + # ─── 1.3 extract metadata (optional) ─── + - name: Extract metadata fields + id: extract_metadata + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBERS: ${{ steps.collect_issues.outputs.issue_numbers }} + REPO: ${{ github.repository }} + run: | + set -e + TEST_ARGS="" + JAVA_VERSION="" + + parse_metadata() { + local text="$1" + local json + if [[ -n "$text" && "$text" =~ METADATA:[[:space:]]*(\{.*\}) ]]; then + json="${BASH_REMATCH[1]}" + # Normalize quotes for jq if needed + TA=$(echo "$json" | jq -r '."test_args" // empty' 2>/dev/null || true) + JV=$(echo "$json" | jq -r '."java-version" // empty' 2>/dev/null || true) + if [[ -n "$TA" ]]; then TEST_ARGS="$TA"; fi + if [[ -n "$JV" ]]; then JAVA_VERSION="$JV"; fi + fi + } - - name: Make Maven wrapper executable - run: chmod +x mvnw + for ISSUE_NUMBER in $(echo $ISSUE_NUMBERS | jq -r '.[]'); do + if [[ -z "$ISSUE_NUMBER" || "$ISSUE_NUMBER" == "null" ]]; then + continue + fi - - name: Build with Maven - run: ./mvnw -ntp verify + # Check issue comments (latest first) + COMMENTS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/comments --jq '.[] | {id: .id, body: .body, created_at: .created_at}' || true) + if [[ -n "$COMMENTS" ]]; then + LATEST_COMMENT_WITH_VALUES="" + while IFS= read -r COMMENT; do + COMMENT_BODY=$(echo "$COMMENT" | jq -r '.body') + if [[ -n "$COMMENT_BODY" ]]; then + parse_metadata "$COMMENT_BODY" + fi + done <<< "$COMMENTS" + fi + + if [[ -z "$TEST_ARGS" && -z "$JAVA_VERSION" ]]; then + # Scan linked commits + COMMIT_IDS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id' || true) + if [[ -z "$COMMIT_IDS" ]]; then + PR_NUMBERS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number' || true) + if [[ -n "$PR_NUMBERS" ]]; then + for PR in $PR_NUMBERS; do + PR_COMMITS=$(gh api repos/$REPO/pulls/$PR/commits --jq '.[].sha' || true) + [[ -n "$PR_COMMITS" ]] && COMMIT_IDS="$COMMIT_IDS"$'\n'"$PR_COMMITS" + done + fi + fi + if [[ -n "$COMMIT_IDS" ]]; then + while IFS= read -r COMMIT_ID; do + [[ -z "$COMMIT_ID" ]] && continue + COMMIT_MSG=$(gh api repos/$REPO/commits/$COMMIT_ID --jq '.commit.message' || true) + parse_metadata "$COMMIT_MSG" + done <<< "$COMMIT_IDS" + fi + fi + + # Break after first issue with metadata found + if [[ -n "$TEST_ARGS" || -n "$JAVA_VERSION" ]]; then + break + fi + done + + echo "test_args=$TEST_ARGS" >> $GITHUB_OUTPUT + echo "java_version=$JAVA_VERSION" >> $GITHUB_OUTPUT + + # ─── 1.4 combine test results ─── + - name: Combine test results + id: combine + shell: bash + run: | + # Just pass through the outputs from extract_tests + echo "fail_to_pass=${{ steps.extract_tests.outputs.fail_to_pass }}" >> $GITHUB_OUTPUT + echo "pass_to_pass=${{ steps.extract_tests.outputs.pass_to_pass }}" >> $GITHUB_OUTPUT + echo "tests=${{ steps.extract_tests.outputs.tests }}" >> $GITHUB_OUTPUT + echo "comment_id=${{ steps.extract_tests.outputs.comment_id }}" >> $GITHUB_OUTPUT + echo "test_args=${{ steps.extract_metadata.outputs.test_args }}" >> $GITHUB_OUTPUT + echo "java_version=${{ steps.extract_metadata.outputs.java_version }}" >> $GITHUB_OUTPUT + + # ─── 1.4 check if FAIL_TO_PASS or PASS_TO_PASS found ─── + - name: Check if FAIL_TO_PASS or PASS_TO_PASS found + if: ${{ github.event_name == 'pull_request' && steps.combine.outputs.fail_to_pass == '[]' && steps.combine.outputs.pass_to_pass == '[]' }} + shell: bash + run: | + echo "::error::FAIL_TO_PASS or PASS_TO_PASS not found in commit messages or issue comments, please add FAIL_TO_PASS or PASS_TO_PASS to issue comment" + exit 1 + + # ──────────── 2. Run tests and handle comments ──────────── + run-tests-and-comments: + needs: collect-process-tests + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + if: ${{ always() && (github.event_name != 'pull_request' || needs.collect-process-tests.outputs.fail_to_pass != '[]' || needs.collect-process-tests.outputs.pass_to_pass != '[]') }} + outputs: + comment_id: ${{ steps.create_comment.outputs.comment_id }} + status: ${{ job.status }} + steps: + - uses: actions/checkout@v4 - - if: ${{ github.ref == 'refs/heads/main' }} - name: Login to Docker Hub - uses: docker/login-action@v3 + # Step 1: Create placeholder comment + - name: Create placeholder issue comment + id: create_comment + if: ${{ github.event_name == 'push' || github.event_name == 'issue_comment' }} + uses: actions/github-script@v7 + env: + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + COMMENT_ID: ${{ needs.collect-process-tests.outputs.comment_id }} with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} + github-token: ${{ secrets.GITHUB_TOKEN }} + result-encoding: string + script: | + const issuePat = /#(\d+)/g; + let issueNum = null, m; + + // • PR context + if (context.payload.pull_request) { + const whole = `${context.payload.pull_request.title}\n${context.payload.pull_request.body}`; + if ((m = issuePat.exec(whole)) !== null) issueNum = +m[1]; + } + + // • Push context + if (!issueNum && context.payload.commits) { + for (const c of context.payload.commits) { + if ((m = issuePat.exec(c.message)) !== null) { issueNum = +m[1]; break; } + } + } + + // • Issue comment context + if (!issueNum && context.payload.issue) { + issueNum = context.payload.issue.number; + } + + if (!issueNum) { core.info('No #issue reference found.'); return; } + + let bodyContent = ''; + + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + core.info('FAIL_TO_PASS: '+process.env.FAIL_TO_PASS); + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + + bodyContent += `\n⏳ **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** has **started**…`; + + // If we have an existing comment ID, update it instead of creating a new one + if (false && process.env.COMMENT_ID) { + try { + // Get existing comment body + const { data: existingComment } = await github.rest.issues.getComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID) + }); + + // Append new content to existing body + const updatedBody = existingComment.body + '\n' + bodyContent; + + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID), + body: updatedBody + }); + core.setOutput('comment_id', process.env.COMMENT_ID); + return; + } catch (error) { + core.warning(`Failed to update comment ${process.env.COMMENT_ID}: ${error.message}`); + // Fall through to create a new comment + } + } + + // Create a new comment + const { data: comment } = await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issueNum, + body: bodyContent + }); + core.setOutput('comment_id', comment.id.toString()); + + # Step 2: Prepare parameters for dataset verification + - name: Prepare dataset verification parameters + id: prepare_params + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + REPO: ${{ github.repository }} + CT_TEST_ARGS: ${{ needs.collect-process-tests.outputs.test_args }} + CT_JAVA_VERSION: ${{ needs.collect-process-tests.outputs.java_version }} + run: | + set -e + # Determine base and head SHAs + if [[ "$EVENT_NAME" == "pull_request" ]]; then + BASE_SHA="${{ github.event.pull_request.base.sha }}" + HEAD_SHA="${{ github.event.pull_request.head.sha }}" + ISSUE_NUMBER="${{ github.event.pull_request.number }}" + elif [[ "$EVENT_NAME" == "push" ]]; then + BASE_SHA="${{ github.event.before }}" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="$(echo "${{ github.event.head_commit.message }}" | grep -oE '#[0-9]+' | head -n1 | tr -d '#')" + elif [[ "$EVENT_NAME" == "issue_comment" ]]; then + BASE_SHA="$(git rev-parse HEAD~1 || echo "")" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="${{ github.event.issue.number }}" + else + BASE_SHA="$(git rev-parse HEAD~1 || echo "")" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="" + fi + echo "Base: $BASE_SHA" + echo "Head: $HEAD_SHA" + + # Ensure we have history + git fetch --prune --unshallow || true + git fetch --all --tags || true + + # Generate patches + PATCH_CONTENT="$(git diff $BASE_SHA $HEAD_SHA -- . ':(exclude)src/test*' || true)" + TEST_PATCH_CONTENT="$(git diff $BASE_SHA $HEAD_SHA -- 'src/test*' || true)" + + # Derived parameters + TEST_ARGS="${CT_TEST_ARGS}" + JAVA_VERSION="${CT_JAVA_VERSION}" + # Normalize nulls and set defaults + if [[ "$TEST_ARGS" == "null" ]]; then TEST_ARGS=""; fi + if [[ -z "$JAVA_VERSION" || "$JAVA_VERSION" == "null" ]]; then JAVA_VERSION="24"; fi + OWNER="${{ github.repository_owner }}" + REPO_NAME="${REPO#*/}" + if [[ -n "$ISSUE_NUMBER" ]]; then + INSTANCE_ID="${OWNER}__${REPO_NAME}__${ISSUE_NUMBER}" + else + INSTANCE_ID="" + fi + + # Export as outputs (with multiline values) + { + echo "base_sha=$BASE_SHA" + echo "head_sha=$HEAD_SHA" + echo "instance_id=$INSTANCE_ID" + } >> "$GITHUB_OUTPUT" - - if: ${{ github.ref == 'refs/heads/main' }} - name: Build and Publish Docker Image + echo "PATCH<<'EOF'" >> $GITHUB_OUTPUT + echo "$PATCH_CONTENT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + echo "TEST_PATCH<<'EOF'" >> $GITHUB_OUTPUT + echo "$TEST_PATCH_CONTENT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + echo "test_args=$TEST_ARGS" >> $GITHUB_OUTPUT + echo "java_version=$JAVA_VERSION" >> $GITHUB_OUTPUT + + # Step 3: Run dataset verifier script + - name: Run dataset verifier + id: run_verifier + shell: bash + env: + REPO: ${{ github.repository }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + PATCH: ${{ steps.prepare_params.outputs.PATCH }} + TEST_PATCH: ${{ steps.prepare_params.outputs.TEST_PATCH }} + COMMIT: ${{ steps.prepare_params.outputs.base_sha }} + TEST_ARGS: ${{ steps.prepare_params.outputs.test_args }} + JAVA_VERSION: ${{ steps.prepare_params.outputs.java_version }} + INSTANCE_ID: ${{ steps.prepare_params.outputs.instance_id }} run: | - ./mvnw spring-boot:build-image -DskipTests - echo "Pushing the image $DOCKER_IMAGE_NAME to Docker Hub..." - docker push --all-tags $DOCKER_IMAGE_NAME \ No newline at end of file + set -e + chmod +x infrastructure/shared/.github/workflows/verify_java_dataset_instance.sh + OUTPUT_FILE="$(mktemp)" + infrastructure/shared/.github/workflows/verify_java_dataset_instance.sh \ + "$REPO" \ + "$COMMIT" \ + "$PATCH" \ + "$TEST_PATCH" \ + "$FAIL_TO_PASS" \ + "$PASS_TO_PASS" \ + "$TEST_ARGS" \ + "true" \ + "$JAVA_VERSION" \ + "$INSTANCE_ID" \ + false \ + true | tee "$OUTPUT_FILE" + VERDICT="$(tail -n1 "$OUTPUT_FILE")" + echo "verdict=$VERDICT" >> $GITHUB_OUTPUT + if [[ "$VERDICT" == "✅" ]]; then + echo "result=success" >> $GITHUB_OUTPUT + echo "emoji=✅" >> $GITHUB_OUTPUT + echo "reason=All checks passed" >> $GITHUB_OUTPUT + else + echo "result=failure" >> $GITHUB_OUTPUT + echo "emoji=❌" >> $GITHUB_OUTPUT + CLEAN_REASON="${VERDICT#❌ }" + echo "reason=$CLEAN_REASON" >> $GITHUB_OUTPUT + fi + + # Step 4: Update comment with final status from verifier + - name: Update issue comment with final status + if: ${{ always() && (github.event_name == 'push' || github.event_name == 'issue_comment') }} + uses: actions/github-script@v7 + env: + COMMENT_ID: ${{ steps.create_comment.outputs.comment_id }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} + RESULT: ${{ steps.run_verifier.outputs.result }} + EMOJI: ${{ steps.run_verifier.outputs.emoji }} + REASON: ${{ steps.run_verifier.outputs.reason }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + if (!process.env.COMMENT_ID) { core.info('No comment to update.'); return; } + let bodyContent = ''; + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + const emoji = process.env.EMOJI || '🟡'; + const reason = process.env.REASON ? `: ${process.env.REASON}` : ''; + bodyContent += `\n${emoji} **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** finished${reason}`; + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID), + body: bodyContent + }); \ No newline at end of file diff --git a/.github/workflows/pr-label-management.yml b/.github/workflows/pr-label-management.yml new file mode 100644 index 00000000..24773ffb --- /dev/null +++ b/.github/workflows/pr-label-management.yml @@ -0,0 +1,174 @@ +name: PR Label Management + +on: + pull_request: + types: [opened, reopened, synchronize] + pull_request_review: + types: [submitted] + +jobs: + manage-labels: + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + issues: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Add Review label on PR creation + if: github.event_name == 'pull_request' + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + // Add Review label to the PR + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + labels: ['Review'] + }); + + console.log(`Added Review label to PR #${context.payload.pull_request.number}`); + + // Extract related issue numbers from PR title and body + const prText = `${context.payload.pull_request.title} ${context.payload.pull_request.body || ''}`; + const issuePattern = /#(\d+)/g; + const relatedIssues = new Set(); + let match; + + while ((match = issuePattern.exec(prText)) !== null) { + relatedIssues.add(match[1]); + } + + // Also check commit messages for issue references + const commits = await github.rest.pulls.listCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number + }); + + for (const commit of commits.data) { + const commitMessage = commit.commit.message; + while ((match = issuePattern.exec(commitMessage)) !== null) { + relatedIssues.add(match[1]); + } + } + + // Add Review label to all related issues + for (const issueNumber of relatedIssues) { + try { + // Check if issue exists + await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber) + }); + + // Add Review label to the issue + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber), + labels: ['Review'] + }); + + console.log(`Added Review label to related issue #${issueNumber}`); + } catch (error) { + console.log(`Error processing issue #${issueNumber}: ${error.message}`); + } + } + + - name: Handle PR approval + if: github.event_name == 'pull_request_review' && github.event.review.state == 'approved' + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const prNumber = context.payload.pull_request.number; + + // Remove Review label and add Verified label to the PR + try { + // First try to remove the Review label + await github.rest.issues.removeLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + name: 'Review' + }); + console.log(`Removed Review label from PR #${prNumber}`); + } catch (error) { + console.log(`Note: Review label might not exist on PR #${prNumber}: ${error.message}`); + } + + // Add Verified label to the PR + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + labels: ['Verified'] + }); + console.log(`Added Verified label to PR #${prNumber}`); + + // Extract related issue numbers from PR title and body + const prText = `${context.payload.pull_request.title} ${context.payload.pull_request.body || ''}`; + const issuePattern = /#(\d+)/g; + const relatedIssues = new Set(); + let match; + + while ((match = issuePattern.exec(prText)) !== null) { + relatedIssues.add(match[1]); + } + + // Also check commit messages for issue references + const commits = await github.rest.pulls.listCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: prNumber + }); + + for (const commit of commits.data) { + const commitMessage = commit.commit.message; + while ((match = issuePattern.exec(commitMessage)) !== null) { + relatedIssues.add(match[1]); + } + } + + // Update labels on all related issues + for (const issueNumber of relatedIssues) { + try { + // Check if issue exists + await github.rest.issues.get({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber) + }); + + // Try to remove Review label from the issue + try { + await github.rest.issues.removeLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber), + name: 'Review' + }); + console.log(`Removed Review label from related issue #${issueNumber}`); + } catch (error) { + console.log(`Note: Review label might not exist on issue #${issueNumber}: ${error.message}`); + } + + // Add Verified label to the issue + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: parseInt(issueNumber), + labels: ['Verified'] + }); + console.log(`Added Verified label to related issue #${issueNumber}`); + } catch (error) { + console.log(`Error processing issue #${issueNumber}: ${error.message}`); + } + } \ No newline at end of file diff --git a/.github/workflows/verify_java_dataset_instance.sh b/.github/workflows/verify_java_dataset_instance.sh new file mode 100755 index 00000000..4f36af07 --- /dev/null +++ b/.github/workflows/verify_java_dataset_instance.sh @@ -0,0 +1,1139 @@ +name: Run Tests + +on: + push: + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + pull_request: + branches: [ "main", "scenario/*", "eval/*", "feature/*" ] + issue_comment: + types: [created] + +jobs: + # ──────────── 1. collect and process tests ──────────── + collect-process-tests: + runs-on: ubuntu-latest + permissions: + contents: read + outputs: + fail_to_pass: ${{ steps.combine.outputs.fail_to_pass }} + pass_to_pass: ${{ steps.combine.outputs.pass_to_pass }} + tests: ${{ steps.combine.outputs.tests }} + comment_id: ${{ steps.combine.outputs.comment_id }} + test_args: ${{ steps.combine.outputs.test_args }} + java_version: ${{ steps.combine.outputs.java_version }} + issue_numbers: ${{ steps.collect_issues.outputs.issue_numbers }} + if: ${{ github.event_name != 'issue_comment' || contains(github.event.comment.body, 'FAIL_TO_PASS') || contains(github.event.comment.body, 'PASS_TO_PASS') }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # ─── 1.1 collect issue numbers based on event type ─── + - name: Collect issue numbers based on event type + id: collect_issues + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.pull_request.number }} + REPO: ${{ github.repository }} + run: | + # Initialize issue numbers variable + ISSUE_NUMBERS="" + + # Handle different event types + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + echo "Collecting issue numbers from commits in PR #$PR_NUMBER" + + # Get all commits in the PR + COMMITS=$(gh api repos/$REPO/pulls/$PR_NUMBER/commits --jq '.[].sha') + + # Initialize an empty array for issue numbers + ISSUE_NUMBERS_ARRAY=() + + # For each commit, extract linked issue numbers + for COMMIT in $COMMITS; do + echo "Processing commit $COMMIT" + + # Get commit message + COMMIT_MSG=$(gh api repos/$REPO/commits/$COMMIT --jq '.commit.message') + + # Extract issue numbers using regex (e.g., #123, fixes #456, etc.) + ISSUES=$(echo "$COMMIT_MSG" | grep -o '#[0-9]\+' | sed 's/#//') + + if [ -n "$ISSUES" ]; then + echo "Found issues in commit $COMMIT: $ISSUES" + # Add to our array + for ISSUE in $ISSUES; do + ISSUE_NUMBERS_ARRAY+=("$ISSUE") + done + fi + done + + # Remove duplicates and create JSON array + UNIQUE_ISSUES=$(echo "${ISSUE_NUMBERS_ARRAY[@]}" | tr ' ' '\n' | sort -u) + + if [ -z "$UNIQUE_ISSUES" ]; then + echo "No issue numbers found in commit messages, using PR number as fallback" + ISSUE_NUMBERS="[\"${{ github.event.pull_request.number }}\"]" + else + # Convert to JSON array + ISSUE_NUMBERS=$(echo "$UNIQUE_ISSUES" | jq -R . | jq -s .) + fi + elif [[ "${{ github.event_name }}" == "push" ]]; then + echo "Extracting issue numbers from commit message" + + # Get commit message + COMMIT_MSG="${{ github.event.head_commit.message }}" + + # Extract issue numbers using regex (e.g., #123, fixes #456, etc.) + ISSUES=$(echo "$COMMIT_MSG" | grep -o '#[0-9]\+' | sed 's/#//') + + if [ -n "$ISSUES" ]; then + echo "Found issues in commit message: $ISSUES" + + # Initialize an empty array for issue numbers + ISSUE_NUMBERS_ARRAY=() + + # Add to our array + for ISSUE in $ISSUES; do + ISSUE_NUMBERS_ARRAY+=("$ISSUE") + done + + # Remove duplicates and create JSON array + UNIQUE_ISSUES=$(echo "${ISSUE_NUMBERS_ARRAY[@]}" | tr ' ' '\n' | sort -u) + + # Convert to JSON array + ISSUE_NUMBERS=$(echo "$UNIQUE_ISSUES" | jq -R . | jq -s .) + else + echo "No issue numbers found in commit message, using empty array as fallback" + ISSUE_NUMBERS="[\"\"]" + fi + elif [[ "${{ github.event_name }}" == "issue_comment" ]]; then + echo "Using issue number from comment event" + ISSUE_NUMBERS="[\"${{ github.event.issue.number }}\"]" + else + echo "Using fallback issue number from inputs" + ISSUE_NUMBERS="[\"\"]" + fi + + echo "Found issue numbers: $ISSUE_NUMBERS" + # Escape the JSON string for GitHub Actions output + ESCAPED_ISSUE_NUMBERS=$(echo "$ISSUE_NUMBERS" | jq -c .) + echo "issue_numbers=$ESCAPED_ISSUE_NUMBERS" >> $GITHUB_OUTPUT + + # ─── 1.2 extract test names from issues ─── + - name: Extract test names for issues + id: extract_tests + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBERS: ${{ steps.collect_issues.outputs.issue_numbers }} + REPO: ${{ github.repository }} + run: | + # Initialize arrays for test results + FAIL_TO_PASS=() + PASS_TO_PASS=() + TESTS=() + COMMENT_ID="" + + # Process each issue number + for ISSUE_NUMBER in $(echo $ISSUE_NUMBERS | jq -r '.[]'); do + if [[ -z "$ISSUE_NUMBER" || "$ISSUE_NUMBER" == "null" ]]; then + continue + fi + + echo "Processing issue #$ISSUE_NUMBER" + + # Function to extract FAIL_TO_PASS and PASS_TO_PASS from text + extract_test_fields() { + local text="$1" + local fail_to_pass="" + local pass_to_pass="" + + if [[ -n "$text" ]]; then + # Find FAIL_TO_PASS pattern + if [[ "$text" =~ FAIL_TO_PASS:[[:space:]]*([^$'\n']+) ]]; then + fail_to_pass="${BASH_REMATCH[1]}" + fi + + # Find PASS_TO_PASS pattern + if [[ "$text" =~ PASS_TO_PASS:[[:space:]]*([^$'\n']+) ]]; then + pass_to_pass="${BASH_REMATCH[1]}" + fi + fi + + echo "$fail_to_pass|$pass_to_pass" + } + + # First check issue comments + echo "Checking issue comments for test fields..." + COMMENTS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/comments --jq '.[] | {id: .id, body: .body, created_at: .created_at}') + + if [[ -n "$COMMENTS" ]]; then + # Process all comments to find the latest one with FAIL_TO_PASS or PASS_TO_PASS + LATEST_COMMENT_WITH_VALUES="" + LATEST_COMMENT_ID="" + LATEST_FAIL_TO_PASS="" + LATEST_PASS_TO_PASS="" + + while IFS= read -r COMMENT; do + COMMENT_BODY=$(echo "$COMMENT" | jq -r '.body') + CURRENT_COMMENT_ID=$(echo "$COMMENT" | jq -r '.id') + + RESULT=$(extract_test_fields "$COMMENT_BODY") + IFS='|' read -r COMMENT_FAIL COMMENT_PASS <<< "$RESULT" + + if [[ -n "$COMMENT_FAIL" || -n "$COMMENT_PASS" ]]; then + LATEST_COMMENT_WITH_VALUES="$COMMENT" + LATEST_COMMENT_ID="$CURRENT_COMMENT_ID" + + if [[ -n "$COMMENT_FAIL" ]]; then + LATEST_FAIL_TO_PASS="$COMMENT_FAIL" + echo "Found FAIL_TO_PASS in issue comment $CURRENT_COMMENT_ID: $COMMENT_FAIL" + fi + + if [[ -n "$COMMENT_PASS" ]]; then + LATEST_PASS_TO_PASS="$COMMENT_PASS" + echo "Found PASS_TO_PASS in issue comment $CURRENT_COMMENT_ID: $COMMENT_PASS" + fi + fi + done <<< "$COMMENTS" + + # Use values from the latest comment + if [[ -n "$LATEST_COMMENT_WITH_VALUES" ]]; then + COMMENT_ID="$LATEST_COMMENT_ID" + + if [[ -n "$LATEST_FAIL_TO_PASS" ]]; then + FAIL_TO_PASS=("$LATEST_FAIL_TO_PASS") + echo "Using FAIL_TO_PASS from latest comment $COMMENT_ID: $LATEST_FAIL_TO_PASS" + fi + + if [[ -n "$LATEST_PASS_TO_PASS" ]]; then + PASS_TO_PASS=("$LATEST_PASS_TO_PASS") + echo "Using PASS_TO_PASS from latest comment $COMMENT_ID: $LATEST_PASS_TO_PASS" + fi + fi + fi + + # If not found in comments, check commit messages + if [[ ${#FAIL_TO_PASS[@]} -eq 0 && ${#PASS_TO_PASS[@]} -eq 0 ]]; then + echo "Checking commit messages for test fields..." + + # Get linked commit IDs + COMMIT_IDS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id') + + if [[ -z "$COMMIT_IDS" ]]; then + echo "No directly linked commits found, checking PRs..." + + # Try to get commits from PRs + PR_NUMBERS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number') + + if [[ -n "$PR_NUMBERS" ]]; then + for PR in $PR_NUMBERS; do + echo "Fetching commits from PR #$PR..." + PR_COMMITS=$(gh api repos/$REPO/pulls/$PR/commits --jq '.[].sha') + + if [[ -n "$PR_COMMITS" ]]; then + COMMIT_IDS="$COMMIT_IDS"$'\n'"$PR_COMMITS" + fi + done + fi + fi + + # Process commit messages to find the latest one with FAIL_TO_PASS or PASS_TO_PASS + if [[ -n "$COMMIT_IDS" ]]; then + # Variables to track the latest commit with values + LATEST_COMMIT_ID="" + LATEST_COMMIT_DATE="" + LATEST_COMMIT_FAIL="" + LATEST_COMMIT_PASS="" + + while IFS= read -r COMMIT_ID; do + if [[ -z "$COMMIT_ID" ]]; then + continue + fi + + echo "Fetching message for commit: $COMMIT_ID" + COMMIT_DATA=$(gh api repos/$REPO/commits/$COMMIT_ID --jq '{message: .commit.message, date: .commit.author.date}') + COMMIT_MSG=$(echo "$COMMIT_DATA" | jq -r '.message') + COMMIT_DATE=$(echo "$COMMIT_DATA" | jq -r '.date') + + if [[ -n "$COMMIT_MSG" ]]; then + RESULT=$(extract_test_fields "$COMMIT_MSG") + IFS='|' read -r COMMIT_FAIL COMMIT_PASS <<< "$RESULT" + + if [[ -n "$COMMIT_FAIL" || -n "$COMMIT_PASS" ]]; then + # Check if this commit is newer than our current latest + if [[ -z "$LATEST_COMMIT_DATE" || "$COMMIT_DATE" > "$LATEST_COMMIT_DATE" ]]; then + LATEST_COMMIT_ID="$COMMIT_ID" + LATEST_COMMIT_DATE="$COMMIT_DATE" + LATEST_COMMIT_FAIL="$COMMIT_FAIL" + LATEST_COMMIT_PASS="$COMMIT_PASS" + + if [[ -n "$COMMIT_FAIL" ]]; then + echo "Found FAIL_TO_PASS in commit $COMMIT_ID: $COMMIT_FAIL" + fi + + if [[ -n "$COMMIT_PASS" ]]; then + echo "Found PASS_TO_PASS in commit $COMMIT_ID: $COMMIT_PASS" + fi + fi + fi + fi + done <<< "$COMMIT_IDS" + + # Use values from the latest commit + if [[ -n "$LATEST_COMMIT_ID" ]]; then + if [[ -n "$LATEST_COMMIT_FAIL" ]]; then + FAIL_TO_PASS=("$LATEST_COMMIT_FAIL") + echo "Using FAIL_TO_PASS from latest commit $LATEST_COMMIT_ID: $LATEST_COMMIT_FAIL" + fi + + if [[ -n "$LATEST_COMMIT_PASS" ]]; then + PASS_TO_PASS=("$LATEST_COMMIT_PASS") + echo "Using PASS_TO_PASS from latest commit $LATEST_COMMIT_ID: $LATEST_COMMIT_PASS" + fi + fi + fi + fi + done + + # Convert arrays to comma-separated strings + FAIL_TO_PASS_STR=$(IFS=,; echo "${FAIL_TO_PASS[*]}") + PASS_TO_PASS_STR=$(IFS=,; echo "${PASS_TO_PASS[*]}") + + # Convert to JSON arrays if not empty + if [[ -n "$FAIL_TO_PASS_STR" ]]; then + FAIL_TO_PASS_JSON=$(echo "$FAIL_TO_PASS_STR" | jq -R -c 'split(",") | map(select(length > 0))') + else + FAIL_TO_PASS_JSON="[]" + fi + + if [[ -n "$PASS_TO_PASS_STR" ]]; then + PASS_TO_PASS_JSON=$(echo "$PASS_TO_PASS_STR" | jq -R -c 'split(",") | map(select(length > 0))') + else + PASS_TO_PASS_JSON="[]" + fi + + # Combine tests + if [[ -n "$FAIL_TO_PASS_STR" || -n "$PASS_TO_PASS_STR" ]]; then + TESTS_STR="$FAIL_TO_PASS_STR,$PASS_TO_PASS_STR" + TESTS_STR=$(echo "$TESTS_STR" | sed 's/^,//;s/,$//') + fi + + # Output results + echo "fail_to_pass=$FAIL_TO_PASS_JSON" >> $GITHUB_OUTPUT + echo "pass_to_pass=$PASS_TO_PASS_JSON" >> $GITHUB_OUTPUT + echo "tests=$TESTS_STR" >> $GITHUB_OUTPUT + echo "comment_id=$COMMENT_ID" >> $GITHUB_OUTPUT + + # ─── 1.3 extract metadata (optional) ─── + - name: Extract metadata fields + id: extract_metadata + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ISSUE_NUMBERS: ${{ steps.collect_issues.outputs.issue_numbers }} + REPO: ${{ github.repository }} + run: | + set -e + TEST_ARGS="" + JAVA_VERSION="" + + parse_metadata() { + local text="$1" + local json + if [[ -n "$text" && "$text" =~ METADATA:[[:space:]]*(\{.*\}) ]]; then + json="${BASH_REMATCH[1]}" + # Normalize quotes for jq if needed + TA=$(echo "$json" | jq -r '."test_args" // empty' 2>/dev/null || true) + JV=$(echo "$json" | jq -r '."java-version" // empty' 2>/dev/null || true) + if [[ -n "$TA" ]]; then TEST_ARGS="$TA"; fi + if [[ -n "$JV" ]]; then JAVA_VERSION="$JV"; fi + fi + } + + for ISSUE_NUMBER in $(echo $ISSUE_NUMBERS | jq -r '.[]'); do + if [[ -z "$ISSUE_NUMBER" || "$ISSUE_NUMBER" == "null" ]]; then + continue + fi + + # Check issue comments (latest first) + COMMENTS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/comments --jq '.[] | {id: .id, body: .body, created_at: .created_at}' || true) + if [[ -n "$COMMENTS" ]]; then + LATEST_COMMENT_WITH_VALUES="" + while IFS= read -r COMMENT; do + COMMENT_BODY=$(echo "$COMMENT" | jq -r '.body') + if [[ -n "$COMMENT_BODY" ]]; then + parse_metadata "$COMMENT_BODY" + fi + done <<< "$COMMENTS" + fi + + if [[ -z "$TEST_ARGS" && -z "$JAVA_VERSION" ]]; then + # Scan linked commits + COMMIT_IDS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id' || true) + if [[ -z "$COMMIT_IDS" ]]; then + PR_NUMBERS=$(gh api repos/$REPO/issues/$ISSUE_NUMBER/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number' || true) + if [[ -n "$PR_NUMBERS" ]]; then + for PR in $PR_NUMBERS; do + PR_COMMITS=$(gh api repos/$REPO/pulls/$PR/commits --jq '.[].sha' || true) + [[ -n "$PR_COMMITS" ]] && COMMIT_IDS="$COMMIT_IDS"$'\n'"$PR_COMMITS" + done + fi + fi + if [[ -n "$COMMIT_IDS" ]]; then + while IFS= read -r COMMIT_ID; do + [[ -z "$COMMIT_ID" ]] && continue + COMMIT_MSG=$(gh api repos/$REPO/commits/$COMMIT_ID --jq '.commit.message' || true) + parse_metadata "$COMMIT_MSG" + done <<< "$COMMIT_IDS" + fi + fi + + # Break after first issue with metadata found + if [[ -n "$TEST_ARGS" || -n "$JAVA_VERSION" ]]; then + break + fi + done + + echo "test_args=$TEST_ARGS" >> $GITHUB_OUTPUT + echo "java_version=$JAVA_VERSION" >> $GITHUB_OUTPUT + + # ─── 1.4 combine test results ─── + - name: Combine test results + id: combine + shell: bash + run: | + # Just pass through the outputs from extract_tests + echo "fail_to_pass=${{ steps.extract_tests.outputs.fail_to_pass }}" >> $GITHUB_OUTPUT + echo "pass_to_pass=${{ steps.extract_tests.outputs.pass_to_pass }}" >> $GITHUB_OUTPUT + echo "tests=${{ steps.extract_tests.outputs.tests }}" >> $GITHUB_OUTPUT + echo "comment_id=${{ steps.extract_tests.outputs.comment_id }}" >> $GITHUB_OUTPUT + echo "test_args=${{ steps.extract_metadata.outputs.test_args }}" >> $GITHUB_OUTPUT + echo "java_version=${{ steps.extract_metadata.outputs.java_version }}" >> $GITHUB_OUTPUT + + # ─── 1.4 check if FAIL_TO_PASS or PASS_TO_PASS found ─── + - name: Check if FAIL_TO_PASS or PASS_TO_PASS found + if: ${{ github.event_name == 'pull_request' && steps.combine.outputs.fail_to_pass == '[]' && steps.combine.outputs.pass_to_pass == '[]' }} + shell: bash + run: | + echo "::error::FAIL_TO_PASS or PASS_TO_PASS not found in commit messages or issue comments, please add FAIL_TO_PASS or PASS_TO_PASS to issue comment" + exit 1 + + # ──────────── 2. Run tests and handle comments ──────────── + run-tests-and-comments: + needs: collect-process-tests + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + if: ${{ false }} + outputs: + comment_id: ${{ steps.create_comment.outputs.comment_id }} + status: ${{ job.status }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # Step 1: Create placeholder comment + - name: Create placeholder issue comment + id: create_comment + if: ${{ github.event_name == 'push' || github.event_name == 'issue_comment' }} + uses: actions/github-script@v7 + env: + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + COMMENT_ID: ${{ needs.collect-process-tests.outputs.comment_id }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + result-encoding: string + script: | + const issuePat = /#(\d+)/g; + let issueNum = null, m; + + // • PR context + if (context.payload.pull_request) { + const whole = `${context.payload.pull_request.title}\n${context.payload.pull_request.body}`; + if ((m = issuePat.exec(whole)) !== null) issueNum = +m[1]; + } + + // • Push context + if (!issueNum && context.payload.commits) { + for (const c of context.payload.commits) { + if ((m = issuePat.exec(c.message)) !== null) { issueNum = +m[1]; break; } + } + } + + // • Issue comment context + if (!issueNum && context.payload.issue) { + issueNum = context.payload.issue.number; + } + + if (!issueNum) { core.info('No #issue reference found.'); return; } + + let bodyContent = ''; + + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + core.info('FAIL_TO_PASS: '+process.env.FAIL_TO_PASS); + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + // Parse JSON array and convert to comma-separated string + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + + bodyContent += `\n⏳ **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** has **started**…`; + + // If we have an existing comment ID, update it instead of creating a new one + if (false && process.env.COMMENT_ID) { + try { + // Get existing comment body + const { data: existingComment } = await github.rest.issues.getComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID) + }); + + // Append new content to existing body + const updatedBody = existingComment.body + '\n' + bodyContent; + + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID), + body: updatedBody + }); + core.setOutput('comment_id', process.env.COMMENT_ID); + return; + } catch (error) { + core.warning(`Failed to update comment ${process.env.COMMENT_ID}: ${error.message}`); + // Fall through to create a new comment + } + } + + // Create a new comment + const { data: comment } = await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issueNum, + body: bodyContent + }); + core.setOutput('comment_id', comment.id.toString()); + + # Step 2: Prepare parameters for dataset verification + - name: Prepare dataset verification parameters + id: prepare_params + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + REPO: ${{ github.repository }} + CT_TEST_ARGS: ${{ needs.collect-process-tests.outputs.test_args }} + CT_JAVA_VERSION: ${{ needs.collect-process-tests.outputs.java_version }} + run: | + set -e + # Ensure we have history before computing SHAs + git fetch --prune --unshallow || true + git fetch --all --tags || true + + # Determine base and head SHAs + if [[ "$EVENT_NAME" == "pull_request" ]]; then + BASE_SHA="${{ github.event.pull_request.base.sha }}" + HEAD_SHA="${{ github.event.pull_request.head.sha }}" + ISSUE_NUMBER="${{ github.event.pull_request.number }}" + elif [[ "$EVENT_NAME" == "push" ]]; then + BASE_SHA="${{ github.event.before }}" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="$(echo "${{ github.event.head_commit.message }}" | grep -oE '#[0-9]+' | head -n1 | tr -d '#')" + elif [[ "$EVENT_NAME" == "issue_comment" ]]; then + HEAD_SHA="${{ github.sha }}" + BASE_SHA="$(git rev-parse HEAD~1 2>/dev/null || true)" + if [[ -z "$BASE_SHA" ]]; then + if git rev-parse --verify origin/main >/dev/null 2>&1; then + BASE_SHA="$(git merge-base HEAD origin/main || true)" + elif git rev-parse --verify origin/master >/dev/null 2>&1; then + BASE_SHA="$(git merge-base HEAD origin/master || true)" + fi + fi + if [[ -z "$BASE_SHA" ]]; then + BASE_SHA="$HEAD_SHA" + fi + ISSUE_NUMBER="${{ github.event.issue.number }}" + else + HEAD_SHA="${{ github.sha }}" + BASE_SHA="$(git rev-parse HEAD~1 2>/dev/null || true)" + if [[ -z "$BASE_SHA" ]]; then BASE_SHA="$HEAD_SHA"; fi + ISSUE_NUMBER="" + fi + echo "Base: $BASE_SHA" + echo "Head: $HEAD_SHA" + + # Build patches only from commits related to the issue(s) + export GH_TOKEN="${{ secrets.GITHUB_TOKEN }}" + ISSUE_NUMBERS_JSON='${{ needs.collect-process-tests.outputs.issue_numbers }}' + + REPO_FULL="$REPO" + + # Helper: classify per-file diffs into source vs test + classify_and_append() { + awk -v source_out="$1" -v tests_out="$2" ' + function basename(p, n, arr){ n=split(p, arr, "/"); return arr[n]; } + function is_test_path(p, pl, bl) { + pl=tolower(p); bl=tolower(basename(p)); + return (index(pl, "/test/") || index(pl, "/tests/") || index(pl, "/src/test/") || index(pl, "/main/test/") || + index(pl, "/spec/") || index(pl, "/specs/") || index(pl, "__tests__") || index(pl, "__test__") || + pl ~ /_test[._]/ || pl ~ /_spec[._]/ || bl ~ /^test_/ || bl ~ /tests\./ || bl ~ /test\./ || bl ~ /spec\./); + } + /^diff --git / { + if (block != "") { if (current_is_test) print block >> tests_out; else print block >> source_out; } + path=""; if (match($0, /^diff --git a\/([^ ]+) b\//, m)) { path=m[1]; } + current_is_test=is_test_path(path); block=$0 "\n"; next; + } + { block = block $0 "\n"; } + END { if (block != "") { if (current_is_test) print block >> tests_out; else print block >> source_out; } } + ' + } + + TMP_COMMITS="$(mktemp)" + + if [[ -n "$ISSUE_NUMBERS_JSON" && "$ISSUE_NUMBERS_JSON" != "null" ]]; then + for ISSUE in $(echo "$ISSUE_NUMBERS_JSON" | jq -r '.[]?'); do + [[ -z "$ISSUE" || "$ISSUE" == "null" ]] && continue + + # From timeline referenced commits + COMMITS=$(gh api repos/$REPO_FULL/issues/$ISSUE/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id' || true) + for C in $COMMITS; do + [[ -z "$C" ]] && continue + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + + # From issue body + BODY=$(gh api repos/$REPO_FULL/issues/$ISSUE --jq '.body' 2>/dev/null || true) + if [[ -n "$BODY" ]]; then + for C in $(echo "$BODY" | grep -oE '\b[0-9a-f]{40}\b' | sort -u); do + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + fi + + # From comments + while IFS= read -r COMMENT; do + [[ -z "$COMMENT" ]] && continue + for C in $(echo "$COMMENT" | grep -oE '\b[0-9a-f]{40}\b' | sort -u); do + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + done <<< "$(gh api repos/$REPO_FULL/issues/$ISSUE/comments --jq '.[].body' 2>/dev/null || true)" + + # From cross-referenced PRs + PRS=$(gh api repos/$REPO_FULL/issues/$ISSUE/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number' 2>/dev/null || true) + for PR in $PRS; do + PR_COMMITS=$(gh api repos/$REPO_FULL/pulls/$PR/commits --jq '.[].sha' 2>/dev/null || true) + for C in $PR_COMMITS; do + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + done + done + fi + + PATCH_CONTENT=""; TEST_PATCH_CONTENT="" + if [[ -s "$TMP_COMMITS" ]]; then + # Dedupe by SHA, keep first date, then sort by date asc + SORTED=$(awk '!seen[$1]++{print}' "$TMP_COMMITS" | sort -k2) + EARLIEST_SHA=$(echo "$SORTED" | head -n1 | awk '{print $1}') + LATEST_SHA=$(echo "$SORTED" | tail -n1 | awk '{print $1}') + if [[ -n "$EARLIEST_SHA" ]]; then + BASE_PARENT=$(gh api repos/$REPO_FULL/commits/$EARLIEST_SHA --jq '.parents[0].sha' 2>/dev/null || true) + [[ -n "$BASE_PARENT" ]] && BASE_SHA="$BASE_PARENT" + fi + [[ -n "$LATEST_SHA" ]] && HEAD_SHA="$LATEST_SHA" + + SRC_FILE="$(mktemp)"; TEST_FILE="$(mktemp)"; : > "$SRC_FILE"; : > "$TEST_FILE" + while read -r SHA DATE; do + [[ -z "$SHA" ]] && continue + PATCH_TEXT=$(gh api repos/$REPO_FULL/commits/$SHA -H 'Accept: application/vnd.github.v3.patch' 2>/dev/null || true) + if [[ -n "$PATCH_TEXT" ]]; then + echo "$PATCH_TEXT" | classify_and_append "$SRC_FILE" "$TEST_FILE" + fi + done <<< "$SORTED" + PATCH_CONTENT="$(cat "$SRC_FILE" || true)" + TEST_PATCH_CONTENT="$(cat "$TEST_FILE" || true)" + rm -f "$SRC_FILE" "$TEST_FILE" + else + # Fallback to full diff between BASE and HEAD + FULL_DIFF="$(git diff "$BASE_SHA" "$HEAD_SHA" || true)" + SRC_FILE="$(mktemp)"; TEST_FILE="$(mktemp)"; : > "$SRC_FILE"; : > "$TEST_FILE" + echo "$FULL_DIFF" | classify_and_append "$SRC_FILE" "$TEST_FILE" + PATCH_CONTENT="$(cat "$SRC_FILE" || true)" + TEST_PATCH_CONTENT="$(cat "$TEST_FILE" || true)" + rm -f "$SRC_FILE" "$TEST_FILE" + fi + rm -f "$TMP_COMMITS" 2>/dev/null || true + + # Derived parameters + TEST_ARGS="${CT_TEST_ARGS}" + JAVA_VERSION="${CT_JAVA_VERSION}" + if [[ "$TEST_ARGS" == "null" ]]; then TEST_ARGS=""; fi + if [[ -z "$JAVA_VERSION" || "$JAVA_VERSION" == "null" ]]; then JAVA_VERSION="24"; fi + OWNER="${{ github.repository_owner }}"; REPO_NAME="${REPO#*/}" + if [[ -n "$ISSUE_NUMBER" ]]; then INSTANCE_ID="${OWNER}__${REPO_NAME}__${ISSUE_NUMBER}"; else INSTANCE_ID=""; fi + + { echo "base_sha=$BASE_SHA"; echo "head_sha=$HEAD_SHA"; echo "instance_id=$INSTANCE_ID"; } >> "$GITHUB_OUTPUT" + { echo "PATCH<> "$GITHUB_OUTPUT" + { echo "TEST_PATCH<> "$GITHUB_OUTPUT" + echo "test_args=$TEST_ARGS" >> "$GITHUB_OUTPUT" + echo "java_version=$JAVA_VERSION" >> "$GITHUB_OUTPUT" + + # Step 3: Run dataset verifier script + - name: Run dataset verifier + id: run_verifier + shell: bash + env: + REPO: ${{ github.repository }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + PATCH: ${{ steps.prepare_params.outputs.PATCH }} + TEST_PATCH: ${{ steps.prepare_params.outputs.TEST_PATCH }} + COMMIT: ${{ steps.prepare_params.outputs.base_sha }} + TEST_ARGS: ${{ steps.prepare_params.outputs.test_args }} + JAVA_VERSION: ${{ steps.prepare_params.outputs.java_version }} + INSTANCE_ID: ${{ steps.prepare_params.outputs.instance_id }} + run: | + set -e + chmod +x .github/workflows/verify_java_dataset_instance.sh + OUTPUT_FILE="$(mktemp)" + .github/workflows/verify_java_dataset_instance.sh \ + "$REPO" \ + "$COMMIT" \ + "$PATCH" \ + "$TEST_PATCH" \ + "$FAIL_TO_PASS" \ + "$PASS_TO_PASS" \ + "$TEST_ARGS" \ + "true" \ + "$JAVA_VERSION" \ + "$INSTANCE_ID" \ + false \ + true | tee "$OUTPUT_FILE" + VERDICT="$(tail -n1 "$OUTPUT_FILE")" + echo "verdict=$VERDICT" >> $GITHUB_OUTPUT + if [[ "$VERDICT" == "✅" ]]; then + echo "result=success" >> $GITHUB_OUTPUT + echo "emoji=✅" >> $GITHUB_OUTPUT + echo "reason=All checks passed" >> $GITHUB_OUTPUT + else + echo "result=failure" >> $GITHUB_OUTPUT + echo "emoji=❌" >> $GITHUB_OUTPUT + CLEAN_REASON="${VERDICT#❌ }" + echo "reason=$CLEAN_REASON" >> $GITHUB_OUTPUT + fi + + # Step 4: Update comment with final status from verifier + - name: Update issue comment with final status + if: ${{ always() && (github.event_name == 'push' || github.event_name == 'issue_comment') }} + uses: actions/github-script@v7 + env: + COMMENT_ID: ${{ steps.create_comment.outputs.comment_id }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} + RESULT: ${{ steps.run_verifier.outputs.result }} + EMOJI: ${{ steps.run_verifier.outputs.emoji }} + REASON: ${{ steps.run_verifier.outputs.reason }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + if (!process.env.COMMENT_ID) { core.info('No comment to update.'); return; } + let bodyContent = ''; + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + const emoji = process.env.EMOJI || '🟡'; + const reason = process.env.REASON ? `: ${process.env.REASON}` : ''; + bodyContent += `\n${emoji} **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** finished${reason}`; + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID), + body: bodyContent + }); + + # ──────────── 2.a Create placeholder comment (split) ──────────── + create-comment: + needs: collect-process-tests + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + if: ${{ github.event_name == 'push' || github.event_name == 'issue_comment' }} + outputs: + comment_id: ${{ steps.create_comment.outputs.comment_id }} + steps: + - name: Create placeholder issue comment + id: create_comment + uses: actions/github-script@v7 + env: + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + COMMENT_ID: ${{ needs.collect-process-tests.outputs.comment_id }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + result-encoding: string + script: | + const issuePat = /#(\d+)/g; + let issueNum = null, m; + + if (context.payload.pull_request) { + const whole = `${context.payload.pull_request.title}\n${context.payload.pull_request.body}`; + if ((m = issuePat.exec(whole)) !== null) issueNum = +m[1]; + } + if (!issueNum && context.payload.commits) { + for (const c of context.payload.commits) { + if ((m = issuePat.exec(c.message)) !== null) { issueNum = +m[1]; break; } + } + } + if (!issueNum && context.payload.issue) { + issueNum = context.payload.issue.number; + } + if (!issueNum) { core.info('No #issue reference found.'); return; } + + let bodyContent = ''; + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + bodyContent += `\n⏳ **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** has **started**…`; + + const { data: comment } = await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issueNum, + body: bodyContent + }); + core.setOutput('comment_id', comment.id.toString()); + + # ──────────── 2.b Prepare params (split) ──────────── + prepare-params: + needs: collect-process-tests + runs-on: ubuntu-latest + permissions: + contents: read + if: ${{ github.event_name != 'pull_request' || needs.collect-process-tests.outputs.fail_to_pass != '[]' || needs.collect-process-tests.outputs.pass_to_pass != '[]' }} + outputs: + base_sha: ${{ steps.prepare_params.outputs.base_sha }} + head_sha: ${{ steps.prepare_params.outputs.head_sha }} + PATCH: ${{ steps.prepare_params.outputs.PATCH }} + TEST_PATCH: ${{ steps.prepare_params.outputs.TEST_PATCH }} + test_args: ${{ steps.prepare_params.outputs.test_args }} + java_version: ${{ steps.prepare_params.outputs.java_version }} + is_maven: ${{ steps.prepare_params.outputs.is_maven }} + instance_id: ${{ steps.prepare_params.outputs.instance_id }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Prepare dataset verification parameters + id: prepare_params + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + REPO: ${{ github.repository }} + CT_TEST_ARGS: ${{ needs.collect-process-tests.outputs.test_args }} + CT_JAVA_VERSION: ${{ needs.collect-process-tests.outputs.java_version }} + run: | + set -e + git fetch --prune --unshallow || true + git fetch --all --tags || true + + if [[ "$EVENT_NAME" == "pull_request" ]]; then + BASE_SHA="${{ github.event.pull_request.base.sha }}" + HEAD_SHA="${{ github.event.pull_request.head.sha }}" + ISSUE_NUMBER="${{ github.event.pull_request.number }}" + elif [[ "$EVENT_NAME" == "push" ]]; then + BASE_SHA="${{ github.event.before }}" + HEAD_SHA="${{ github.sha }}" + ISSUE_NUMBER="$(echo "${{ github.event.head_commit.message }}" | grep -oE '#[0-9]+' | head -n1 | tr -d '#')" + elif [[ "$EVENT_NAME" == "issue_comment" ]]; then + HEAD_SHA="${{ github.sha }}" + BASE_SHA="$(git rev-parse HEAD~1 2>/dev/null || true)" + if [[ -z "$BASE_SHA" ]]; then + if git rev-parse --verify origin/main >/dev/null 2>&1; then + BASE_SHA="$(git merge-base HEAD origin/main || true)" + elif git rev-parse --verify origin/master >/dev/null 2>&1; then + BASE_SHA="$(git merge-base HEAD origin/master || true)" + fi + fi + if [[ -z "$BASE_SHA" ]]; then + BASE_SHA="$HEAD_SHA" + fi + ISSUE_NUMBER="${{ github.event.issue.number }}" + else + HEAD_SHA="${{ github.sha }}" + BASE_SHA="$(git rev-parse HEAD~1 2>/dev/null || true)" + if [[ -z "$BASE_SHA" ]]; then BASE_SHA="$HEAD_SHA"; fi + ISSUE_NUMBER="" + fi + echo "Base: $BASE_SHA" + echo "Head: $HEAD_SHA" + + # Build patches only from commits related to the issue(s) + export GH_TOKEN="${{ secrets.GITHUB_TOKEN }}" + ISSUE_NUMBERS_JSON='${{ needs.collect-process-tests.outputs.issue_numbers }}' + + REPO_FULL="$REPO" + + classify_and_append() { + awk -v source_out="$1" -v tests_out="$2" ' + function basename(p, n, arr){ n=split(p, arr, "/"); return arr[n]; } + function is_test_path(p, pl, bl) { + pl=tolower(p); bl=tolower(basename(p)); + return (index(pl, "/test/") || index(pl, "/tests/") || index(pl, "/src/test/") || index(pl, "/main/test/") || + index(pl, "/spec/") || index(pl, "/specs/") || index(pl, "__tests__") || index(pl, "__test__") || + pl ~ /_test[._]/ || pl ~ /_spec[._]/ || bl ~ /^test_/ || bl ~ /tests\./ || bl ~ /test\./ || bl ~ /spec\./); + } + /^diff --git / { + if (block != "") { if (current_is_test) print block >> tests_out; else print block >> source_out; } + path=""; if (match($0, /^diff --git a\/([^ ]+) b\//, m)) { path=m[1]; } + current_is_test=is_test_path(path); block=$0 "\n"; next; + } + { block = block $0 "\n"; } + END { if (block != "") { if (current_is_test) print block >> tests_out; else print block >> source_out; } } + ' + } + + TMP_COMMITS="$(mktemp)" + + if [[ -n "$ISSUE_NUMBERS_JSON" && "$ISSUE_NUMBERS_JSON" != "null" ]]; then + for ISSUE in $(echo "$ISSUE_NUMBERS_JSON" | jq -r '.[]?'); do + [[ -z "$ISSUE" || "$ISSUE" == "null" ]] && continue + + COMMITS=$(gh api repos/$REPO_FULL/issues/$ISSUE/timeline --jq '.[] | select(.event == "referenced" and .commit_id != null) | .commit_id' || true) + for C in $COMMITS; do + [[ -z "$C" ]] && continue + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + + BODY=$(gh api repos/$REPO_FULL/issues/$ISSUE --jq '.body' 2>/dev/null || true) + if [[ -n "$BODY" ]]; then + for C in $(echo "$BODY" | grep -oE '\b[0-9a-f]{40}\b' | sort -u); do + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + fi + + while IFS= read -r COMMENT; do + [[ -z "$COMMENT" ]] && continue + for C in $(echo "$COMMENT" | grep -oE '\b[0-9a-f]{40}\b' | sort -u); do + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + done <<< "$(gh api repos/$REPO_FULL/issues/$ISSUE/comments --jq '.[].body' 2>/dev/null || true)" + + PRS=$(gh api repos/$REPO_FULL/issues/$ISSUE/timeline --jq '.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null) | .source.issue.number' 2>/dev/null || true) + for PR in $PRS; do + PR_COMMITS=$(gh api repos/$REPO_FULL/pulls/$PR/commits --jq '.[].sha' 2>/dev/null || true) + for C in $PR_COMMITS; do + DATE=$(gh api repos/$REPO_FULL/commits/$C --jq '.commit.committer.date' 2>/dev/null || true) + [[ -n "$DATE" ]] && echo "$C $DATE" >> "$TMP_COMMITS" + done + done + done + fi + + PATCH_CONTENT=""; TEST_PATCH_CONTENT="" + if [[ -s "$TMP_COMMITS" ]]; then + SORTED=$(awk '!seen[$1]++{print}' "$TMP_COMMITS" | sort -k2) + EARLIEST_SHA=$(echo "$SORTED" | head -n1 | awk '{print $1}') + LATEST_SHA=$(echo "$SORTED" | tail -n1 | awk '{print $1}') + if [[ -n "$EARLIEST_SHA" ]]; then + BASE_PARENT=$(gh api repos/$REPO_FULL/commits/$EARLIEST_SHA --jq '.parents[0].sha' 2>/dev/null || true) + [[ -n "$BASE_PARENT" ]] && BASE_SHA="$BASE_PARENT" + fi + [[ -n "$LATEST_SHA" ]] && HEAD_SHA="$LATEST_SHA" + + SRC_FILE="$(mktemp)"; TEST_FILE="$(mktemp)"; : > "$SRC_FILE"; : > "$TEST_FILE" + while read -r SHA DATE; do + [[ -z "$SHA" ]] && continue + PATCH_TEXT=$(gh api repos/$REPO_FULL/commits/$SHA -H 'Accept: application/vnd.github.v3.patch' 2>/dev/null || true) + if [[ -n "$PATCH_TEXT" ]]; then + echo "$PATCH_TEXT" | classify_and_append "$SRC_FILE" "$TEST_FILE" + fi + done <<< "$SORTED" + PATCH_CONTENT="$(cat "$SRC_FILE" || true)" + TEST_PATCH_CONTENT="$(cat "$TEST_FILE" || true)" + rm -f "$SRC_FILE" "$TEST_FILE" + else + FULL_DIFF="$(git diff "$BASE_SHA" "$HEAD_SHA" || true)" + SRC_FILE="$(mktemp)"; TEST_FILE="$(mktemp)"; : > "$SRC_FILE"; : > "$TEST_FILE" + echo "$FULL_DIFF" | classify_and_append "$SRC_FILE" "$TEST_FILE" + PATCH_CONTENT="$(cat "$SRC_FILE" || true)" + TEST_PATCH_CONTENT="$(cat "$TEST_FILE" || true)" + rm -f "$SRC_FILE" "$TEST_FILE" + fi + rm -f "$TMP_COMMITS" 2>/dev/null || true + + TEST_ARGS="${CT_TEST_ARGS}"; JAVA_VERSION="${CT_JAVA_VERSION}" + if [[ "$TEST_ARGS" == "null" ]]; then TEST_ARGS=""; fi + if [[ -z "$JAVA_VERSION" || "$JAVA_VERSION" == "null" ]]; then JAVA_VERSION="24"; fi + OWNER="${{ github.repository_owner }}"; REPO_NAME="${REPO#*/}" + if [[ -n "$ISSUE_NUMBER" ]]; then INSTANCE_ID="${OWNER}__${REPO_NAME}__${ISSUE_NUMBER}"; else INSTANCE_ID=""; fi + + # Detect build tool (Maven vs Gradle) + IS_MAVEN="true" + if git ls-files | grep -E '(^|/)(settings\.gradle(\.kts)?)$' -q; then + IS_MAVEN="false" + elif git ls-files | grep -E '(^|/)pom\.xml$' -q; then + IS_MAVEN="true" + elif [[ -f "settings.gradle" || -f "settings.gradle.kts" ]]; then + IS_MAVEN="false" + elif [[ -f "pom.xml" ]]; then + IS_MAVEN="true" + fi + + { echo "base_sha=$BASE_SHA"; echo "head_sha=$HEAD_SHA"; echo "instance_id=$INSTANCE_ID"; } >> "$GITHUB_OUTPUT" + { echo "PATCH<> "$GITHUB_OUTPUT" + { echo "TEST_PATCH<> "$GITHUB_OUTPUT" + echo "test_args=$TEST_ARGS" >> "$GITHUB_OUTPUT" + echo "java_version=$JAVA_VERSION" >> "$GITHUB_OUTPUT" + echo "is_maven=$IS_MAVEN" >> "$GITHUB_OUTPUT" + + # ──────────── 2.c Run verifier (split) ──────────── + run-verifier: + needs: [collect-process-tests, prepare-params] + runs-on: ubuntu-latest + permissions: + contents: read + if: ${{ github.event_name != 'pull_request' || needs.collect-process-tests.outputs.fail_to_pass != '[]' || needs.collect-process-tests.outputs.pass_to_pass != '[]' }} + outputs: + verdict: ${{ steps.run_verifier.outputs.verdict }} + result: ${{ steps.run_verifier.outputs.result }} + emoji: ${{ steps.run_verifier.outputs.emoji }} + reason: ${{ steps.run_verifier.outputs.reason }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Run dataset verifier + id: run_verifier + shell: bash + env: + REPO: ${{ github.repository }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + PATCH: ${{ needs.prepare-params.outputs.PATCH }} + TEST_PATCH: ${{ needs.prepare-params.outputs.TEST_PATCH }} + COMMIT: ${{ needs.prepare-params.outputs.base_sha }} + TEST_ARGS: ${{ needs.prepare-params.outputs.test_args }} + JAVA_VERSION: ${{ needs.prepare-params.outputs.java_version }} + INSTANCE_ID: ${{ needs.prepare-params.outputs.instance_id }} + IS_MAVEN: ${{ needs.prepare-params.outputs.is_maven }} + run: | + set -e + chmod +x .github/workflows/verify_java_dataset_instance.sh + OUTPUT_FILE="$(mktemp)" + .github/workflows/verify_java_dataset_instance.sh \ + "$REPO" \ + "$COMMIT" \ + "$PATCH" \ + "$TEST_PATCH" \ + "$FAIL_TO_PASS" \ + "$PASS_TO_PASS" \ + "$TEST_ARGS" \ + "$IS_MAVEN" \ + "$JAVA_VERSION" \ + "$INSTANCE_ID" \ + false \ + true | tee "$OUTPUT_FILE" + VERDICT="$(tail -n1 "$OUTPUT_FILE")" + echo "verdict=$VERDICT" >> $GITHUB_OUTPUT + if [[ "$VERDICT" == "✅" ]]; then + echo "result=success" >> $GITHUB_OUTPUT + echo "emoji=✅" >> $GITHUB_OUTPUT + echo "reason=All checks passed" >> $GITHUB_OUTPUT + else + echo "result=failure" >> $GITHUB_OUTPUT + echo "emoji=❌" >> $GITHUB_OUTPUT + CLEAN_REASON="${VERDICT#❌ }" + echo "reason=$CLEAN_REASON" >> $GITHUB_OUTPUT + fi + + # ──────────── 2.d Update comment (split) ──────────── + update-comment: + needs: [collect-process-tests, create-comment, run-verifier] + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + if: ${{ always() && (github.event_name == 'push' || github.event_name == 'issue_comment') }} + steps: + - name: Update issue comment with final status + uses: actions/github-script@v7 + env: + COMMENT_ID: ${{ needs.create-comment.outputs.comment_id }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + WORKFLOW_NAME: ${{ github.workflow }} + RESULT: ${{ needs.run-verifier.outputs.result }} + EMOJI: ${{ needs.run-verifier.outputs.emoji }} + REASON: ${{ needs.run-verifier.outputs.reason }} + FAIL_TO_PASS: ${{ needs.collect-process-tests.outputs.fail_to_pass }} + PASS_TO_PASS: ${{ needs.collect-process-tests.outputs.pass_to_pass }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + if (!process.env.COMMENT_ID) { core.info('No comment to update.'); return; } + let bodyContent = ''; + if (!process.env.COMMENT_ID){ + if (process.env.FAIL_TO_PASS && process.env.FAIL_TO_PASS !== '[]') { + const failToPassArray = JSON.parse(process.env.FAIL_TO_PASS); + const failToPassString = failToPassArray.join(', '); + bodyContent += `FAIL_TO_PASS: ${failToPassString}\n`; + } + if (process.env.PASS_TO_PASS && process.env.PASS_TO_PASS !== '[]') { + const passToPassArray = JSON.parse(process.env.PASS_TO_PASS); + const passToPassString = passToPassArray.join(', '); + bodyContent += `PASS_TO_PASS: ${passToPassString}\n`; + } + } + const emoji = process.env.EMOJI || '🟡'; + const reason = process.env.REASON ? `: ${process.env.REASON}` : ''; + bodyContent += `\n${emoji} **[${process.env.WORKFLOW_NAME}](${process.env.RUN_URL})** finished${reason}`; + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: Number(process.env.COMMENT_ID), + body: bodyContent + }); diff --git a/README.md b/README.md index 3d3d82e9..afe1dfee 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ The feature-service microservice manages products, releases and features. * Maven, JUnit 5, Testcontainers ## Prerequisites -* JDK 21 or later +* JDK 24 or later * Docker ([installation instructions](https://docs.docker.com/engine/install/)) * [IntelliJ IDEA](https://www.jetbrains.com/idea/) * PostgreSQL and Keycloak diff --git a/pom.xml b/pom.xml index dc8f63fc..e96d6f51 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,9 @@ 2025.0.0 2.8.9 1.6.3 - 2.45.0 + 3.0.0 + 2.74.0 + 1.21.4 sivaprasadreddy/ft-feature-service @@ -141,6 +143,21 @@ rest-assured test + + org.testcontainers + rabbitmq + test + + + org.springframework.amqp + spring-rabbit-test + test + + + org.awaitility + awaitility + test + @@ -216,7 +233,7 @@ - 2.50.0 + ${palantir-java-format.version} diff --git a/src/main/java/com/sivalabs/ft/features/api/GlobalExceptionHandler.java b/src/main/java/com/sivalabs/ft/features/api/GlobalExceptionHandler.java index 0c15088e..75ec490e 100644 --- a/src/main/java/com/sivalabs/ft/features/api/GlobalExceptionHandler.java +++ b/src/main/java/com/sivalabs/ft/features/api/GlobalExceptionHandler.java @@ -2,12 +2,14 @@ import static org.springframework.http.HttpStatus.*; +import com.fasterxml.jackson.databind.exc.InvalidFormatException; import com.sivalabs.ft.features.domain.exceptions.BadRequestException; import com.sivalabs.ft.features.domain.exceptions.ResourceNotFoundException; import java.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.ProblemDetail; +import org.springframework.http.converter.HttpMessageNotReadableException; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.RestControllerAdvice; @@ -42,4 +44,23 @@ ProblemDetail handle(BadRequestException e) { problemDetail.setProperty("timestamp", Instant.now()); return problemDetail; } + + @ExceptionHandler(HttpMessageNotReadableException.class) + ProblemDetail handle(HttpMessageNotReadableException ex) { + String message = "Invalid request body"; + + Throwable cause = ex.getCause(); + if (cause instanceof InvalidFormatException ife) { + if (ife.getTargetType() != null && ife.getTargetType().isEnum()) { + String invalidValue = String.valueOf(ife.getValue()); + String enumName = ife.getTargetType().getSimpleName(); + message = "Invalid value '%s' for enum %s.".formatted(invalidValue, enumName); + } + } + + ProblemDetail problemDetail = ProblemDetail.forStatusAndDetail(BAD_REQUEST, message); + problemDetail.setTitle("Bad Request"); + problemDetail.setProperty("timestamp", Instant.now()); + return problemDetail; + } } diff --git a/src/main/java/com/sivalabs/ft/features/api/controllers/FeatureController.java b/src/main/java/com/sivalabs/ft/features/api/controllers/FeatureController.java index 9ca87c1f..df1b334c 100644 --- a/src/main/java/com/sivalabs/ft/features/api/controllers/FeatureController.java +++ b/src/main/java/com/sivalabs/ft/features/api/controllers/FeatureController.java @@ -174,7 +174,12 @@ void updateFeature(@PathVariable String code, @RequestBody UpdateFeaturePayload payload.status(), payload.releaseCode(), payload.assignedTo(), - username); + username, + payload.plannedCompletionAt(), + payload.actualCompletionAt(), + payload.featurePlanningStatus(), + payload.featureOwner(), + payload.blockageReason()); featureService.updateFeature(cmd); } diff --git a/src/main/java/com/sivalabs/ft/features/api/deserializer/FeaturePlanningStatusDeserializer.java b/src/main/java/com/sivalabs/ft/features/api/deserializer/FeaturePlanningStatusDeserializer.java new file mode 100644 index 00000000..acf1c030 --- /dev/null +++ b/src/main/java/com/sivalabs/ft/features/api/deserializer/FeaturePlanningStatusDeserializer.java @@ -0,0 +1,21 @@ +package com.sivalabs.ft.features.api.deserializer; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; +import java.io.IOException; + +public class FeaturePlanningStatusDeserializer extends JsonDeserializer { + + @Override + public FeaturePlanningStatus deserialize(JsonParser parser, DeserializationContext context) throws IOException { + String value = parser.getText(); + try { + return FeaturePlanningStatus.valueOf(value); + } catch (IllegalArgumentException e) { + throw new JsonMappingException(parser, "Invalid featurePlanningStatus value: '%s'.".formatted(value)); + } + } +} diff --git a/src/main/java/com/sivalabs/ft/features/api/models/UpdateFeaturePayload.java b/src/main/java/com/sivalabs/ft/features/api/models/UpdateFeaturePayload.java index 0775576a..a50ccc26 100644 --- a/src/main/java/com/sivalabs/ft/features/api/models/UpdateFeaturePayload.java +++ b/src/main/java/com/sivalabs/ft/features/api/models/UpdateFeaturePayload.java @@ -1,12 +1,21 @@ package com.sivalabs.ft.features.api.models; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.sivalabs.ft.features.api.deserializer.FeaturePlanningStatusDeserializer; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; import com.sivalabs.ft.features.domain.models.FeatureStatus; import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.Size; +import java.time.Instant; public record UpdateFeaturePayload( @NotEmpty(message = "Title is required") @Size(max = 500, message = "Title cannot exceed 500 characters") String title, String description, String releaseCode, String assignedTo, - FeatureStatus status) {} + FeatureStatus status, + Instant plannedCompletionAt, + Instant actualCompletionAt, + @JsonDeserialize(using = FeaturePlanningStatusDeserializer.class) FeaturePlanningStatus featurePlanningStatus, + String featureOwner, + String blockageReason) {} diff --git a/src/main/java/com/sivalabs/ft/features/domain/Commands.java b/src/main/java/com/sivalabs/ft/features/domain/Commands.java index a591fe5a..9b5e0449 100644 --- a/src/main/java/com/sivalabs/ft/features/domain/Commands.java +++ b/src/main/java/com/sivalabs/ft/features/domain/Commands.java @@ -1,5 +1,6 @@ package com.sivalabs.ft.features.domain; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; import com.sivalabs.ft.features.domain.models.FeatureStatus; import com.sivalabs.ft.features.domain.models.ReleaseStatus; import java.time.Instant; @@ -36,7 +37,12 @@ public record UpdateFeatureCommand( FeatureStatus status, String releaseCode, String assignedTo, - String updatedBy) {} + String updatedBy, + Instant plannedCompletionAt, + Instant actualCompletionAt, + FeaturePlanningStatus featurePlanningStatus, + String featureOwner, + String blockageReason) {} public record DeleteFeatureCommand(String code, String deletedBy) {} diff --git a/src/main/java/com/sivalabs/ft/features/domain/FeatureRepository.java b/src/main/java/com/sivalabs/ft/features/domain/FeatureRepository.java index c67caca2..a419bdd0 100644 --- a/src/main/java/com/sivalabs/ft/features/domain/FeatureRepository.java +++ b/src/main/java/com/sivalabs/ft/features/domain/FeatureRepository.java @@ -21,8 +21,8 @@ interface FeatureRepository extends ListCrudRepository { void deleteByCode(String code); @Modifying - @Query("delete from Feature f where f.release.code = :code") - void deleteByReleaseCode(String code); + @Query("update Feature f set f.release = null where f.release.code = :code") + void unsetRelease(String code); boolean existsByCode(String code); diff --git a/src/main/java/com/sivalabs/ft/features/domain/FeatureService.java b/src/main/java/com/sivalabs/ft/features/domain/FeatureService.java index a884564f..70a53811 100644 --- a/src/main/java/com/sivalabs/ft/features/domain/FeatureService.java +++ b/src/main/java/com/sivalabs/ft/features/domain/FeatureService.java @@ -124,6 +124,11 @@ public void updateFeature(UpdateFeatureCommand cmd) { feature.setStatus(cmd.status()); feature.setUpdatedBy(cmd.updatedBy()); feature.setUpdatedAt(Instant.now()); + feature.setPlannedCompletionAt(cmd.plannedCompletionAt()); + feature.setActualCompletionAt(cmd.actualCompletionAt()); + feature.setFeaturePlanningStatus(cmd.featurePlanningStatus()); + feature.setFeatureOwner(cmd.featureOwner()); + feature.setBlockageReason(cmd.blockageReason()); featureRepository.save(feature); eventPublisher.publishFeatureUpdatedEvent(feature); } diff --git a/src/main/java/com/sivalabs/ft/features/domain/ReleaseService.java b/src/main/java/com/sivalabs/ft/features/domain/ReleaseService.java index 85dbd16a..f51cb015 100644 --- a/src/main/java/com/sivalabs/ft/features/domain/ReleaseService.java +++ b/src/main/java/com/sivalabs/ft/features/domain/ReleaseService.java @@ -5,6 +5,7 @@ import com.sivalabs.ft.features.domain.dtos.ReleaseDto; import com.sivalabs.ft.features.domain.entities.Product; import com.sivalabs.ft.features.domain.entities.Release; +import com.sivalabs.ft.features.domain.exceptions.ResourceNotFoundException; import com.sivalabs.ft.features.domain.mappers.ReleaseMapper; import com.sivalabs.ft.features.domain.models.ReleaseStatus; import java.time.Instant; @@ -81,9 +82,9 @@ public void updateRelease(UpdateReleaseCommand cmd) { @Transactional public void deleteRelease(String code) { if (!releaseRepository.existsByCode(code)) { - throw new IllegalArgumentException("Release with code " + code + " not found"); + throw new ResourceNotFoundException("Release with code " + code + " not found"); } - featureRepository.deleteByReleaseCode(code); + featureRepository.unsetRelease(code); releaseRepository.deleteByCode(code); } } diff --git a/src/main/java/com/sivalabs/ft/features/domain/dtos/FeatureDto.java b/src/main/java/com/sivalabs/ft/features/domain/dtos/FeatureDto.java index 3fcddfb4..4ea962b0 100644 --- a/src/main/java/com/sivalabs/ft/features/domain/dtos/FeatureDto.java +++ b/src/main/java/com/sivalabs/ft/features/domain/dtos/FeatureDto.java @@ -1,5 +1,6 @@ package com.sivalabs.ft.features.domain.dtos; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; import com.sivalabs.ft.features.domain.models.FeatureStatus; import java.io.Serializable; import java.time.Instant; @@ -16,7 +17,12 @@ public record FeatureDto( String createdBy, Instant createdAt, String updatedBy, - Instant updatedAt) + Instant updatedAt, + Instant plannedCompletionAt, + Instant actualCompletionAt, + FeaturePlanningStatus featurePlanningStatus, + String featureOwner, + String blockageReason) implements Serializable { public FeatureDto makeFavorite(boolean favorite) { @@ -32,6 +38,11 @@ public FeatureDto makeFavorite(boolean favorite) { createdBy, createdAt, updatedBy, - updatedAt); + updatedAt, + plannedCompletionAt, + actualCompletionAt, + featurePlanningStatus, + featureOwner, + blockageReason); } } diff --git a/src/main/java/com/sivalabs/ft/features/domain/entities/Feature.java b/src/main/java/com/sivalabs/ft/features/domain/entities/Feature.java index ed6e163a..b52da133 100644 --- a/src/main/java/com/sivalabs/ft/features/domain/entities/Feature.java +++ b/src/main/java/com/sivalabs/ft/features/domain/entities/Feature.java @@ -1,5 +1,6 @@ package com.sivalabs.ft.features.domain.entities; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; import com.sivalabs.ft.features.domain.models.FeatureStatus; import jakarta.persistence.*; import jakarta.validation.constraints.NotNull; @@ -53,6 +54,22 @@ public class Feature { @Column(name = "updated_at") private Instant updatedAt; + @Column(name = "planned_completion_at") + private Instant plannedCompletionAt; + + @Column(name = "actual_completion_at") + private Instant actualCompletionAt; + + @Column(name = "feature_planning_status", length = 50) + @Enumerated(EnumType.STRING) + private FeaturePlanningStatus featurePlanningStatus; + + @Size(max = 255) @Column(name = "feature_owner") + private String featureOwner; + + @Column(name = "blockage_reason", length = Integer.MAX_VALUE) + private String blockageReason; + public Long getId() { return id; } @@ -148,4 +165,44 @@ public Instant getUpdatedAt() { public void setUpdatedAt(Instant updatedAt) { this.updatedAt = updatedAt; } + + public Instant getPlannedCompletionAt() { + return plannedCompletionAt; + } + + public void setPlannedCompletionAt(Instant plannedCompletionAt) { + this.plannedCompletionAt = plannedCompletionAt; + } + + public Instant getActualCompletionAt() { + return actualCompletionAt; + } + + public void setActualCompletionAt(Instant actualCompletionAt) { + this.actualCompletionAt = actualCompletionAt; + } + + public FeaturePlanningStatus getFeaturePlanningStatus() { + return featurePlanningStatus; + } + + public void setFeaturePlanningStatus(FeaturePlanningStatus featurePlanningStatus) { + this.featurePlanningStatus = featurePlanningStatus; + } + + public String getFeatureOwner() { + return featureOwner; + } + + public void setFeatureOwner(String featureOwner) { + this.featureOwner = featureOwner; + } + + public String getBlockageReason() { + return blockageReason; + } + + public void setBlockageReason(String blockageReason) { + this.blockageReason = blockageReason; + } } diff --git a/src/main/java/com/sivalabs/ft/features/domain/models/FeaturePlanningStatus.java b/src/main/java/com/sivalabs/ft/features/domain/models/FeaturePlanningStatus.java new file mode 100644 index 00000000..ff138c4d --- /dev/null +++ b/src/main/java/com/sivalabs/ft/features/domain/models/FeaturePlanningStatus.java @@ -0,0 +1,8 @@ +package com.sivalabs.ft.features.domain.models; + +public enum FeaturePlanningStatus { + NOT_STARTED, + IN_PROGRESS, + DONE, + BLOCKED +} diff --git a/src/main/resources/db/migration/V5__add_feature_planning_fields.sql b/src/main/resources/db/migration/V5__add_feature_planning_fields.sql new file mode 100644 index 00000000..2d9d823c --- /dev/null +++ b/src/main/resources/db/migration/V5__add_feature_planning_fields.sql @@ -0,0 +1,9 @@ +alter table features + add column planned_completion_at timestamp, + add column actual_completion_at timestamp, + add column feature_planning_status varchar(50), + add column feature_owner varchar(255), + add column blockage_reason text; + +create index idx_features_feature_planning_status on features (feature_planning_status); +create index idx_features_feature_owner on features (feature_owner); diff --git a/src/test/java/com/sivalabs/ft/features/api/controllers/FeatureControllerTests.java b/src/test/java/com/sivalabs/ft/features/api/controllers/FeatureControllerTests.java index 04b2726c..37c33e01 100644 --- a/src/test/java/com/sivalabs/ft/features/api/controllers/FeatureControllerTests.java +++ b/src/test/java/com/sivalabs/ft/features/api/controllers/FeatureControllerTests.java @@ -5,6 +5,7 @@ import com.sivalabs.ft.features.AbstractIT; import com.sivalabs.ft.features.WithMockOAuth2User; import com.sivalabs.ft.features.domain.dtos.FeatureDto; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; import com.sivalabs.ft.features.domain.models.FeatureStatus; import org.junit.jupiter.api.Test; import org.springframework.http.HttpStatus; @@ -123,4 +124,152 @@ void shouldDeleteFeature() { var getResult = mvc.get().uri("/api/features/{code}", "IDEA-2").exchange(); assertThat(getResult).hasStatus(HttpStatus.NOT_FOUND); } + + @Test + @WithMockOAuth2User(username = "user") + void shouldUpdateFeatureWithPlanningFields() { + var payload = + """ + { + "title": "Feature With Planning", + "description": "Description", + "status": "IN_PROGRESS", + "plannedCompletionAt": "2026-06-01T00:00:00Z", + "actualCompletionAt": null, + "featurePlanningStatus": "IN_PROGRESS", + "featureOwner": "planning.owner", + "blockageReason": null + } + """; + + var result = mvc.put() + .uri("/api/features/{code}", "IDEA-1") + .contentType(MediaType.APPLICATION_JSON) + .content(payload) + .exchange(); + assertThat(result).hasStatusOk(); + + var updatedFeature = mvc.get().uri("/api/features/{code}", "IDEA-1").exchange(); + assertThat(updatedFeature) + .hasStatusOk() + .bodyJson() + .convertTo(FeatureDto.class) + .satisfies(dto -> { + assertThat(dto.featurePlanningStatus()).isEqualTo(FeaturePlanningStatus.IN_PROGRESS); + assertThat(dto.featureOwner()).isEqualTo("planning.owner"); + assertThat(dto.plannedCompletionAt()).isNotNull(); + assertThat(dto.blockageReason()).isNull(); + }); + } + + @Test + @WithMockOAuth2User(username = "user") + void shouldUpdateFeatureWithBlockedPlanningStatus() { + var payload = + """ + { + "title": "Blocked Feature", + "description": "Description", + "status": "ON_HOLD", + "featurePlanningStatus": "BLOCKED", + "blockageReason": "Waiting for external API" + } + """; + + var result = mvc.put() + .uri("/api/features/{code}", "IDEA-1") + .contentType(MediaType.APPLICATION_JSON) + .content(payload) + .exchange(); + assertThat(result).hasStatusOk(); + + var updatedFeature = mvc.get().uri("/api/features/{code}", "IDEA-1").exchange(); + assertThat(updatedFeature) + .hasStatusOk() + .bodyJson() + .convertTo(FeatureDto.class) + .satisfies(dto -> { + assertThat(dto.featurePlanningStatus()).isEqualTo(FeaturePlanningStatus.BLOCKED); + assertThat(dto.blockageReason()).isEqualTo("Waiting for external API"); + }); + } + + @Test + void shouldReturnPlanningFieldsInGetFeaturesByRelease() { + var result = mvc.get() + .uri("/api/features?releaseCode={code}", "IDEA-2023.3.8") + .exchange(); + assertThat(result) + .hasStatusOk() + .bodyJson() + .extractingPath("$.size()") + .asNumber() + .isEqualTo(2); + } + + @Test + @WithMockOAuth2User(username = "user") + void shouldReturnBadRequestForInvalidFeaturePlanningStatus() { + var payload = + """ + { + "title": "Feature with Invalid Status", + "description": "Feature with non-existent planning status", + "status": "NEW", + "featurePlanningStatus": "INVALID_STATUS" + } + """; + + var result = mvc.put() + .uri("/api/features/{code}", "IDEA-1") + .contentType(MediaType.APPLICATION_JSON) + .content(payload) + .exchange(); + + assertThat(result).hasStatus(HttpStatus.BAD_REQUEST); + } + + @Test + @WithMockOAuth2User(username = "user") + void shouldReturnBadRequestForBlankFeaturePlanningStatus() { + var payload = + """ + { + "title": "Feature with Invalid Status", + "description": "Feature with non-existent planning status", + "status": "NEW", + "featurePlanningStatus": " " + } + """; + + var result = mvc.put() + .uri("/api/features/{code}", "IDEA-1") + .contentType(MediaType.APPLICATION_JSON) + .content(payload) + .exchange(); + + assertThat(result).hasStatus(HttpStatus.BAD_REQUEST); + } + + @Test + @WithMockOAuth2User(username = "user") + void shouldReturnBadRequestForEmptyFeaturePlanningStatus() { + var payload = + """ + { + "title": "Feature with Invalid Status", + "description": "Feature with non-existent planning status", + "status": "NEW", + "featurePlanningStatus": "" + } + """; + + var result = mvc.put() + .uri("/api/features/{code}", "IDEA-1") + .contentType(MediaType.APPLICATION_JSON) + .content(payload) + .exchange(); + + assertThat(result).hasStatus(HttpStatus.BAD_REQUEST); + } } diff --git a/src/test/java/com/sivalabs/ft/features/domain/FeatureEntityTest.java b/src/test/java/com/sivalabs/ft/features/domain/FeatureEntityTest.java new file mode 100644 index 00000000..ff74f241 --- /dev/null +++ b/src/test/java/com/sivalabs/ft/features/domain/FeatureEntityTest.java @@ -0,0 +1,63 @@ +package com.sivalabs.ft.features.domain; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.sivalabs.ft.features.domain.entities.Feature; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; +import com.sivalabs.ft.features.domain.models.FeatureStatus; +import java.time.Instant; +import org.junit.jupiter.api.Test; + +class FeatureEntityTest { + + @Test + void shouldSetAndGetPlanningFields() { + Feature feature = new Feature(); + Instant planned = Instant.parse("2026-06-01T00:00:00Z"); + Instant actual = Instant.parse("2026-06-15T00:00:00Z"); + + feature.setPlannedCompletionAt(planned); + feature.setActualCompletionAt(actual); + feature.setFeaturePlanningStatus(FeaturePlanningStatus.IN_PROGRESS); + feature.setFeatureOwner("owner.user"); + feature.setBlockageReason("Waiting for dependencies"); + + assertThat(feature.getPlannedCompletionAt()).isEqualTo(planned); + assertThat(feature.getActualCompletionAt()).isEqualTo(actual); + assertThat(feature.getFeaturePlanningStatus()).isEqualTo(FeaturePlanningStatus.IN_PROGRESS); + assertThat(feature.getFeatureOwner()).isEqualTo("owner.user"); + assertThat(feature.getBlockageReason()).isEqualTo("Waiting for dependencies"); + } + + @Test + void shouldHaveNullPlanningFieldsByDefault() { + Feature feature = new Feature(); + + assertThat(feature.getPlannedCompletionAt()).isNull(); + assertThat(feature.getActualCompletionAt()).isNull(); + assertThat(feature.getFeaturePlanningStatus()).isNull(); + assertThat(feature.getFeatureOwner()).isNull(); + assertThat(feature.getBlockageReason()).isNull(); + } + + @Test + void shouldSupportAllPlanningStatusValues() { + Feature feature = new Feature(); + + for (FeaturePlanningStatus status : FeaturePlanningStatus.values()) { + feature.setFeaturePlanningStatus(status); + assertThat(feature.getFeaturePlanningStatus()).isEqualTo(status); + } + } + + @Test + void shouldSetBlockedStatus() { + Feature feature = new Feature(); + feature.setStatus(FeatureStatus.ON_HOLD); + feature.setFeaturePlanningStatus(FeaturePlanningStatus.BLOCKED); + feature.setBlockageReason("External dependency not ready"); + + assertThat(feature.getFeaturePlanningStatus()).isEqualTo(FeaturePlanningStatus.BLOCKED); + assertThat(feature.getBlockageReason()).isEqualTo("External dependency not ready"); + } +} diff --git a/src/test/java/com/sivalabs/ft/features/domain/FeatureRepositoryTest.java b/src/test/java/com/sivalabs/ft/features/domain/FeatureRepositoryTest.java new file mode 100644 index 00000000..d02d6445 --- /dev/null +++ b/src/test/java/com/sivalabs/ft/features/domain/FeatureRepositoryTest.java @@ -0,0 +1,92 @@ +package com.sivalabs.ft.features.domain; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.sivalabs.ft.features.TestcontainersConfiguration; +import com.sivalabs.ft.features.domain.entities.Feature; +import com.sivalabs.ft.features.domain.models.FeaturePlanningStatus; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest; +import org.springframework.context.annotation.Import; + +@DataJpaTest +@Import(TestcontainersConfiguration.class) +class FeatureRepositoryTest { + + @Autowired + private FeatureRepository featureRepository; + + @Test + void shouldFindFeatureByCode() { + var feature = featureRepository.findByCode("IDEA-1"); + assertThat(feature).isPresent(); + assertThat(feature.get().getCode()).isEqualTo("IDEA-1"); + } + + @Test + void shouldPersistPlanningFields() { + Feature feature = featureRepository.findByCode("IDEA-1").orElseThrow(); + Instant planned = Instant.now().plus(30, ChronoUnit.DAYS).truncatedTo(ChronoUnit.MICROS); + Instant actual = Instant.now().plus(45, ChronoUnit.DAYS).truncatedTo(ChronoUnit.MICROS); + + feature.setPlannedCompletionAt(planned); + feature.setActualCompletionAt(actual); + feature.setFeaturePlanningStatus(FeaturePlanningStatus.IN_PROGRESS); + feature.setFeatureOwner("planning.owner"); + feature.setBlockageReason(null); + featureRepository.save(feature); + + Feature saved = featureRepository.findByCode("IDEA-1").orElseThrow(); + assertThat(saved.getPlannedCompletionAt()).isEqualTo(planned); + assertThat(saved.getActualCompletionAt()).isEqualTo(actual); + assertThat(saved.getFeaturePlanningStatus()).isEqualTo(FeaturePlanningStatus.IN_PROGRESS); + assertThat(saved.getFeatureOwner()).isEqualTo("planning.owner"); + assertThat(saved.getBlockageReason()).isNull(); + } + + @Test + void shouldPersistBlockedStatusWithReason() { + Feature feature = featureRepository.findByCode("IDEA-1").orElseThrow(); + feature.setFeaturePlanningStatus(FeaturePlanningStatus.BLOCKED); + feature.setBlockageReason("Waiting for API contract"); + featureRepository.save(feature); + + Feature saved = featureRepository.findByCode("IDEA-1").orElseThrow(); + assertThat(saved.getFeaturePlanningStatus()).isEqualTo(FeaturePlanningStatus.BLOCKED); + assertThat(saved.getBlockageReason()).isEqualTo("Waiting for API contract"); + } + + @Test + void shouldPersistDoneStatus() { + Feature feature = featureRepository.findByCode("GO-3").orElseThrow(); + Instant completedAt = Instant.now().truncatedTo(ChronoUnit.MICROS); + feature.setFeaturePlanningStatus(FeaturePlanningStatus.DONE); + feature.setActualCompletionAt(completedAt); + featureRepository.save(feature); + + Feature saved = featureRepository.findByCode("GO-3").orElseThrow(); + assertThat(saved.getFeaturePlanningStatus()).isEqualTo(FeaturePlanningStatus.DONE); + assertThat(saved.getActualCompletionAt()).isEqualTo(completedAt); + } + + @Test + void shouldSupportNullPlanningFields() { + Feature feature = featureRepository.findByCode("IDEA-1").orElseThrow(); + feature.setPlannedCompletionAt(null); + feature.setActualCompletionAt(null); + feature.setFeaturePlanningStatus(null); + feature.setFeatureOwner(null); + feature.setBlockageReason(null); + featureRepository.save(feature); + + Feature saved = featureRepository.findByCode("IDEA-1").orElseThrow(); + assertThat(saved.getPlannedCompletionAt()).isNull(); + assertThat(saved.getActualCompletionAt()).isNull(); + assertThat(saved.getFeaturePlanningStatus()).isNull(); + assertThat(saved.getFeatureOwner()).isNull(); + assertThat(saved.getBlockageReason()).isNull(); + } +}