Bot Detection #367
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # ___ _ _ | |
| # / _ \ | | (_) | |
| # | |_| | __ _ ___ _ __ | |_ _ ___ | |
| # | _ |/ _` |/ _ \ '_ \| __| |/ __| | |
| # | | | | (_| | __/ | | | |_| | (__ | |
| # \_| |_/\__, |\___|_| |_|\__|_|\___| | |
| # __/ | | |
| # _ _ |___/ | |
| # | | | | / _| | | |
| # | | | | ___ _ __ _ __| |_| | _____ ____ | |
| # | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| | |
| # \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ | |
| # \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ | |
| # | |
| # This file was automatically generated by gh-aw. DO NOT EDIT. | |
| # | |
| # To update this file, edit the corresponding .md file and run: | |
| # gh aw compile | |
| # Not all edits will cause changes to this file. | |
| # | |
| # For more information: https://github.github.com/gh-aw/introduction/overview/ | |
| # | |
| # Investigates suspicious repository activity and maintains a single triage issue | |
| # | |
| # gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"fd48550f1cc134361d5d1824e474b278a25c7d7713560fdb20546fc3c63eaf58","strict":true,"agent_id":"copilot"} | |
| name: "Bot Detection" | |
| "on": | |
| schedule: | |
| - cron: "25 */6 * * *" | |
| # Friendly format: every 6h (scattered) | |
| workflow_dispatch: | |
| inputs: | |
| aw_context: | |
| default: "" | |
| description: Agent caller context (used internally by Agentic Workflows). | |
| required: false | |
| type: string | |
| permissions: {} | |
| concurrency: | |
| group: "gh-aw-${{ github.workflow }}" | |
| run-name: "Bot Detection" | |
| jobs: | |
| activation: | |
| needs: precompute | |
| if: needs.precompute.outputs.action != 'none' | |
| runs-on: ubuntu-slim | |
| permissions: | |
| contents: read | |
| outputs: | |
| comment_id: "" | |
| comment_repo: "" | |
| lockdown_check_failed: ${{ steps.generate_aw_info.outputs.lockdown_check_failed == 'true' }} | |
| model: ${{ steps.generate_aw_info.outputs.model }} | |
| secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Generate agentic run info | |
| id: generate_aw_info | |
| env: | |
| GH_AW_INFO_ENGINE_ID: "copilot" | |
| GH_AW_INFO_ENGINE_NAME: "GitHub Copilot CLI" | |
| GH_AW_INFO_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || 'auto' }} | |
| GH_AW_INFO_VERSION: "latest" | |
| GH_AW_INFO_AGENT_VERSION: "latest" | |
| GH_AW_INFO_WORKFLOW_NAME: "Bot Detection" | |
| GH_AW_INFO_EXPERIMENTAL: "false" | |
| GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true" | |
| GH_AW_INFO_STAGED: "false" | |
| GH_AW_INFO_ALLOWED_DOMAINS: '["defaults"]' | |
| GH_AW_INFO_FIREWALL_ENABLED: "true" | |
| GH_AW_INFO_AWF_VERSION: "v0.25.13" | |
| GH_AW_INFO_AWMG_VERSION: "" | |
| GH_AW_INFO_FIREWALL_TYPE: "squid" | |
| GH_AW_COMPILED_STRICT: "true" | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/generate_aw_info.cjs'); | |
| await main(core, context); | |
| - name: Validate COPILOT_GITHUB_TOKEN secret | |
| id: validate-secret | |
| run: ${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default | |
| env: | |
| COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | |
| - name: Checkout .github and .agents folders | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| persist-credentials: false | |
| sparse-checkout: | | |
| .github | |
| .agents | |
| actions/setup | |
| sparse-checkout-cone-mode: true | |
| fetch-depth: 1 | |
| - name: Check workflow lock file | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_WORKFLOW_FILE: "bot-detection.lock.yml" | |
| GH_AW_CONTEXT_WORKFLOW_REF: "${{ github.workflow_ref }}" | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/check_workflow_timestamp_api.cjs'); | |
| await main(); | |
| - name: Create prompt with built-in context | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_SAFE_OUTPUTS: ${{ runner.temp }}/gh-aw/safeoutputs/outputs.jsonl | |
| GH_AW_GITHUB_ACTOR: ${{ github.actor }} | |
| GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} | |
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} | |
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} | |
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} | |
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | |
| GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} | |
| GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ACTION: ${{ needs.precompute.outputs.action }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_BODY: ${{ needs.precompute.outputs.issue_body }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_NUMBER: ${{ needs.precompute.outputs.issue_number }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_TITLE: ${{ needs.precompute.outputs.issue_title }} | |
| # poutine:ignore untrusted_checkout_exec | |
| run: | | |
| bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh | |
| { | |
| cat << 'GH_AW_PROMPT_87df6df872752ee5_EOF' | |
| <system> | |
| GH_AW_PROMPT_87df6df872752ee5_EOF | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md" | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md" | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md" | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md" | |
| cat << 'GH_AW_PROMPT_87df6df872752ee5_EOF' | |
| <safe-output-tools> | |
| Tools: create_issue, update_issue, missing_tool, missing_data, noop | |
| </safe-output-tools> | |
| <github-context> | |
| The following GitHub context information is available for this workflow: | |
| {{#if __GH_AW_GITHUB_ACTOR__ }} | |
| - **actor**: __GH_AW_GITHUB_ACTOR__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_REPOSITORY__ }} | |
| - **repository**: __GH_AW_GITHUB_REPOSITORY__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_WORKSPACE__ }} | |
| - **workspace**: __GH_AW_GITHUB_WORKSPACE__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} | |
| - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} | |
| - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} | |
| - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} | |
| - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_RUN_ID__ }} | |
| - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ | |
| {{/if}} | |
| </github-context> | |
| GH_AW_PROMPT_87df6df872752ee5_EOF | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md" | |
| cat << 'GH_AW_PROMPT_87df6df872752ee5_EOF' | |
| </system> | |
| {{#runtime-import .github/workflows/bot-detection.md}} | |
| GH_AW_PROMPT_87df6df872752ee5_EOF | |
| } > "$GH_AW_PROMPT" | |
| - name: Interpolate variables and render templates | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ACTION: ${{ needs.precompute.outputs.action }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_BODY: ${{ needs.precompute.outputs.issue_body }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_NUMBER: ${{ needs.precompute.outputs.issue_number }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_TITLE: ${{ needs.precompute.outputs.issue_title }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/interpolate_prompt.cjs'); | |
| await main(); | |
| - name: Substitute placeholders | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_GITHUB_ACTOR: ${{ github.actor }} | |
| GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} | |
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} | |
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} | |
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} | |
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | |
| GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} | |
| GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ACTION: ${{ needs.precompute.outputs.action }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_BODY: ${{ needs.precompute.outputs.issue_body }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_NUMBER: ${{ needs.precompute.outputs.issue_number }} | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_TITLE: ${{ needs.precompute.outputs.issue_title }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const substitutePlaceholders = require('${{ runner.temp }}/gh-aw/actions/substitute_placeholders.cjs'); | |
| // Call the substitution function | |
| return await substitutePlaceholders({ | |
| file: process.env.GH_AW_PROMPT, | |
| substitutions: { | |
| GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, | |
| GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, | |
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, | |
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, | |
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, | |
| GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, | |
| GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, | |
| GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ACTION: process.env.GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ACTION, | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_BODY: process.env.GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_BODY, | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_NUMBER: process.env.GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_NUMBER, | |
| GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_TITLE: process.env.GH_AW_NEEDS_PRECOMPUTE_OUTPUTS_ISSUE_TITLE | |
| } | |
| }); | |
| - name: Validate prompt placeholders | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| # poutine:ignore untrusted_checkout_exec | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/validate_prompt_placeholders.sh | |
| - name: Print prompt | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| # poutine:ignore untrusted_checkout_exec | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/print_prompt_summary.sh | |
| - name: Upload activation artifact | |
| if: success() | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: activation | |
| path: | | |
| /tmp/gh-aw/aw_info.json | |
| /tmp/gh-aw/aw-prompts/prompt.txt | |
| retention-days: 1 | |
| agent: | |
| needs: | |
| - activation | |
| - precompute | |
| if: needs.precompute.outputs.action != 'none' | |
| runs-on: ubuntu-latest | |
| permissions: | |
| actions: read | |
| contents: read | |
| issues: read | |
| pull-requests: read | |
| concurrency: | |
| group: "gh-aw-copilot-${{ github.workflow }}" | |
| env: | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} | |
| GH_AW_ASSETS_ALLOWED_EXTS: "" | |
| GH_AW_ASSETS_BRANCH: "" | |
| GH_AW_ASSETS_MAX_SIZE_KB: 0 | |
| GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs | |
| GH_AW_WORKFLOW_ID_SANITIZED: botdetection | |
| outputs: | |
| checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} | |
| effective_tokens: ${{ steps.parse-mcp-gateway.outputs.effective_tokens }} | |
| has_patch: ${{ steps.collect_output.outputs.has_patch }} | |
| inference_access_error: ${{ steps.detect-inference-error.outputs.inference_access_error || 'false' }} | |
| model: ${{ needs.activation.outputs.model }} | |
| output: ${{ steps.collect_output.outputs.output }} | |
| output_types: ${{ steps.collect_output.outputs.output_types }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Set runtime paths | |
| id: set-runtime-paths | |
| run: | | |
| echo "GH_AW_SAFE_OUTPUTS=${RUNNER_TEMP}/gh-aw/safeoutputs/outputs.jsonl" >> "$GITHUB_OUTPUT" | |
| echo "GH_AW_SAFE_OUTPUTS_CONFIG_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" >> "$GITHUB_OUTPUT" | |
| echo "GH_AW_SAFE_OUTPUTS_TOOLS_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/tools.json" >> "$GITHUB_OUTPUT" | |
| - name: Checkout repository | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| persist-credentials: false | |
| - name: Create gh-aw temp directory | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/create_gh_aw_tmp_dir.sh | |
| - name: Configure gh CLI for GitHub Enterprise | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/configure_gh_for_ghe.sh | |
| env: | |
| GH_TOKEN: ${{ github.token }} | |
| - name: Configure Git credentials | |
| env: | |
| REPO_NAME: ${{ github.repository }} | |
| SERVER_URL: ${{ github.server_url }} | |
| run: | | |
| git config --global user.email "github-actions[bot]@users.noreply.github.com" | |
| git config --global user.name "github-actions[bot]" | |
| git config --global am.keepcr true | |
| # Re-authenticate git with GitHub token | |
| SERVER_URL_STRIPPED="${SERVER_URL#https://}" | |
| git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" | |
| echo "Git configured with standard GitHub Actions identity" | |
| - name: Checkout PR branch | |
| id: checkout-pr | |
| if: | | |
| github.event.pull_request || github.event.issue.pull_request | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/checkout_pr_branch.cjs'); | |
| await main(); | |
| - name: Install GitHub Copilot CLI | |
| run: ${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh latest | |
| - name: Install AWF binary | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh v0.25.13 | |
| - name: Determine automatic lockdown mode for GitHub MCP Server | |
| id: determine-automatic-lockdown | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} | |
| GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} | |
| with: | |
| script: | | |
| const determineAutomaticLockdown = require('${{ runner.temp }}/gh-aw/actions/determine_automatic_lockdown.cjs'); | |
| await determineAutomaticLockdown(github, context, core); | |
| - name: Download container images | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.25.13 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.13 ghcr.io/github/gh-aw-firewall/squid:0.25.13 ghcr.io/github/gh-aw-mcpg:v0.2.12 ghcr.io/github/github-mcp-server:v0.32.0 node:lts-alpine | |
| - name: Write Safe Outputs Config | |
| run: | | |
| mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs | |
| mkdir -p /tmp/gh-aw/safeoutputs | |
| mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs | |
| cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_9b11c10810696f2f_EOF' | |
| {"create_issue":{"labels":["security","bot-detection"],"max":1},"mentions":{"allowed":["pelikhan"]},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"},"update_issue":{"allow_body":true,"max":1,"target":"*"}} | |
| GH_AW_SAFE_OUTPUTS_CONFIG_9b11c10810696f2f_EOF | |
| - name: Write Safe Outputs Tools | |
| run: | | |
| cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_06c03da36d7fe1ee_EOF' | |
| { | |
| "description_suffixes": { | |
| "create_issue": " CONSTRAINTS: Maximum 1 issue(s) can be created. Labels [\"security\" \"bot-detection\"] will be automatically added.", | |
| "update_issue": " CONSTRAINTS: Maximum 1 issue(s) can be updated. Target: *. Body updates are allowed." | |
| }, | |
| "repo_params": {}, | |
| "dynamic_tools": [] | |
| } | |
| GH_AW_SAFE_OUTPUTS_TOOLS_META_06c03da36d7fe1ee_EOF | |
| cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_2c8694789036f98b_EOF' | |
| { | |
| "create_issue": { | |
| "defaultMax": 1, | |
| "fields": { | |
| "body": { | |
| "required": true, | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 65000 | |
| }, | |
| "labels": { | |
| "type": "array", | |
| "itemType": "string", | |
| "itemSanitize": true, | |
| "itemMaxLength": 128 | |
| }, | |
| "parent": { | |
| "issueOrPRNumber": true | |
| }, | |
| "repo": { | |
| "type": "string", | |
| "maxLength": 256 | |
| }, | |
| "temporary_id": { | |
| "type": "string" | |
| }, | |
| "title": { | |
| "required": true, | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 128 | |
| } | |
| } | |
| }, | |
| "missing_data": { | |
| "defaultMax": 20, | |
| "fields": { | |
| "alternatives": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| }, | |
| "context": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| }, | |
| "data_type": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 128 | |
| }, | |
| "reason": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| } | |
| } | |
| }, | |
| "missing_tool": { | |
| "defaultMax": 20, | |
| "fields": { | |
| "alternatives": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 512 | |
| }, | |
| "reason": { | |
| "required": true, | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| }, | |
| "tool": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 128 | |
| } | |
| } | |
| }, | |
| "noop": { | |
| "defaultMax": 1, | |
| "fields": { | |
| "message": { | |
| "required": true, | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 65000 | |
| } | |
| } | |
| }, | |
| "update_issue": { | |
| "defaultMax": 1, | |
| "fields": { | |
| "assignees": { | |
| "type": "array", | |
| "itemType": "string", | |
| "itemSanitize": true, | |
| "itemMaxLength": 39 | |
| }, | |
| "body": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 65000 | |
| }, | |
| "issue_number": { | |
| "issueOrPRNumber": true | |
| }, | |
| "labels": { | |
| "type": "array", | |
| "itemType": "string", | |
| "itemSanitize": true, | |
| "itemMaxLength": 128 | |
| }, | |
| "milestone": { | |
| "optionalPositiveInteger": true | |
| }, | |
| "operation": { | |
| "type": "string", | |
| "enum": [ | |
| "replace", | |
| "append", | |
| "prepend", | |
| "replace-island" | |
| ] | |
| }, | |
| "repo": { | |
| "type": "string", | |
| "maxLength": 256 | |
| }, | |
| "status": { | |
| "type": "string", | |
| "enum": [ | |
| "open", | |
| "closed" | |
| ] | |
| }, | |
| "title": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 128 | |
| } | |
| }, | |
| "customValidation": "requiresOneOf:status,title,body" | |
| } | |
| } | |
| GH_AW_SAFE_OUTPUTS_VALIDATION_2c8694789036f98b_EOF | |
| node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs | |
| - name: Generate Safe Outputs MCP Server Config | |
| id: safe-outputs-config | |
| run: | | |
| # Generate a secure random API key (360 bits of entropy, 40+ chars) | |
| # Mask immediately to prevent timing vulnerabilities | |
| API_KEY=$(openssl rand -base64 45 | tr -d '/+=') | |
| echo "::add-mask::${API_KEY}" | |
| PORT=3001 | |
| # Set outputs for next steps | |
| { | |
| echo "safe_outputs_api_key=${API_KEY}" | |
| echo "safe_outputs_port=${PORT}" | |
| } >> "$GITHUB_OUTPUT" | |
| echo "Safe Outputs MCP server will run on port ${PORT}" | |
| - name: Start Safe Outputs MCP HTTP Server | |
| id: safe-outputs-start | |
| env: | |
| DEBUG: '*' | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} | |
| GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} | |
| GH_AW_SAFE_OUTPUTS_TOOLS_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/tools.json | |
| GH_AW_SAFE_OUTPUTS_CONFIG_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/config.json | |
| GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs | |
| run: | | |
| # Environment variables are set above to prevent template injection | |
| export DEBUG | |
| export GH_AW_SAFE_OUTPUTS | |
| export GH_AW_SAFE_OUTPUTS_PORT | |
| export GH_AW_SAFE_OUTPUTS_API_KEY | |
| export GH_AW_SAFE_OUTPUTS_TOOLS_PATH | |
| export GH_AW_SAFE_OUTPUTS_CONFIG_PATH | |
| export GH_AW_MCP_LOG_DIR | |
| bash ${RUNNER_TEMP}/gh-aw/actions/start_safe_outputs_server.sh | |
| - name: Start MCP Gateway | |
| id: start-mcp-gateway | |
| env: | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} | |
| GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} | |
| GITHUB_MCP_GUARD_MIN_INTEGRITY: ${{ steps.determine-automatic-lockdown.outputs.min_integrity }} | |
| GITHUB_MCP_GUARD_REPOS: ${{ steps.determine-automatic-lockdown.outputs.repos }} | |
| GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| run: | | |
| set -eo pipefail | |
| mkdir -p /tmp/gh-aw/mcp-config | |
| # Export gateway environment variables for MCP config and gateway script | |
| export MCP_GATEWAY_PORT="80" | |
| export MCP_GATEWAY_DOMAIN="host.docker.internal" | |
| MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') | |
| echo "::add-mask::${MCP_GATEWAY_API_KEY}" | |
| export MCP_GATEWAY_API_KEY | |
| export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" | |
| mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" | |
| export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" | |
| export DEBUG="*" | |
| export GH_AW_ENGINE="copilot" | |
| export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.12' | |
| mkdir -p /home/runner/.copilot | |
| cat << GH_AW_MCP_CONFIG_fbf069f346e13146_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh | |
| { | |
| "mcpServers": { | |
| "github": { | |
| "type": "stdio", | |
| "container": "ghcr.io/github/github-mcp-server:v0.32.0", | |
| "env": { | |
| "GITHUB_HOST": "\${GITHUB_SERVER_URL}", | |
| "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", | |
| "GITHUB_READ_ONLY": "1", | |
| "GITHUB_TOOLSETS": "context,repos,issues,pull_requests" | |
| }, | |
| "guard-policies": { | |
| "allow-only": { | |
| "min-integrity": "$GITHUB_MCP_GUARD_MIN_INTEGRITY", | |
| "repos": "$GITHUB_MCP_GUARD_REPOS" | |
| } | |
| } | |
| }, | |
| "safeoutputs": { | |
| "type": "http", | |
| "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", | |
| "headers": { | |
| "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" | |
| }, | |
| "guard-policies": { | |
| "write-sink": { | |
| "accept": [ | |
| "*" | |
| ] | |
| } | |
| } | |
| } | |
| }, | |
| "gateway": { | |
| "port": $MCP_GATEWAY_PORT, | |
| "domain": "${MCP_GATEWAY_DOMAIN}", | |
| "apiKey": "${MCP_GATEWAY_API_KEY}", | |
| "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" | |
| } | |
| } | |
| GH_AW_MCP_CONFIG_fbf069f346e13146_EOF | |
| - name: Download activation artifact | |
| uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 | |
| with: | |
| name: activation | |
| path: /tmp/gh-aw | |
| - name: Clean git credentials | |
| continue-on-error: true | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/clean_git_credentials.sh | |
| - name: Execute GitHub Copilot CLI | |
| id: agentic_execution | |
| # Copilot CLI tool arguments (sorted): | |
| timeout-minutes: 10 | |
| run: | | |
| set -o pipefail | |
| touch /tmp/gh-aw/agent-step-summary.md | |
| # shellcheck disable=SC1003 | |
| sudo -E awf --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --env-all --exclude-env COPILOT_GITHUB_TOKEN --exclude-env GITHUB_MCP_SERVER_TOKEN --exclude-env MCP_GATEWAY_API_KEY --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.13 --skip-pull --enable-api-proxy \ | |
| -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-all-tools --allow-all-paths --add-dir "${GITHUB_WORKSPACE}" --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log | |
| env: | |
| COPILOT_AGENT_RUNNER_TYPE: STANDALONE | |
| COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | |
| COPILOT_MODEL: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} | |
| GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json | |
| GH_AW_PHASE: agent | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| GH_AW_VERSION: dev | |
| GITHUB_API_URL: ${{ github.api_url }} | |
| GITHUB_AW: true | |
| GITHUB_HEAD_REF: ${{ github.head_ref }} | |
| GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| GITHUB_REF_NAME: ${{ github.ref_name }} | |
| GITHUB_SERVER_URL: ${{ github.server_url }} | |
| GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md | |
| GITHUB_WORKSPACE: ${{ github.workspace }} | |
| GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com | |
| GIT_AUTHOR_NAME: github-actions[bot] | |
| GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com | |
| GIT_COMMITTER_NAME: github-actions[bot] | |
| XDG_CONFIG_HOME: /home/runner | |
| - name: Detect inference access error | |
| id: detect-inference-error | |
| if: always() | |
| continue-on-error: true | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/detect_inference_access_error.sh | |
| - name: Configure Git credentials | |
| env: | |
| REPO_NAME: ${{ github.repository }} | |
| SERVER_URL: ${{ github.server_url }} | |
| run: | | |
| git config --global user.email "github-actions[bot]@users.noreply.github.com" | |
| git config --global user.name "github-actions[bot]" | |
| git config --global am.keepcr true | |
| # Re-authenticate git with GitHub token | |
| SERVER_URL_STRIPPED="${SERVER_URL#https://}" | |
| git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" | |
| echo "Git configured with standard GitHub Actions identity" | |
| - name: Copy Copilot session state files to logs | |
| if: always() | |
| continue-on-error: true | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/copy_copilot_session_state.sh | |
| - name: Stop MCP Gateway | |
| if: always() | |
| continue-on-error: true | |
| env: | |
| MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} | |
| MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} | |
| GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} | |
| run: | | |
| bash ${RUNNER_TEMP}/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" | |
| - name: Redact secrets in logs | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/redact_secrets.cjs'); | |
| await main(); | |
| env: | |
| GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' | |
| SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | |
| SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} | |
| SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} | |
| SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Append agent step summary | |
| if: always() | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/append_agent_step_summary.sh | |
| - name: Copy Safe Outputs | |
| if: always() | |
| env: | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| run: | | |
| mkdir -p /tmp/gh-aw | |
| cp "$GH_AW_SAFE_OUTPUTS" /tmp/gh-aw/safeoutputs.jsonl 2>/dev/null || true | |
| - name: Ingest agent output | |
| id: collect_output | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" | |
| GITHUB_SERVER_URL: ${{ github.server_url }} | |
| GITHUB_API_URL: ${{ github.api_url }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/collect_ndjson_output.cjs'); | |
| await main(); | |
| - name: Parse agent logs for step summary | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_copilot_log.cjs'); | |
| await main(); | |
| - name: Parse MCP Gateway logs for step summary | |
| if: always() | |
| id: parse-mcp-gateway | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_mcp_gateway_log.cjs'); | |
| await main(); | |
| - name: Print firewall logs | |
| if: always() | |
| continue-on-error: true | |
| env: | |
| AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs | |
| run: | | |
| # Fix permissions on firewall logs so they can be uploaded as artifacts | |
| # AWF runs with sudo, creating files owned by root | |
| sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true | |
| # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) | |
| if command -v awf &> /dev/null; then | |
| awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" | |
| else | |
| echo 'AWF binary not installed, skipping firewall log summary' | |
| fi | |
| - name: Parse token usage for step summary | |
| if: always() | |
| continue-on-error: true | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/parse_token_usage.sh | |
| - name: Write agent output placeholder if missing | |
| if: always() | |
| run: | | |
| if [ ! -f /tmp/gh-aw/agent_output.json ]; then | |
| echo '{"items":[]}' > /tmp/gh-aw/agent_output.json | |
| fi | |
| - name: Upload agent artifacts | |
| if: always() | |
| continue-on-error: true | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: agent | |
| path: | | |
| /tmp/gh-aw/aw-prompts/prompt.txt | |
| /tmp/gh-aw/sandbox/agent/logs/ | |
| /tmp/gh-aw/redacted-urls.log | |
| /tmp/gh-aw/mcp-logs/ | |
| /tmp/gh-aw/agent-stdio.log | |
| /tmp/gh-aw/agent/ | |
| /tmp/gh-aw/safeoutputs.jsonl | |
| /tmp/gh-aw/agent_output.json | |
| if-no-files-found: ignore | |
| - name: Upload firewall audit logs | |
| if: always() | |
| continue-on-error: true | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: firewall-audit-logs | |
| path: | | |
| /tmp/gh-aw/sandbox/firewall/logs/ | |
| /tmp/gh-aw/sandbox/firewall/audit/ | |
| if-no-files-found: ignore | |
| conclusion: | |
| needs: | |
| - activation | |
| - agent | |
| - precompute | |
| - safe_outputs | |
| if: always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true') | |
| runs-on: ubuntu-slim | |
| permissions: | |
| contents: read | |
| issues: write | |
| concurrency: | |
| group: "gh-aw-conclusion-bot-detection" | |
| cancel-in-progress: false | |
| outputs: | |
| noop_message: ${{ steps.noop.outputs.noop_message }} | |
| tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} | |
| total_count: ${{ steps.missing_tool.outputs.total_count }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Download agent output artifact | |
| id: download-agent-output | |
| continue-on-error: true | |
| uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 | |
| with: | |
| name: agent | |
| path: /tmp/gh-aw/ | |
| - name: Setup agent output environment variable | |
| id: setup-agent-output-env | |
| if: steps.download-agent-output.outcome == 'success' | |
| run: | | |
| mkdir -p /tmp/gh-aw/ | |
| find "/tmp/gh-aw/" -type f -print | |
| echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" | |
| - name: Process No-Op Messages | |
| id: noop | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_NOOP_MAX: "1" | |
| GH_AW_WORKFLOW_NAME: "Bot Detection" | |
| GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
| GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} | |
| GH_AW_NOOP_REPORT_AS_ISSUE: "true" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_noop_message.cjs'); | |
| await main(); | |
| - name: Record Missing Tool | |
| id: missing_tool | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_MISSING_TOOL_CREATE_ISSUE: "true" | |
| GH_AW_WORKFLOW_NAME: "Bot Detection" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/missing_tool.cjs'); | |
| await main(); | |
| - name: Handle Agent Failure | |
| id: handle_agent_failure | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_WORKFLOW_NAME: "Bot Detection" | |
| GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
| GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} | |
| GH_AW_WORKFLOW_ID: "bot-detection" | |
| GH_AW_ENGINE_ID: "copilot" | |
| GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }} | |
| GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} | |
| GH_AW_INFERENCE_ACCESS_ERROR: ${{ needs.agent.outputs.inference_access_error }} | |
| GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }} | |
| GH_AW_GROUP_REPORTS: "false" | |
| GH_AW_FAILURE_REPORT_AS_ISSUE: "true" | |
| GH_AW_TIMEOUT_MINUTES: "10" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_agent_failure.cjs'); | |
| await main(); | |
| precompute: | |
| runs-on: ubuntu-latest | |
| permissions: | |
| actions: read | |
| contents: read | |
| issues: read | |
| pull-requests: read | |
| outputs: | |
| action: ${{ steps.precompute.outputs.action }} | |
| issue_body: ${{ steps.precompute.outputs.issue_body }} | |
| issue_number: ${{ steps.precompute.outputs.issue_number }} | |
| issue_title: ${{ steps.precompute.outputs.issue_title }} | |
| steps: | |
| - name: Configure GH_HOST for enterprise compatibility | |
| id: ghes-host-config | |
| shell: bash | |
| run: | | |
| # Derive GH_HOST from GITHUB_SERVER_URL so the gh CLI targets the correct | |
| # GitHub instance (GHES/GHEC). On github.com this is a harmless no-op. | |
| GH_HOST="${GITHUB_SERVER_URL#https://}" | |
| GH_HOST="${GH_HOST#http://}" | |
| echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV" | |
| - name: Precompute deterministic findings | |
| id: precompute | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { owner, repo } = context.repo; | |
| const HOURS_BACK = 6; | |
| const ISSUE_TITLE = "🔎 Activity Signals: Review Queue"; | |
| const MIN_ACCOUNT_AGE_DAYS = 14; | |
| const MAX_PR = 50; | |
| const MAX_COMMENT_EXAMPLES = 10; | |
| const MAX_TOUCHED_FILES = 10; | |
| const ALLOWED_DOMAINS = new Set([ | |
| // GitHub docs + blog | |
| "docs.github.com", | |
| "github.blog", | |
| // Marketplace + package registries | |
| "marketplace.visualstudio.com", | |
| "npmjs.com", | |
| "pkg.go.dev", | |
| // Language vendor sites | |
| "golang.org", | |
| "go.dev", | |
| "nodejs.org", | |
| ]); | |
| const ALLOWED_ACCOUNTS = new Set([ | |
| // Bots and service accounts | |
| "github-actions[bot]", | |
| "dependabot[bot]", | |
| "renovate[bot]", | |
| "copilot", | |
| "copilot-swe-agent", | |
| ]); | |
| const TRUSTED_ORGS = [owner]; | |
| const MEMBER_ACCOUNTS = new Set(); | |
| function parseJsonList(envName) { | |
| try { | |
| const raw = process.env[envName]; | |
| if (!raw) return []; | |
| const parsed = JSON.parse(raw); | |
| return Array.isArray(parsed) ? parsed : []; | |
| } catch { | |
| return []; | |
| } | |
| } | |
| function toISO(d) { | |
| return new Date(d).toISOString(); | |
| } | |
| function normalizeForDup(s) { | |
| return (s || "") | |
| .toString() | |
| .replace(/https?:\/\/\S+/g, "") | |
| .toLowerCase() | |
| .replace(/\s+/g, " ") | |
| .trim() | |
| .slice(0, 240); | |
| } | |
| function extractDomains(text) { | |
| const domains = []; | |
| const urlRe = /https?:\/\/[^\s)\]]+/g; | |
| const matches = text.match(urlRe) || []; | |
| for (const raw of matches) { | |
| try { | |
| const u = new URL(raw); | |
| domains.push(u.hostname.toLowerCase()); | |
| } catch { | |
| // ignore parse failures | |
| } | |
| } | |
| return domains; | |
| } | |
| function isExternalDomain(host) { | |
| const allowed = new Set([ | |
| "github.com", | |
| "raw.githubusercontent.com", | |
| "avatars.githubusercontent.com", | |
| "api.github.com", | |
| ]); | |
| return host && !allowed.has(host) && !ALLOWED_DOMAINS.has(host); | |
| } | |
| function isAllowedAccount(login) { | |
| const normalized = String(login || "").toLowerCase(); | |
| return ALLOWED_ACCOUNTS.has(normalized) || MEMBER_ACCOUNTS.has(normalized); | |
| } | |
| async function loadMemberAccounts() { | |
| try { | |
| const collaborators = await github.paginate(github.rest.repos.listCollaborators, { | |
| owner, | |
| repo, | |
| per_page: 100, | |
| }); | |
| for (const collaborator of collaborators) { | |
| if (collaborator?.login) { | |
| MEMBER_ACCOUNTS.add(String(collaborator.login).toLowerCase()); | |
| } | |
| } | |
| } catch { | |
| // If collaborator lookup fails, continue without member allowlist. | |
| } | |
| } | |
| async function loadContributorAccounts() { | |
| try { | |
| const contributors = await github.paginate(github.rest.repos.listContributors, { | |
| owner, | |
| repo, | |
| per_page: 100, | |
| }); | |
| for (const contributor of contributors) { | |
| if (contributor?.login) { | |
| MEMBER_ACCOUNTS.add(String(contributor.login).toLowerCase()); | |
| } | |
| } | |
| } catch { | |
| // If contributor lookup fails, continue without contributor allowlist. | |
| } | |
| } | |
| async function loadOrgMembers() { | |
| for (const org of TRUSTED_ORGS) { | |
| try { | |
| const members = await github.paginate(github.rest.orgs.listMembers, { | |
| org, | |
| per_page: 100, | |
| }); | |
| for (const member of members) { | |
| if (member?.login) { | |
| MEMBER_ACCOUNTS.add(String(member.login).toLowerCase()); | |
| } | |
| } | |
| } catch { | |
| // If org member lookup fails, continue without org allowlist. | |
| } | |
| } | |
| } | |
| function isShortener(host) { | |
| const shorteners = new Set(["bit.ly", "tinyurl.com", "t.co", "is.gd", "goo.gl"]); | |
| return shorteners.has(host); | |
| } | |
| function isNonGitHubBinaryUrl(urlStr) { | |
| try { | |
| const u = new URL(urlStr); | |
| const host = u.hostname.toLowerCase(); | |
| if (!isExternalDomain(host)) return false; | |
| const path = u.pathname.toLowerCase(); | |
| return ( | |
| path.endsWith(".exe") || | |
| path.endsWith(".msi") || | |
| path.endsWith(".pkg") || | |
| path.endsWith(".dmg") || | |
| path.endsWith(".zip") || | |
| path.endsWith(".tar.gz") | |
| ); | |
| } catch { | |
| return false; | |
| } | |
| } | |
| async function getRunCreatedAt() { | |
| const runId = context.runId; | |
| const { data } = await github.rest.actions.getWorkflowRun({ | |
| owner, | |
| repo, | |
| run_id: runId, | |
| }); | |
| return new Date(data.created_at); | |
| } | |
| const end = await getRunCreatedAt(); | |
| const start = new Date(end.getTime() - HOURS_BACK * 60 * 60 * 1000); | |
| for (const domain of parseJsonList("BOT_DETECTION_ALLOWED_DOMAINS")) { | |
| if (domain) ALLOWED_DOMAINS.add(String(domain).toLowerCase()); | |
| } | |
| await loadMemberAccounts(); | |
| await loadContributorAccounts(); | |
| await loadOrgMembers(); | |
| // Search issues + PRs updated in window (API requires is:issue or is:pull-request) | |
| const qBase = `repo:${owner}/${repo} updated:>=${toISO(start)}`; | |
| const rawItems = []; | |
| for (const scope of ["is:issue", "is:pull-request"]) { | |
| const search = await github.rest.search.issuesAndPullRequests({ | |
| q: `${qBase} ${scope}`, | |
| per_page: 100, | |
| sort: "updated", | |
| order: "desc", | |
| }); | |
| rawItems.push(...(search.data.items || [])); | |
| } | |
| const seen = new Set(); | |
| let skippedNoLogin = 0; | |
| let skippedAllowed = 0; | |
| const skippedAllowedLogins = new Set(); | |
| const MAX_LOGGED_SKIPPED = 10; | |
| const items = rawItems | |
| .filter(i => new Date(i.updated_at) >= start && new Date(i.updated_at) <= end) | |
| .map(i => ({ | |
| number: i.number, | |
| title: i.title || "", | |
| body: i.body || "", | |
| url: i.html_url, | |
| created_at: i.created_at, | |
| updated_at: i.updated_at, | |
| is_pr: Boolean(i.pull_request), | |
| author: i.user?.login || "", | |
| })) | |
| .filter(i => { | |
| if (seen.has(i.url)) return false; | |
| seen.add(i.url); | |
| return true; | |
| }); | |
| // Deterministic ordering for any downstream processing | |
| items.sort((a, b) => { | |
| const at = a.updated_at.localeCompare(b.updated_at); | |
| if (at !== 0) return at; | |
| const an = a.number - b.number; | |
| if (an !== 0) return an; | |
| return a.url.localeCompare(b.url); | |
| }); | |
| // Collect per-author signals | |
| const perAuthor = new Map(); | |
| const domainAccounts = new Map(); // domain -> Set(logins) | |
| const userCreatedAt = new Map(); | |
| async function ensureUserCreatedAt(login) { | |
| if (!login || userCreatedAt.has(login)) return; | |
| try { | |
| const { data: userInfo } = await github.rest.users.getByUsername({ username: login }); | |
| userCreatedAt.set(login, new Date(userInfo.created_at)); | |
| } catch { | |
| userCreatedAt.set(login, null); | |
| } | |
| } | |
| function ensureAuthor(login) { | |
| if (!perAuthor.has(login)) { | |
| perAuthor.set(login, { | |
| login, | |
| itemCount: 0, | |
| prCount: 0, | |
| issueCount: 0, | |
| commentCount: 0, | |
| reviewCount: 0, | |
| accountAgeDays: null, | |
| externalDomains: new Set(), | |
| hasShortener: false, | |
| hasNonGitHubBinary: false, | |
| touchesWorkflows: false, | |
| touchesCI: false, | |
| touchesDeps: false, | |
| dupTexts: new Map(), | |
| exampleItems: [], | |
| touchedFiles: new Set(), | |
| examples: [], | |
| }); | |
| } | |
| return perAuthor.get(login); | |
| } | |
| for (const it of items) { | |
| const login = it.author; | |
| if (!login) { | |
| skippedNoLogin += 1; | |
| continue; | |
| } | |
| if (isAllowedAccount(login)) { | |
| skippedAllowed += 1; | |
| if (skippedAllowedLogins.size < MAX_LOGGED_SKIPPED) { | |
| skippedAllowedLogins.add(login); | |
| } | |
| continue; | |
| } | |
| const s = ensureAuthor(login); | |
| await ensureUserCreatedAt(login); | |
| s.itemCount += 1; | |
| if (it.is_pr) s.prCount += 1; | |
| else s.issueCount += 1; | |
| if (s.exampleItems.length < 5) { | |
| s.exampleItems.push({ | |
| title: it.title || "", | |
| url: it.url, | |
| is_pr: it.is_pr, | |
| number: it.number, | |
| }); | |
| } | |
| if (s.examples.length < 5) { | |
| s.examples.push({ url: it.url, is_pr: it.is_pr, number: it.number }); | |
| } | |
| const text = `${it.title}\n\n${it.body}`; | |
| const domains = extractDomains(text); | |
| for (const host of domains) { | |
| if (!host) continue; | |
| if (isExternalDomain(host)) { | |
| s.externalDomains.add(host); | |
| if (!domainAccounts.has(host)) domainAccounts.set(host, new Set()); | |
| domainAccounts.get(host).add(login); | |
| } | |
| if (isShortener(host)) s.hasShortener = true; | |
| } | |
| // Non-GitHub binary/download links | |
| const urlRe = /https?:\/\/[^\s)\]]+/g; | |
| const urlMatches = (text.match(urlRe) || []); | |
| for (const u of urlMatches) { | |
| if (isNonGitHubBinaryUrl(u)) { | |
| s.hasNonGitHubBinary = true; | |
| } | |
| } | |
| // Duplicate-ish content detection (within items we fetched) | |
| const norm = normalizeForDup(text); | |
| if (norm) { | |
| s.dupTexts.set(norm, (s.dupTexts.get(norm) || 0) + 1); | |
| } | |
| } | |
| // PR comments + reviews (deterministic and bounded) | |
| const prItems = items.filter(i => i.is_pr).slice(0, MAX_PR); | |
| for (const it of prItems) { | |
| const login = it.author; | |
| if (login) { | |
| if (isAllowedAccount(login)) continue; | |
| await ensureUserCreatedAt(login); | |
| } | |
| let issueComments = []; | |
| try { | |
| let total = 0; | |
| issueComments = await github.paginate( | |
| github.rest.issues.listComments, | |
| { | |
| owner, | |
| repo, | |
| issue_number: it.number, | |
| per_page: 100, | |
| }, | |
| (response, done) => { | |
| const remaining = 500 - total; | |
| if (remaining <= 0) { | |
| done(); | |
| return []; | |
| } | |
| if (total + response.data.length >= 500) { | |
| total = 500; | |
| done(); | |
| return response.data.slice(0, remaining); | |
| } | |
| total += response.data.length; | |
| return response.data; | |
| } | |
| ); | |
| } catch { | |
| // ignore | |
| } | |
| let reviewComments = []; | |
| try { | |
| let total = 0; | |
| reviewComments = await github.paginate( | |
| github.rest.pulls.listReviewComments, | |
| { | |
| owner, | |
| repo, | |
| pull_number: it.number, | |
| per_page: 100, | |
| }, | |
| (response, done) => { | |
| const remaining = 500 - total; | |
| if (remaining <= 0) { | |
| done(); | |
| return []; | |
| } | |
| if (total + response.data.length >= 500) { | |
| total = 500; | |
| done(); | |
| return response.data.slice(0, remaining); | |
| } | |
| total += response.data.length; | |
| return response.data; | |
| } | |
| ); | |
| } catch { | |
| // ignore | |
| } | |
| let reviews = []; | |
| try { | |
| let total = 0; | |
| reviews = await github.paginate( | |
| github.rest.pulls.listReviews, | |
| { | |
| owner, | |
| repo, | |
| pull_number: it.number, | |
| per_page: 100, | |
| }, | |
| (response, done) => { | |
| const remaining = 500 - total; | |
| if (remaining <= 0) { | |
| done(); | |
| return []; | |
| } | |
| if (total + response.data.length >= 500) { | |
| total = 500; | |
| done(); | |
| return response.data.slice(0, remaining); | |
| } | |
| total += response.data.length; | |
| return response.data; | |
| } | |
| ); | |
| } catch { | |
| // ignore | |
| } | |
| const commentCandidates = [...issueComments, ...reviewComments] | |
| .filter(c => c?.created_at) | |
| .filter(c => new Date(c.created_at) >= start && new Date(c.created_at) <= end) | |
| .sort((a, b) => a.created_at.localeCompare(b.created_at)); | |
| for (const c of commentCandidates) { | |
| const commenter = c.user?.login || ""; | |
| if (!commenter) continue; | |
| if (isAllowedAccount(commenter)) continue; | |
| await ensureUserCreatedAt(commenter); | |
| const s = ensureAuthor(commenter); | |
| s.commentCount += 1; | |
| if (s.examples.length < MAX_COMMENT_EXAMPLES) { | |
| s.examples.push({ url: c.html_url, is_pr: true, number: it.number }); | |
| } | |
| } | |
| const reviewCandidates = reviews | |
| .map(r => ({ | |
| user: r.user, | |
| submitted_at: r.submitted_at || r.submittedAt, | |
| url: r.html_url || `${it.url}#pullrequestreview-${r.id}`, | |
| })) | |
| .filter(r => r.submitted_at) | |
| .filter(r => new Date(r.submitted_at) >= start && new Date(r.submitted_at) <= end) | |
| .sort((a, b) => a.submitted_at.localeCompare(b.submitted_at)); | |
| for (const r of reviewCandidates) { | |
| const reviewer = r.user?.login || ""; | |
| if (!reviewer) continue; | |
| if (isAllowedAccount(reviewer)) continue; | |
| await ensureUserCreatedAt(reviewer); | |
| const s = ensureAuthor(reviewer); | |
| s.reviewCount += 1; | |
| if (s.examples.length < MAX_COMMENT_EXAMPLES) { | |
| s.examples.push({ url: r.url, is_pr: true, number: it.number }); | |
| } | |
| } | |
| } | |
| // PR file touches (sensitive paths) - deterministic and bounded | |
| for (const it of prItems) { | |
| const login = it.author; | |
| if (!login) continue; | |
| if (isAllowedAccount(login)) continue; | |
| const s = ensureAuthor(login); | |
| try { | |
| let total = 0; | |
| const files = await github.paginate( | |
| github.rest.pulls.listFiles, | |
| { | |
| owner, | |
| repo, | |
| pull_number: it.number, | |
| per_page: 100, | |
| }, | |
| (response, done) => { | |
| const remaining = 500 - total; | |
| if (remaining <= 0) { | |
| done(); | |
| return []; | |
| } | |
| if (total + response.data.length >= 500) { | |
| total = 500; | |
| done(); | |
| return response.data.slice(0, remaining); | |
| } | |
| total += response.data.length; | |
| return response.data; | |
| } | |
| ); | |
| const filenames = files.map(f => f.filename); | |
| for (const fn of filenames) { | |
| if (s.touchedFiles.size < MAX_TOUCHED_FILES) s.touchedFiles.add(fn); | |
| if (fn.startsWith(".github/workflows/") || fn.startsWith(".github/actions/")) s.touchesWorkflows = true; | |
| if (fn === "Dockerfile" || fn === "Makefile" || fn.startsWith("scripts/") || fn.startsWith("actions/")) s.touchesCI = true; | |
| if ( | |
| fn === "package.json" || | |
| fn === "package-lock.json" || | |
| fn === "pnpm-lock.yaml" || | |
| fn === "yarn.lock" || | |
| fn === "go.mod" || | |
| fn === "go.sum" || | |
| fn.startsWith("requirements") | |
| ) { | |
| s.touchesDeps = true; | |
| } | |
| } | |
| } catch (e) { | |
| // If file listing fails, do not infer. | |
| } | |
| } | |
| // Score + severity | |
| const accounts = Array.from(perAuthor.values()).map(s => { | |
| if (userCreatedAt.has(s.login)) { | |
| const createdAt = userCreatedAt.get(s.login); | |
| if (createdAt) { | |
| const now = new Date(end); | |
| s.accountAgeDays = Math.max(0, Math.floor((now - createdAt) / (24 * 60 * 60 * 1000))); | |
| } | |
| } | |
| let score = 0; | |
| const extDomains = Array.from(s.externalDomains); | |
| score += Math.min(9, extDomains.length * 3); | |
| if (s.hasShortener) score += 8; | |
| if (s.hasNonGitHubBinary) score += 10; | |
| if (s.touchesWorkflows) score += 15; | |
| if (s.touchesCI) score += 10; | |
| if (s.touchesDeps) score += 6; | |
| if (s.itemCount >= 5) score += 6; | |
| if (s.accountAgeDays !== null && s.accountAgeDays < MIN_ACCOUNT_AGE_DAYS) score += 8; | |
| let hasDup3 = false; | |
| for (const [, c] of s.dupTexts) { | |
| if (c >= 3) { | |
| hasDup3 = true; | |
| break; | |
| } | |
| } | |
| if (hasDup3) score += 8; | |
| score = Math.min(100, score); | |
| let severity = "None"; | |
| if (score >= 20) severity = "High"; | |
| else if (score >= 10) severity = "Medium"; | |
| else if (score >= 1) severity = "Low"; | |
| // Deterministic signal summary | |
| const signals = []; | |
| if (extDomains.length > 0) signals.push(`external_domains=${extDomains.length}`); | |
| if (s.hasShortener) signals.push("shortener"); | |
| if (s.hasNonGitHubBinary) signals.push("non_github_binary_link"); | |
| if (s.touchesWorkflows) signals.push("touches_workflows"); | |
| if (s.touchesCI) signals.push("touches_ci_or_scripts"); | |
| if (s.touchesDeps) signals.push("touches_dependencies"); | |
| if (s.itemCount >= 5) signals.push(`burst_items=${s.itemCount}`); | |
| if (hasDup3) signals.push("dup_text>=3"); | |
| if (s.commentCount > 0) signals.push(`comments=${s.commentCount}`); | |
| if (s.reviewCount > 0) signals.push(`reviews=${s.reviewCount}`); | |
| if (s.accountAgeDays !== null && s.accountAgeDays < MIN_ACCOUNT_AGE_DAYS) { | |
| signals.push(`new_account=${s.accountAgeDays}d`); | |
| } | |
| return { | |
| login: s.login, | |
| risk_score: score, | |
| severity, | |
| signals, | |
| external_domains: extDomains.sort((a, b) => a.localeCompare(b)), | |
| pr_count: s.prCount, | |
| issue_count: s.issueCount, | |
| comment_count: s.commentCount, | |
| review_count: s.reviewCount, | |
| example_items: s.exampleItems, | |
| touched_files: Array.from(s.touchedFiles).sort((a, b) => a.localeCompare(b)), | |
| examples: s.examples, | |
| }; | |
| }); | |
| // Stable sorting | |
| accounts.sort((a, b) => { | |
| if (b.risk_score !== a.risk_score) return b.risk_score - a.risk_score; | |
| return a.login.localeCompare(b.login); | |
| }); | |
| const domains = Array.from(domainAccounts.entries()) | |
| .map(([domain, logins]) => ({ domain, count: logins.size, accounts: Array.from(logins).sort((a, b) => a.localeCompare(b)) })) | |
| .sort((a, b) => { | |
| if (b.count !== a.count) return b.count - a.count; | |
| return a.domain.localeCompare(b.domain); | |
| }); | |
| const topSeverity = accounts.find(a => a.severity !== "None")?.severity || "None"; | |
| // Calculate metrics for observability and decision logic | |
| const highRiskAccounts = accounts.filter(a => a.risk_score >= 10).length; | |
| const multiAccountDomains = domains.filter(d => d.count >= 2).length; | |
| const hasFindings = highRiskAccounts > 0 || multiAccountDomains > 0; | |
| // Log analysis summary for observability | |
| const skippedNames = Array.from(skippedAllowedLogins).sort((a, b) => a.localeCompare(b)); | |
| const skippedLabel = skippedNames.length > 0 ? skippedNames.map(n => `@${n}`).join(", ") : "none"; | |
| const analyzedNames = accounts.slice(0, 10).map(a => `@${a.login}`).join(", ") || "none"; | |
| const domainSamples = domains.slice(0, 10).map(d => d.domain).join(", ") || "none"; | |
| core.info("Summary:"); | |
| core.info(`- Window: ${toISO(start)} -> ${toISO(end)}`); | |
| core.info(`- Items: raw=${rawItems.length}, in_window+dedup=${items.length}`); | |
| core.info(`- PRs scanned: ${prItems.length} (max ${MAX_PR})`); | |
| core.info(`- Skipped (no login): ${skippedNoLogin}`); | |
| core.info(`- Skipped (allowlisted): ${skippedAllowed} [${skippedLabel}]`); | |
| core.info(`- Accounts analyzed: ${accounts.length} [${analyzedNames}]`); | |
| core.info(`- Risk >= 10: ${highRiskAccounts}`); | |
| core.info(`- External domains: total=${domains.length}, shared>=2=${multiAccountDomains} [${domainSamples}]`); | |
| core.info(`- Decision: has_findings=${hasFindings} (will ${hasFindings ? "run" : "skip"} agent job)`); | |
| core.info("Detailed report:"); | |
| if (domains.length === 0) { | |
| core.info("- Domains: none"); | |
| } else { | |
| core.info("- Domains:"); | |
| for (const d of domains) { | |
| const logins = d.accounts.map(login => `@${login}`).join(", ") || "none"; | |
| core.info(` - ${d.domain}: accounts=${d.count} [${logins}]`); | |
| } | |
| } | |
| if (accounts.length === 0) { | |
| core.info("- Accounts: none"); | |
| } else { | |
| core.info("- Accounts:"); | |
| for (const a of accounts) { | |
| const signalsText = a.signals.join(", ") || "none"; | |
| const domainsText = (a.external_domains || []).join(", ") || "none"; | |
| const touchedText = (a.touched_files || []).join(", ") || "none"; | |
| core.info(` - @${a.login}: score=${a.risk_score}, severity=${a.severity}, signals=[${signalsText}]`); | |
| core.info(` - activity: pr=${a.pr_count || 0}, issue=${a.issue_count || 0}, comment=${a.comment_count || 0}, review=${a.review_count || 0}`); | |
| core.info(` - external_domains: ${domainsText}`); | |
| core.info(` - touched_files: ${touchedText}`); | |
| if (a.example_items && a.example_items.length > 0) { | |
| const itemLines = a.example_items | |
| .map(item => { | |
| const label = item.is_pr ? `PR #${item.number}` : `Issue #${item.number}`; | |
| const title = item.title ? ` "${item.title}"` : ""; | |
| return `${label}${title}`; | |
| }) | |
| .join("; "); | |
| core.info(` - examples: ${itemLines}`); | |
| } | |
| if (a.examples && a.examples.length > 0) { | |
| core.info(" - evidence:"); | |
| for (const ex of a.examples) { | |
| core.info(` - ${ex.url}`); | |
| } | |
| } | |
| } | |
| } | |
| // Find existing triage issue (exact title match) | |
| let existingIssueNumber = ""; | |
| try { | |
| const openIssues = await github.rest.issues.listForRepo({ | |
| owner, | |
| repo, | |
| state: "open", | |
| per_page: 100, | |
| }); | |
| const existing = (openIssues.data || []).find(i => (i.title || "") === ISSUE_TITLE); | |
| if (existing?.number) existingIssueNumber = String(existing.number); | |
| } catch (e) { | |
| // ignore | |
| } | |
| // Render deterministic markdown body | |
| function renderBody(includeMention) { | |
| const lines = []; | |
| if (includeMention) lines.push("@pelikhan", ""); | |
| lines.push( | |
| `**Window:** ${toISO(start)} → ${toISO(end)}`, | |
| `**Assessment:** ${topSeverity}`, | |
| "" | |
| ); | |
| if (!hasFindings) { | |
| lines.push("No meaningful suspicious activity detected in this window."); | |
| return lines.join("\n"); | |
| } | |
| if (domains.length > 0) { | |
| lines.push("## Domains (external)", "", "| Domain | Accounts | Logins |", "| --- | ---: | --- |"); | |
| for (const d of domains.slice(0, 20)) { | |
| const maxLogins = 5; | |
| const shown = d.accounts.slice(0, maxLogins).map(login => `@${login}`); | |
| const overflow = d.accounts.length > maxLogins ? ` +${d.accounts.length - maxLogins} more` : ""; | |
| lines.push(`| ${d.domain} | ${d.count} | ${shown.join(", ")}${overflow} |`); | |
| } | |
| lines.push(""); | |
| } | |
| const high = accounts.filter(a => a.severity === "High"); | |
| const med = accounts.filter(a => a.severity === "Medium"); | |
| const low = accounts.filter(a => a.severity === "Low"); | |
| function renderAccounts(title, arr) { | |
| if (arr.length === 0) return; | |
| lines.push(`## ${title}`, ""); | |
| for (const a of arr.slice(0, 25)) { | |
| const sig = a.signals.join(", "); | |
| lines.push(`- @${a.login} — score=${a.risk_score} — ${sig}`); | |
| const changeParts = []; | |
| if (a.example_items && a.example_items.length > 0) { | |
| const itemSamples = a.example_items.slice(0, 2).map(item => { | |
| const label = item.is_pr ? `PR #${item.number}` : `Issue #${item.number}`; | |
| const title = item.title ? ` "${item.title}"` : ""; | |
| return `${label}${title}`; | |
| }); | |
| changeParts.push(itemSamples.join("; ")); | |
| } | |
| if (a.touched_files && a.touched_files.length > 0) { | |
| const files = a.touched_files.slice(0, 6).join(", "); | |
| changeParts.push(`files: ${files}`); | |
| } | |
| if (changeParts.length > 0) { | |
| lines.push(` - Change summary: ${changeParts.join("; ")}`); | |
| } | |
| lines.push( | |
| ` - Activity summary: ${a.pr_count || 0} PR, ${a.issue_count || 0} issue, ${a.comment_count || 0} comment, ${a.review_count || 0} review` | |
| ); | |
| if (a.examples && a.examples.length > 0) { | |
| lines.push(" <details><summary>Evidence</summary>", ""); | |
| for (const ex of a.examples.slice(0, 5)) { | |
| lines.push(` - ${ex.url}`); | |
| } | |
| if (a.examples.length > 5) { | |
| lines.push(` - ... and ${a.examples.length - 5} more`); | |
| } | |
| lines.push("", " </details>"); | |
| } | |
| } | |
| lines.push(""); | |
| } | |
| renderAccounts("Accounts (High)", high); | |
| renderAccounts("Accounts (Medium)", med); | |
| renderAccounts("Accounts (Low)", low); | |
| lines.push("## Notes", "", "- This report is computed deterministically from GitHub Search + PR file listings + PR comments/reviews within the window."); | |
| return lines.join("\n"); | |
| } | |
| let action = "none"; | |
| let issueBody = ""; | |
| let issueNumber = ""; | |
| if (hasFindings) { | |
| if (existingIssueNumber) { | |
| action = "update"; | |
| issueNumber = existingIssueNumber; | |
| issueBody = renderBody(false); | |
| } else { | |
| action = "create"; | |
| issueBody = renderBody(true); | |
| } | |
| } | |
| core.setOutput("action", action); | |
| core.setOutput("issue_number", issueNumber); | |
| core.setOutput("issue_title", ISSUE_TITLE); | |
| core.setOutput("issue_body", issueBody); | |
| safe_outputs: | |
| needs: agent | |
| if: (!cancelled()) && needs.agent.result != 'skipped' | |
| runs-on: ubuntu-slim | |
| permissions: | |
| contents: read | |
| issues: write | |
| timeout-minutes: 15 | |
| env: | |
| GH_AW_CALLER_WORKFLOW_ID: "${{ github.repository }}/bot-detection" | |
| GH_AW_EFFECTIVE_TOKENS: ${{ needs.agent.outputs.effective_tokens }} | |
| GH_AW_ENGINE_ID: "copilot" | |
| GH_AW_ENGINE_MODEL: ${{ needs.agent.outputs.model }} | |
| GH_AW_WORKFLOW_ID: "bot-detection" | |
| GH_AW_WORKFLOW_NAME: "Bot Detection" | |
| outputs: | |
| code_push_failure_count: ${{ steps.process_safe_outputs.outputs.code_push_failure_count }} | |
| code_push_failure_errors: ${{ steps.process_safe_outputs.outputs.code_push_failure_errors }} | |
| create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} | |
| create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} | |
| created_issue_number: ${{ steps.process_safe_outputs.outputs.created_issue_number }} | |
| created_issue_url: ${{ steps.process_safe_outputs.outputs.created_issue_url }} | |
| process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} | |
| process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Download agent output artifact | |
| id: download-agent-output | |
| continue-on-error: true | |
| uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 | |
| with: | |
| name: agent | |
| path: /tmp/gh-aw/ | |
| - name: Setup agent output environment variable | |
| id: setup-agent-output-env | |
| if: steps.download-agent-output.outcome == 'success' | |
| run: | | |
| mkdir -p /tmp/gh-aw/ | |
| find "/tmp/gh-aw/" -type f -print | |
| echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" | |
| - name: Configure GH_HOST for enterprise compatibility | |
| id: ghes-host-config | |
| shell: bash | |
| run: | | |
| # Derive GH_HOST from GITHUB_SERVER_URL so the gh CLI targets the correct | |
| # GitHub instance (GHES/GHEC). On github.com this is a harmless no-op. | |
| GH_HOST="${GITHUB_SERVER_URL#https://}" | |
| GH_HOST="${GH_HOST#http://}" | |
| echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV" | |
| - name: Process Safe Outputs | |
| id: process_safe_outputs | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" | |
| GITHUB_SERVER_URL: ${{ github.server_url }} | |
| GITHUB_API_URL: ${{ github.api_url }} | |
| GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"labels\":[\"security\",\"bot-detection\"],\"max\":1},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"},\"update_issue\":{\"allow_body\":true,\"max\":1,\"target\":\"*\"}}" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/safe_output_handler_manager.cjs'); | |
| await main(); | |
| - name: Upload Safe Output Items | |
| if: always() | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: safe-output-items | |
| path: /tmp/gh-aw/safe-output-items.jsonl | |
| if-no-files-found: ignore | |