Issue Monster #3335
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # ___ _ _ | |
| # / _ \ | | (_) | |
| # | |_| | __ _ ___ _ __ | |_ _ ___ | |
| # | _ |/ _` |/ _ \ '_ \| __| |/ __| | |
| # | | | | (_| | __/ | | | |_| | (__ | |
| # \_| |_/\__, |\___|_| |_|\__|_|\___| | |
| # __/ | | |
| # _ _ |___/ | |
| # | | | | / _| | | |
| # | | | | ___ _ __ _ __| |_| | _____ ____ | |
| # | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| | |
| # \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ | |
| # \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ | |
| # | |
| # This file was automatically generated by gh-aw. DO NOT EDIT. | |
| # | |
| # To update this file, edit the corresponding .md file and run: | |
| # gh aw compile | |
| # Not all edits will cause changes to this file. | |
| # | |
| # For more information: https://github.github.com/gh-aw/introduction/overview/ | |
| # | |
| # The Cookie Monster of issues - assigns issues to Copilot coding agent one at a time | |
| # | |
| # Resolved workflow manifest: | |
| # Imports: | |
| # - shared/activation-app.md | |
| # | |
| # gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"19b8330a2613e4324b1dcd2142962666f9312f81dc5e579f7f3b79a213e5e4a9","strict":true,"agent_id":"copilot","agent_model":"gpt-5.1-codex-mini"} | |
| name: "Issue Monster" | |
| "on": | |
| # permissions: # Permissions applied to pre-activation job | |
| # issues: read | |
| # pull-requests: read | |
| schedule: | |
| - cron: "*/30 * * * *" | |
| # Friendly format: every 30m | |
| # skip-if-check-failing: # Skip-if-check-failing processed as check status gate in pre-activation job | |
| # allow-pending: true | |
| # include: | |
| # - build | |
| # - test | |
| # - lint-go | |
| # - lint-js | |
| # skip-if-match: # Skip-if-match processed as search check in pre-activation job | |
| # max: 5 | |
| # query: is:pr is:open is:draft author:app/copilot-swe-agent | |
| # skip-if-no-match: is:issue is:open # Skip-if-no-match processed as search check in pre-activation job | |
| # steps: # Steps injected into pre-activation job | |
| # - id: search | |
| # name: Search for candidate issues | |
| # uses: actions/github-script@v8 | |
| # with: | |
| # script: | | |
| # const { owner, repo } = context.repo; | |
| # | |
| # try { | |
| # // Check for recent rate-limited PRs to avoid scheduling more work during rate limiting | |
| # core.info('Checking for recent rate-limited PRs...'); | |
| # const rateLimitCheckDate = new Date(); | |
| # rateLimitCheckDate.setHours(rateLimitCheckDate.getHours() - 1); // Check last hour | |
| # // Format as YYYY-MM-DDTHH:MM:SS for GitHub search API | |
| # const rateLimitCheckISO = rateLimitCheckDate.toISOString().split('.')[0] + 'Z'; | |
| # | |
| # const recentPRsQuery = `is:pr author:app/copilot-swe-agent created:>${rateLimitCheckISO} repo:${owner}/${repo}`; | |
| # const recentPRsResponse = await github.rest.search.issuesAndPullRequests({ | |
| # q: recentPRsQuery, | |
| # per_page: 10, | |
| # sort: 'created', | |
| # order: 'desc' | |
| # }); | |
| # | |
| # core.info(`Found ${recentPRsResponse.data.total_count} recent Copilot PRs to check for rate limiting`); | |
| # | |
| # // Check if any recent PRs have rate limit indicators | |
| # let rateLimitDetected = false; | |
| # for (const pr of recentPRsResponse.data.items) { | |
| # try { | |
| # const prTimelineQuery = ` | |
| # query($owner: String!, $repo: String!, $number: Int!) { | |
| # repository(owner: $owner, name: $repo) { | |
| # pullRequest(number: $number) { | |
| # timelineItems(first: 50, itemTypes: [ISSUE_COMMENT]) { | |
| # nodes { | |
| # __typename | |
| # ... on IssueComment { | |
| # body | |
| # createdAt | |
| # } | |
| # } | |
| # } | |
| # } | |
| # } | |
| # } | |
| # `; | |
| # | |
| # const prTimelineResult = await github.graphql(prTimelineQuery, { | |
| # owner, | |
| # repo, | |
| # number: pr.number | |
| # }); | |
| # | |
| # const comments = prTimelineResult?.repository?.pullRequest?.timelineItems?.nodes || []; | |
| # const rateLimitPattern = /rate limit|API rate limit|secondary rate limit|abuse detection|429|too many requests/i; | |
| # | |
| # for (const comment of comments) { | |
| # if (comment.body && rateLimitPattern.test(comment.body)) { | |
| # core.warning(`Rate limiting detected in PR #${pr.number}: ${comment.body.substring(0, 200)}`); | |
| # rateLimitDetected = true; | |
| # break; | |
| # } | |
| # } | |
| # | |
| # if (rateLimitDetected) break; | |
| # } catch (error) { | |
| # core.warning(`Could not check PR #${pr.number} for rate limiting: ${error.message}`); | |
| # } | |
| # } | |
| # | |
| # if (rateLimitDetected) { | |
| # core.warning('🛑 Rate limiting detected in recent PRs. Skipping issue assignment to prevent further rate limit issues.'); | |
| # core.setOutput('issue_count', 0); | |
| # core.setOutput('issue_numbers', ''); | |
| # core.setOutput('issue_list', ''); | |
| # core.setOutput('has_issues', 'false'); | |
| # return; | |
| # } | |
| # | |
| # core.info('✓ No rate limiting detected. Proceeding with issue search.'); | |
| # | |
| # // Labels that indicate an issue should NOT be auto-assigned | |
| # const excludeLabels = [ | |
| # 'wontfix', | |
| # 'duplicate', | |
| # 'invalid', | |
| # 'question', | |
| # 'discussion', | |
| # 'needs-discussion', | |
| # 'blocked', | |
| # 'on-hold', | |
| # 'waiting-for-feedback', | |
| # 'needs-more-info', | |
| # 'no-bot', | |
| # 'no-campaign' | |
| # ]; | |
| # | |
| # // Labels that indicate an issue is a GOOD candidate for auto-assignment | |
| # const priorityLabels = [ | |
| # 'good first issue', | |
| # 'good-first-issue', | |
| # 'bug', | |
| # 'enhancement', | |
| # 'feature', | |
| # 'documentation', | |
| # 'tech-debt', | |
| # 'refactoring', | |
| # 'performance', | |
| # 'security' | |
| # ]; | |
| # | |
| # // Search for open issues with "cookie" label and without excluded labels | |
| # // The "cookie" label indicates issues that are approved work queue items from automated workflows | |
| # const query = `is:issue is:open repo:${owner}/${repo} label:cookie -label:"${excludeLabels.join('" -label:"')}"`; | |
| # core.info(`Searching: ${query}`); | |
| # const response = await github.rest.search.issuesAndPullRequests({ | |
| # q: query, | |
| # per_page: 100, | |
| # sort: 'created', | |
| # order: 'desc' | |
| # }); | |
| # core.info(`Found ${response.data.total_count} total issues matching basic criteria`); | |
| # | |
| # // Fetch full details for each issue to get labels, assignees, sub-issues, and linked PRs | |
| # // Track integrity-filtered issues to emit a diagnostic summary | |
| # const integrityFilteredIssues = []; | |
| # const issuesWithDetails = (await Promise.all( | |
| # response.data.items.map(async (issue) => { | |
| # // Fetch full issue details — some issues may be blocked by integrity policy | |
| # let fullIssue; | |
| # try { | |
| # fullIssue = await github.rest.issues.get({ | |
| # owner, | |
| # repo, | |
| # issue_number: issue.number | |
| # }); | |
| # } catch (fetchError) { | |
| # // Integrity-filtered issues (403/451) or other transient errors should be | |
| # // skipped individually rather than failing the entire batch | |
| # const status = fetchError.status || fetchError.response?.status; | |
| # // 403 = Forbidden (integrity policy), 451 = Unavailable For Legal Reasons | |
| # const isIntegrityBlock = status === 403 || status === 451 || | |
| # /\bintegrity\b/i.test(fetchError.message || ''); | |
| # const errorSummary = (fetchError.message || String(fetchError)).slice(0, 120); | |
| # if (isIntegrityBlock) { | |
| # integrityFilteredIssues.push(issue.number); | |
| # core.warning(`⚠️ Skipping issue #${issue.number}: blocked by integrity policy (HTTP ${status || 'unknown'}): ${errorSummary}`); | |
| # } else { | |
| # core.warning(`⚠️ Skipping issue #${issue.number}: could not fetch details (HTTP ${status || 'unknown'}): ${errorSummary}`); | |
| # } | |
| # return null; | |
| # } | |
| # | |
| # // Check if this issue has sub-issues and linked PRs using GraphQL | |
| # let subIssuesCount = 0; | |
| # let linkedPRs = []; | |
| # try { | |
| # const issueDetailsQuery = ` | |
| # query($owner: String!, $repo: String!, $number: Int!) { | |
| # repository(owner: $owner, name: $repo) { | |
| # issue(number: $number) { | |
| # subIssues { | |
| # totalCount | |
| # } | |
| # timelineItems(first: 100, itemTypes: [CROSS_REFERENCED_EVENT]) { | |
| # nodes { | |
| # ... on CrossReferencedEvent { | |
| # source { | |
| # __typename | |
| # ... on PullRequest { | |
| # number | |
| # state | |
| # isDraft | |
| # author { | |
| # login | |
| # } | |
| # } | |
| # } | |
| # } | |
| # } | |
| # } | |
| # } | |
| # } | |
| # } | |
| # `; | |
| # const issueDetailsResult = await github.graphql(issueDetailsQuery, { | |
| # owner, | |
| # repo, | |
| # number: issue.number | |
| # }); | |
| # | |
| # subIssuesCount = issueDetailsResult?.repository?.issue?.subIssues?.totalCount || 0; | |
| # | |
| # // Extract linked PRs from timeline | |
| # const timelineItems = issueDetailsResult?.repository?.issue?.timelineItems?.nodes || []; | |
| # linkedPRs = timelineItems | |
| # .filter(item => item?.source?.__typename === 'PullRequest') | |
| # .map(item => ({ | |
| # number: item.source.number, | |
| # state: item.source.state, | |
| # isDraft: item.source.isDraft, | |
| # author: item.source.author?.login | |
| # })); | |
| # | |
| # core.info(`Issue #${issue.number} has ${linkedPRs.length} linked PR(s)`); | |
| # } catch (error) { | |
| # // If GraphQL query fails, continue with defaults | |
| # core.warning(`Could not check details for #${issue.number}: ${error.message}`); | |
| # } | |
| # | |
| # return { | |
| # ...fullIssue.data, | |
| # subIssuesCount, | |
| # linkedPRs | |
| # }; | |
| # }) | |
| # )).filter(Boolean); // Remove null entries (integrity-filtered or otherwise skipped) | |
| # | |
| # // Emit diagnostic summary for integrity-filtered issues | |
| # if (integrityFilteredIssues.length > 0) { | |
| # core.warning(`🛡️ Integrity filter diagnostic: ${integrityFilteredIssues.length} issue(s) were skipped due to integrity policy: #${integrityFilteredIssues.join(', #')}. These issues will be excluded from this run.`); | |
| # } | |
| # | |
| # // Filter and score issues | |
| # const scoredIssues = issuesWithDetails | |
| # .filter(issue => { | |
| # // Exclude issues that already have assignees | |
| # if (issue.assignees && issue.assignees.length > 0) { | |
| # core.info(`Skipping #${issue.number}: already has assignees`); | |
| # return false; | |
| # } | |
| # | |
| # // Exclude issues with excluded labels (double check) | |
| # const issueLabels = issue.labels.map(l => l.name.toLowerCase()); | |
| # if (issueLabels.some(label => excludeLabels.map(l => l.toLowerCase()).includes(label))) { | |
| # core.info(`Skipping #${issue.number}: has excluded label`); | |
| # return false; | |
| # } | |
| # | |
| # // Exclude issues with campaign labels (campaign:*) | |
| # // Campaign items are managed by campaign orchestrators | |
| # if (issueLabels.some(label => label.startsWith('campaign:'))) { | |
| # core.info(`Skipping #${issue.number}: has campaign label (managed by campaign orchestrator)`); | |
| # return false; | |
| # } | |
| # | |
| # // Exclude issues that have sub-issues (parent/organizing issues) | |
| # if (issue.subIssuesCount > 0) { | |
| # core.info(`Skipping #${issue.number}: has ${issue.subIssuesCount} sub-issue(s) - parent issues are used for organizing, not tasks`); | |
| # return false; | |
| # } | |
| # | |
| # // Exclude issues with closed PRs (treat as complete) | |
| # const closedPRs = issue.linkedPRs?.filter(pr => pr.state === 'CLOSED' || pr.state === 'MERGED') || []; | |
| # if (closedPRs.length > 0) { | |
| # core.info(`Skipping #${issue.number}: has ${closedPRs.length} closed/merged PR(s) - treating as complete`); | |
| # return false; | |
| # } | |
| # | |
| # // Exclude issues with open PRs from Copilot coding agent | |
| # const openCopilotPRs = issue.linkedPRs?.filter(pr => | |
| # pr.state === 'OPEN' && | |
| # (pr.author === 'copilot-swe-agent' || pr.author?.includes('copilot')) | |
| # ) || []; | |
| # if (openCopilotPRs.length > 0) { | |
| # core.info(`Skipping #${issue.number}: has ${openCopilotPRs.length} open PR(s) from Copilot - already being worked on`); | |
| # return false; | |
| # } | |
| # | |
| # return true; | |
| # }) | |
| # .map(issue => { | |
| # const issueLabels = issue.labels.map(l => l.name.toLowerCase()); | |
| # let score = 0; | |
| # | |
| # // Score based on priority labels (higher score = higher priority) | |
| # if (issueLabels.includes('good first issue') || issueLabels.includes('good-first-issue')) { | |
| # score += 50; | |
| # } | |
| # if (issueLabels.includes('bug')) { | |
| # score += 40; | |
| # } | |
| # if (issueLabels.includes('security')) { | |
| # score += 45; | |
| # } | |
| # if (issueLabels.includes('documentation')) { | |
| # score += 35; | |
| # } | |
| # if (issueLabels.includes('enhancement') || issueLabels.includes('feature')) { | |
| # score += 30; | |
| # } | |
| # if (issueLabels.includes('performance')) { | |
| # score += 25; | |
| # } | |
| # if (issueLabels.includes('tech-debt') || issueLabels.includes('refactoring')) { | |
| # score += 20; | |
| # } | |
| # | |
| # // Bonus for issues with clear labels (any priority label) | |
| # if (issueLabels.some(label => priorityLabels.map(l => l.toLowerCase()).includes(label))) { | |
| # score += 10; | |
| # } | |
| # | |
| # // Age bonus: older issues get slight priority (days old / 10) | |
| # const ageInDays = Math.floor((Date.now() - new Date(issue.created_at)) / (1000 * 60 * 60 * 24)); | |
| # score += Math.min(ageInDays / 10, 20); // Cap age bonus at 20 points | |
| # | |
| # return { | |
| # number: issue.number, | |
| # title: issue.title, | |
| # labels: issue.labels.map(l => l.name), | |
| # created_at: issue.created_at, | |
| # score | |
| # }; | |
| # }) | |
| # .sort((a, b) => b.score - a.score); // Sort by score descending | |
| # | |
| # // Format output | |
| # const issueList = scoredIssues.map(i => { | |
| # const labelStr = i.labels.length > 0 ? ` [${i.labels.join(', ')}]` : ''; | |
| # return `#${i.number}: ${i.title}${labelStr} (score: ${i.score.toFixed(1)})`; | |
| # }).join('\n'); | |
| # | |
| # const issueNumbers = scoredIssues.map(i => i.number).join(','); | |
| # | |
| # core.info(`Total candidate issues after filtering: ${scoredIssues.length}`); | |
| # if (scoredIssues.length > 0) { | |
| # core.info(`Top candidates:\n${issueList.split('\n').slice(0, 10).join('\n')}`); | |
| # } | |
| # | |
| # core.setOutput('issue_count', scoredIssues.length); | |
| # core.setOutput('issue_numbers', issueNumbers); | |
| # core.setOutput('issue_list', issueList); | |
| # | |
| # if (scoredIssues.length === 0) { | |
| # core.info('🍽️ No suitable candidate issues - the plate is empty!'); | |
| # core.setOutput('has_issues', 'false'); | |
| # } else { | |
| # core.setOutput('has_issues', 'true'); | |
| # } | |
| # } catch (error) { | |
| # core.error(`Error searching for issues: ${error.message}`); | |
| # core.setOutput('issue_count', 0); | |
| # core.setOutput('issue_numbers', ''); | |
| # core.setOutput('issue_list', ''); | |
| # core.setOutput('has_issues', 'false'); | |
| # } | |
| workflow_dispatch: | |
| inputs: | |
| aw_context: | |
| default: "" | |
| description: Agent caller context (used internally by Agentic Workflows). | |
| required: false | |
| type: string | |
| permissions: {} | |
| concurrency: | |
| group: "gh-aw-${{ github.workflow }}-${{ github.event.issue.number || github.event.discussion.number || github.run_id }}" | |
| run-name: "Issue Monster" | |
| jobs: | |
| activation: | |
| needs: pre_activation | |
| if: needs.pre_activation.outputs.activated == 'true' && (needs.pre_activation.outputs.has_issues == 'true') | |
| runs-on: ubuntu-slim | |
| permissions: | |
| contents: read | |
| outputs: | |
| comment_id: "" | |
| comment_repo: "" | |
| lockdown_check_failed: ${{ steps.generate_aw_info.outputs.lockdown_check_failed == 'true' }} | |
| model: ${{ steps.generate_aw_info.outputs.model }} | |
| secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Generate agentic run info | |
| id: generate_aw_info | |
| env: | |
| GH_AW_INFO_ENGINE_ID: "copilot" | |
| GH_AW_INFO_ENGINE_NAME: "GitHub Copilot CLI" | |
| GH_AW_INFO_MODEL: "gpt-5.1-codex-mini" | |
| GH_AW_INFO_VERSION: "latest" | |
| GH_AW_INFO_AGENT_VERSION: "latest" | |
| GH_AW_INFO_WORKFLOW_NAME: "Issue Monster" | |
| GH_AW_INFO_EXPERIMENTAL: "false" | |
| GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true" | |
| GH_AW_INFO_STAGED: "false" | |
| GH_AW_INFO_ALLOWED_DOMAINS: '["defaults"]' | |
| GH_AW_INFO_FIREWALL_ENABLED: "true" | |
| GH_AW_INFO_AWF_VERSION: "v0.25.0" | |
| GH_AW_INFO_AWMG_VERSION: "" | |
| GH_AW_INFO_FIREWALL_TYPE: "squid" | |
| GH_AW_COMPILED_STRICT: "true" | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/generate_aw_info.cjs'); | |
| await main(core, context); | |
| - name: Validate COPILOT_GITHUB_TOKEN secret | |
| id: validate-secret | |
| run: ${RUNNER_TEMP}/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default | |
| env: | |
| COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | |
| - name: Checkout .github and .agents folders | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| persist-credentials: false | |
| sparse-checkout: | | |
| .github | |
| .agents | |
| actions/setup | |
| sparse-checkout-cone-mode: true | |
| fetch-depth: 1 | |
| - name: Check workflow file timestamps | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_WORKFLOW_FILE: "issue-monster.lock.yml" | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/check_workflow_timestamp_api.cjs'); | |
| await main(); | |
| - name: Create prompt with built-in context | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_SAFE_OUTPUTS: ${{ runner.temp }}/gh-aw/safeoutputs/outputs.jsonl | |
| GH_AW_GITHUB_ACTOR: ${{ github.actor }} | |
| GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} | |
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} | |
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} | |
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} | |
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | |
| GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} | |
| GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_COUNT: ${{ needs.pre_activation.outputs.issue_count }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_LIST: ${{ needs.pre_activation.outputs.issue_list }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_NUMBERS: ${{ needs.pre_activation.outputs.issue_numbers }} | |
| # poutine:ignore untrusted_checkout_exec | |
| run: | | |
| bash ${RUNNER_TEMP}/gh-aw/actions/create_prompt_first.sh | |
| { | |
| cat << 'GH_AW_PROMPT_EOF' | |
| <system> | |
| GH_AW_PROMPT_EOF | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/xpia.md" | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/temp_folder_prompt.md" | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/markdown.md" | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/safe_outputs_prompt.md" | |
| cat << 'GH_AW_PROMPT_EOF' | |
| <safe-output-tools> | |
| Tools: add_comment(max:3), assign_to_agent(max:3), missing_tool, missing_data, noop | |
| </safe-output-tools> | |
| <github-context> | |
| The following GitHub context information is available for this workflow: | |
| {{#if __GH_AW_GITHUB_ACTOR__ }} | |
| - **actor**: __GH_AW_GITHUB_ACTOR__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_REPOSITORY__ }} | |
| - **repository**: __GH_AW_GITHUB_REPOSITORY__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_WORKSPACE__ }} | |
| - **workspace**: __GH_AW_GITHUB_WORKSPACE__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} | |
| - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} | |
| - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} | |
| - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} | |
| - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ | |
| {{/if}} | |
| {{#if __GH_AW_GITHUB_RUN_ID__ }} | |
| - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ | |
| {{/if}} | |
| </github-context> | |
| GH_AW_PROMPT_EOF | |
| cat "${RUNNER_TEMP}/gh-aw/prompts/github_mcp_tools_with_safeoutputs_prompt.md" | |
| cat << 'GH_AW_PROMPT_EOF' | |
| </system> | |
| GH_AW_PROMPT_EOF | |
| cat << 'GH_AW_PROMPT_EOF' | |
| {{#runtime-import .github/workflows/shared/activation-app.md}} | |
| GH_AW_PROMPT_EOF | |
| cat << 'GH_AW_PROMPT_EOF' | |
| {{#runtime-import .github/workflows/issue-monster.md}} | |
| GH_AW_PROMPT_EOF | |
| } > "$GH_AW_PROMPT" | |
| - name: Interpolate variables and render templates | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_COUNT: ${{ needs.pre_activation.outputs.issue_count }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_LIST: ${{ needs.pre_activation.outputs.issue_list }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_NUMBERS: ${{ needs.pre_activation.outputs.issue_numbers }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/interpolate_prompt.cjs'); | |
| await main(); | |
| - name: Substitute placeholders | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_GITHUB_ACTOR: ${{ github.actor }} | |
| GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} | |
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} | |
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} | |
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} | |
| GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} | |
| GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} | |
| GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_COUNT: ${{ needs.pre_activation.outputs.issue_count }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_LIST: ${{ needs.pre_activation.outputs.issue_list }} | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_NUMBERS: ${{ needs.pre_activation.outputs.issue_numbers }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const substitutePlaceholders = require('${{ runner.temp }}/gh-aw/actions/substitute_placeholders.cjs'); | |
| // Call the substitution function | |
| return await substitutePlaceholders({ | |
| file: process.env.GH_AW_PROMPT, | |
| substitutions: { | |
| GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, | |
| GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, | |
| GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, | |
| GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, | |
| GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, | |
| GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, | |
| GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, | |
| GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_COUNT: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_COUNT, | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_LIST: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_LIST, | |
| GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_NUMBERS: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ISSUE_NUMBERS | |
| } | |
| }); | |
| - name: Validate prompt placeholders | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| # poutine:ignore untrusted_checkout_exec | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/validate_prompt_placeholders.sh | |
| - name: Print prompt | |
| env: | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| # poutine:ignore untrusted_checkout_exec | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/print_prompt_summary.sh | |
| - name: Upload activation artifact | |
| if: success() | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: activation | |
| path: | | |
| /tmp/gh-aw/aw_info.json | |
| /tmp/gh-aw/aw-prompts/prompt.txt | |
| retention-days: 1 | |
| agent: | |
| needs: activation | |
| runs-on: ubuntu-latest | |
| permissions: | |
| contents: read | |
| issues: read | |
| pull-requests: read | |
| env: | |
| DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} | |
| GH_AW_ASSETS_ALLOWED_EXTS: "" | |
| GH_AW_ASSETS_BRANCH: "" | |
| GH_AW_ASSETS_MAX_SIZE_KB: 0 | |
| GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs | |
| GH_AW_WORKFLOW_ID_SANITIZED: issuemonster | |
| outputs: | |
| checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} | |
| has_patch: ${{ steps.collect_output.outputs.has_patch }} | |
| inference_access_error: ${{ steps.detect-inference-error.outputs.inference_access_error || 'false' }} | |
| model: ${{ needs.activation.outputs.model }} | |
| output: ${{ steps.collect_output.outputs.output }} | |
| output_types: ${{ steps.collect_output.outputs.output_types }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Set runtime paths | |
| id: set-runtime-paths | |
| run: | | |
| echo "GH_AW_SAFE_OUTPUTS=${RUNNER_TEMP}/gh-aw/safeoutputs/outputs.jsonl" >> "$GITHUB_OUTPUT" | |
| echo "GH_AW_SAFE_OUTPUTS_CONFIG_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/config.json" >> "$GITHUB_OUTPUT" | |
| echo "GH_AW_SAFE_OUTPUTS_TOOLS_PATH=${RUNNER_TEMP}/gh-aw/safeoutputs/tools.json" >> "$GITHUB_OUTPUT" | |
| - name: Checkout repository | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| persist-credentials: false | |
| - name: Create gh-aw temp directory | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/create_gh_aw_tmp_dir.sh | |
| - name: Configure gh CLI for GitHub Enterprise | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/configure_gh_for_ghe.sh | |
| env: | |
| GH_TOKEN: ${{ github.token }} | |
| - name: Configure Git credentials | |
| env: | |
| REPO_NAME: ${{ github.repository }} | |
| SERVER_URL: ${{ github.server_url }} | |
| run: | | |
| git config --global user.email "github-actions[bot]@users.noreply.github.com" | |
| git config --global user.name "github-actions[bot]" | |
| git config --global am.keepcr true | |
| # Re-authenticate git with GitHub token | |
| SERVER_URL_STRIPPED="${SERVER_URL#https://}" | |
| git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" | |
| echo "Git configured with standard GitHub Actions identity" | |
| - name: Checkout PR branch | |
| id: checkout-pr | |
| if: | | |
| github.event.pull_request || github.event.issue.pull_request | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/checkout_pr_branch.cjs'); | |
| await main(); | |
| - name: Install GitHub Copilot CLI | |
| run: ${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh latest | |
| env: | |
| GH_HOST: github.com | |
| - name: Install AWF binary | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh v0.25.0 | |
| - name: Parse integrity filter lists | |
| id: parse-guard-vars | |
| env: | |
| GH_AW_BLOCKED_USERS_VAR: ${{ vars.GH_AW_GITHUB_BLOCKED_USERS || '' }} | |
| GH_AW_APPROVAL_LABELS_VAR: ${{ vars.GH_AW_GITHUB_APPROVAL_LABELS || '' }} | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/parse_guard_list.sh | |
| - name: Download container images | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.25.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.0 ghcr.io/github/gh-aw-firewall/squid:0.25.0 ghcr.io/github/gh-aw-mcpg:v0.2.6 ghcr.io/github/github-mcp-server:v0.32.0 node:lts-alpine | |
| - name: Write Safe Outputs Config | |
| run: | | |
| mkdir -p ${RUNNER_TEMP}/gh-aw/safeoutputs | |
| mkdir -p /tmp/gh-aw/safeoutputs | |
| mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs | |
| cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' | |
| {"add_comment":{"max":3,"target":"*"},"assign_to_agent":{"allowed":["copilot"],"max":3,"target":"*"},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}} | |
| GH_AW_SAFE_OUTPUTS_CONFIG_EOF | |
| - name: Write Safe Outputs Tools | |
| run: | | |
| cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/tools_meta.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_META_EOF' | |
| { | |
| "description_suffixes": { | |
| "add_comment": " CONSTRAINTS: Maximum 3 comment(s) can be added. Target: *.", | |
| "assign_to_agent": " CONSTRAINTS: Maximum 3 issue(s) can be assigned to agent." | |
| }, | |
| "repo_params": {}, | |
| "dynamic_tools": [] | |
| } | |
| GH_AW_SAFE_OUTPUTS_TOOLS_META_EOF | |
| cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' | |
| { | |
| "add_comment": { | |
| "defaultMax": 1, | |
| "fields": { | |
| "body": { | |
| "required": true, | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 65000 | |
| }, | |
| "item_number": { | |
| "issueOrPRNumber": true | |
| }, | |
| "repo": { | |
| "type": "string", | |
| "maxLength": 256 | |
| } | |
| } | |
| }, | |
| "assign_to_agent": { | |
| "defaultMax": 1, | |
| "fields": { | |
| "agent": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 128 | |
| }, | |
| "issue_number": { | |
| "issueNumberOrTemporaryId": true | |
| }, | |
| "pull_number": { | |
| "optionalPositiveInteger": true | |
| }, | |
| "pull_request_repo": { | |
| "type": "string", | |
| "maxLength": 256 | |
| }, | |
| "repo": { | |
| "type": "string", | |
| "maxLength": 256 | |
| } | |
| }, | |
| "customValidation": "requiresOneOf:issue_number,pull_number" | |
| }, | |
| "missing_data": { | |
| "defaultMax": 20, | |
| "fields": { | |
| "alternatives": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| }, | |
| "context": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| }, | |
| "data_type": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 128 | |
| }, | |
| "reason": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| } | |
| } | |
| }, | |
| "missing_tool": { | |
| "defaultMax": 20, | |
| "fields": { | |
| "alternatives": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 512 | |
| }, | |
| "reason": { | |
| "required": true, | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 256 | |
| }, | |
| "tool": { | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 128 | |
| } | |
| } | |
| }, | |
| "noop": { | |
| "defaultMax": 1, | |
| "fields": { | |
| "message": { | |
| "required": true, | |
| "type": "string", | |
| "sanitize": true, | |
| "maxLength": 65000 | |
| } | |
| } | |
| } | |
| } | |
| GH_AW_SAFE_OUTPUTS_VALIDATION_EOF | |
| node ${RUNNER_TEMP}/gh-aw/actions/generate_safe_outputs_tools.cjs | |
| - name: Generate Safe Outputs MCP Server Config | |
| id: safe-outputs-config | |
| run: | | |
| # Generate a secure random API key (360 bits of entropy, 40+ chars) | |
| # Mask immediately to prevent timing vulnerabilities | |
| API_KEY=$(openssl rand -base64 45 | tr -d '/+=') | |
| echo "::add-mask::${API_KEY}" | |
| PORT=3001 | |
| # Set outputs for next steps | |
| { | |
| echo "safe_outputs_api_key=${API_KEY}" | |
| echo "safe_outputs_port=${PORT}" | |
| } >> "$GITHUB_OUTPUT" | |
| echo "Safe Outputs MCP server will run on port ${PORT}" | |
| - name: Start Safe Outputs MCP HTTP Server | |
| id: safe-outputs-start | |
| env: | |
| DEBUG: '*' | |
| GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} | |
| GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} | |
| GH_AW_SAFE_OUTPUTS_TOOLS_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/tools.json | |
| GH_AW_SAFE_OUTPUTS_CONFIG_PATH: ${{ runner.temp }}/gh-aw/safeoutputs/config.json | |
| GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs | |
| run: | | |
| # Environment variables are set above to prevent template injection | |
| export DEBUG | |
| export GH_AW_SAFE_OUTPUTS_PORT | |
| export GH_AW_SAFE_OUTPUTS_API_KEY | |
| export GH_AW_SAFE_OUTPUTS_TOOLS_PATH | |
| export GH_AW_SAFE_OUTPUTS_CONFIG_PATH | |
| export GH_AW_MCP_LOG_DIR | |
| bash ${RUNNER_TEMP}/gh-aw/actions/start_safe_outputs_server.sh | |
| - name: Start MCP Gateway | |
| id: start-mcp-gateway | |
| env: | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} | |
| GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} | |
| GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| run: | | |
| set -eo pipefail | |
| mkdir -p /tmp/gh-aw/mcp-config | |
| # Export gateway environment variables for MCP config and gateway script | |
| export MCP_GATEWAY_PORT="80" | |
| export MCP_GATEWAY_DOMAIN="host.docker.internal" | |
| MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') | |
| echo "::add-mask::${MCP_GATEWAY_API_KEY}" | |
| export MCP_GATEWAY_API_KEY | |
| export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" | |
| mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" | |
| export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" | |
| export DEBUG="*" | |
| export GH_AW_ENGINE="copilot" | |
| export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.2.6' | |
| mkdir -p /home/runner/.copilot | |
| cat << GH_AW_MCP_CONFIG_EOF | bash ${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.sh | |
| { | |
| "mcpServers": { | |
| "github": { | |
| "type": "stdio", | |
| "container": "ghcr.io/github/github-mcp-server:v0.32.0", | |
| "env": { | |
| "GITHUB_HOST": "\${GITHUB_SERVER_URL}", | |
| "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", | |
| "GITHUB_READ_ONLY": "1", | |
| "GITHUB_TOOLSETS": "context,repos,issues,pull_requests" | |
| }, | |
| "guard-policies": { | |
| "allow-only": { | |
| "approval-labels": ${{ steps.parse-guard-vars.outputs.approval_labels }}, | |
| "blocked-users": ${{ steps.parse-guard-vars.outputs.blocked_users }}, | |
| "min-integrity": "approved", | |
| "repos": "all" | |
| } | |
| } | |
| }, | |
| "safeoutputs": { | |
| "type": "http", | |
| "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", | |
| "headers": { | |
| "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" | |
| }, | |
| "guard-policies": { | |
| "write-sink": { | |
| "accept": [ | |
| "*" | |
| ] | |
| } | |
| } | |
| } | |
| }, | |
| "gateway": { | |
| "port": $MCP_GATEWAY_PORT, | |
| "domain": "${MCP_GATEWAY_DOMAIN}", | |
| "apiKey": "${MCP_GATEWAY_API_KEY}", | |
| "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" | |
| } | |
| } | |
| GH_AW_MCP_CONFIG_EOF | |
| - name: Download activation artifact | |
| uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 | |
| with: | |
| name: activation | |
| path: /tmp/gh-aw | |
| - name: Clean git credentials | |
| continue-on-error: true | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/clean_git_credentials.sh | |
| - name: Execute GitHub Copilot CLI | |
| id: agentic_execution | |
| # Copilot CLI tool arguments (sorted): | |
| timeout-minutes: 30 | |
| run: | | |
| set -o pipefail | |
| touch /tmp/gh-aw/agent-step-summary.md | |
| # shellcheck disable=SC1003 | |
| sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --allow-domains "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.0 --skip-pull --enable-api-proxy \ | |
| -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --allow-all-paths --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log | |
| env: | |
| COPILOT_AGENT_RUNNER_TYPE: STANDALONE | |
| COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | |
| COPILOT_MODEL: gpt-5.1-codex-mini | |
| GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json | |
| GH_AW_PHASE: agent | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| GH_AW_VERSION: dev | |
| GITHUB_API_URL: ${{ github.api_url }} | |
| GITHUB_AW: true | |
| GITHUB_HEAD_REF: ${{ github.head_ref }} | |
| GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| GITHUB_REF_NAME: ${{ github.ref_name }} | |
| GITHUB_SERVER_URL: ${{ github.server_url }} | |
| GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md | |
| GITHUB_WORKSPACE: ${{ github.workspace }} | |
| GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com | |
| GIT_AUTHOR_NAME: github-actions[bot] | |
| GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com | |
| GIT_COMMITTER_NAME: github-actions[bot] | |
| XDG_CONFIG_HOME: /home/runner | |
| - name: Detect inference access error | |
| id: detect-inference-error | |
| if: always() | |
| continue-on-error: true | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/detect_inference_access_error.sh | |
| - name: Configure Git credentials | |
| env: | |
| REPO_NAME: ${{ github.repository }} | |
| SERVER_URL: ${{ github.server_url }} | |
| run: | | |
| git config --global user.email "github-actions[bot]@users.noreply.github.com" | |
| git config --global user.name "github-actions[bot]" | |
| git config --global am.keepcr true | |
| # Re-authenticate git with GitHub token | |
| SERVER_URL_STRIPPED="${SERVER_URL#https://}" | |
| git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" | |
| echo "Git configured with standard GitHub Actions identity" | |
| - name: Copy Copilot session state files to logs | |
| if: always() | |
| continue-on-error: true | |
| run: | | |
| # Copy Copilot session state files to logs folder for artifact collection | |
| # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them | |
| SESSION_STATE_DIR="$HOME/.copilot/session-state" | |
| LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" | |
| if [ -d "$SESSION_STATE_DIR" ]; then | |
| echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" | |
| mkdir -p "$LOGS_DIR" | |
| cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true | |
| echo "Session state files copied successfully" | |
| else | |
| echo "No session-state directory found at $SESSION_STATE_DIR" | |
| fi | |
| - name: Stop MCP Gateway | |
| if: always() | |
| continue-on-error: true | |
| env: | |
| MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} | |
| MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} | |
| GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} | |
| run: | | |
| bash ${RUNNER_TEMP}/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" | |
| - name: Redact secrets in logs | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/redact_secrets.cjs'); | |
| await main(); | |
| env: | |
| GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' | |
| SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | |
| SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} | |
| SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} | |
| SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Append agent step summary | |
| if: always() | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/append_agent_step_summary.sh | |
| - name: Copy Safe Outputs | |
| if: always() | |
| env: | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| run: | | |
| mkdir -p /tmp/gh-aw | |
| cp "$GH_AW_SAFE_OUTPUTS" /tmp/gh-aw/safeoutputs.jsonl 2>/dev/null || true | |
| - name: Ingest agent output | |
| id: collect_output | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_SAFE_OUTPUTS: ${{ steps.set-runtime-paths.outputs.GH_AW_SAFE_OUTPUTS }} | |
| GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" | |
| GITHUB_SERVER_URL: ${{ github.server_url }} | |
| GITHUB_API_URL: ${{ github.api_url }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/collect_ndjson_output.cjs'); | |
| await main(); | |
| - name: Parse agent logs for step summary | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_copilot_log.cjs'); | |
| await main(); | |
| - name: Parse MCP Gateway logs for step summary | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_mcp_gateway_log.cjs'); | |
| await main(); | |
| - name: Print firewall logs | |
| if: always() | |
| continue-on-error: true | |
| env: | |
| AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs | |
| run: | | |
| # Fix permissions on firewall logs so they can be uploaded as artifacts | |
| # AWF runs with sudo, creating files owned by root | |
| sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true | |
| # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) | |
| if command -v awf &> /dev/null; then | |
| awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" | |
| else | |
| echo 'AWF binary not installed, skipping firewall log summary' | |
| fi | |
| - name: Write agent output placeholder if missing | |
| if: always() | |
| run: | | |
| if [ ! -f /tmp/gh-aw/agent_output.json ]; then | |
| echo '{"items":[]}' > /tmp/gh-aw/agent_output.json | |
| fi | |
| - name: Upload agent artifacts | |
| if: always() | |
| continue-on-error: true | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: agent | |
| path: | | |
| /tmp/gh-aw/aw-prompts/prompt.txt | |
| /tmp/gh-aw/sandbox/agent/logs/ | |
| /tmp/gh-aw/redacted-urls.log | |
| /tmp/gh-aw/mcp-logs/ | |
| /tmp/gh-aw/proxy-logs/ | |
| !/tmp/gh-aw/proxy-logs/proxy-tls/ | |
| /tmp/gh-aw/agent-stdio.log | |
| /tmp/gh-aw/agent/ | |
| /tmp/gh-aw/safeoutputs.jsonl | |
| /tmp/gh-aw/agent_output.json | |
| /tmp/gh-aw/aw-*.patch | |
| if-no-files-found: ignore | |
| - name: Upload firewall audit logs | |
| if: always() | |
| continue-on-error: true | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: firewall-audit-logs | |
| path: | | |
| /tmp/gh-aw/sandbox/firewall/logs/ | |
| /tmp/gh-aw/sandbox/firewall/audit/ | |
| if-no-files-found: ignore | |
| conclusion: | |
| needs: | |
| - activation | |
| - agent | |
| - detection | |
| - safe_outputs | |
| if: always() && (needs.agent.result != 'skipped' || needs.activation.outputs.lockdown_check_failed == 'true') | |
| runs-on: ubuntu-slim | |
| permissions: | |
| contents: read | |
| discussions: write | |
| issues: write | |
| pull-requests: write | |
| concurrency: | |
| group: "gh-aw-conclusion-issue-monster" | |
| cancel-in-progress: false | |
| outputs: | |
| noop_message: ${{ steps.noop.outputs.noop_message }} | |
| tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} | |
| total_count: ${{ steps.missing_tool.outputs.total_count }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Download agent output artifact | |
| id: download-agent-output | |
| continue-on-error: true | |
| uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 | |
| with: | |
| name: agent | |
| path: /tmp/gh-aw/ | |
| - name: Setup agent output environment variable | |
| id: setup-agent-output-env | |
| if: steps.download-agent-output.outcome == 'success' | |
| run: | | |
| mkdir -p /tmp/gh-aw/ | |
| find "/tmp/gh-aw/" -type f -print | |
| echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" | |
| - name: Process No-Op Messages | |
| id: noop | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_NOOP_MAX: "1" | |
| GH_AW_WORKFLOW_NAME: "Issue Monster" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/noop.cjs'); | |
| await main(); | |
| - name: Record Missing Tool | |
| id: missing_tool | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_WORKFLOW_NAME: "Issue Monster" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/missing_tool.cjs'); | |
| await main(); | |
| - name: Handle Agent Failure | |
| id: handle_agent_failure | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_WORKFLOW_NAME: "Issue Monster" | |
| GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
| GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} | |
| GH_AW_WORKFLOW_ID: "issue-monster" | |
| GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }} | |
| GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} | |
| GH_AW_INFERENCE_ACCESS_ERROR: ${{ needs.agent.outputs.inference_access_error }} | |
| GH_AW_ASSIGNMENT_ERRORS: ${{ needs.safe_outputs.outputs.assign_to_agent_assignment_errors }} | |
| GH_AW_ASSIGNMENT_ERROR_COUNT: ${{ needs.safe_outputs.outputs.assign_to_agent_assignment_error_count }} | |
| GH_AW_LOCKDOWN_CHECK_FAILED: ${{ needs.activation.outputs.lockdown_check_failed }} | |
| GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🍪 *Om nom nom by [{workflow_name}]({run_url})*{history_link}\",\"runStarted\":\"🍪 ISSUE! ISSUE! [{workflow_name}]({run_url}) hungry for issues on this {event_type}! Om nom nom...\",\"runSuccess\":\"🍪 YUMMY! [{workflow_name}]({run_url}) ate the issues! That was DELICIOUS! Me want MORE! 😋\",\"runFailure\":\"🍪 Aww... [{workflow_name}]({run_url}) {status}. No cookie for monster today... 😢\"}" | |
| GH_AW_GROUP_REPORTS: "false" | |
| GH_AW_FAILURE_REPORT_AS_ISSUE: "true" | |
| GH_AW_TIMEOUT_MINUTES: "30" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_agent_failure.cjs'); | |
| await main(); | |
| - name: Handle No-Op Message | |
| id: handle_noop_message | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_WORKFLOW_NAME: "Issue Monster" | |
| GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
| GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} | |
| GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} | |
| GH_AW_NOOP_REPORT_AS_ISSUE: "true" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/handle_noop_message.cjs'); | |
| await main(); | |
| detection: | |
| needs: agent | |
| if: always() && needs.agent.result != 'skipped' | |
| runs-on: ubuntu-latest | |
| permissions: | |
| contents: read | |
| outputs: | |
| detection_conclusion: ${{ steps.detection_conclusion.outputs.conclusion }} | |
| detection_success: ${{ steps.detection_conclusion.outputs.success }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Download agent output artifact | |
| id: download-agent-output | |
| continue-on-error: true | |
| uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 | |
| with: | |
| name: agent | |
| path: /tmp/gh-aw/ | |
| - name: Setup agent output environment variable | |
| id: setup-agent-output-env | |
| if: steps.download-agent-output.outcome == 'success' | |
| run: | | |
| mkdir -p /tmp/gh-aw/ | |
| find "/tmp/gh-aw/" -type f -print | |
| echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" | |
| # --- Threat Detection --- | |
| - name: Download container images | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.25.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.25.0 ghcr.io/github/gh-aw-firewall/squid:0.25.0 | |
| - name: Check if detection needed | |
| id: detection_guard | |
| if: always() | |
| env: | |
| OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} | |
| HAS_PATCH: ${{ needs.agent.outputs.has_patch }} | |
| run: | | |
| if [[ -n "$OUTPUT_TYPES" || "$HAS_PATCH" == "true" ]]; then | |
| echo "run_detection=true" >> "$GITHUB_OUTPUT" | |
| echo "Detection will run: output_types=$OUTPUT_TYPES, has_patch=$HAS_PATCH" | |
| else | |
| echo "run_detection=false" >> "$GITHUB_OUTPUT" | |
| echo "Detection skipped: no agent outputs or patches to analyze" | |
| fi | |
| - name: Clear MCP configuration for detection | |
| if: always() && steps.detection_guard.outputs.run_detection == 'true' | |
| run: | | |
| rm -f /tmp/gh-aw/mcp-config/mcp-servers.json | |
| rm -f /home/runner/.copilot/mcp-config.json | |
| rm -f "$GITHUB_WORKSPACE/.gemini/settings.json" | |
| - name: Prepare threat detection files | |
| if: always() && steps.detection_guard.outputs.run_detection == 'true' | |
| run: | | |
| mkdir -p /tmp/gh-aw/threat-detection/aw-prompts | |
| cp /tmp/gh-aw/aw-prompts/prompt.txt /tmp/gh-aw/threat-detection/aw-prompts/prompt.txt 2>/dev/null || true | |
| cp /tmp/gh-aw/agent_output.json /tmp/gh-aw/threat-detection/agent_output.json 2>/dev/null || true | |
| for f in /tmp/gh-aw/aw-*.patch; do | |
| [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true | |
| done | |
| echo "Prepared threat detection files:" | |
| ls -la /tmp/gh-aw/threat-detection/ 2>/dev/null || true | |
| - name: Setup threat detection | |
| if: always() && steps.detection_guard.outputs.run_detection == 'true' | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| WORKFLOW_NAME: "Issue Monster" | |
| WORKFLOW_DESCRIPTION: "The Cookie Monster of issues - assigns issues to Copilot coding agent one at a time" | |
| HAS_PATCH: ${{ needs.agent.outputs.has_patch }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/setup_threat_detection.cjs'); | |
| await main(); | |
| - name: Ensure threat-detection directory and log | |
| if: always() && steps.detection_guard.outputs.run_detection == 'true' | |
| run: | | |
| mkdir -p /tmp/gh-aw/threat-detection | |
| touch /tmp/gh-aw/threat-detection/detection.log | |
| - name: Install GitHub Copilot CLI | |
| run: ${RUNNER_TEMP}/gh-aw/actions/install_copilot_cli.sh latest | |
| env: | |
| GH_HOST: github.com | |
| - name: Install AWF binary | |
| run: bash ${RUNNER_TEMP}/gh-aw/actions/install_awf_binary.sh v0.25.0 | |
| - name: Execute GitHub Copilot CLI | |
| if: always() && steps.detection_guard.outputs.run_detection == 'true' | |
| id: detection_agentic_execution | |
| # Copilot CLI tool arguments (sorted): | |
| timeout-minutes: 20 | |
| run: | | |
| set -o pipefail | |
| touch /tmp/gh-aw/agent-step-summary.md | |
| # shellcheck disable=SC1003 | |
| sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount "${RUNNER_TEMP}/gh-aw:${RUNNER_TEMP}/gh-aw:ro" --mount "${RUNNER_TEMP}/gh-aw:/host${RUNNER_TEMP}/gh-aw:ro" --allow-domains "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,host.docker.internal,telemetry.enterprise.githubcopilot.com" --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --audit-dir /tmp/gh-aw/sandbox/firewall/audit --enable-host-access --image-tag 0.25.0 --skip-pull --enable-api-proxy \ | |
| -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log | |
| env: | |
| COPILOT_AGENT_RUNNER_TYPE: STANDALONE | |
| COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} | |
| COPILOT_MODEL: gpt-5.1-codex-mini | |
| GH_AW_PHASE: detection | |
| GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt | |
| GH_AW_VERSION: dev | |
| GITHUB_API_URL: ${{ github.api_url }} | |
| GITHUB_AW: true | |
| GITHUB_HEAD_REF: ${{ github.head_ref }} | |
| GITHUB_REF_NAME: ${{ github.ref_name }} | |
| GITHUB_SERVER_URL: ${{ github.server_url }} | |
| GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md | |
| GITHUB_WORKSPACE: ${{ github.workspace }} | |
| GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com | |
| GIT_AUTHOR_NAME: github-actions[bot] | |
| GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com | |
| GIT_COMMITTER_NAME: github-actions[bot] | |
| XDG_CONFIG_HOME: /home/runner | |
| - name: Upload threat detection log | |
| if: always() && steps.detection_guard.outputs.run_detection == 'true' | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: detection | |
| path: /tmp/gh-aw/threat-detection/detection.log | |
| if-no-files-found: ignore | |
| - name: Parse and conclude threat detection | |
| id: detection_conclusion | |
| if: always() | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| RUN_DETECTION: ${{ steps.detection_guard.outputs.run_detection }} | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/parse_threat_detection_results.cjs'); | |
| await main(); | |
| pre_activation: | |
| runs-on: ubuntu-slim | |
| permissions: | |
| contents: read | |
| issues: read | |
| pull-requests: read | |
| outputs: | |
| activated: ${{ steps.check_membership.outputs.is_team_member == 'true' && steps.check_skip_if_match.outputs.skip_check_ok == 'true' && steps.check_skip_if_no_match.outputs.skip_no_match_check_ok == 'true' && steps.check_skip_if_check_failing.outputs.skip_if_check_failing_ok == 'true' }} | |
| has_issues: ${{ steps.search.outputs.has_issues }} | |
| issue_count: ${{ steps.search.outputs.issue_count }} | |
| issue_list: ${{ steps.search.outputs.issue_list }} | |
| issue_numbers: ${{ steps.search.outputs.issue_numbers }} | |
| matched_command: '' | |
| search_result: ${{ steps.search.outcome }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Check team membership for workflow | |
| id: check_membership | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_REQUIRED_ROLES: admin,maintainer,write | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/check_membership.cjs'); | |
| await main(); | |
| - name: Check skip-if-match query | |
| id: check_skip_if_match | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_SKIP_QUERY: "is:pr is:open is:draft author:app/copilot-swe-agent" | |
| GH_AW_WORKFLOW_NAME: "Issue Monster" | |
| GH_AW_SKIP_MAX_MATCHES: "5" | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/check_skip_if_match.cjs'); | |
| await main(); | |
| - name: Check skip-if-no-match query | |
| id: check_skip_if_no_match | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_SKIP_QUERY: "is:issue is:open" | |
| GH_AW_WORKFLOW_NAME: "Issue Monster" | |
| GH_AW_SKIP_MIN_MATCHES: "1" | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/check_skip_if_no_match.cjs'); | |
| await main(); | |
| - name: Check skip-if-check-failing | |
| id: check_skip_if_check_failing | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_SKIP_CHECK_INCLUDE: "[\"build\",\"test\",\"lint-go\",\"lint-js\"]" | |
| GH_AW_SKIP_CHECK_ALLOW_PENDING: "true" | |
| with: | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/check_skip_if_check_failing.cjs'); | |
| await main(); | |
| - name: Search for candidate issues | |
| id: search | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| with: | |
| script: | | |
| const { owner, repo } = context.repo; | |
| try { | |
| // Check for recent rate-limited PRs to avoid scheduling more work during rate limiting | |
| core.info('Checking for recent rate-limited PRs...'); | |
| const rateLimitCheckDate = new Date(); | |
| rateLimitCheckDate.setHours(rateLimitCheckDate.getHours() - 1); // Check last hour | |
| // Format as YYYY-MM-DDTHH:MM:SS for GitHub search API | |
| const rateLimitCheckISO = rateLimitCheckDate.toISOString().split('.')[0] + 'Z'; | |
| const recentPRsQuery = `is:pr author:app/copilot-swe-agent created:>${rateLimitCheckISO} repo:${owner}/${repo}`; | |
| const recentPRsResponse = await github.rest.search.issuesAndPullRequests({ | |
| q: recentPRsQuery, | |
| per_page: 10, | |
| sort: 'created', | |
| order: 'desc' | |
| }); | |
| core.info(`Found ${recentPRsResponse.data.total_count} recent Copilot PRs to check for rate limiting`); | |
| // Check if any recent PRs have rate limit indicators | |
| let rateLimitDetected = false; | |
| for (const pr of recentPRsResponse.data.items) { | |
| try { | |
| const prTimelineQuery = ` | |
| query($owner: String!, $repo: String!, $number: Int!) { | |
| repository(owner: $owner, name: $repo) { | |
| pullRequest(number: $number) { | |
| timelineItems(first: 50, itemTypes: [ISSUE_COMMENT]) { | |
| nodes { | |
| __typename | |
| ... on IssueComment { | |
| body | |
| createdAt | |
| } | |
| } | |
| } | |
| } | |
| } | |
| } | |
| `; | |
| const prTimelineResult = await github.graphql(prTimelineQuery, { | |
| owner, | |
| repo, | |
| number: pr.number | |
| }); | |
| const comments = prTimelineResult?.repository?.pullRequest?.timelineItems?.nodes || []; | |
| const rateLimitPattern = /rate limit|API rate limit|secondary rate limit|abuse detection|429|too many requests/i; | |
| for (const comment of comments) { | |
| if (comment.body && rateLimitPattern.test(comment.body)) { | |
| core.warning(`Rate limiting detected in PR #${pr.number}: ${comment.body.substring(0, 200)}`); | |
| rateLimitDetected = true; | |
| break; | |
| } | |
| } | |
| if (rateLimitDetected) break; | |
| } catch (error) { | |
| core.warning(`Could not check PR #${pr.number} for rate limiting: ${error.message}`); | |
| } | |
| } | |
| if (rateLimitDetected) { | |
| core.warning('🛑 Rate limiting detected in recent PRs. Skipping issue assignment to prevent further rate limit issues.'); | |
| core.setOutput('issue_count', 0); | |
| core.setOutput('issue_numbers', ''); | |
| core.setOutput('issue_list', ''); | |
| core.setOutput('has_issues', 'false'); | |
| return; | |
| } | |
| core.info('✓ No rate limiting detected. Proceeding with issue search.'); | |
| // Labels that indicate an issue should NOT be auto-assigned | |
| const excludeLabels = [ | |
| 'wontfix', | |
| 'duplicate', | |
| 'invalid', | |
| 'question', | |
| 'discussion', | |
| 'needs-discussion', | |
| 'blocked', | |
| 'on-hold', | |
| 'waiting-for-feedback', | |
| 'needs-more-info', | |
| 'no-bot', | |
| 'no-campaign' | |
| ]; | |
| // Labels that indicate an issue is a GOOD candidate for auto-assignment | |
| const priorityLabels = [ | |
| 'good first issue', | |
| 'good-first-issue', | |
| 'bug', | |
| 'enhancement', | |
| 'feature', | |
| 'documentation', | |
| 'tech-debt', | |
| 'refactoring', | |
| 'performance', | |
| 'security' | |
| ]; | |
| // Search for open issues with "cookie" label and without excluded labels | |
| // The "cookie" label indicates issues that are approved work queue items from automated workflows | |
| const query = `is:issue is:open repo:${owner}/${repo} label:cookie -label:"${excludeLabels.join('" -label:"')}"`; | |
| core.info(`Searching: ${query}`); | |
| const response = await github.rest.search.issuesAndPullRequests({ | |
| q: query, | |
| per_page: 100, | |
| sort: 'created', | |
| order: 'desc' | |
| }); | |
| core.info(`Found ${response.data.total_count} total issues matching basic criteria`); | |
| // Fetch full details for each issue to get labels, assignees, sub-issues, and linked PRs | |
| // Track integrity-filtered issues to emit a diagnostic summary | |
| const integrityFilteredIssues = []; | |
| const issuesWithDetails = (await Promise.all( | |
| response.data.items.map(async (issue) => { | |
| // Fetch full issue details — some issues may be blocked by integrity policy | |
| let fullIssue; | |
| try { | |
| fullIssue = await github.rest.issues.get({ | |
| owner, | |
| repo, | |
| issue_number: issue.number | |
| }); | |
| } catch (fetchError) { | |
| // Integrity-filtered issues (403/451) or other transient errors should be | |
| // skipped individually rather than failing the entire batch | |
| const status = fetchError.status || fetchError.response?.status; | |
| // 403 = Forbidden (integrity policy), 451 = Unavailable For Legal Reasons | |
| const isIntegrityBlock = status === 403 || status === 451 || | |
| /\bintegrity\b/i.test(fetchError.message || ''); | |
| const errorSummary = (fetchError.message || String(fetchError)).slice(0, 120); | |
| if (isIntegrityBlock) { | |
| integrityFilteredIssues.push(issue.number); | |
| core.warning(`⚠️ Skipping issue #${issue.number}: blocked by integrity policy (HTTP ${status || 'unknown'}): ${errorSummary}`); | |
| } else { | |
| core.warning(`⚠️ Skipping issue #${issue.number}: could not fetch details (HTTP ${status || 'unknown'}): ${errorSummary}`); | |
| } | |
| return null; | |
| } | |
| // Check if this issue has sub-issues and linked PRs using GraphQL | |
| let subIssuesCount = 0; | |
| let linkedPRs = []; | |
| try { | |
| const issueDetailsQuery = ` | |
| query($owner: String!, $repo: String!, $number: Int!) { | |
| repository(owner: $owner, name: $repo) { | |
| issue(number: $number) { | |
| subIssues { | |
| totalCount | |
| } | |
| timelineItems(first: 100, itemTypes: [CROSS_REFERENCED_EVENT]) { | |
| nodes { | |
| ... on CrossReferencedEvent { | |
| source { | |
| __typename | |
| ... on PullRequest { | |
| number | |
| state | |
| isDraft | |
| author { | |
| login | |
| } | |
| } | |
| } | |
| } | |
| } | |
| } | |
| } | |
| } | |
| } | |
| `; | |
| const issueDetailsResult = await github.graphql(issueDetailsQuery, { | |
| owner, | |
| repo, | |
| number: issue.number | |
| }); | |
| subIssuesCount = issueDetailsResult?.repository?.issue?.subIssues?.totalCount || 0; | |
| // Extract linked PRs from timeline | |
| const timelineItems = issueDetailsResult?.repository?.issue?.timelineItems?.nodes || []; | |
| linkedPRs = timelineItems | |
| .filter(item => item?.source?.__typename === 'PullRequest') | |
| .map(item => ({ | |
| number: item.source.number, | |
| state: item.source.state, | |
| isDraft: item.source.isDraft, | |
| author: item.source.author?.login | |
| })); | |
| core.info(`Issue #${issue.number} has ${linkedPRs.length} linked PR(s)`); | |
| } catch (error) { | |
| // If GraphQL query fails, continue with defaults | |
| core.warning(`Could not check details for #${issue.number}: ${error.message}`); | |
| } | |
| return { | |
| ...fullIssue.data, | |
| subIssuesCount, | |
| linkedPRs | |
| }; | |
| }) | |
| )).filter(Boolean); // Remove null entries (integrity-filtered or otherwise skipped) | |
| // Emit diagnostic summary for integrity-filtered issues | |
| if (integrityFilteredIssues.length > 0) { | |
| core.warning(`🛡️ Integrity filter diagnostic: ${integrityFilteredIssues.length} issue(s) were skipped due to integrity policy: #${integrityFilteredIssues.join(', #')}. These issues will be excluded from this run.`); | |
| } | |
| // Filter and score issues | |
| const scoredIssues = issuesWithDetails | |
| .filter(issue => { | |
| // Exclude issues that already have assignees | |
| if (issue.assignees && issue.assignees.length > 0) { | |
| core.info(`Skipping #${issue.number}: already has assignees`); | |
| return false; | |
| } | |
| // Exclude issues with excluded labels (double check) | |
| const issueLabels = issue.labels.map(l => l.name.toLowerCase()); | |
| if (issueLabels.some(label => excludeLabels.map(l => l.toLowerCase()).includes(label))) { | |
| core.info(`Skipping #${issue.number}: has excluded label`); | |
| return false; | |
| } | |
| // Exclude issues with campaign labels (campaign:*) | |
| // Campaign items are managed by campaign orchestrators | |
| if (issueLabels.some(label => label.startsWith('campaign:'))) { | |
| core.info(`Skipping #${issue.number}: has campaign label (managed by campaign orchestrator)`); | |
| return false; | |
| } | |
| // Exclude issues that have sub-issues (parent/organizing issues) | |
| if (issue.subIssuesCount > 0) { | |
| core.info(`Skipping #${issue.number}: has ${issue.subIssuesCount} sub-issue(s) - parent issues are used for organizing, not tasks`); | |
| return false; | |
| } | |
| // Exclude issues with closed PRs (treat as complete) | |
| const closedPRs = issue.linkedPRs?.filter(pr => pr.state === 'CLOSED' || pr.state === 'MERGED') || []; | |
| if (closedPRs.length > 0) { | |
| core.info(`Skipping #${issue.number}: has ${closedPRs.length} closed/merged PR(s) - treating as complete`); | |
| return false; | |
| } | |
| // Exclude issues with open PRs from Copilot coding agent | |
| const openCopilotPRs = issue.linkedPRs?.filter(pr => | |
| pr.state === 'OPEN' && | |
| (pr.author === 'copilot-swe-agent' || pr.author?.includes('copilot')) | |
| ) || []; | |
| if (openCopilotPRs.length > 0) { | |
| core.info(`Skipping #${issue.number}: has ${openCopilotPRs.length} open PR(s) from Copilot - already being worked on`); | |
| return false; | |
| } | |
| return true; | |
| }) | |
| .map(issue => { | |
| const issueLabels = issue.labels.map(l => l.name.toLowerCase()); | |
| let score = 0; | |
| // Score based on priority labels (higher score = higher priority) | |
| if (issueLabels.includes('good first issue') || issueLabels.includes('good-first-issue')) { | |
| score += 50; | |
| } | |
| if (issueLabels.includes('bug')) { | |
| score += 40; | |
| } | |
| if (issueLabels.includes('security')) { | |
| score += 45; | |
| } | |
| if (issueLabels.includes('documentation')) { | |
| score += 35; | |
| } | |
| if (issueLabels.includes('enhancement') || issueLabels.includes('feature')) { | |
| score += 30; | |
| } | |
| if (issueLabels.includes('performance')) { | |
| score += 25; | |
| } | |
| if (issueLabels.includes('tech-debt') || issueLabels.includes('refactoring')) { | |
| score += 20; | |
| } | |
| // Bonus for issues with clear labels (any priority label) | |
| if (issueLabels.some(label => priorityLabels.map(l => l.toLowerCase()).includes(label))) { | |
| score += 10; | |
| } | |
| // Age bonus: older issues get slight priority (days old / 10) | |
| const ageInDays = Math.floor((Date.now() - new Date(issue.created_at)) / (1000 * 60 * 60 * 24)); | |
| score += Math.min(ageInDays / 10, 20); // Cap age bonus at 20 points | |
| return { | |
| number: issue.number, | |
| title: issue.title, | |
| labels: issue.labels.map(l => l.name), | |
| created_at: issue.created_at, | |
| score | |
| }; | |
| }) | |
| .sort((a, b) => b.score - a.score); // Sort by score descending | |
| // Format output | |
| const issueList = scoredIssues.map(i => { | |
| const labelStr = i.labels.length > 0 ? ` [${i.labels.join(', ')}]` : ''; | |
| return `#${i.number}: ${i.title}${labelStr} (score: ${i.score.toFixed(1)})`; | |
| }).join('\n'); | |
| const issueNumbers = scoredIssues.map(i => i.number).join(','); | |
| core.info(`Total candidate issues after filtering: ${scoredIssues.length}`); | |
| if (scoredIssues.length > 0) { | |
| core.info(`Top candidates:\n${issueList.split('\n').slice(0, 10).join('\n')}`); | |
| } | |
| core.setOutput('issue_count', scoredIssues.length); | |
| core.setOutput('issue_numbers', issueNumbers); | |
| core.setOutput('issue_list', issueList); | |
| if (scoredIssues.length === 0) { | |
| core.info('🍽️ No suitable candidate issues - the plate is empty!'); | |
| core.setOutput('has_issues', 'false'); | |
| } else { | |
| core.setOutput('has_issues', 'true'); | |
| } | |
| } catch (error) { | |
| core.error(`Error searching for issues: ${error.message}`); | |
| core.setOutput('issue_count', 0); | |
| core.setOutput('issue_numbers', ''); | |
| core.setOutput('issue_list', ''); | |
| core.setOutput('has_issues', 'false'); | |
| } | |
| safe_outputs: | |
| needs: | |
| - agent | |
| - detection | |
| if: (!cancelled()) && needs.agent.result != 'skipped' && needs.detection.result == 'success' | |
| runs-on: ubuntu-slim | |
| permissions: | |
| contents: read | |
| discussions: write | |
| issues: write | |
| pull-requests: write | |
| timeout-minutes: 15 | |
| env: | |
| GH_AW_CALLER_WORKFLOW_ID: "${{ github.repository }}/issue-monster" | |
| GH_AW_ENGINE_ID: "copilot" | |
| GH_AW_ENGINE_MODEL: "gpt-5.1-codex-mini" | |
| GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🍪 *Om nom nom by [{workflow_name}]({run_url})*{history_link}\",\"runStarted\":\"🍪 ISSUE! ISSUE! [{workflow_name}]({run_url}) hungry for issues on this {event_type}! Om nom nom...\",\"runSuccess\":\"🍪 YUMMY! [{workflow_name}]({run_url}) ate the issues! That was DELICIOUS! Me want MORE! 😋\",\"runFailure\":\"🍪 Aww... [{workflow_name}]({run_url}) {status}. No cookie for monster today... 😢\"}" | |
| GH_AW_WORKFLOW_ID: "issue-monster" | |
| GH_AW_WORKFLOW_NAME: "Issue Monster" | |
| outputs: | |
| assign_to_agent_assigned: ${{ steps.assign_to_agent.outputs.assigned }} | |
| assign_to_agent_assignment_error_count: ${{ steps.assign_to_agent.outputs.assignment_error_count }} | |
| assign_to_agent_assignment_errors: ${{ steps.assign_to_agent.outputs.assignment_errors }} | |
| code_push_failure_count: ${{ steps.process_safe_outputs.outputs.code_push_failure_count }} | |
| code_push_failure_errors: ${{ steps.process_safe_outputs.outputs.code_push_failure_errors }} | |
| comment_id: ${{ steps.process_safe_outputs.outputs.comment_id }} | |
| comment_url: ${{ steps.process_safe_outputs.outputs.comment_url }} | |
| create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} | |
| create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} | |
| process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} | |
| process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} | |
| steps: | |
| - name: Checkout actions folder | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| with: | |
| repository: github/gh-aw | |
| sparse-checkout: | | |
| actions | |
| persist-credentials: false | |
| - name: Setup Scripts | |
| uses: ./actions/setup | |
| with: | |
| destination: ${{ runner.temp }}/gh-aw/actions | |
| - name: Download agent output artifact | |
| id: download-agent-output | |
| continue-on-error: true | |
| uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 | |
| with: | |
| name: agent | |
| path: /tmp/gh-aw/ | |
| - name: Setup agent output environment variable | |
| id: setup-agent-output-env | |
| if: steps.download-agent-output.outcome == 'success' | |
| run: | | |
| mkdir -p /tmp/gh-aw/ | |
| find "/tmp/gh-aw/" -type f -print | |
| echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/agent_output.json" >> "$GITHUB_OUTPUT" | |
| - name: Configure GH_HOST for enterprise compatibility | |
| id: ghes-host-config | |
| shell: bash | |
| run: | | |
| # Derive GH_HOST from GITHUB_SERVER_URL so the gh CLI targets the correct | |
| # GitHub instance (GHES/GHEC). On github.com this is a harmless no-op. | |
| GH_HOST="${GITHUB_SERVER_URL#https://}" | |
| GH_HOST="${GH_HOST#http://}" | |
| echo "GH_HOST=${GH_HOST}" >> "$GITHUB_ENV" | |
| - name: Process Safe Outputs | |
| id: process_safe_outputs | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" | |
| GITHUB_SERVER_URL: ${{ github.server_url }} | |
| GITHUB_API_URL: ${{ github.api_url }} | |
| GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"max\":3,\"target\":\"*\"},\"assign_to_agent\":{\"allowed\":[\"copilot\"],\"max\":3,\"target\":\"*\"},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"}}" | |
| with: | |
| github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/safe_output_handler_manager.cjs'); | |
| await main(); | |
| - name: Assign to Agent | |
| id: assign_to_agent | |
| if: (!cancelled()) && needs.agent.result != 'skipped' && contains(needs.agent.outputs.output_types, 'assign_to_agent') | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| GH_AW_AGENT_OUTPUT: ${{ steps.setup-agent-output-env.outputs.GH_AW_AGENT_OUTPUT }} | |
| GH_AW_AGENT_MAX_COUNT: 3 | |
| GH_AW_AGENT_TARGET: "*" | |
| GH_AW_AGENT_ALLOWED: "copilot" | |
| with: | |
| github-token: ${{ secrets.GH_AW_AGENT_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} | |
| script: | | |
| const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs'); | |
| setupGlobals(core, github, context, exec, io); | |
| const { main } = require('${{ runner.temp }}/gh-aw/actions/assign_to_agent.cjs'); | |
| await main(); | |
| - name: Upload Safe Output Items | |
| if: always() | |
| uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 | |
| with: | |
| name: safe-output-items | |
| path: /tmp/gh-aw/safe-output-items.jsonl | |
| if-no-files-found: ignore | |