mirror of
https://github.com/OpenRouterTeam/spawn.git
synced 2026-05-03 06:10:21 +00:00
* fix: pin all GitHub Actions to commit SHAs and version-lock tools Addresses supply chain hardening findings from issue #2982: - Pin all 6 GitHub Actions to full commit SHAs with version comments: - actions/checkout@v4 → SHA 34e1148... - oven-sh/setup-bun@v2 → SHA 0c5077e... - actions/github-script@v7 → SHA f28e40c... - docker/login-action@v3 → SHA c94ce9f... - docker/build-push-action@v6 → SHA 10e90e3... - hashicorp/setup-packer@main → SHA c3d53c5... (v3.2.0) - Pin Packer version: latest → 1.15.0 (in packer-snapshots.yml) - Pin bun version: latest → 1.3.11 (in agent-tarballs.yml) - Pin shellcheck: replace apt-get (no version) with pinned download of v0.10.0 from GitHub releases with SHA256 integrity check These changes eliminate the primary LiteLLM-style attack vector: a compromised action maintainer can no longer force-push malicious code to an existing tag and have it run in CI. Fixes #2982 Agent: issue-fixer Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com> * fix: exclude import aliases from no-type-assertion lint rule The `JsNamedImportSpecifier` exclusion prevents `import { foo as bar }` patterns from being flagged as type assertions. Previously, any `as` keyword in import/export statements triggered the ban because the GritQL pattern `$value as $type` matched import specifiers as well as actual TypeScript type assertions. This also removes the `as _foo` import aliases in the script-failure-guidance test file (replaced with direct imports + distinctly-named wrapper functions) which were the original manifestation of this bug. All 1944 tests pass. Biome check clean across 169 files. Agent: issue-fixer Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com> --------- Co-authored-by: B <6723574+louisgv@users.noreply.github.com> Co-authored-by: Claude Sonnet 4.5 <noreply@anthropic.com>
182 lines
6.7 KiB
YAML
182 lines
6.7 KiB
YAML
name: Packer Snapshots
|
|
|
|
on:
|
|
schedule:
|
|
# Nightly at 4 AM UTC (before tarball build at 5 AM)
|
|
- cron: "0 4 * * *"
|
|
workflow_dispatch:
|
|
inputs:
|
|
agent:
|
|
description: "Single agent to build (leave empty for all)"
|
|
required: false
|
|
type: string
|
|
|
|
permissions:
|
|
contents: read
|
|
|
|
jobs:
|
|
matrix:
|
|
name: Generate matrix
|
|
runs-on: ubuntu-latest
|
|
outputs:
|
|
include: ${{ steps.set.outputs.include }}
|
|
steps:
|
|
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
|
- id: set
|
|
run: |
|
|
SINGLE_AGENT="${SINGLE_AGENT_INPUT}"
|
|
|
|
if [ -n "$SINGLE_AGENT" ]; then
|
|
AGENTS=$(jq -nc --arg agent "$SINGLE_AGENT" '[$agent]')
|
|
else
|
|
AGENTS=$(jq -c 'keys' packer/agents.json)
|
|
fi
|
|
|
|
# Build a flat include array: [{agent, cloud}, ...]
|
|
INCLUDE=$(jq -nc --argjson agents "$AGENTS" \
|
|
'[$agents[] as $a | {agent: $a, cloud: "digitalocean"}]')
|
|
echo "include=${INCLUDE}" >> "$GITHUB_OUTPUT"
|
|
env:
|
|
SINGLE_AGENT_INPUT: ${{ inputs.agent }}
|
|
|
|
build:
|
|
name: "digitalocean/${{ matrix.agent }}"
|
|
needs: matrix
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
include: ${{ fromJson(needs.matrix.outputs.include) }}
|
|
steps:
|
|
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
|
|
|
|
- name: Read agent config
|
|
id: config
|
|
run: |
|
|
TIER=$(jq -r --arg a "$AGENT_NAME" '.[$a].tier // "minimal"' packer/agents.json)
|
|
INSTALL=$(jq -c --arg a "$AGENT_NAME" '.[$a].install // []' packer/agents.json)
|
|
echo "tier=${TIER}" >> "$GITHUB_OUTPUT"
|
|
echo "install=${INSTALL}" >> "$GITHUB_OUTPUT"
|
|
env:
|
|
AGENT_NAME: ${{ matrix.agent }}
|
|
|
|
- name: Setup Packer
|
|
uses: hashicorp/setup-packer@c3d53c525d422944e50ee27b840746d6522b08de # v3.2.0
|
|
with:
|
|
version: "1.15.0"
|
|
|
|
- name: Init Packer plugins
|
|
run: packer init packer/digitalocean.pkr.hcl
|
|
|
|
- name: Generate variables file
|
|
run: |
|
|
jq -n \
|
|
--arg token "$DO_API_TOKEN" \
|
|
--arg agent "$AGENT_NAME" \
|
|
--arg tier "$TIER" \
|
|
--argjson install "$INSTALL_COMMANDS" \
|
|
'{
|
|
do_api_token: $token,
|
|
agent_name: $agent,
|
|
cloud_init_tier: $tier,
|
|
install_commands: $install
|
|
}' > packer/auto.pkrvars.json
|
|
env:
|
|
DO_API_TOKEN: ${{ secrets.DO_API_TOKEN }}
|
|
AGENT_NAME: ${{ matrix.agent }}
|
|
TIER: ${{ steps.config.outputs.tier }}
|
|
INSTALL_COMMANDS: ${{ steps.config.outputs.install }}
|
|
|
|
- name: Build snapshot
|
|
run: packer build -var-file=packer/auto.pkrvars.json packer/digitalocean.pkr.hcl
|
|
|
|
# When a workflow is cancelled, Packer is killed before it can destroy
|
|
# the temporary builder droplet — leaving orphaned instances.
|
|
- name: Destroy orphaned builder droplets
|
|
if: cancelled()
|
|
run: |
|
|
# Filter by spawn-packer tag to avoid destroying builder droplets from other workflows
|
|
DROPLET_IDS=$(curl -s -H "Authorization: Bearer ${DO_API_TOKEN}" \
|
|
"https://api.digitalocean.com/v2/droplets?per_page=200&tag_name=spawn-packer" \
|
|
| jq -r '.droplets[].id')
|
|
|
|
if [ -z "$DROPLET_IDS" ]; then
|
|
echo "No orphaned packer builder droplets found"
|
|
exit 0
|
|
fi
|
|
|
|
for ID in $DROPLET_IDS; do
|
|
echo "Destroying orphaned builder droplet: ${ID}"
|
|
curl -s -X DELETE -H "Authorization: Bearer ${DO_API_TOKEN}" \
|
|
"https://api.digitalocean.com/v2/droplets/${ID}" || true
|
|
done
|
|
env:
|
|
DO_API_TOKEN: ${{ secrets.DO_API_TOKEN }}
|
|
|
|
- name: Cleanup old snapshots
|
|
if: success()
|
|
run: |
|
|
PREFIX="spawn-${AGENT_NAME}-"
|
|
SNAPSHOTS=$(curl -s -H "Authorization: Bearer ${DO_API_TOKEN}" \
|
|
"https://api.digitalocean.com/v2/images?private=true&per_page=100" \
|
|
| jq -r --arg prefix "$PREFIX" \
|
|
'[.images[] | select(.name | startswith($prefix))] | sort_by(.created_at) | reverse | .[1:] | .[].id')
|
|
|
|
for ID in $SNAPSHOTS; do
|
|
echo "Deleting old snapshot: ${ID}"
|
|
curl -s -X DELETE -H "Authorization: Bearer ${DO_API_TOKEN}" \
|
|
"https://api.digitalocean.com/v2/images/${ID}" || true
|
|
done
|
|
env:
|
|
DO_API_TOKEN: ${{ secrets.DO_API_TOKEN }}
|
|
AGENT_NAME: ${{ matrix.agent }}
|
|
|
|
- name: Submit to DO Marketplace
|
|
if: success()
|
|
run: |
|
|
# Skip if no marketplace app IDs configured
|
|
if [ -z "$MARKETPLACE_APP_IDS" ]; then
|
|
echo "No MARKETPLACE_APP_IDS secret — skipping marketplace submission"
|
|
exit 0
|
|
fi
|
|
|
|
# Look up this agent's app ID from the JSON map
|
|
APP_ID=$(echo "$MARKETPLACE_APP_IDS" | jq -r --arg a "$AGENT_NAME" '.[$a] // empty')
|
|
if [ -z "$APP_ID" ]; then
|
|
echo "No marketplace app ID for agent ${AGENT_NAME} — skipping"
|
|
exit 0
|
|
fi
|
|
|
|
# Extract snapshot ID from Packer manifest
|
|
# artifact_id format is "region:snapshot_id" (e.g. "sfo3:12345678")
|
|
IMG_ID=$(jq '.builds[-1].artifact_id | split(":")[1] | tonumber' packer/manifest.json)
|
|
if [ -z "$IMG_ID" ] || [ "$IMG_ID" = "null" ]; then
|
|
echo "Failed to extract snapshot ID from manifest"
|
|
exit 1
|
|
fi
|
|
|
|
echo "Submitting snapshot ${IMG_ID} for ${AGENT_NAME} (app: ${APP_ID})"
|
|
|
|
# PATCH the Vendor API — updates go to "pending" review.
|
|
# 400 = app already pending/in-review (expected for nightly runs), not an error.
|
|
HTTP_CODE=$(curl -s -o /tmp/mp-response.json -w "%{http_code}" \
|
|
-X PATCH \
|
|
-H "Content-Type: application/json" \
|
|
-H "Authorization: Bearer ${DO_API_TOKEN}" \
|
|
-d "$(jq -n \
|
|
--arg reason "Nightly rebuild — $(date -u '+%Y-%m-%d')" \
|
|
--argjson imageId "$IMG_ID" \
|
|
'{reasonForUpdate: $reason, imageId: $imageId}')" \
|
|
"https://api.digitalocean.com/api/v1/vendor-portal/apps/${APP_ID}")
|
|
|
|
case "$HTTP_CODE" in
|
|
200) echo "Marketplace submission accepted (pending review)" ;;
|
|
400) echo "App already pending review — skipping (expected for nightly runs)" ;;
|
|
*) echo "Marketplace API returned ${HTTP_CODE}:"
|
|
cat /tmp/mp-response.json
|
|
exit 1 ;;
|
|
esac
|
|
env:
|
|
DO_API_TOKEN: ${{ secrets.DO_API_TOKEN }}
|
|
AGENT_NAME: ${{ matrix.agent }}
|
|
MARKETPLACE_APP_IDS: ${{ secrets.MARKETPLACE_APP_IDS }}
|