revert: remove Packer snapshot pipeline (#2205)

DO snapshots are private and account-scoped — users on different
accounts cannot see snapshots built by the CI token. Docker images
are the better approach for cross-account pre-built agents.

Removes: packer/, packer-snapshots workflow, snapshot lookup code,
and snapshot test. Reverts DO CLI to plain cloud-init flow.

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Ahmed Abushagur 2026-03-04 23:48:52 -08:00 committed by GitHub
parent 96ffb3e201
commit 07c2c08e3a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 11 additions and 547 deletions

View file

@ -1,98 +0,0 @@
name: Packer DigitalOcean Snapshots
on:
schedule:
# 4 AM UTC daily (before docker at 6 AM)
- cron: "0 4 * * *"
workflow_dispatch:
inputs:
agent:
description: "Single agent to build (leave empty for all)"
required: false
type: string
permissions:
contents: read
jobs:
matrix:
runs-on: ubuntu-latest
outputs:
agents: ${{ steps.set-matrix.outputs.agents }}
steps:
- uses: actions/checkout@v4
- id: set-matrix
env:
AGENT_INPUT: ${{ inputs.agent }}
run: |
if [ -n "${AGENT_INPUT}" ]; then
echo "agents=$(jq -cn --arg a "${AGENT_INPUT}" '[$a]')" >> "$GITHUB_OUTPUT"
else
echo "agents=$(jq -c 'keys' packer/agents.json)" >> "$GITHUB_OUTPUT"
fi
build:
needs: matrix
runs-on: ubuntu-latest
strategy:
max-parallel: 3
fail-fast: false
matrix:
agent: ${{ fromJson(needs.matrix.outputs.agents) }}
steps:
- uses: actions/checkout@v4
- name: Setup Packer
uses: hashicorp/setup-packer@main
with:
version: latest
- name: Generate Packer var file
env:
DIGITALOCEAN_TOKEN: ${{ secrets.DO_API_TOKEN }}
AGENT_NAME: ${{ matrix.agent }}
run: |
jq -n \
--arg token "${DIGITALOCEAN_TOKEN}" \
--arg agent "${AGENT_NAME}" \
--arg tier "$(jq -r --arg a "${AGENT_NAME}" '.[$a].tier' packer/agents.json)" \
--argjson install "$(jq --arg a "${AGENT_NAME}" '.[$a].install' packer/agents.json)" \
'{
do_api_token: $token,
agent_name: $agent,
cloud_init_tier: $tier,
install_commands: $install
}' > packer/build.auto.pkrvars.json
- name: Packer init
working-directory: packer
run: packer init digitalocean.pkr.hcl
- name: Packer build
working-directory: packer
run: packer build -var-file=build.auto.pkrvars.json digitalocean.pkr.hcl
cleanup:
needs: [matrix, build]
if: always()
runs-on: ubuntu-latest
strategy:
matrix:
agent: ${{ fromJson(needs.matrix.outputs.agents) }}
steps:
- name: Delete old snapshots
env:
DO_API_TOKEN: ${{ secrets.DO_API_TOKEN }}
AGENT_NAME: ${{ matrix.agent }}
run: |
# List snapshots for this agent, sorted by creation date
snapshots=$(curl -s -H "Authorization: Bearer ${DO_API_TOKEN}" \
"https://api.digitalocean.com/v2/images?private=true&per_page=50&tag_name=spawn-${AGENT_NAME}" \
| jq -r '.images | sort_by(.created_at) | reverse | .[1:] | .[].id')
for id in $snapshots; do
echo "Deleting old snapshot: ${id}"
curl -s -X DELETE -H "Authorization: Bearer ${DO_API_TOKEN}" \
"https://api.digitalocean.com/v2/images/${id}"
done

View file

@ -1,6 +1,6 @@
{
"name": "@openrouter/spawn",
"version": "0.12.23",
"version": "0.12.24",
"type": "module",
"bin": {
"spawn": "cli.js"

View file

@ -1,110 +0,0 @@
import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test";
// We test findSpawnSnapshot by importing from the module.
// The function uses the module-level doToken + doApi, so we mock fetch.
const originalFetch = globalThis.fetch;
describe("findSpawnSnapshot", () => {
// findSpawnSnapshot requires doToken to be set, which happens via ensureDoToken.
// Since doToken is module-private, we test the function's behavior via fetch mocking.
beforeEach(() => {
// Reset fetch before each test
globalThis.fetch = originalFetch;
});
afterEach(() => {
globalThis.fetch = originalFetch;
});
it("returns latest snapshot ID when API returns multiple images", async () => {
const images = {
images: [
{
id: 111,
created_at: "2026-03-01T00:00:00Z",
name: "spawn-claude-20260301",
},
{
id: 222,
created_at: "2026-03-03T00:00:00Z",
name: "spawn-claude-20260303",
},
{
id: 333,
created_at: "2026-03-02T00:00:00Z",
name: "spawn-claude-20260302",
},
],
};
globalThis.fetch = mock(() => Promise.resolve(new Response(JSON.stringify(images))));
// Import fresh to avoid module-level state issues
const { findSpawnSnapshot } = await import("../digitalocean/digitalocean");
const result = await findSpawnSnapshot("claude");
// Should return the latest (ID 222, created 2026-03-03)
expect(result).toBe("222");
});
it("returns null when no images found", async () => {
globalThis.fetch = mock(() =>
Promise.resolve(
new Response(
JSON.stringify({
images: [],
}),
),
),
);
const { findSpawnSnapshot } = await import("../digitalocean/digitalocean");
const result = await findSpawnSnapshot("claude");
expect(result).toBeNull();
});
it("returns null on API error (graceful fallback)", async () => {
globalThis.fetch = mock(() =>
Promise.resolve(
new Response("Unauthorized", {
status: 401,
}),
),
);
const { findSpawnSnapshot } = await import("../digitalocean/digitalocean");
const result = await findSpawnSnapshot("claude");
expect(result).toBeNull();
});
it("returns null when images have no valid ID", async () => {
const images = {
images: [
{
id: "not-a-number",
created_at: "2026-03-01T00:00:00Z",
},
],
};
globalThis.fetch = mock(() => Promise.resolve(new Response(JSON.stringify(images))));
const { findSpawnSnapshot } = await import("../digitalocean/digitalocean");
const result = await findSpawnSnapshot("claude");
expect(result).toBeNull();
});
it("returns null on network failure", async () => {
globalThis.fetch = mock(() => Promise.reject(new Error("Network error")));
const { findSpawnSnapshot } = await import("../digitalocean/digitalocean");
const result = await findSpawnSnapshot("claude");
expect(result).toBeNull();
});
});

View file

@ -750,56 +750,6 @@ export async function promptDoRegion(): Promise<string> {
return selectFromList(items, "DigitalOcean region", DEFAULT_DO_REGION);
}
// ─── Snapshot Lookup ─────────────────────────────────────────────────────────
/**
* Find the latest pre-built Packer snapshot for an agent.
* Returns the numeric image ID or null if none found / on error.
*/
export async function findSpawnSnapshot(agentName: string): Promise<string | null> {
try {
const text = await doApi("GET", `/images?private=true&per_page=50&tag_name=spawn-${agentName}`, undefined, 1);
const data = parseJsonObj(text);
const images = toObjectArray(data?.images);
if (images.length === 0) {
return null;
}
// Sort by created_at descending and pick the latest
images.sort((a, b) => {
const aDate = isString(a.created_at) ? a.created_at : "";
const bDate = isString(b.created_at) ? b.created_at : "";
return bDate.localeCompare(aDate);
});
const latestId = images[0].id;
if (isNumber(latestId) && latestId > 0) {
logInfo(`Found pre-built snapshot for ${agentName} (ID: ${latestId})`);
return String(latestId);
}
return null;
} catch {
return null;
}
}
// ─── SSH-Only Wait (for snapshot-based boots) ────────────────────────────────
/**
* Wait for SSH to become available without waiting for cloud-init.
* Used when booting from a pre-built snapshot (no cloud-init needed).
*/
export async function waitForSshOnly(ip?: string): Promise<void> {
const serverIp = ip || doServerIp;
const selectedKeys = await ensureSshKeys();
const keyOpts = getSshKeyOpts(selectedKeys);
await sharedWaitForSsh({
host: serverIp,
user: "root",
maxAttempts: 36,
extraSshOpts: keyOpts,
});
logInfo("SSH available (snapshot boot — skipping cloud-init)");
}
// ─── Provisioning ────────────────────────────────────────────────────────────
function getCloudInitUserdata(tier: CloudInitTier = "full"): string {
@ -833,21 +783,17 @@ export async function createServer(
tier?: CloudInitTier,
dropletSize?: string,
region?: string,
snapshotId?: string,
): Promise<void> {
const size = dropletSize || process.env.DO_DROPLET_SIZE || "s-2vcpu-4gb";
const effectiveRegion = region || process.env.DO_REGION || "nyc3";
const image = snapshotId ? Number(snapshotId) : "ubuntu-24-04-x64";
const image = "ubuntu-24-04-x64";
if (!validateRegionName(effectiveRegion)) {
logError("Invalid DO_REGION");
throw new Error("Invalid region");
}
const imageLabel = snapshotId ? `snapshot ${snapshotId}` : "ubuntu-24-04-x64";
logStep(
`Creating DigitalOcean droplet '${name}' (size: ${size}, region: ${effectiveRegion}, image: ${imageLabel})...`,
);
logStep(`Creating DigitalOcean droplet '${name}' (size: ${size}, region: ${effectiveRegion})...`);
// Get all SSH key IDs
const keysText = await doApi("GET", "/account/keys");
@ -856,20 +802,17 @@ export async function createServer(
.map((k) => (isNumber(k.id) ? k.id : 0))
.filter((n) => n > 0);
const dropletBody: Record<string, unknown> = {
const userdata = getCloudInitUserdata(tier);
const body = JSON.stringify({
name,
region: effectiveRegion,
size,
image,
ssh_keys: sshKeyIds,
user_data: userdata,
backups: false,
monitoring: false,
};
// Only include cloud-init userdata when booting from a base image (not a snapshot)
if (!snapshotId) {
dropletBody.user_data = getCloudInitUserdata(tier);
}
const body = JSON.stringify(dropletBody);
});
const createText = await doApi("POST", "/droplets", body);
const createData = parseJsonObj(createText);

View file

@ -10,8 +10,6 @@ import {
createServer as createDroplet,
getServerName,
waitForCloudInit,
waitForSshOnly,
findSpawnSnapshot,
runServer,
uploadFile,
interactiveSession,
@ -20,7 +18,7 @@ import { agents, resolveAgent } from "./agents";
import { saveLaunchCmd } from "../history.js";
import { runOrchestration } from "../shared/orchestrate";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { logInfo, logStep } from "../shared/ui";
import { logStep } from "../shared/ui";
async function main() {
const agentName = process.argv[2];
@ -34,7 +32,6 @@ async function main() {
let dropletSize = "";
let region = "";
let snapshotId: string | null = null;
const cloud: CloudOrchestrator = {
cloudName: "digitalocean",
@ -47,8 +44,6 @@ async function main() {
await promptSpawnName();
const usedBrowserAuth = await ensureDoToken();
await ensureSshKey();
// Look for a pre-built snapshot after auth (needs valid token)
snapshotId = await findSpawnSnapshot(agentName);
if (usedBrowserAuth) {
logStep("Next step: OpenRouter authentication (opening browser in 5s)...");
await new Promise((r) => setTimeout(r, 5000));
@ -59,31 +54,17 @@ async function main() {
region = await promptDoRegion();
},
async createServer(name: string) {
await createDroplet(name, agent.cloudInitTier, dropletSize, region, snapshotId || undefined);
await createDroplet(name, agent.cloudInitTier, dropletSize, region);
},
getServerName,
async waitForReady() {
if (snapshotId) {
await waitForSshOnly();
} else {
await waitForCloudInit();
}
await waitForCloudInit();
},
interactiveSession,
saveLaunchCmd,
};
// When using a snapshot, skip the agent install step (already pre-installed)
const effectiveAgent = snapshotId
? {
...agent,
install: async () => {
logInfo("Agent pre-installed (snapshot)");
},
}
: agent;
await runOrchestration(cloud, effectiveAgent, agentName);
await runOrchestration(cloud, agent, agentName);
}
main().catch((err) => {

2
packer/.gitignore vendored
View file

@ -1,2 +0,0 @@
*.auto.pkrvars.json
.packer.d/

View file

@ -1,45 +0,0 @@
{
"claude": {
"tier": "minimal",
"install": [
"curl -fsSL https://claude.ai/install.sh | bash || npm install -g @anthropic-ai/claude-code"
]
},
"codex": {
"tier": "node",
"install": [
"npm install -g @openai/codex"
]
},
"openclaw": {
"tier": "full",
"install": [
"npm install -g openclaw"
]
},
"opencode": {
"tier": "minimal",
"install": [
"curl -fsSL https://opencode.ai/install | bash"
]
},
"kilocode": {
"tier": "node",
"install": [
"npm install -g @kilocode/cli"
]
},
"zeroclaw": {
"tier": "minimal",
"install": [
"fallocate -l 4G /swapfile && chmod 600 /swapfile && mkswap /swapfile && swapon /swapfile",
"curl -LsSf https://raw.githubusercontent.com/zeroclaw-labs/zeroclaw/a117be64fdaa31779204beadf2942c8aef57d0e5/scripts/bootstrap.sh | bash -s -- --install-rust --install-system-deps --prefer-prebuilt"
]
},
"hermes": {
"tier": "minimal",
"install": [
"curl -fsSL https://raw.githubusercontent.com/NousResearch/hermes-agent/main/scripts/install.sh | bash"
]
}
}

View file

@ -1,127 +0,0 @@
packer {
required_plugins {
digitalocean = {
version = ">= 1.4.0"
source = "github.com/digitalocean/digitalocean"
}
}
}
# Variables
variable "do_api_token" {
type = string
sensitive = true
description = "DigitalOcean API token"
}
variable "agent_name" {
type = string
description = "Agent identifier (e.g. claude, codex, openclaw)"
}
variable "cloud_init_tier" {
type = string
default = "full"
description = "Package tier: minimal, node, bun, full"
}
variable "install_commands" {
type = list(string)
default = []
description = "Shell commands to install the agent"
}
variable "region" {
type = string
default = "nyc3"
description = "Build region"
}
variable "size" {
type = string
default = "s-2vcpu-4gb"
description = "Droplet size for the build VM"
}
variable "base_image" {
type = string
default = "ubuntu-24-04-x64"
description = "Base image slug"
}
# Locals
locals {
snapshot_name = "spawn-${var.agent_name}-${formatdate("YYYYMMDD", timestamp())}"
}
# Source
source "digitalocean" "agent" {
api_token = var.do_api_token
image = var.base_image
region = var.region
size = var.size
ssh_username = "root"
snapshot_name = local.snapshot_name
snapshot_regions = [
"nyc1", "nyc3", "sfo3", "ams3", "sgp1",
"lon1", "fra1", "tor1", "blr1", "syd1",
]
tags = ["spawn", "spawn-${var.agent_name}"]
}
# Build
build {
sources = ["source.digitalocean.agent"]
# 1. System update
provisioner "shell" {
inline = [
"export DEBIAN_FRONTEND=noninteractive",
"apt-get update -y",
"apt-get upgrade -y -o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold'",
]
}
# 2. Tier packages + runtimes
provisioner "shell" {
script = "scripts/tier-${var.cloud_init_tier}.sh"
}
# 3. Agent install (15 min timeout, 2 retries via wrapper)
provisioner "shell" {
inline = var.install_commands
timeout = "15m"
max_retries = 2
expect_disconnect = false
environment_vars = [
"HOME=/root",
"DEBIAN_FRONTEND=noninteractive",
]
}
# 4. Marker file + PATH setup
provisioner "shell" {
inline = [
"echo 'agent=${var.agent_name}' > /root/.spawn-snapshot",
"echo 'built=${formatdate("YYYY-MM-DD", timestamp())}' >> /root/.spawn-snapshot",
"for rc in /root/.bashrc /root/.zshrc; do grep -q '.bun/bin' \"$rc\" 2>/dev/null || echo 'export PATH=\"$HOME/.local/bin:$HOME/.bun/bin:$PATH\"' >> \"$rc\"; done",
]
}
# 5. Cleanup
provisioner "shell" {
inline = [
"apt-get clean",
"rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*",
"rm -f /var/log/cloud-init*.log /var/log/syslog /var/log/auth.log",
"truncate -s 0 /var/log/lastlog /var/log/wtmp /var/log/btmp 2>/dev/null || true",
"sync",
]
}
}

View file

@ -1,22 +0,0 @@
#!/bin/bash
set -eo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get install -y --no-install-recommends \
curl \
unzip \
git \
ca-certificates \
zsh
# Bun
if ! command -v bun >/dev/null 2>&1; then
curl --proto '=https' -fsSL https://bun.sh/install | bash
fi
ln -sf /root/.bun/bin/bun /usr/local/bin/bun 2>/dev/null || true
# PATH setup
for rc in /root/.bashrc /root/.zshrc; do
grep -q '.bun/bin' "$rc" 2>/dev/null || printf 'export PATH="$HOME/.local/bin:$HOME/.bun/bin:$PATH"\n' >> "$rc"
done

View file

@ -1,26 +0,0 @@
#!/bin/bash
set -eo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get install -y --no-install-recommends \
curl \
unzip \
git \
ca-certificates \
zsh \
build-essential
# Node.js 22 via n
curl --proto '=https' -fsSL https://raw.githubusercontent.com/tj/n/master/bin/n | bash -s install 22
# Bun
if ! command -v bun >/dev/null 2>&1; then
curl --proto '=https' -fsSL https://bun.sh/install | bash
fi
ln -sf /root/.bun/bin/bun /usr/local/bin/bun 2>/dev/null || true
# PATH setup
for rc in /root/.bashrc /root/.zshrc; do
grep -q '.bun/bin' "$rc" 2>/dev/null || printf 'export PATH="$HOME/.local/bin:$HOME/.bun/bin:$PATH"\n' >> "$rc"
done

View file

@ -1,10 +0,0 @@
#!/bin/bash
set -eo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get install -y --no-install-recommends \
curl \
unzip \
git \
ca-certificates

View file

@ -1,20 +0,0 @@
#!/bin/bash
set -eo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get install -y --no-install-recommends \
curl \
unzip \
git \
ca-certificates \
zsh \
build-essential
# Node.js 22 via n
curl --proto '=https' -fsSL https://raw.githubusercontent.com/tj/n/master/bin/n | bash -s install 22
# PATH setup
for rc in /root/.bashrc /root/.zshrc; do
grep -q '.bun/bin' "$rc" 2>/dev/null || printf 'export PATH="$HOME/.local/bin:$HOME/.bun/bin:$PATH"\n' >> "$rc"
done