fix: add unique spawn IDs to prevent history record corruption (#2235)

* fix: add unique spawn IDs to prevent history record corruption

History records were matched by heuristic ("most recent record for this
cloud without a connection"), which caused saveVmConnection and
saveLaunchCmd to overwrite the wrong record during concurrent or failed
spawns.

Fix: every SpawnRecord now has a unique `id` (UUID). All history
operations (saveVmConnection, saveLaunchCmd, removeRecord,
markRecordDeleted, mergeLastConnection) match by id when available,
falling back to the old heuristic for pre-migration records.

The orchestrator (TS path) now creates the history record AFTER server
creation succeeds, not before — so failed provisions don't leave orphan
entries.

Also adds "Remove from history" option to the spawn ls action picker,
restoring the ability to soft-delete entries without destroying the VM.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

* test: add 18 unit tests for spawn ID history behavior

Tests cover:
- generateSpawnId returns unique UUIDs
- saveSpawnRecord auto-generates id when not provided
- saveVmConnection matches by spawnId (not heuristic)
- saveVmConnection does not cross-contaminate concurrent spawns
- saveVmConnection falls back to heuristic without spawnId
- saveLaunchCmd matches by spawnId (not heuristic)
- saveLaunchCmd falls back without spawnId
- removeRecord matches by id, not by timestamp+agent+cloud
- removeRecord handles duplicate timestamps correctly
- removeRecord falls back for legacy records without id
- markRecordDeleted targets correct record by id
- mergeLastConnection uses spawn_id from last-connection.json
- mergeLastConnection falls back to heuristic without spawn_id

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

* style: enable biome import sorting with grouped imports

Adds organizeImports to biome assist config with groups:
1. Type imports
2. Node built-ins
3. Third-party packages
4. @openrouter/* packages
5. Aliases

Auto-fixed import order and lint issues across all TypeScript files,
including .claude/skills/ and packages/cli/src/.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
L 2026-03-06 02:27:03 -05:00 committed by GitHub
parent 699df354a9
commit 65a81edc57
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
118 changed files with 1951 additions and 1780 deletions

View file

@ -6,8 +6,8 @@
*/
import { execFileSync } from "node:child_process";
import { dirname } from "node:path";
import { existsSync } from "node:fs";
import { dirname } from "node:path";
import { FilePathInput, parseStdin } from "./schemas.ts";
const raw = await Bun.stdin.text();

View file

@ -9,8 +9,8 @@
*/
import { execFileSync } from "node:child_process";
import { dirname, resolve } from "node:path";
import { existsSync, readFileSync } from "node:fs";
import { dirname, resolve } from "node:path";
const file = process.env.CLAUDE_FILE;
if (!file) {

View file

@ -19,19 +19,13 @@
* REPO_ROOT Repository root for manifest.json (default: cwd)
*/
import { createHmac, randomUUID, timingSafeEqual } from "crypto";
import {
readFileSync,
writeFileSync,
existsSync,
mkdirSync,
unlinkSync,
} from "fs";
import { join } from "path";
import { homedir } from "os";
import { createHmac, randomUUID, timingSafeEqual } from "node:crypto";
import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
// --- Config ---
const PORT = parseInt(process.env.KEY_SERVER_PORT ?? "8081", 10);
const PORT = Number.parseInt(process.env.KEY_SERVER_PORT ?? "8081", 10);
const SECRET = process.env.KEY_SERVER_SECRET ?? "";
const RESEND_API_KEY = process.env.RESEND_API_KEY ?? "";
const KEY_REQUEST_EMAIL = process.env.KEY_REQUEST_EMAIL ?? "";
@ -58,7 +52,10 @@ if (!KEY_SERVER_HOST) {
// --- Data paths ---
const CONFIG_DIR = join(homedir(), ".config", "spawn");
mkdirSync(CONFIG_DIR, { recursive: true, mode: 0o700 });
mkdirSync(CONFIG_DIR, {
recursive: true,
mode: 0o700,
});
const DATA_FILE = join(CONFIG_DIR, "key-requests.json");
// --- Types ---
@ -87,29 +84,46 @@ interface DataStore {
// --- Rate limiting (in-memory, auto-cleanup every 30 min) ---
const rateMaps = {
ip: new Map<string, { count: number; resetAt: number }>(),
batch: new Map<string, { count: number; resetAt: number }>(),
ip: new Map<
string,
{
count: number;
resetAt: number;
}
>(),
batch: new Map<
string,
{
count: number;
resetAt: number;
}
>(),
};
setInterval(() => {
const now = Date.now();
for (const m of Object.values(rateMaps))
for (const [k, v] of m) if (v.resetAt < now) m.delete(k);
for (const m of Object.values(rateMaps)) {
for (const [k, v] of m) {
if (v.resetAt < now) {
m.delete(k);
}
}
}
}, 30 * 60_000).unref?.();
function rateCheck(
key: string,
map: typeof rateMaps.ip,
max: number,
windowMs: number
): number | null {
function rateCheck(key: string, map: typeof rateMaps.ip, max: number, windowMs: number): number | null {
const now = Date.now();
const e = map.get(key);
if (!e || e.resetAt < now) {
map.set(key, { count: 1, resetAt: now + windowMs });
map.set(key, {
count: 1,
resetAt: now + windowMs,
});
return null;
}
if (e.count >= max) return Math.ceil((e.resetAt - now) / 1000);
if (e.count >= max) {
return Math.ceil((e.resetAt - now) / 1000);
}
e.count++;
return null;
}
@ -119,44 +133,46 @@ function load(): DataStore {
try {
return JSON.parse(readFileSync(DATA_FILE, "utf-8"));
} catch {
return { batches: [] };
return {
batches: [],
};
}
}
function save(d: DataStore) {
writeFileSync(DATA_FILE, JSON.stringify(d, null, 2), { mode: 0o600 });
writeFileSync(DATA_FILE, JSON.stringify(d, null, 2), {
mode: 0o600,
});
}
function cleanup(d: DataStore) {
const now = Date.now();
const week = 7 * 86400_000;
d.batches = d.batches.filter((b) => {
if (
b.providers.every((p) => p.status === "fulfilled") &&
now - b.emailedAt > week
)
if (b.providers.every((p) => p.status === "fulfilled") && now - b.emailedAt > week) {
return false;
if (
b.expiresAt < now &&
b.providers.every((p) => p.status === "pending")
)
}
if (b.expiresAt < now && b.providers.every((p) => p.status === "pending")) {
return false;
}
return true;
});
}
// --- HMAC signing ---
function signHmac(id: string, exp: number) {
return createHmac("sha256", SECRET)
.update(`${id}:${exp}`)
.digest("hex");
return createHmac("sha256", SECRET).update(`${id}:${exp}`).digest("hex");
}
function verifyHmac(id: string, sig: string, exp: string) {
const e = parseInt(exp);
if (isNaN(e) || e <= Date.now()) return false;
const e = Number.parseInt(exp, 10);
if (Number.isNaN(e) || e <= Date.now()) {
return false;
}
const expected = signHmac(id, e);
if (sig.length !== expected.length) return false;
if (sig.length !== expected.length) {
return false;
}
return timingSafeEqual(Buffer.from(sig), Buffer.from(expected));
}
@ -164,7 +180,9 @@ function verifyHmac(id: string, sig: string, exp: string) {
function isAuthed(req: Request) {
const given = req.headers.get("Authorization") ?? "";
const expected = `Bearer ${SECRET}`;
if (given.length !== expected.length) return false;
if (given.length !== expected.length) {
return false;
}
return timingSafeEqual(Buffer.from(given), Buffer.from(expected));
}
@ -173,24 +191,31 @@ const SAFE_PROVIDER_RE = /^[a-z0-9][a-z0-9._-]{0,63}$/;
// --- Manifest parsing ---
function getClouds() {
const m = JSON.parse(
readFileSync(join(REPO_ROOT, "manifest.json"), "utf-8")
);
const m = JSON.parse(readFileSync(join(REPO_ROOT, "manifest.json"), "utf-8"));
const result = new Map<
string,
{ name: string; envVars: string[]; helpUrl: string }
{
name: string;
envVars: string[];
helpUrl: string;
}
>();
for (const [k, c] of Object.entries(
m.clouds as Record<string, any>
)) {
for (const [k, c] of Object.entries(m.clouds as Record<string, any>)) {
const auth: string = c.auth ?? "";
if (/\b(login|configure|setup)\b/i.test(auth)) continue;
if (/\b(login|configure|setup)\b/i.test(auth)) {
continue;
}
const vars = auth
.split(/\s*\+\s*/)
.map((s: string) => s.trim())
.filter(Boolean);
if (vars.length)
result.set(k, { name: c.name ?? k, envVars: vars, helpUrl: c.url ?? "" });
if (vars.length) {
result.set(k, {
name: c.name ?? k,
envVars: vars,
helpUrl: c.url ?? "",
});
}
}
return result;
}
@ -200,8 +225,7 @@ async function sendEmail(batch: KeyBatch, url: string): Promise<boolean> {
const pending = batch.providers.filter((p) => p.status === "pending");
const lines = pending
.map(
(p) =>
`\u2022 ${p.providerName} \u2014 ${p.envVars.map((v) => v.name).join(", ")}\n Get key from: ${p.helpUrl}`
(p) => `\u2022 ${p.providerName} \u2014 ${p.envVars.map((v) => v.name).join(", ")}\n Get key from: ${p.helpUrl}`,
)
.join("\n\n");
const count = pending.length;
@ -210,9 +234,11 @@ async function sendEmail(batch: KeyBatch, url: string): Promise<boolean> {
const html = `<p>The Spawn QA bot needs API keys for:</p>${pending
.map(
(p) =>
`<p><b>${esc(p.providerName)}</b> \u2014 ${p.envVars.map((v) => esc(v.name)).join(", ")}<br><a href="${esc(p.helpUrl)}">Get key</a></p>`
`<p><b>${esc(p.providerName)}</b> \u2014 ${p.envVars.map((v) => esc(v.name)).join(", ")}<br><a href="${esc(p.helpUrl)}">Get key</a></p>`,
)
.join("")}<p><a href="${esc(url)}"><b>Submit API Keys</b></a> (expires 24h)</p><p>Fill in what you have, leave others blank.</p>`;
.join(
"",
)}<p><a href="${esc(url)}"><b>Submit API Keys</b></a> (expires 24h)</p><p>Fill in what you have, leave others blank.</p>`;
try {
const r = await fetch("https://api.resend.com/emails", {
@ -223,16 +249,16 @@ async function sendEmail(batch: KeyBatch, url: string): Promise<boolean> {
},
body: JSON.stringify({
from: KEY_FROM_EMAIL,
to: [KEY_REQUEST_EMAIL],
to: [
KEY_REQUEST_EMAIL,
],
subject,
text,
html,
}),
});
if (!r.ok) {
console.error(
`[key-server] Resend ${r.status}: ${await r.text()}`
);
console.error(`[key-server] Resend ${r.status}: ${await r.text()}`);
return false;
}
return true;
@ -244,20 +270,20 @@ async function sendEmail(batch: KeyBatch, url: string): Promise<boolean> {
// --- HTML helpers ---
function esc(s: string) {
return s
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
return s.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;");
}
function formPage(
batch: KeyBatch,
msg?: { text: string; error: boolean }
msg?: {
text: string;
error: boolean;
},
): string {
const pending = batch.providers.filter((p) => p.status === "pending");
const done = batch.providers.filter((p) => p.status === "fulfilled");
const css = `*{box-sizing:border-box}body{font-family:system-ui,-apple-system,sans-serif;background:#0f172a;color:#e2e8f0;display:flex;justify-content:center;padding:2rem;margin:0}main{max-width:600px;width:100%}h1{text-align:center;margin-bottom:.5rem}.sub{text-align:center;color:#94a3b8;margin-top:0}.card{background:#1e293b;border-radius:8px;padding:1.25rem;margin:1rem 0}.card h3{margin:0 0 .25rem;color:#f8fafc}.card a{color:#38bdf8;font-size:.875rem}label{display:block;margin-top:.75rem;font-size:.875rem;color:#94a3b8}input{width:100%;padding:.5rem;margin-top:.25rem;background:#0f172a;border:1px solid #334155;border-radius:4px;color:#e2e8f0;font-family:monospace;font-size:.875rem}input:focus{outline:none;border-color:#38bdf8}button{display:block;width:100%;padding:.75rem;margin-top:1.5rem;background:#2563eb;color:#fff;border:none;border-radius:6px;font-size:1rem;cursor:pointer}button:hover{background:#1d4ed8}.ok{text-align:center;color:#22c55e;font-size:.875rem}.msg{text-align:center;padding:1rem;border-radius:6px;margin:1rem 0}.msg.s{background:#14532d;color:#22c55e}.msg.e{background:#450a0a;color:#ef4444}`;
const css =
"*{box-sizing:border-box}body{font-family:system-ui,-apple-system,sans-serif;background:#0f172a;color:#e2e8f0;display:flex;justify-content:center;padding:2rem;margin:0}main{max-width:600px;width:100%}h1{text-align:center;margin-bottom:.5rem}.sub{text-align:center;color:#94a3b8;margin-top:0}.card{background:#1e293b;border-radius:8px;padding:1.25rem;margin:1rem 0}.card h3{margin:0 0 .25rem;color:#f8fafc}.card a{color:#38bdf8;font-size:.875rem}label{display:block;margin-top:.75rem;font-size:.875rem;color:#94a3b8}input{width:100%;padding:.5rem;margin-top:.25rem;background:#0f172a;border:1px solid #334155;border-radius:4px;color:#e2e8f0;font-family:monospace;font-size:.875rem}input:focus{outline:none;border-color:#38bdf8}button{display:block;width:100%;padding:.75rem;margin-top:1.5rem;background:#2563eb;color:#fff;border:none;border-radius:6px;font-size:1rem;cursor:pointer}button:hover{background:#1d4ed8}.ok{text-align:center;color:#22c55e;font-size:.875rem}.msg{text-align:center;padding:1rem;border-radius:6px;margin:1rem 0}.msg.s{background:#14532d;color:#22c55e}.msg.e{background:#450a0a;color:#ef4444}";
if (pending.length === 0) {
return `<!DOCTYPE html><html><head><meta charset="utf-8"><meta name="referrer" content="no-referrer"><title>Keys Complete</title><style>${css}</style></head><body><main><h1 style="color:#22c55e">All Keys Submitted</h1><p class="sub">${done.length} provider key${done.length !== 1 ? "s" : ""} saved. The next QA cycle will pick them up.</p></main></body></html>`;
@ -269,9 +295,9 @@ function formPage(
`<div class="card"><h3>${esc(p.providerName)}</h3><a href="${esc(p.helpUrl)}" target="_blank" rel="noopener">Get key</a>${p.envVars
.map(
(v) =>
`<label>${esc(v.name)}<input type="text" name="${esc(p.provider)}__${esc(v.name)}" autocomplete="off" spellcheck="false"></label>`
`<label>${esc(v.name)}<input type="text" name="${esc(p.provider)}__${esc(v.name)}" autocomplete="off" spellcheck="false"></label>`,
)
.join("")}</div>`
.join("")}</div>`,
)
.join("");
@ -279,9 +305,7 @@ function formPage(
done.length > 0
? `<p class="ok">${done.length} provider${done.length !== 1 ? "s" : ""} already submitted.</p>`
: "";
const msgHtml = msg
? `<div class="msg ${msg.error ? "e" : "s"}">${esc(msg.text)}</div>`
: "";
const msgHtml = msg ? `<div class="msg ${msg.error ? "e" : "s"}">${esc(msg.text)}</div>` : "";
return `<!DOCTYPE html><html><head><meta charset="utf-8"><meta name="referrer" content="no-referrer"><meta name="viewport" content="width=device-width,initial-scale=1"><title>Spawn QA — API Keys</title><style>${css}</style></head><body><main><h1>Spawn QA — API Keys</h1><p class="sub">Fill in what you have. Leave others blank. You can return later.</p>${msgHtml}${doneNote}<form method="POST">${cards}<button type="submit">Submit Keys</button></form></main></body></html>`;
}
@ -289,26 +313,38 @@ function formPage(
// --- Config file operations ---
function saveKeys(provider: string, vars: Record<string, string>) {
const cfgPath = join(CONFIG_DIR, `${provider}.json`);
const data: Record<string, string> = { ...vars };
const data: Record<string, string> = {
...vars,
};
// Backward compat: single-var clouds also get api_key/token fields
if (Object.keys(vars).length === 1) {
const v = Object.values(vars)[0];
data.api_key = v;
data.token = v;
}
writeFileSync(cfgPath, JSON.stringify(data, null, 2), { mode: 0o600 });
writeFileSync(cfgPath, JSON.stringify(data, null, 2), {
mode: 0o600,
});
console.log(`[key-server] Saved ${provider} config`);
}
function validKeyVal(v: string) {
// Enforce reasonable length: API keys are typically 20-200 chars
if (v.length < 8 || v.length > 512) return false;
if (v.length < 8 || v.length > 512) {
return false;
}
// Block control characters (U+0000U+001F, U+007FU+009F)
if (/[\x00-\x1f\x7f-\x9f]/.test(v)) return false;
if (/[\x00-\x1f\x7f-\x9f]/.test(v)) {
return false;
}
// Block shell metacharacters
if (/[;&'"<>|$`\\(){}]/.test(v)) return false;
if (/[;&'"<>|$`\\(){}]/.test(v)) {
return false;
}
// Must be printable ASCII only (API keys don't contain non-ASCII)
if (!/^[\x20-\x7e]+$/.test(v)) return false;
if (!/^[\x20-\x7e]+$/.test(v)) {
return false;
}
return true;
}
@ -321,8 +357,7 @@ const HTML_HEADERS: Record<string, string> = {
};
// --- UUID regex ---
const UUID_RE =
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/;
const UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/;
// --- Server ---
const server = Bun.serve({
@ -337,30 +372,36 @@ const server = Bun.serve({
cleanup(d);
return Response.json({
status: "ok",
pending: d.batches.reduce(
(n, b) => n + b.providers.filter((x) => x.status === "pending").length,
0
),
fulfilled: d.batches.reduce(
(n, b) =>
n + b.providers.filter((x) => x.status === "fulfilled").length,
0
),
pending: d.batches.reduce((n, b) => n + b.providers.filter((x) => x.status === "pending").length, 0),
fulfilled: d.batches.reduce((n, b) => n + b.providers.filter((x) => x.status === "fulfilled").length, 0),
batches: d.batches.length,
});
}
// POST /request-batch (authed)
if (req.method === "POST" && path === "/request-batch") {
if (!isAuthed(req))
return Response.json({ error: "unauthorized" }, { status: 401 });
if (!isAuthed(req)) {
return Response.json(
{
error: "unauthorized",
},
{
status: 401,
},
);
}
const body = await req.json().catch(() => null);
if (!body?.providers?.length)
if (!body?.providers?.length) {
return Response.json(
{ error: "providers array required" },
{ status: 400 }
{
error: "providers array required",
},
{
status: 400,
},
);
}
const clouds = getClouds();
const d = load();
@ -374,11 +415,7 @@ const server = Bun.serve({
for (const pk of body.providers as string[]) {
if (
d.batches.some(
(b) =>
now - b.emailedAt < day &&
b.providers.some(
(x) => x.provider === pk && x.status === "pending"
)
(b) => now - b.emailedAt < day && b.providers.some((x) => x.provider === pk && x.status === "pending"),
)
) {
skipped.push(pk);
@ -387,8 +424,13 @@ const server = Bun.serve({
}
}
if (!requested.length)
return Response.json({ batchId: null, requested: [], skipped });
if (!requested.length) {
return Response.json({
batchId: null,
requested: [],
skipped,
});
}
const batchId = randomUUID();
const exp = now + day;
@ -397,7 +439,9 @@ const server = Bun.serve({
return {
provider: k,
providerName: info?.name ?? k,
envVars: (info?.envVars ?? []).map((n) => ({ name: n })),
envVars: (info?.envVars ?? []).map((n) => ({
name: n,
})),
helpUrl: info?.helpUrl ?? "",
status: "pending" as const,
};
@ -412,13 +456,25 @@ const server = Bun.serve({
const signedUrl = `${KEY_SERVER_HOST}/key/${batchId}?sig=${signHmac(batchId, exp)}&exp=${exp}`;
// Send email FIRST — only persist batch if email succeeds
if (!(await sendEmail(batch, signedUrl)))
return Response.json({ error: "email send failed" }, { status: 502 });
if (!(await sendEmail(batch, signedUrl))) {
return Response.json(
{
error: "email send failed",
},
{
status: 502,
},
);
}
d.batches.push(batch);
save(d);
console.log(`[key-server] Batch ${batchId}: ${requested.join(", ")}`);
return Response.json({ batchId, requested, skipped });
return Response.json({
batchId,
requested,
skipped,
});
}
// Routes under /key/:id
@ -428,77 +484,126 @@ const server = Bun.serve({
// DELETE /key/:provider (authed, manual invalidation)
if (req.method === "DELETE") {
if (!isAuthed(req))
return Response.json({ error: "unauthorized" }, { status: 401 });
if (!SAFE_PROVIDER_RE.test(id))
return Response.json({ error: "invalid provider name" }, { status: 400 });
if (!isAuthed(req)) {
return Response.json(
{
error: "unauthorized",
},
{
status: 401,
},
);
}
if (!SAFE_PROVIDER_RE.test(id)) {
return Response.json(
{
error: "invalid provider name",
},
{
status: 400,
},
);
}
const cfg = join(CONFIG_DIR, `${id}.json`);
if (existsSync(cfg)) {
unlinkSync(cfg);
console.log(`[key-server] Deleted ${id} config`);
return Response.json({ status: "deleted", provider: id });
return Response.json({
status: "deleted",
provider: id,
});
}
return Response.json(
{ status: "not_found", provider: id },
{ status: 404 }
{
status: "not_found",
provider: id,
},
{
status: 404,
},
);
}
// GET/POST /key/:batchId (signed URL)
if (!UUID_RE.test(id))
return Response.json({ error: "not found" }, { status: 404 });
if (!UUID_RE.test(id)) {
return Response.json(
{
error: "not found",
},
{
status: 404,
},
);
}
const sig = url.searchParams.get("sig") ?? "";
const exp = url.searchParams.get("exp") ?? "";
if (!verifyHmac(id, sig, exp))
return new Response("Invalid or expired link", { status: 403 });
if (!verifyHmac(id, sig, exp)) {
return new Response("Invalid or expired link", {
status: 403,
});
}
const d = load();
const batch = d.batches.find((b) => b.batchId === id);
if (!batch) return new Response("Batch not found", { status: 404 });
if (!batch) {
return new Response("Batch not found", {
status: 404,
});
}
// GET — render form (idempotent)
if (req.method === "GET")
if (req.method === "GET") {
return new Response(formPage(batch), {
headers: HTML_HEADERS,
});
}
// POST — submit keys (rate-limited)
if (req.method === "POST") {
// Use actual connection IP instead of spoofable x-forwarded-for header
const ip = server.requestIP(req)?.address ?? "unknown";
let retry = rateCheck(ip, rateMaps.ip, 10, 15 * 60_000);
if (retry !== null)
if (retry !== null) {
return new Response("Too many requests", {
status: 429,
headers: { "Retry-After": String(retry) },
headers: {
"Retry-After": String(retry),
},
});
}
retry = rateCheck(id, rateMaps.batch, 5, 3600_000);
if (retry !== null)
if (retry !== null) {
return new Response("Too many requests for this batch", {
status: 429,
headers: { "Retry-After": String(retry) },
headers: {
"Retry-After": String(retry),
},
});
}
const fd = await req.formData();
let submitted = 0;
for (const pr of batch.providers) {
if (pr.status === "fulfilled") continue;
if (pr.status === "fulfilled") {
continue;
}
const vals: Record<string, string> = {};
let filled = 0;
for (const v of pr.envVars) {
const val = (
(fd.get(`${pr.provider}__${v.name}`) as string) ?? ""
).trim();
const val = ((fd.get(`${pr.provider}__${v.name}`) as string) ?? "").trim();
if (val) {
if (!validKeyVal(val))
if (!validKeyVal(val)) {
return new Response(
formPage(batch, {
text: `Invalid characters in ${v.name}. Do not include shell metacharacters.`,
error: true,
}),
{ headers: HTML_HEADERS }
{
headers: HTML_HEADERS,
},
);
}
vals[v.name] = val;
filled++;
}
@ -516,33 +621,64 @@ const server = Bun.serve({
? `${submitted} provider key${submitted !== 1 ? "s" : ""} saved successfully.`
: "No complete submissions. Please fill in all fields for at least one provider.";
return new Response(
formPage(batch, { text, error: submitted === 0 }),
{ headers: HTML_HEADERS }
formPage(batch, {
text,
error: submitted === 0,
}),
{
headers: HTML_HEADERS,
},
);
}
}
// GET /status?provider=... (authed)
if (req.method === "GET" && path === "/status") {
if (!isAuthed(req))
return Response.json({ error: "unauthorized" }, { status: 401 });
const provider = url.searchParams.get("provider");
if (!provider)
if (!isAuthed(req)) {
return Response.json(
{ error: "provider param required" },
{ status: 400 }
{
error: "unauthorized",
},
{
status: 401,
},
);
if (!SAFE_PROVIDER_RE.test(provider))
return Response.json({ error: "invalid provider name" }, { status: 400 });
}
const provider = url.searchParams.get("provider");
if (!provider) {
return Response.json(
{
error: "provider param required",
},
{
status: 400,
},
);
}
if (!SAFE_PROVIDER_RE.test(provider)) {
return Response.json(
{
error: "invalid provider name",
},
{
status: 400,
},
);
}
return Response.json({
provider,
status: existsSync(join(CONFIG_DIR, `${provider}.json`))
? "fulfilled"
: "pending",
status: existsSync(join(CONFIG_DIR, `${provider}.json`)) ? "fulfilled" : "pending",
});
}
return Response.json({ error: "not found" }, { status: 404 });
return Response.json(
{
error: "not found",
},
{
status: 404,
},
);
},
});

View file

@ -17,18 +17,15 @@
* (log files at .docs/).
*/
import { timingSafeEqual } from "crypto";
import { realpathSync, existsSync } from "fs";
import { resolve, dirname } from "path";
import { timingSafeEqual } from "node:crypto";
import { existsSync, realpathSync } from "node:fs";
import { dirname, resolve } from "node:path";
const PORT = 8080;
const TRIGGER_SECRET = process.env.TRIGGER_SECRET ?? "";
const TARGET_SCRIPT = process.env.TARGET_SCRIPT ?? "";
const MAX_CONCURRENT = parseInt(process.env.MAX_CONCURRENT ?? "1", 10);
const RUN_TIMEOUT_MS = parseInt(
process.env.RUN_TIMEOUT_MS ?? String(75 * 60 * 1000),
10
);
const MAX_CONCURRENT = Number.parseInt(process.env.MAX_CONCURRENT ?? "1", 10);
const RUN_TIMEOUT_MS = Number.parseInt(process.env.RUN_TIMEOUT_MS ?? String(75 * 60 * 1000), 10);
if (!TRIGGER_SECRET) {
console.error("ERROR: TRIGGER_SECRET env var is required");
@ -43,13 +40,13 @@ if (!TARGET_SCRIPT) {
// Validate TARGET_SCRIPT against an allowlist of directories and file extensions.
// This prevents an attacker who can control the env var from executing arbitrary scripts.
const SKILL_DIR = realpathSync(dirname(new URL(import.meta.url).pathname));
const ALLOWED_SCRIPT_DIRS = [SKILL_DIR];
const ALLOWED_SCRIPT_DIRS = [
SKILL_DIR,
];
function validateTargetScript(scriptPath: string): string {
if (!scriptPath.endsWith(".sh")) {
console.error(
`ERROR: TARGET_SCRIPT must be a .sh file, got: ${scriptPath}`
);
console.error(`ERROR: TARGET_SCRIPT must be a .sh file, got: ${scriptPath}`);
process.exit(1);
}
const resolved = resolve(scriptPath);
@ -58,12 +55,10 @@ function validateTargetScript(scriptPath: string): string {
process.exit(1);
}
const real = realpathSync(resolved);
const inAllowedDir = ALLOWED_SCRIPT_DIRS.some((dir) =>
real.startsWith(dir + "/")
);
const inAllowedDir = ALLOWED_SCRIPT_DIRS.some((dir) => real.startsWith(dir + "/"));
if (!inAllowedDir) {
console.error(
`ERROR: TARGET_SCRIPT must be inside an allowed directory (${ALLOWED_SCRIPT_DIRS.join(", ")}), got: ${real}`
`ERROR: TARGET_SCRIPT must be inside an allowed directory (${ALLOWED_SCRIPT_DIRS.join(", ")}), got: ${real}`,
);
process.exit(1);
}
@ -87,7 +82,9 @@ let nextRunId = 1;
function isAuthed(req: Request): boolean {
const given = req.headers.get("Authorization") ?? "";
const expected = `Bearer ${TRIGGER_SECRET}`;
if (given.length !== expected.length) return false;
if (given.length !== expected.length) {
return false;
}
return timingSafeEqual(Buffer.from(given), Buffer.from(expected));
}
@ -125,7 +122,7 @@ function reapAndEnforce() {
// Check if process is still alive
if (!isAlive(pid)) {
console.log(
`[trigger] Reaping dead run #${id} (pid=${pid}, reason=${run.reason}, age=${Math.round(elapsed / 1000)}s)`
`[trigger] Reaping dead run #${id} (pid=${pid}, reason=${run.reason}, age=${Math.round(elapsed / 1000)}s)`,
);
runs.delete(id);
continue;
@ -134,7 +131,7 @@ function reapAndEnforce() {
// Kill if exceeded timeout
if (elapsed > RUN_TIMEOUT_MS) {
console.log(
`[trigger] Killing stale run #${id} (pid=${pid}, reason=${run.reason}, age=${Math.round(elapsed / 1000)}s, timeout=${Math.round(RUN_TIMEOUT_MS / 1000)}s)`
`[trigger] Killing stale run #${id} (pid=${pid}, reason=${run.reason}, age=${Math.round(elapsed / 1000)}s, timeout=${Math.round(RUN_TIMEOUT_MS / 1000)}s)`,
);
try {
run.proc.kill(9);
@ -145,25 +142,23 @@ function reapAndEnforce() {
}
function gracefulShutdown(signal: string) {
if (shuttingDown) return;
if (shuttingDown) {
return;
}
shuttingDown = true;
console.log(`[trigger] Received ${signal}, shutting down gracefully...`);
console.log(
`[trigger] Waiting for ${runs.size} running script(s) to finish...`
);
console.log(`[trigger] Waiting for ${runs.size} running script(s) to finish...`);
server.stop();
if (runs.size === 0) {
console.log(`[trigger] No running scripts, exiting immediately`);
console.log("[trigger] No running scripts, exiting immediately");
process.exit(0);
}
const HARD_TIMEOUT_MS = 15 * 60 * 1000;
const forceKillTimer = setTimeout(() => {
console.error(
`[trigger] Hard timeout reached (${HARD_TIMEOUT_MS / 1000}s), force killing remaining processes`
);
console.error(`[trigger] Hard timeout reached (${HARD_TIMEOUT_MS / 1000}s), force killing remaining processes`);
for (const [, run] of runs) {
try {
run.proc.kill(9);
@ -175,12 +170,12 @@ function gracefulShutdown(signal: string) {
Promise.all(Array.from(runs.values()).map((r) => r.proc.exited))
.then(() => {
console.log(`[trigger] All scripts finished, exiting`);
console.log("[trigger] All scripts finished, exiting");
clearTimeout(forceKillTimer);
process.exit(0);
})
.catch((e) => {
console.error(`[trigger] Error waiting for scripts:`, e);
console.error("[trigger] Error waiting for scripts:", e);
clearTimeout(forceKillTimer);
process.exit(1);
});
@ -198,34 +193,40 @@ function startFireAndForgetRun(reason: string, issue: string): Response {
const startedAt = Date.now();
console.log(
`[trigger] Run #${id} starting (reason=${reason}${issue ? `, issue=#${issue}` : ""}, concurrent=${runs.size + 1}/${MAX_CONCURRENT})`
`[trigger] Run #${id} starting (reason=${reason}${issue ? `, issue=#${issue}` : ""}, concurrent=${runs.size + 1}/${MAX_CONCURRENT})`,
);
const proc = Bun.spawn(["bash", VALIDATED_TARGET_SCRIPT], {
cwd:
process.env.REPO_ROOT ||
VALIDATED_TARGET_SCRIPT.substring(
0,
VALIDATED_TARGET_SCRIPT.lastIndexOf("/")
) ||
".",
stdout: "inherit",
stderr: "inherit",
env: {
...process.env,
SPAWN_ISSUE: issue,
SPAWN_REASON: reason,
const proc = Bun.spawn(
[
"bash",
VALIDATED_TARGET_SCRIPT,
],
{
cwd:
process.env.REPO_ROOT || VALIDATED_TARGET_SCRIPT.substring(0, VALIDATED_TARGET_SCRIPT.lastIndexOf("/")) || ".",
stdout: "inherit",
stderr: "inherit",
env: {
...process.env,
SPAWN_ISSUE: issue,
SPAWN_REASON: reason,
},
},
});
);
runs.set(id, { proc, startedAt, reason, issue });
runs.set(id, {
proc,
startedAt,
reason,
issue,
});
// Clean up run entry when process exits
proc.exited
.then((exitCode) => {
const elapsed = Math.round((Date.now() - startedAt) / 1000);
console.log(
`[trigger] Run #${id} finished (exit=${exitCode}, duration=${elapsed}s, remaining=${runs.size - 1}/${MAX_CONCURRENT})`
`[trigger] Run #${id} finished (exit=${exitCode}, duration=${elapsed}s, remaining=${runs.size - 1}/${MAX_CONCURRENT})`,
);
runs.delete(id);
})
@ -243,14 +244,16 @@ function startFireAndForgetRun(reason: string, issue: string): Response {
max: MAX_CONCURRENT,
},
{
headers: { "X-Run-Id": String(id) },
}
headers: {
"X-Run-Id": String(id),
},
},
);
}
const server = Bun.serve({
port: PORT,
async fetch(req, server) {
async fetch(req, _server) {
const url = new URL(req.url);
if (req.method === "GET" && url.pathname === "/health") {
@ -276,13 +279,24 @@ const server = Bun.serve({
if (req.method === "POST" && url.pathname === "/trigger") {
if (shuttingDown) {
return Response.json(
{ error: "server is shutting down" },
{ status: 503 }
{
error: "server is shutting down",
},
{
status: 503,
},
);
}
if (!isAuthed(req)) {
return Response.json({ error: "unauthorized" }, { status: 401 });
return Response.json(
{
error: "unauthorized",
},
{
status: 401,
},
);
}
// Reap dead processes and kill timed-out runs before checking capacity
@ -290,9 +304,7 @@ const server = Bun.serve({
if (runs.size >= MAX_CONCURRENT) {
const now = Date.now();
const oldest = Array.from(runs.values()).reduce((a, b) =>
a.startedAt < b.startedAt ? a : b
);
const oldest = Array.from(runs.values()).reduce((a, b) => (a.startedAt < b.startedAt ? a : b));
return Response.json(
{
error: "max concurrent runs reached",
@ -302,15 +314,22 @@ const server = Bun.serve({
oldestAgeSec: Math.round((now - oldest.startedAt) / 1000),
timeoutSec: Math.round(RUN_TIMEOUT_MS / 1000),
},
{ status: 429 }
{
status: 429,
},
);
}
const reason = url.searchParams.get("reason") ?? "manual";
if (!VALID_REASONS.has(reason)) {
return Response.json(
{ error: "invalid reason", allowed: Array.from(VALID_REASONS) },
{ status: 400 }
{
error: "invalid reason",
allowed: Array.from(VALID_REASONS),
},
{
status: 400,
},
);
}
const issue = url.searchParams.get("issue") ?? "";
@ -320,8 +339,12 @@ const server = Bun.serve({
// Digits-only regex is the primary defense; length cap is defense-in-depth.
if (issue && (!/^\d+$/.test(issue) || issue.length > 10)) {
return Response.json(
{ error: "issue must be a positive integer (max 10 digits)" },
{ status: 400 }
{
error: "issue must be a positive integer (max 10 digits)",
},
{
status: 400,
},
);
}
@ -335,7 +358,9 @@ const server = Bun.serve({
issue,
running: runs.size,
},
{ status: 409 }
{
status: 409,
},
);
}
}
@ -351,7 +376,9 @@ const server = Bun.serve({
reason,
running: runs.size,
},
{ status: 409 }
{
status: 409,
},
);
}
}
@ -360,20 +387,27 @@ const server = Bun.serve({
return startFireAndForgetRun(reason, issue);
}
return Response.json({ error: "not found" }, { status: 404 });
return Response.json(
{
error: "not found",
},
{
status: 404,
},
);
},
});
// Proactively reap stale runs every 60 seconds instead of only on requests
const reapInterval = setInterval(() => {
if (runs.size > 0) reapAndEnforce();
if (runs.size > 0) {
reapAndEnforce();
}
}, 60_000);
reapInterval.unref?.();
console.log(`[trigger] Listening on port ${server.port}`);
console.log(`[trigger] TARGET_SCRIPT=${VALIDATED_TARGET_SCRIPT}`);
console.log(`[trigger] MAX_CONCURRENT=${MAX_CONCURRENT}`);
console.log(
`[trigger] RUN_TIMEOUT_MS=${RUN_TIMEOUT_MS} (${Math.round(RUN_TIMEOUT_MS / 1000 / 60)}min)`
);
console.log(`[trigger] Fire-and-forget mode — /trigger returns immediately, output goes to console`);
console.log(`[trigger] RUN_TIMEOUT_MS=${RUN_TIMEOUT_MS} (${Math.round(RUN_TIMEOUT_MS / 1000 / 60)}min)`);
console.log("[trigger] Fire-and-forget mode — /trigger returns immediately, output goes to console");

View file

@ -1,12 +1,13 @@
// SPA helpers — pure functions for parsing Claude Code stream events,
// Slack formatting, state management, and file download/cleanup.
import { mkdirSync, readFileSync, writeFileSync, existsSync, rmSync, readdirSync, statSync } from "node:fs";
import { dirname } from "node:path";
import * as v from "valibot";
import type { Result } from "@openrouter/spawn-shared";
import { isString, toRecord, Ok, Err } from "@openrouter/spawn-shared";
import { existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from "node:fs";
import { dirname } from "node:path";
import { Err, isString, Ok, toRecord } from "@openrouter/spawn-shared";
import { slackifyMarkdown } from "slackify-markdown";
import * as v from "valibot";
// #region State

View file

@ -1,23 +1,24 @@
// SPA (Spawn's Personal Agent) — Slack bot entry point.
// Pipes Slack threads into Claude Code sessions and streams responses back.
import type { SectionBlock, ContextBlock, KnownBlock } from "@slack/bolt";
import type { ContextBlock, KnownBlock, SectionBlock } from "@slack/bolt";
import type { State, ToolCall } from "./helpers";
import { isString, toRecord } from "@openrouter/spawn-shared";
import { App } from "@slack/bolt";
import * as v from "valibot";
import { toRecord, isString } from "@openrouter/spawn-shared";
import type { State, ToolCall } from "./helpers";
import {
ResultSchema,
loadState,
saveState,
findMapping,
addMapping,
parseStreamEvent,
stripMention,
downloadSlackFile,
runCleanupIfDue,
formatToolStats,
findMapping,
formatToolHistory,
formatToolStats,
loadState,
parseStreamEvent,
ResultSchema,
runCleanupIfDue,
saveState,
stripMention,
} from "./helpers";
type SlackClient = InstanceType<typeof App>["client"];

View file

@ -1,18 +1,19 @@
import { describe, it, expect, mock, afterEach } from "bun:test";
import {
parseStreamEvent,
stripMention,
markdownToSlack,
loadState,
saveState,
downloadSlackFile,
extractToolHint,
formatToolStats,
formatToolHistory,
} from "./helpers";
import type { ToolCall } from "./helpers";
import { afterEach, describe, expect, it, mock } from "bun:test";
import { toRecord } from "@openrouter/spawn-shared";
import streamEvents from "../../../fixtures/claude-code/stream-events.json";
import {
downloadSlackFile,
extractToolHint,
formatToolHistory,
formatToolStats,
loadState,
markdownToSlack,
parseStreamEvent,
saveState,
stripMention,
} from "./helpers";
// Helper: extract a fixture event by index and cast to Record<string, unknown>
function fixture(index: number): Record<string, unknown> {

View file

@ -1,5 +1,5 @@
import { readFileSync, writeFileSync, existsSync } from "fs";
import { resolve } from "path";
import { existsSync, readFileSync, writeFileSync } from "node:fs";
import { resolve } from "node:path";
// ── Types ───────────────────────────────────────────────────────────
@ -59,20 +59,24 @@ const manifest = JSON.parse(readFileSync(MANIFEST_PATH, "utf-8"));
const agents: Record<string, AgentEntry> = manifest.agents;
const clouds: Record<string, CloudEntry> = manifest.clouds;
const agentSources: Record<string, SourceEntry> = existsSync(
AGENT_SOURCES_PATH
)
const agentSources: Record<string, SourceEntry> = existsSync(AGENT_SOURCES_PATH)
? JSON.parse(readFileSync(AGENT_SOURCES_PATH, "utf-8"))
: {};
const cloudSources: Record<string, SourceEntry> = existsSync(
CLOUD_SOURCES_PATH
)
const cloudSources: Record<string, SourceEntry> = existsSync(CLOUD_SOURCES_PATH)
? JSON.parse(readFileSync(CLOUD_SOURCES_PATH, "utf-8"))
: {};
const agentIds = onlyAgent ? [onlyAgent] : Object.keys(agents);
const cloudIds = onlyCloud ? [onlyCloud] : Object.keys(clouds);
const agentIds = onlyAgent
? [
onlyAgent,
]
: Object.keys(agents);
const cloudIds = onlyCloud
? [
onlyCloud,
]
: Object.keys(clouds);
const today = new Date().toISOString().slice(0, 10); // YYYY-MM-DD
const EXT_MAP: Record<string, string> = {
@ -103,9 +107,15 @@ const AGENT_METADATA_FIELDS = [
async function validateSources(
label: string,
ids: string[],
entries: Record<string, { icon?: string; [k: string]: unknown }>,
entries: Record<
string,
{
icon?: string;
[k: string]: unknown;
}
>,
sources: Record<string, SourceEntry>,
assetDir: string
assetDir: string,
) {
console.log(`── Validating ${label} source URLs ──`);
for (const id of ids) {
@ -120,20 +130,17 @@ async function validateSources(
continue;
}
try {
const res = await fetch(src.url, { method: "HEAD" });
const res = await fetch(src.url, {
method: "HEAD",
});
if (!res.ok) {
console.log(
`${id}: BROKEN source URL (HTTP ${res.status}) → ${src.url}`
);
console.log(`${id}: BROKEN source URL (HTTP ${res.status}) → ${src.url}`);
hasErrors = true;
} else {
const contentType =
res.headers.get("content-type")?.split(";")[0] ?? "";
const contentType = res.headers.get("content-type")?.split(";")[0] ?? "";
const isImage = contentType.startsWith("image/");
if (!isImage) {
console.log(
`${id}: source URL returns ${contentType}, not an image → ${src.url}`
);
console.log(`${id}: source URL returns ${contentType}, not an image → ${src.url}`);
} else {
console.log(`${id}: OK (${contentType})`);
}
@ -150,9 +157,15 @@ async function validateSources(
async function refreshIconsFor(
label: string,
ids: string[],
entries: Record<string, { icon?: string; [k: string]: unknown }>,
entries: Record<
string,
{
icon?: string;
[k: string]: unknown;
}
>,
sources: Record<string, SourceEntry>,
assetDir: string
assetDir: string,
) {
console.log(`── Refreshing ${label} icons ──`);
for (const id of ids) {
@ -167,24 +180,19 @@ async function refreshIconsFor(
console.log(`${id}: icon fetch failed (HTTP ${res.status})`);
continue;
}
const contentType =
res.headers.get("content-type")?.split(";")[0] ?? "";
const contentType = res.headers.get("content-type")?.split(";")[0] ?? "";
const ext = EXT_MAP[contentType] ?? src.ext;
const outPath = resolve(ROOT, `${assetDir}/${id}.${ext}`);
const rawUrl = `https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/${assetDir}/${id}.${ext}`;
if (dryRun) {
console.log(
` [dry-run] ${id}: would download ${src.url}${outPath}`
);
console.log(` [dry-run] ${id}: would download ${src.url}${outPath}`);
} else {
const buf = Buffer.from(await res.arrayBuffer());
writeFileSync(outPath, buf);
entries[id].icon = rawUrl;
sources[id].ext = ext;
console.log(
`${id}: icon refreshed (${buf.length} bytes, .${ext})`
);
console.log(`${id}: icon refreshed (${buf.length} bytes, .${ext})`);
}
} catch (err) {
console.log(`${id}: icon fetch error: ${err}`);
@ -218,7 +226,10 @@ async function refreshAgentStats() {
"--jq",
"{stargazers_count, license: .license.spdx_id, language}",
],
{ stdout: "pipe", stderr: "pipe" }
{
stdout: "pipe",
stderr: "pipe",
},
);
const out = await new Response(proc.stdout).text();
const exitCode = await proc.exited;
@ -231,22 +242,22 @@ async function refreshAgentStats() {
const oldStars = agent.github_stars;
if (dryRun) {
console.log(
` [dry-run] ${id}: stars ${oldStars ?? "?"}${data.stargazers_count}`
);
if (data.license && data.license !== agent.license)
console.log(
` [dry-run] ${id}: license ${agent.license ?? "?"}${data.license}`
);
if (data.language && data.language !== agent.language)
console.log(
` [dry-run] ${id}: language ${agent.language ?? "?"}${data.language}`
);
console.log(` [dry-run] ${id}: stars ${oldStars ?? "?"}${data.stargazers_count}`);
if (data.license && data.license !== agent.license) {
console.log(` [dry-run] ${id}: license ${agent.license ?? "?"}${data.license}`);
}
if (data.language && data.language !== agent.language) {
console.log(` [dry-run] ${id}: language ${agent.language ?? "?"}${data.language}`);
}
} else {
agent.github_stars = data.stargazers_count;
agent.stars_updated = today;
if (data.license) agent.license = data.license;
if (data.language) agent.language = data.language;
if (data.license) {
agent.license = data.license;
}
if (data.language) {
agent.language = data.language;
}
const delta =
oldStars != null
? ` (${data.stargazers_count - oldStars >= 0 ? "+" : ""}${data.stargazers_count - oldStars})`
@ -289,31 +300,29 @@ function validateCloudIcons() {
// ── Main ────────────────────────────────────────────────────────────
async function main() {
const scope = cloudsOnly
? "clouds"
: agentsOnly
? "agents"
: "agents + clouds";
const mode = validateOnly
? "validate"
: dryRun
? "dry-run"
: "update";
console.log(`${mode === "validate" ? "Validating" : "Updating"} metadata for ${scope}${mode === "dry-run" ? " [dry-run]" : ""}...\n`);
const scope = cloudsOnly ? "clouds" : agentsOnly ? "agents" : "agents + clouds";
const mode = validateOnly ? "validate" : dryRun ? "dry-run" : "update";
console.log(
`${mode === "validate" ? "Validating" : "Updating"} metadata for ${scope}${mode === "dry-run" ? " [dry-run]" : ""}...\n`,
);
if (validateOnly) {
// Validate-only: HEAD-check all source URLs, report broken ones
if (!cloudsOnly)
if (!cloudsOnly) {
await validateSources("agent", agentIds, agents, agentSources, "assets/agents");
if (!agentsOnly)
}
if (!agentsOnly) {
await validateSources("cloud", cloudIds, clouds, cloudSources, "assets/clouds");
if (!cloudsOnly) validateAgentMetadata();
if (!agentsOnly) validateCloudIcons();
}
if (!cloudsOnly) {
validateAgentMetadata();
}
if (!agentsOnly) {
validateCloudIcons();
}
if (hasErrors) {
console.log(
"\n✗ Validation failed — fix broken source URLs in .sources.json files"
);
console.log("\n✗ Validation failed — fix broken source URLs in .sources.json files");
process.exit(1);
} else {
console.log("\n✓ All source URLs valid");
@ -323,24 +332,12 @@ async function main() {
// Agent icons
if (!cloudsOnly && !statsOnly) {
await refreshIconsFor(
"agent",
agentIds,
agents,
agentSources,
"assets/agents"
);
await refreshIconsFor("agent", agentIds, agents, agentSources, "assets/agents");
}
// Cloud icons
if (!agentsOnly && !statsOnly) {
await refreshIconsFor(
"cloud",
cloudIds,
clouds,
cloudSources,
"assets/clouds"
);
await refreshIconsFor("cloud", cloudIds, clouds, cloudSources, "assets/clouds");
}
// Agent GitHub stats
@ -349,25 +346,17 @@ async function main() {
}
// Validation
if (!cloudsOnly) validateAgentMetadata();
if (!agentsOnly) validateCloudIcons();
if (!cloudsOnly) {
validateAgentMetadata();
}
if (!agentsOnly) {
validateCloudIcons();
}
if (!dryRun) {
writeFileSync(
MANIFEST_PATH,
JSON.stringify(manifest, null, 2) + "\n",
"utf-8"
);
writeFileSync(
AGENT_SOURCES_PATH,
JSON.stringify(agentSources, null, 2) + "\n",
"utf-8"
);
writeFileSync(
CLOUD_SOURCES_PATH,
JSON.stringify(cloudSources, null, 2) + "\n",
"utf-8"
);
writeFileSync(MANIFEST_PATH, JSON.stringify(manifest, null, 2) + "\n", "utf-8");
writeFileSync(AGENT_SOURCES_PATH, JSON.stringify(agentSources, null, 2) + "\n", "utf-8");
writeFileSync(CLOUD_SOURCES_PATH, JSON.stringify(cloudSources, null, 2) + "\n", "utf-8");
console.log("\n✓ manifest.json and .sources.json files updated");
}
}

View file

@ -75,6 +75,30 @@
}
},
"assist": {
"enabled": false
"actions": {
"source": {
"organizeImports": {
"level": "on",
"options": {
"groups": [
{
"type": true
},
":BLANK_LINE:",
{
"type": false
},
[":NODE:"],
":BLANK_LINE:",
["!@openrouter/**"],
":BLANK_LINE:",
["@openrouter/**"],
":BLANK_LINE:",
":ALIAS:"
]
}
}
}
}
}
}

View file

@ -2,6 +2,6 @@
"agent": "claude-code",
"recorded_at": "2026-02-24T00:00:00Z",
"fixtures": {
"stream-events": {"format": "stream-json", "type": "synthetic", "recorded_at": "2026-02-24T00:00:00Z"}
"stream-events": { "format": "stream-json", "type": "synthetic", "recorded_at": "2026-02-24T00:00:00Z" }
}
}

View file

@ -5,9 +5,7 @@
"id": "msg_01ABC",
"type": "message",
"role": "assistant",
"content": [
{"type": "text", "text": "I'll look at the issue and check the repository structure."}
],
"content": [{ "type": "text", "text": "I'll look at the issue and check the repository structure." }],
"model": "claude-sonnet-4-20250514",
"stop_reason": "end_turn"
}
@ -19,7 +17,12 @@
"type": "message",
"role": "assistant",
"content": [
{"type": "tool_use", "id": "toolu_01ABC", "name": "Bash", "input": {"command": "gh issue list --repo OpenRouterTeam/spawn --state open --limit 5"}}
{
"type": "tool_use",
"id": "toolu_01ABC",
"name": "Bash",
"input": { "command": "gh issue list --repo OpenRouterTeam/spawn --state open --limit 5" }
}
],
"model": "claude-sonnet-4-20250514",
"stop_reason": "tool_use"
@ -32,7 +35,12 @@
"type": "message",
"role": "user",
"content": [
{"type": "tool_result", "tool_use_id": "toolu_01ABC", "content": "#1234 [Bug]: Fly.io deploy fails on arm64\n#1235 [CLI]: Add --json flag to list command", "is_error": false}
{
"type": "tool_result",
"tool_use_id": "toolu_01ABC",
"content": "#1234 [Bug]: Fly.io deploy fails on arm64\n#1235 [CLI]: Add --json flag to list command",
"is_error": false
}
]
}
},
@ -42,9 +50,7 @@
"id": "msg_01GHI",
"type": "message",
"role": "assistant",
"content": [
{"type": "tool_use", "id": "toolu_01DEF", "name": "Glob", "input": {"pattern": "**/*.ts"}}
],
"content": [{ "type": "tool_use", "id": "toolu_01DEF", "name": "Glob", "input": { "pattern": "**/*.ts" } }],
"model": "claude-sonnet-4-20250514",
"stop_reason": "tool_use"
}
@ -56,7 +62,12 @@
"type": "message",
"role": "user",
"content": [
{"type": "tool_result", "tool_use_id": "toolu_01DEF", "content": "packages/cli/src/index.ts\npackages/cli/src/commands.ts\npackages/cli/src/manifest.ts", "is_error": false}
{
"type": "tool_result",
"tool_use_id": "toolu_01DEF",
"content": "packages/cli/src/index.ts\npackages/cli/src/commands.ts\npackages/cli/src/manifest.ts",
"is_error": false
}
]
}
},
@ -67,7 +78,12 @@
"type": "message",
"role": "assistant",
"content": [
{"type": "tool_use", "id": "toolu_01GHI", "name": "Read", "input": {"file_path": "/home/user/spawn/packages/cli/src/index.ts"}}
{
"type": "tool_use",
"id": "toolu_01GHI",
"name": "Read",
"input": { "file_path": "/home/user/spawn/packages/cli/src/index.ts" }
}
],
"model": "claude-sonnet-4-20250514",
"stop_reason": "tool_use"
@ -80,7 +96,12 @@
"type": "message",
"role": "user",
"content": [
{"type": "tool_result", "tool_use_id": "toolu_01GHI", "content": "Command execution failed: Permission denied", "is_error": true}
{
"type": "tool_result",
"tool_use_id": "toolu_01GHI",
"content": "Command execution failed: Permission denied",
"is_error": true
}
]
}
},
@ -91,7 +112,10 @@
"type": "message",
"role": "assistant",
"content": [
{"type": "text", "text": "I've reviewed the open issues. Here's a summary:\n\n1. **#1234** - Fly.io deploy fails on arm64 architecture\n2. **#1235** - Request to add `--json` flag to the list command\n\nWould you like me to create a new issue or work on one of these?"}
{
"type": "text",
"text": "I've reviewed the open issues. Here's a summary:\n\n1. **#1234** - Fly.io deploy fails on arm64 architecture\n2. **#1235** - Request to add `--json` flag to the list command\n\nWould you like me to create a new issue or work on one of these?"
}
],
"model": "claude-sonnet-4-20250514",
"stop_reason": "end_turn"

View file

@ -2,12 +2,12 @@
"cloud": "digitalocean",
"recorded_at": "2026-02-11T03:07:38Z",
"fixtures": {
"account": {"endpoint": "/account", "recorded_at": "2026-02-11T03:07:30Z"},
"ssh-keys": {"endpoint": "/account/keys", "recorded_at": "2026-02-11T03:07:30Z"},
"droplets": {"endpoint": "/droplets", "recorded_at": "2026-02-11T03:07:31Z"},
"sizes": {"endpoint": "/sizes", "recorded_at": "2026-02-11T03:07:31Z"},
"regions": {"endpoint": "/regions", "recorded_at": "2026-02-11T03:07:32Z"},
"create-server": {"endpoint": "POST /droplets", "type": "live", "recorded_at": "2026-02-11T03:07:34Z"},
"delete-server": {"endpoint": "DELETE /droplets/{id}", "type": "live", "recorded_at": "2026-02-11T03:07:38Z"}
"account": { "endpoint": "/account", "recorded_at": "2026-02-11T03:07:30Z" },
"ssh-keys": { "endpoint": "/account/keys", "recorded_at": "2026-02-11T03:07:30Z" },
"droplets": { "endpoint": "/droplets", "recorded_at": "2026-02-11T03:07:31Z" },
"sizes": { "endpoint": "/sizes", "recorded_at": "2026-02-11T03:07:31Z" },
"regions": { "endpoint": "/regions", "recorded_at": "2026-02-11T03:07:32Z" },
"create-server": { "endpoint": "POST /droplets", "type": "live", "recorded_at": "2026-02-11T03:07:34Z" },
"delete-server": { "endpoint": "DELETE /droplets/{id}", "type": "live", "recorded_at": "2026-02-11T03:07:38Z" }
}
}

View file

@ -12,9 +12,7 @@
"type": "local"
}
],
"features": [
"droplet_agent"
],
"features": ["droplet_agent"],
"id": 550842125,
"image": {
"created_at": "2025-08-08T15:11:27Z",
@ -59,14 +57,7 @@
"next_backup_window": null,
"region": {
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "New York 3",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -248,20 +239,7 @@
"networking_throughput": 2000,
"price_hourly": 0.00595,
"price_monthly": 4,
"regions": [
"ams3",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo2",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo2", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-1vcpu-512mb-10gb",
"transfer": 0.5,
"vcpus": 1

View file

@ -6,14 +6,7 @@
"regions": [
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "New York 1",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -178,27 +171,14 @@
},
{
"available": false,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "image_transfer"],
"name": "San Francisco 1",
"sizes": [],
"slug": "sfo1"
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "New York 2",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -341,27 +321,14 @@
},
{
"available": false,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "image_transfer"],
"name": "Amsterdam 2",
"sizes": [],
"slug": "ams2"
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "Singapore 1",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -524,14 +491,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "London 1",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -702,14 +662,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "New York 3",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -876,14 +829,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "Amsterdam 3",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -1046,14 +992,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "Frankfurt 1",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -1218,14 +1157,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "Toronto 1",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -1393,14 +1325,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "San Francisco 2",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -1543,14 +1468,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "Bangalore 1",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -1718,14 +1636,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "San Francisco 3",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -1896,14 +1807,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "Sydney 1",
"sizes": [
"s-1vcpu-512mb-10gb",
@ -2072,14 +1976,7 @@
},
{
"available": true,
"features": [
"backups",
"ipv6",
"metadata",
"install_agent",
"storage",
"image_transfer"
],
"features": ["backups", "ipv6", "metadata", "install_agent", "storage", "image_transfer"],
"name": "Atlanta 1",
"sizes": [
"s-1vcpu-1gb-amd",

View file

@ -21,20 +21,7 @@
"networking_throughput": 2000,
"price_hourly": 0.00595,
"price_monthly": 4,
"regions": [
"ams3",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo2",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo2", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-1vcpu-512mb-10gb",
"transfer": 0.5,
"vcpus": 1
@ -56,20 +43,7 @@
"networking_throughput": 2000,
"price_hourly": 0.00893,
"price_monthly": 6,
"regions": [
"ams3",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo2",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo2", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-1vcpu-1gb",
"transfer": 1.0,
"vcpus": 1
@ -91,20 +65,7 @@
"networking_throughput": 2000,
"price_hourly": 0.01042,
"price_monthly": 7,
"regions": [
"ams3",
"atl1",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "atl1", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-1vcpu-1gb-intel",
"transfer": 1.0,
"vcpus": 1
@ -162,20 +123,7 @@
"networking_throughput": 2000,
"price_hourly": 0.01786,
"price_monthly": 12,
"regions": [
"ams3",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo2",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo2", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-1vcpu-2gb",
"transfer": 2.0,
"vcpus": 1
@ -197,20 +145,7 @@
"networking_throughput": 2000,
"price_hourly": 0.02083,
"price_monthly": 14,
"regions": [
"ams3",
"atl1",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "atl1", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-1vcpu-2gb-intel",
"transfer": 2.0,
"vcpus": 1
@ -268,20 +203,7 @@
"networking_throughput": 2000,
"price_hourly": 0.02679,
"price_monthly": 18,
"regions": [
"ams3",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo2",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo2", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-2vcpu-2gb",
"transfer": 3.0,
"vcpus": 2
@ -303,20 +225,7 @@
"networking_throughput": 2000,
"price_hourly": 0.03125,
"price_monthly": 21,
"regions": [
"ams3",
"atl1",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "atl1", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-2vcpu-2gb-intel",
"transfer": 3.0,
"vcpus": 2
@ -374,20 +283,7 @@
"networking_throughput": 2000,
"price_hourly": 0.03571,
"price_monthly": 24,
"regions": [
"ams3",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo2",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo2", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-2vcpu-4gb",
"transfer": 4.0,
"vcpus": 2
@ -409,20 +305,7 @@
"networking_throughput": 2000,
"price_hourly": 0.04167,
"price_monthly": 28,
"regions": [
"ams3",
"atl1",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "atl1", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-2vcpu-4gb-intel",
"transfer": 4.0,
"vcpus": 2
@ -516,20 +399,7 @@
"networking_throughput": 2000,
"price_hourly": 0.07143,
"price_monthly": 48,
"regions": [
"ams3",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo2",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo2", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-4vcpu-8gb",
"transfer": 5.0,
"vcpus": 4
@ -551,20 +421,7 @@
"networking_throughput": 2000,
"price_hourly": 0.08333,
"price_monthly": 56,
"regions": [
"ams3",
"atl1",
"blr1",
"fra1",
"lon1",
"nyc1",
"nyc2",
"nyc3",
"sfo3",
"sgp1",
"syd1",
"tor1"
],
"regions": ["ams3", "atl1", "blr1", "fra1", "lon1", "nyc1", "nyc2", "nyc3", "sfo3", "sgp1", "syd1", "tor1"],
"slug": "s-4vcpu-8gb-intel",
"transfer": 5.0,
"vcpus": 4

View file

@ -2,12 +2,12 @@
"cloud": "hetzner",
"recorded_at": "2026-02-11T03:07:29Z",
"fixtures": {
"server-types": {"endpoint": "/server_types?per_page=50", "recorded_at": "2026-02-11T03:07:22Z"},
"datacenters": {"endpoint": "/datacenters", "recorded_at": "2026-02-14T00:00:00Z"},
"locations": {"endpoint": "/locations", "recorded_at": "2026-02-11T03:07:23Z"},
"ssh-keys": {"endpoint": "/ssh_keys", "recorded_at": "2026-02-11T03:07:24Z"},
"servers": {"endpoint": "/servers", "recorded_at": "2026-02-11T03:07:25Z"},
"create-server": {"endpoint": "POST /servers", "type": "live", "recorded_at": "2026-02-11T03:07:26Z"},
"delete-server": {"endpoint": "DELETE /servers/{id}", "type": "live", "recorded_at": "2026-02-11T03:07:29Z"}
"server-types": { "endpoint": "/server_types?per_page=50", "recorded_at": "2026-02-11T03:07:22Z" },
"datacenters": { "endpoint": "/datacenters", "recorded_at": "2026-02-14T00:00:00Z" },
"locations": { "endpoint": "/locations", "recorded_at": "2026-02-11T03:07:23Z" },
"ssh-keys": { "endpoint": "/ssh_keys", "recorded_at": "2026-02-11T03:07:24Z" },
"servers": { "endpoint": "/servers", "recorded_at": "2026-02-11T03:07:25Z" },
"create-server": { "endpoint": "POST /servers", "type": "live", "recorded_at": "2026-02-11T03:07:26Z" },
"delete-server": { "endpoint": "DELETE /servers/{id}", "type": "live", "recorded_at": "2026-02-11T03:07:29Z" }
}
}

View file

@ -56,113 +56,15 @@
"name": "nbg1-dc3",
"server_types": {
"available": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"available_for_migration": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"supported": [
1,
3,
5,
7,
9,
11,
12,
13,
14,
15,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
93,
94,
95,
96,
97,
98,
99,
100,
101,
104,
105,
106,
107,
109,
110,
111,
112,
113,
114,
115,
116,
117
1, 3, 5, 7, 9, 11, 12, 13, 14, 15, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 40, 41,
42, 43, 44, 45, 93, 94, 95, 96, 97, 98, 99, 100, 101, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115,
116, 117
]
}
},

View file

@ -12,9 +12,15 @@
"city": "Falkenstein"
},
"server_types": {
"available": [1, 3, 5, 7, 9, 11, 22, 23, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 120],
"available": [
1, 3, 5, 7, 9, 11, 22, 23, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 85, 86, 87, 88, 89, 90, 91, 92,
93, 94, 95, 96, 97, 120
],
"available_for_migration": [1, 3, 5, 7, 9, 11, 22, 23, 33, 34, 35, 36],
"supported": [1, 3, 5, 7, 9, 11, 22, 23, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97]
"supported": [
1, 3, 5, 7, 9, 11, 22, 23, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 85, 86, 87, 88, 89, 90, 91, 92,
93, 94, 95, 96, 97
]
}
}
]

View file

@ -29,113 +29,15 @@
"name": "nbg1-dc3",
"server_types": {
"available": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"available_for_migration": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"supported": [
1,
3,
5,
7,
9,
11,
12,
13,
14,
15,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
93,
94,
95,
96,
97,
98,
99,
100,
101,
104,
105,
106,
107,
109,
110,
111,
112,
113,
114,
115,
116,
117
1, 3, 5, 7, 9, 11, 12, 13, 14, 15, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 93, 94, 95, 96, 97, 98, 99, 100, 101, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114,
115, 116, 117
]
}
},
@ -307,113 +209,15 @@
"name": "nbg1-dc3",
"server_types": {
"available": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"available_for_migration": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"supported": [
1,
3,
5,
7,
9,
11,
12,
13,
14,
15,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
93,
94,
95,
96,
97,
98,
99,
100,
101,
104,
105,
106,
107,
109,
110,
111,
112,
113,
114,
115,
116,
117
1, 3, 5, 7, 9, 11, 12, 13, 14, 15, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 93, 94, 95, 96, 97, 98, 99, 100, 101, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114,
115, 116, 117
]
}
},
@ -585,113 +389,15 @@
"name": "nbg1-dc3",
"server_types": {
"available": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"available_for_migration": [
27,
28,
29,
30,
31,
32,
45,
93,
94,
96,
97,
98,
99,
100,
101,
109,
110,
111,
112,
113,
114,
115,
116,
117
27, 28, 29, 30, 31, 32, 45, 93, 94, 96, 97, 98, 99, 100, 101, 109, 110, 111, 112, 113, 114, 115, 116, 117
],
"supported": [
1,
3,
5,
7,
9,
11,
12,
13,
14,
15,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
93,
94,
95,
96,
97,
98,
99,
100,
101,
104,
105,
106,
107,
109,
110,
111,
112,
113,
114,
115,
116,
117
1, 3, 5, 7, 9, 11, 12, 13, 14, 15, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 93, 94, 95, 96, 97, 98, 99, 100, 101, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114,
115, 116, 117
]
}
},

View file

@ -1,10 +1,6 @@
language js(typescript)
`$value as $type` as $expr where {
!$expr <: `$_ as const`,
register_diagnostic(
span = $expr,
message = "Type assertions (`as`) are banned. Use schema validation (parseJsonWith), type guards, or `satisfies` instead.",
severity = "error"
)
! $expr <: `$_ as const`,
register_diagnostic(span=$expr, message="Type assertions (`as`) are banned. Use schema validation (parseJsonWith), type guards, or `satisfies` instead.", severity="error")
}

View file

@ -4,9 +4,5 @@ or {
`typeof $val === "string"`,
`typeof $val === "number"`
} as $expr where {
register_diagnostic(
span = $expr,
message = "Use `isString()` or `isNumber()` from `shared/type-guards` instead of raw `typeof` checks.",
severity = "error"
)
register_diagnostic(span=$expr, message="Use `isString()` or `isNumber()` from `shared/type-guards` instead of raw `typeof` checks.", severity="error")
}

View file

@ -28,11 +28,7 @@
}
},
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/claude.png",
"featured_cloud": [
"gcp",
"aws",
"digitalocean"
],
"featured_cloud": ["gcp", "aws", "digitalocean"],
"creator": "Anthropic",
"repo": "anthropics/claude-code",
"license": "Proprietary",
@ -44,11 +40,7 @@
"runtime": "node",
"category": "cli",
"tagline": "Anthropic's AI coding agent — plan, build, and ship code across your entire codebase",
"tags": [
"coding",
"terminal",
"agentic"
]
"tags": ["coding", "terminal", "agentic"]
},
"openclaw": {
"name": "OpenClaw",
@ -69,11 +61,7 @@
}
},
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/openclaw.png",
"featured_cloud": [
"gcp",
"aws",
"digitalocean"
],
"featured_cloud": ["gcp", "aws", "digitalocean"],
"creator": "OpenClaw",
"repo": "openclaw/openclaw",
"license": "MIT",
@ -85,11 +73,7 @@
"runtime": "bun",
"category": "tui",
"tagline": "Your personal AI — any channel, any model, from the terminal",
"tags": [
"coding",
"tui",
"gateway"
]
"tags": ["coding", "tui", "gateway"]
},
"zeroclaw": {
"name": "ZeroClaw",
@ -115,11 +99,7 @@
},
"notes": "Rust-based agent framework built by Harvard/MIT/Sundai.Club communities. Natively supports OpenRouter via OPENROUTER_API_KEY + ZEROCLAW_PROVIDER=openrouter. Requires compilation from source (~5-10 min).",
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/zeroclaw.png",
"featured_cloud": [
"hetzner",
"gcp",
"aws"
],
"featured_cloud": ["hetzner", "gcp", "aws"],
"creator": "Sundai.Club",
"repo": "zeroclaw-labs/zeroclaw",
"license": "Apache-2.0",
@ -131,12 +111,7 @@
"runtime": "binary",
"category": "cli",
"tagline": "Fast, small, fully autonomous AI infrastructure — deploy anywhere, swap anything",
"tags": [
"coding",
"terminal",
"rust",
"autonomous"
]
"tags": ["coding", "terminal", "rust", "autonomous"]
},
"codex": {
"name": "Codex CLI",
@ -151,11 +126,7 @@
},
"notes": "Works with OpenRouter via OPENAI_BASE_URL override pointing to openrouter.ai/api/v1",
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/codex.png",
"featured_cloud": [
"gcp",
"aws",
"digitalocean"
],
"featured_cloud": ["gcp", "aws", "digitalocean"],
"creator": "OpenAI",
"repo": "openai/codex",
"license": "Apache-2.0",
@ -167,11 +138,7 @@
"runtime": "binary",
"category": "cli",
"tagline": "OpenAI's lightweight coding agent for the terminal",
"tags": [
"coding",
"terminal",
"openai"
]
"tags": ["coding", "terminal", "openai"]
},
"opencode": {
"name": "OpenCode",
@ -184,11 +151,7 @@
},
"notes": "Natively supports OpenRouter via OPENROUTER_API_KEY env var. Go-based TUI using Bubble Tea.",
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/opencode.png",
"featured_cloud": [
"daytona",
"gcp",
"aws"
],
"featured_cloud": ["daytona", "gcp", "aws"],
"creator": "SST",
"repo": "sst/opencode",
"license": "MIT",
@ -200,11 +163,7 @@
"runtime": "go",
"category": "tui",
"tagline": "The open-source AI coding agent",
"tags": [
"coding",
"tui",
"go"
]
"tags": ["coding", "tui", "go"]
},
"kilocode": {
"name": "Kilo Code",
@ -219,11 +178,7 @@
},
"notes": "Natively supports OpenRouter as a provider via KILO_PROVIDER_TYPE=openrouter. CLI installable via npm as @kilocode/cli, invocable as 'kilocode' or 'kilo'.",
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/kilocode.png",
"featured_cloud": [
"gcp",
"aws",
"digitalocean"
],
"featured_cloud": ["gcp", "aws", "digitalocean"],
"creator": "Kilo-Org",
"repo": "Kilo-Org/kilocode",
"license": "MIT",
@ -235,12 +190,7 @@
"runtime": "node",
"category": "cli",
"tagline": "All-in-one AI coding platform — 100+ providers, one CLI",
"tags": [
"coding",
"terminal",
"agentic",
"engineering"
]
"tags": ["coding", "terminal", "agentic", "engineering"]
},
"hermes": {
"name": "Hermes Agent",
@ -255,11 +205,7 @@
},
"notes": "Natively supports OpenRouter via OPENROUTER_API_KEY. Also works via OPENAI_BASE_URL + OPENAI_API_KEY for OpenAI-compatible mode. Installs Python 3.11 via uv.",
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/hermes.png",
"featured_cloud": [
"sprite",
"hetzner",
"gcp"
],
"featured_cloud": ["sprite", "hetzner", "gcp"],
"creator": "Nous Research",
"repo": "NousResearch/hermes-agent",
"license": "MIT",
@ -271,12 +217,7 @@
"runtime": "python",
"category": "cli",
"tagline": "Persistent AI agent with memory, tools, and multi-platform messaging",
"tags": [
"agent",
"messaging",
"memory",
"tools"
]
"tags": ["agent", "messaging", "memory", "tools"]
}
},
"clouds": {

View file

@ -1,5 +1,9 @@
{
"private": true,
"type": "module",
"workspaces": ["packages/*", ".claude/skills/setup-spa", ".claude/scripts"]
"workspaces": [
"packages/*",
".claude/skills/setup-spa",
".claude/scripts"
]
}

View file

@ -1,6 +1,6 @@
{
"name": "@openrouter/spawn",
"version": "0.14.2",
"version": "0.14.3",
"type": "module",
"bin": {
"spawn": "cli.js"

View file

@ -1,7 +1,6 @@
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
import { unlinkSync, existsSync, readFileSync } from "node:fs";
import { BUNDLES, DEFAULT_BUNDLE, loadCredsFromConfig, saveCredsToConfig, getAwsConfigPath } from "../aws/aws";
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
import { existsSync, readFileSync, unlinkSync } from "node:fs";
import { BUNDLES, DEFAULT_BUNDLE, getAwsConfigPath, loadCredsFromConfig, saveCredsToConfig } from "../aws/aws";
// ─── Credential caching tests ────────────────────────────────────────────────

View file

@ -1,7 +1,8 @@
import { describe, it, expect, beforeEach } from "bun:test";
import { mockClackPrompts } from "./test-helpers";
import type { Manifest } from "../manifest";
import { beforeEach, describe, expect, it } from "bun:test";
import { mockClackPrompts } from "./test-helpers";
/**
* Tests for checkEntity output messages (commands/shared.ts).
*

View file

@ -1,7 +1,8 @@
import { describe, it, expect, beforeEach } from "bun:test";
import { checkEntity } from "../commands";
import type { Manifest } from "../manifest";
import { beforeEach, describe, expect, it } from "bun:test";
import { checkEntity } from "../commands";
/**
* Tests for checkEntity (commands/shared.ts).
*

View file

@ -1,10 +1,11 @@
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { homedir } from "node:os";
import { mockClackPrompts } from "./test-helpers";
import type { SpawnRecord } from "../history.js";
import { clearHistory, loadHistory, saveSpawnRecord, filterHistory, getHistoryPath } from "../history.js";
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { clearHistory, filterHistory, getHistoryPath, loadHistory, saveSpawnRecord } from "../history.js";
import { mockClackPrompts } from "./test-helpers";
/**
* Tests for clearHistory (history.ts) and cmdListClear (commands/list.ts).

View file

@ -1,4 +1,4 @@
import { describe, it, expect, afterEach } from "bun:test";
import { afterEach, describe, expect, it } from "bun:test";
import { hasCloudCredentials } from "../commands";
describe("hasCloudCredentials", () => {

View file

@ -1,5 +1,5 @@
import { describe, it, expect } from "bun:test";
import { getPackagesForTier, needsNode, needsBun, NODE_INSTALL_CMD } from "../shared/cloud-init.js";
import { describe, expect, it } from "bun:test";
import { getPackagesForTier, NODE_INSTALL_CMD, needsBun, needsNode } from "../shared/cloud-init.js";
describe("getPackagesForTier", () => {
const MINIMAL_PACKAGES = [

View file

@ -1,7 +1,7 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { isString } from "../shared/type-guards";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for cmdInteractive() in commands/interactive.ts.

View file

@ -1,8 +1,9 @@
import type { spyOn } from "bun:test";
import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test";
import type { Manifest } from "../manifest";
import { afterEach, beforeEach, describe, expect, it, mock } from "bun:test";
import { loadManifest } from "../manifest";
import { createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { createConsoleMocks, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for cmdMatrix, cmdAgents, and cmdClouds listing command output.

View file

@ -1,10 +1,11 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { homedir } from "node:os";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import type { SpawnRecord } from "../history";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for cmdLast the feature added in PR #1171 that reruns the most recent spawn.
*

View file

@ -1,10 +1,11 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { homedir } from "node:os";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import type { SpawnRecord } from "../history";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Integration tests for cmdList through the real exported function.
*

View file

@ -1,10 +1,10 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { mkdirSync, rmSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { homedir } from "node:os";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { join } from "node:path";
import { loadManifest } from "../manifest";
import { isString } from "../shared/type-guards";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for the --name duplicate detection feature (issue #1864).

View file

@ -1,10 +1,10 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { existsSync, readFileSync, writeFileSync, mkdirSync, rmSync } from "node:fs";
import { join } from "node:path";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { join } from "node:path";
import { loadManifest } from "../manifest";
import { isString } from "../shared/type-guards";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for the cmdRun happy-path pipeline: successful download, history

View file

@ -1,6 +1,6 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for cmdCloudInfo and related cloud validation paths in commands/info.ts.

View file

@ -1,6 +1,6 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for display/output commands: cmdAgentInfo (happy path) and cmdHelp.

View file

@ -1,7 +1,7 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { isString } from "../shared/type-guards";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for commands/ error/validation paths that call process.exit(1).

View file

@ -1,16 +1,16 @@
import { describe, it, expect } from "bun:test";
import { describe, expect, it } from "bun:test";
import {
parseAuthEnvVars,
calculateColumnWidth,
formatRelativeTime,
getErrorMessage,
getImplementedAgents,
getImplementedClouds,
getMissingClouds,
getErrorMessage,
getStatusDescription,
calculateColumnWidth,
getTerminalWidth,
formatRelativeTime,
parseAuthEnvVars,
} from "../commands";
import { createMockManifest, createEmptyManifest } from "./test-helpers";
import { createEmptyManifest, createMockManifest } from "./test-helpers";
/**
* Tests for exported utility functions in commands/ that lacked

View file

@ -1,6 +1,6 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for the display-name suggestion branches in validateEntity

View file

@ -1,7 +1,7 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { isString } from "../shared/type-guards";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for cmdRun display-name resolution and validateImplementation

View file

@ -1,7 +1,7 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { isString } from "../shared/type-guards";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for detectAndFixSwappedArgs and resolveAndLog logic in commands/run.ts.

View file

@ -1,7 +1,8 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { isString } from "../shared/type-guards";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import pkg from "../../package.json" with { type: "json" };
import { isString } from "../shared/type-guards";
import { createConsoleMocks, mockClackPrompts, restoreMocks } from "./test-helpers";
const VERSION = pkg.version;
/**

View file

@ -1,4 +1,4 @@
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
import { credentialHints } from "../commands";
/**

View file

@ -1,5 +1,5 @@
import { describe, it, expect, afterEach } from "bun:test";
import { KNOWN_FLAGS, findUnknownFlag } from "../flags";
import { afterEach, describe, expect, it } from "bun:test";
import { findUnknownFlag, KNOWN_FLAGS } from "../flags";
describe("--custom flag", () => {
describe("flag registration", () => {

View file

@ -1,7 +1,7 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { createMockManifest, createConsoleMocks, restoreMocks, mockClackPrompts } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { loadManifest } from "../manifest";
import { isString } from "../shared/type-guards";
import { createConsoleMocks, createMockManifest, mockClackPrompts, restoreMocks } from "./test-helpers";
/**
* Tests for the download failure pipeline through real code paths in commands/run.ts.

View file

@ -1,8 +1,8 @@
import { describe, it, expect } from "bun:test";
import { describe, expect, it } from "bun:test";
import {
findClosestKeyByNameOrKey,
levenshtein,
findClosestMatch,
levenshtein,
resolveAgentKey,
resolveCloudKey,
} from "../commands";

View file

@ -4,7 +4,7 @@
* gateway recovers from crashes without manual intervention.
*/
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { mockClackPrompts } from "./test-helpers";
// ── Mock @clack/prompts (must be before importing agent-setup) ──────────
@ -12,6 +12,7 @@ const clack = mockClackPrompts();
// ── Import the function under test ──────────────────────────────────────
const { startGateway } = await import("../shared/agent-setup");
import type { CloudRunner } from "../shared/agent-setup";
// ── Helpers ─────────────────────────────────────────────────────────────

View file

@ -0,0 +1,485 @@
/**
* history-spawn-id.test.ts Tests for unique spawn ID behavior.
*
* Verifies that:
* - Every saved record gets a unique id
* - saveVmConnection matches by spawnId (not heuristic)
* - saveLaunchCmd matches by spawnId (not heuristic)
* - removeRecord / markRecordDeleted match by id
* - Concurrent spawns on the same cloud don't cross-contaminate
* - Backward compat: records without id still work via heuristic
*/
import type { SpawnRecord } from "../history.js";
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import {
generateSpawnId,
getConnectionPath,
getHistoryPath,
loadHistory,
markRecordDeleted,
removeRecord,
saveLaunchCmd,
saveSpawnRecord,
saveVmConnection,
} from "../history.js";
describe("history spawn IDs", () => {
let testDir: string;
let originalEnv: NodeJS.ProcessEnv;
beforeEach(() => {
testDir = join(homedir(), `.spawn-test-${Date.now()}-${Math.random()}`);
mkdirSync(testDir, {
recursive: true,
});
originalEnv = {
...process.env,
};
process.env.SPAWN_HOME = testDir;
});
afterEach(() => {
process.env = originalEnv;
if (existsSync(testDir)) {
rmSync(testDir, {
recursive: true,
force: true,
});
}
});
// ── generateSpawnId ──────────────────────────────────────────────────
describe("generateSpawnId", () => {
it("returns a valid UUID string", () => {
const id = generateSpawnId();
expect(id).toMatch(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/);
});
it("returns unique values on each call", () => {
const ids = new Set<string>();
for (let i = 0; i < 100; i++) {
ids.add(generateSpawnId());
}
expect(ids.size).toBe(100);
});
});
// ── saveSpawnRecord auto-generates id ────────────────────────────────
describe("saveSpawnRecord id generation", () => {
it("auto-generates id when not provided", () => {
saveSpawnRecord({
id: "",
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
const history = loadHistory();
expect(history).toHaveLength(1);
expect(history[0].id).toBeDefined();
expect(typeof history[0].id).toBe("string");
expect(history[0].id.length).toBeGreaterThan(0);
});
it("preserves id when explicitly provided", () => {
const customId = "custom-id-123";
saveSpawnRecord({
id: customId,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
const history = loadHistory();
expect(history[0].id).toBe(customId);
});
it("generates different ids for consecutive saves", () => {
saveSpawnRecord({
id: "",
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveSpawnRecord({
id: "",
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:01:00.000Z",
});
const history = loadHistory();
expect(history).toHaveLength(2);
expect(history[0].id).not.toBe(history[1].id);
});
});
// ── saveVmConnection matches by spawnId ──────────────────────────────
describe("saveVmConnection with spawnId", () => {
it("attaches connection to the correct record by spawnId", () => {
const id1 = generateSpawnId();
const id2 = generateSpawnId();
// Save two records for the same cloud
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveSpawnRecord({
id: id2,
agent: "codex",
cloud: "gcp",
timestamp: "2026-01-01T00:01:00.000Z",
});
// Attach connection to the FIRST record by id
saveVmConnection("1.2.3.4", "root", "srv-1", "my-server", "gcp", undefined, undefined, id1);
const history = loadHistory();
expect(history[0].connection?.ip).toBe("1.2.3.4");
expect(history[0].connection?.server_name).toBe("my-server");
// Second record should NOT have a connection
expect(history[1].connection).toBeUndefined();
});
it("does not cross-contaminate concurrent spawns on the same cloud", () => {
const id1 = generateSpawnId();
const id2 = generateSpawnId();
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "hetzner",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveSpawnRecord({
id: id2,
agent: "codex",
cloud: "hetzner",
timestamp: "2026-01-01T00:01:00.000Z",
});
// Each connection targets its own record
saveVmConnection("10.0.0.1", "root", "srv-a", "server-a", "hetzner", undefined, undefined, id1);
saveVmConnection("10.0.0.2", "root", "srv-b", "server-b", "hetzner", undefined, undefined, id2);
const history = loadHistory();
expect(history[0].connection?.ip).toBe("10.0.0.1");
expect(history[0].connection?.server_name).toBe("server-a");
expect(history[1].connection?.ip).toBe("10.0.0.2");
expect(history[1].connection?.server_name).toBe("server-b");
});
it("writes spawn_id to last-connection.json", () => {
const id = generateSpawnId();
saveSpawnRecord({
id,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveVmConnection("1.2.3.4", "root", "", "srv", "gcp", undefined, undefined, id);
const connFile = JSON.parse(readFileSync(getConnectionPath(), "utf-8"));
expect(connFile.spawn_id).toBe(id);
});
it("falls back to heuristic when spawnId is not provided", () => {
saveSpawnRecord({
id: generateSpawnId(),
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
// No spawnId — should match the most recent gcp record without connection
saveVmConnection("5.6.7.8", "user", "", "fallback-srv", "gcp");
const history = loadHistory();
expect(history[0].connection?.ip).toBe("5.6.7.8");
});
});
// ── saveLaunchCmd matches by spawnId ──────────────────────────────────
describe("saveLaunchCmd with spawnId", () => {
it("updates the correct record by spawnId", () => {
const id1 = generateSpawnId();
const id2 = generateSpawnId();
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveSpawnRecord({
id: id2,
agent: "codex",
cloud: "gcp",
timestamp: "2026-01-01T00:01:00.000Z",
});
// Attach connections to both
saveVmConnection("1.1.1.1", "root", "", "srv1", "gcp", undefined, undefined, id1);
saveVmConnection("2.2.2.2", "root", "", "srv2", "gcp", undefined, undefined, id2);
// Update launch command for the FIRST record only
saveLaunchCmd("claude --start", id1);
const history = loadHistory();
expect(history[0].connection?.launch_cmd).toBe("claude --start");
expect(history[1].connection?.launch_cmd).toBeUndefined();
});
it("falls back to most recent record with connection when no spawnId", () => {
const id = generateSpawnId();
saveSpawnRecord({
id,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveVmConnection("1.1.1.1", "root", "", "srv", "gcp", undefined, undefined, id);
saveLaunchCmd("fallback-cmd");
const history = loadHistory();
expect(history[0].connection?.launch_cmd).toBe("fallback-cmd");
});
});
// ── removeRecord matches by id ────────────────────────────────────────
describe("removeRecord with id", () => {
it("removes the correct record by id", () => {
const id1 = generateSpawnId();
const id2 = generateSpawnId();
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveSpawnRecord({
id: id2,
agent: "codex",
cloud: "gcp",
timestamp: "2026-01-01T00:01:00.000Z",
});
const result = removeRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
expect(result).toBe(true);
const history = loadHistory();
expect(history).toHaveLength(1);
expect(history[0].id).toBe(id2);
});
it("does not remove wrong record with same agent/cloud/timestamp", () => {
const id1 = generateSpawnId();
const id2 = generateSpawnId();
const ts = "2026-01-01T00:00:00.000Z";
// Two records with same agent/cloud/timestamp but different ids
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: ts,
});
saveSpawnRecord({
id: id2,
agent: "claude",
cloud: "gcp",
timestamp: ts,
});
// Remove by id1 — should only remove the first one
removeRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: ts,
});
const history = loadHistory();
expect(history).toHaveLength(1);
expect(history[0].id).toBe(id2);
});
it("falls back to timestamp+agent+cloud for records without id", () => {
// Write a legacy record without id directly
const legacy: SpawnRecord[] = [
{
id: "",
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
},
{
id: "",
agent: "codex",
cloud: "hetzner",
timestamp: "2026-01-02T00:00:00.000Z",
},
];
writeFileSync(getHistoryPath(), JSON.stringify(legacy, null, 2) + "\n");
const result = removeRecord({
id: "",
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
expect(result).toBe(true);
const history = loadHistory();
expect(history).toHaveLength(1);
expect(history[0].agent).toBe("codex");
});
});
// ── markRecordDeleted matches by id ───────────────────────────────────
describe("markRecordDeleted with id", () => {
it("marks the correct record as deleted by id", () => {
const id1 = generateSpawnId();
const id2 = generateSpawnId();
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveSpawnRecord({
id: id2,
agent: "codex",
cloud: "gcp",
timestamp: "2026-01-01T00:01:00.000Z",
});
// Attach connections to both
saveVmConnection("1.1.1.1", "root", "srv1", "server1", "gcp", undefined, undefined, id1);
saveVmConnection("2.2.2.2", "root", "srv2", "server2", "gcp", undefined, undefined, id2);
// Mark only the first as deleted
const result = markRecordDeleted({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
expect(result).toBe(true);
const history = loadHistory();
expect(history[0].connection?.deleted).toBe(true);
expect(history[0].connection?.deleted_at).toBeDefined();
expect(history[1].connection?.deleted).toBeUndefined();
});
it("returns false when record has no connection", () => {
const id = generateSpawnId();
saveSpawnRecord({
id,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
const result = markRecordDeleted({
id,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
expect(result).toBe(false);
});
});
// ── mergeLastConnection uses spawn_id ─────────────────────────────────
describe("mergeLastConnection via getActiveServers", () => {
it("merges connection to correct record using spawn_id in last-connection.json", () => {
const id1 = generateSpawnId();
const id2 = generateSpawnId();
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
saveSpawnRecord({
id: id2,
agent: "codex",
cloud: "gcp",
timestamp: "2026-01-01T00:01:00.000Z",
});
// Manually write last-connection.json with spawn_id targeting the second record
const connData = {
ip: "9.9.9.9",
user: "root",
server_name: "targeted-srv",
cloud: "gcp",
spawn_id: id2,
};
writeFileSync(getConnectionPath(), JSON.stringify(connData) + "\n");
// getActiveServers triggers mergeLastConnection
const servers = loadHistory();
// Force merge by importing getActiveServers (it calls mergeLastConnection internally)
const { getActiveServers } = require("../history.js");
getActiveServers();
const history = loadHistory();
// The first record should NOT have the connection
expect(history[0].connection).toBeUndefined();
// The second record should have it
expect(history[1].connection?.ip).toBe("9.9.9.9");
expect(history[1].connection?.server_name).toBe("targeted-srv");
});
it("falls back to heuristic when last-connection.json has no spawn_id", () => {
const id1 = generateSpawnId();
saveSpawnRecord({
id: id1,
agent: "claude",
cloud: "gcp",
timestamp: "2026-01-01T00:00:00.000Z",
});
// Write last-connection.json WITHOUT spawn_id
const connData = {
ip: "8.8.8.8",
user: "root",
cloud: "gcp",
};
writeFileSync(getConnectionPath(), JSON.stringify(connData) + "\n");
const { getActiveServers } = require("../history.js");
getActiveServers();
const history = loadHistory();
expect(history[0].connection?.ip).toBe("8.8.8.8");
});
});
});

View file

@ -1,9 +1,10 @@
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync, readFileSync, readdirSync } from "node:fs";
import { join } from "node:path";
import { homedir } from "node:os";
import type { SpawnRecord } from "../history.js";
import { loadHistory, saveSpawnRecord, filterHistory } from "../history.js";
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
import { existsSync, mkdirSync, readdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { filterHistory, loadHistory, saveSpawnRecord } from "../history.js";
/**
* Tests for history trimming and boundary behavior.

View file

@ -1,9 +1,10 @@
import { describe, it, expect, beforeEach, afterEach } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync, readFileSync } from "node:fs";
import type { SpawnRecord } from "../history.js";
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import type { SpawnRecord } from "../history.js";
import { getSpawnDir, getHistoryPath, loadHistory, saveSpawnRecord, filterHistory } from "../history.js";
import { filterHistory, getHistoryPath, getSpawnDir, loadHistory, saveSpawnRecord } from "../history.js";
describe("history", () => {
let testDir: string;

View file

@ -1,8 +1,9 @@
import { describe, it, expect } from "bun:test";
import { readFileSync, readdirSync, existsSync } from "node:fs";
import type { Manifest } from "../manifest";
import { describe, expect, it } from "bun:test";
import { existsSync, readdirSync, readFileSync } from "node:fs";
import { join, resolve } from "node:path";
import * as v from "valibot";
import type { Manifest } from "../manifest";
/**
* Icon integrity tests.

View file

@ -1,9 +1,10 @@
import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test";
import { existsSync, writeFileSync, mkdirSync, rmSync, utimesSync } from "node:fs";
import { join } from "node:path";
import type { Manifest, AgentDef, CloudDef } from "../manifest";
import { loadManifest, agentKeys, cloudKeys, matrixStatus, countImplemented, isValidManifest } from "../manifest";
import type { AgentDef, CloudDef, Manifest } from "../manifest";
import type { TestEnvironment } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock } from "bun:test";
import { existsSync, mkdirSync, rmSync, utimesSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { agentKeys, cloudKeys, countImplemented, isValidManifest, loadManifest, matrixStatus } from "../manifest";
import { createMockManifest, setupTestEnvironment, teardownTestEnvironment } from "./test-helpers";
/**

View file

@ -1,8 +1,9 @@
import { describe, it, expect } from "bun:test";
import { readFileSync, existsSync } from "node:fs";
import { join, resolve } from "node:path";
import type { Manifest } from "../manifest";
import { describe, expect, it } from "bun:test";
import { existsSync, readFileSync } from "node:fs";
import { join, resolve } from "node:path";
/**
* Manifest integrity tests.
*

View file

@ -1,7 +1,8 @@
import { describe, it, expect } from "bun:test";
import type { Manifest } from "../manifest";
import { describe, expect, it } from "bun:test";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import type { Manifest } from "../manifest";
/**
* Manifest type contract validation tests.

View file

@ -1,12 +1,13 @@
import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test";
import type { Manifest } from "../manifest";
import { loadManifest, agentKeys, cloudKeys, matrixStatus, countImplemented } from "../manifest";
import { writeFileSync, mkdirSync } from "node:fs";
import { join } from "node:path";
import type { TestEnvironment } from "./test-helpers";
import { afterEach, beforeEach, describe, expect, it, mock } from "bun:test";
import { mkdirSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { agentKeys, cloudKeys, countImplemented, loadManifest, matrixStatus } from "../manifest";
import {
createMockManifest,
createEmptyManifest,
createMockManifest,
mockSuccessfulFetch,
setupTestEnvironment,
teardownTestEnvironment,

View file

@ -1,4 +1,4 @@
import { describe, it, expect } from "bun:test";
import { describe, expect, it } from "bun:test";
import { OAUTH_CODE_REGEX } from "../shared/oauth-constants";
describe("OAUTH_CODE_REGEX", () => {

View file

@ -10,7 +10,7 @@
* bleed into with-retry-result.test.ts which tests the real wrapSshCall.
*/
import { describe, it, expect, beforeEach, mock, spyOn } from "bun:test";
import { beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { isNumber } from "../shared/type-guards.js";
// ── Mock only oauth (needed to avoid interactive prompts) ─────────────
@ -26,8 +26,9 @@ mock.module("../shared/oauth", () => ({
// ── Import the real module under test ─────────────────────────────────────
const { runOrchestration } = await import("../shared/orchestrate");
import type { CloudOrchestrator } from "../shared/orchestrate";
import type { AgentConfig } from "../shared/agents";
import type { CloudOrchestrator } from "../shared/orchestrate";
// ── Helpers ───────────────────────────────────────────────────────────────
@ -398,7 +399,10 @@ describe("runOrchestration", () => {
await runOrchestrationSafe(cloud, agent, "testagent");
expect(saveLaunchCmd).toHaveBeenCalledWith("my-agent --start");
expect(saveLaunchCmd).toHaveBeenCalledTimes(1);
const args = saveLaunchCmd.mock.calls[0];
expect(args[0]).toBe("my-agent --start");
expect(typeof args[1]).toBe("string"); // spawnId
stderrSpy.mockRestore();
exitSpy.mockRestore();
});

View file

@ -1,6 +1,6 @@
import { describe, it, expect } from "bun:test";
import { describe, expect, it } from "bun:test";
import * as v from "valibot";
import { parseJsonWith, parseJsonRaw } from "../shared/parse";
import { parseJsonRaw, parseJsonWith } from "../shared/parse";
describe("parseJsonWith", () => {
const NumberSchema = v.object({

View file

@ -1,8 +1,9 @@
import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test";
import { mockClackPrompts } from "./test-helpers";
import { preflightCredentialCheck } from "../commands";
import type { Manifest } from "../manifest";
import { afterEach, beforeEach, describe, expect, it, mock } from "bun:test";
import { preflightCredentialCheck } from "../commands";
import { mockClackPrompts } from "./test-helpers";
const mockIsCancel = mock(() => false);
const clackMocks = mockClackPrompts({
isCancel: mockIsCancel,

View file

@ -23,9 +23,9 @@
* - Subprocesses (execSync, spawnSync) inherit the sandboxed environment
*/
import { mkdirSync, readdirSync, rmSync, mkdtempSync } from "node:fs";
import { join } from "node:path";
import { mkdirSync, mkdtempSync, readdirSync, rmSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
// ── Stray test file cleanup ──────────────────────────────────────────────────
//

View file

@ -1,4 +1,4 @@
import { describe, it, expect } from "bun:test";
import { describe, expect, it } from "bun:test";
import { validatePromptFilePath, validatePromptFileStats } from "../security.js";
describe("validatePromptFilePath", () => {

View file

@ -1,7 +1,8 @@
import { describe, it, expect, beforeEach, afterEach, mock } from "bun:test";
import { mockClackPrompts } from "./test-helpers";
import type { Manifest } from "../manifest";
import { afterEach, beforeEach, describe, expect, it, mock } from "bun:test";
import { mockClackPrompts } from "./test-helpers";
/**
* Tests for critical-path functions in the `spawn <agent> <cloud>` run flow:
*

View file

@ -1,4 +1,4 @@
import { describe, it, expect } from "bun:test";
import { describe, expect, it } from "bun:test";
import {
getScriptFailureGuidance as _getScriptFailureGuidance,
getSignalGuidance as _getSignalGuidance,

View file

@ -3,8 +3,8 @@
* These functions prevent command injection via corrupted history files
*/
import { describe, it, expect } from "bun:test";
import { validateConnectionIP, validateUsername, validateServerIdentifier, validateLaunchCmd } from "../security.js";
import { describe, expect, it } from "bun:test";
import { validateConnectionIP, validateLaunchCmd, validateServerIdentifier, validateUsername } from "../security.js";
describe("validateConnectionIP", () => {
describe("valid inputs", () => {

View file

@ -1,5 +1,5 @@
import { describe, it, expect } from "bun:test";
import { validateIdentifier, validateScriptContent, validatePrompt } from "../security";
import { describe, expect, it } from "bun:test";
import { validateIdentifier, validatePrompt, validateScriptContent } from "../security";
/**
* Edge case tests for security validation functions.

View file

@ -1,5 +1,5 @@
import { describe, it, expect } from "bun:test";
import { validateIdentifier, validateScriptContent, validatePrompt } from "../security";
import { describe, expect, it } from "bun:test";
import { validateIdentifier, validatePrompt, validateScriptContent } from "../security";
/**
* Tests for security validation with encoding edge cases and

View file

@ -1,5 +1,5 @@
import { describe, it, expect } from "bun:test";
import { validateIdentifier, validateScriptContent, validatePrompt } from "../security.js";
import { describe, expect, it } from "bun:test";
import { validateIdentifier, validatePrompt, validateScriptContent } from "../security.js";
describe("Security Validation", () => {
describe("validateIdentifier", () => {

View file

@ -5,8 +5,8 @@
* to mock ssh-keygen invocations no real subprocess calls.
*/
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { mkdirSync, writeFileSync, rmSync, existsSync } from "node:fs";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
import { join } from "node:path";
import { mockClackPrompts } from "./test-helpers";

View file

@ -1,9 +1,10 @@
import { spyOn, mock } from "bun:test";
import { existsSync, mkdirSync, rmSync } from "node:fs";
import { join } from "node:path";
import { tmpdir } from "node:os";
import type { Manifest } from "../manifest";
import { mock, spyOn } from "bun:test";
import { existsSync, mkdirSync, rmSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
// ── Mock Data ──────────────────────────────────────────────────────────────────
export const createMockManifest = (): Manifest => ({

View file

@ -1,4 +1,4 @@
import { describe, it, expect } from "bun:test";
import { describe, expect, it } from "bun:test";
const { validateServerName, validateRegionName, validateModelId, toKebabCase, sanitizeTermValue, jsonEscape } =
await import("../shared/ui.js");

View file

@ -1,5 +1,5 @@
import { describe, it, expect } from "bun:test";
import { KNOWN_FLAGS, findUnknownFlag, expandEqualsFlags } from "../flags";
import { describe, expect, it } from "bun:test";
import { expandEqualsFlags, findUnknownFlag, KNOWN_FLAGS } from "../flags";
/**
* Tests for unknown flag detection and flag expansion in CLI argument parsing.

View file

@ -1,4 +1,4 @@
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from "bun:test";
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
import fs from "node:fs";
import path from "node:path";

View file

@ -1,4 +1,4 @@
import { describe, it, expect, mock, spyOn } from "bun:test";
import { describe, expect, it, mock, spyOn } from "bun:test";
// Suppress log output during tests
spyOn(process.stderr, "write").mockImplementation(() => true);

View file

@ -1,7 +1,7 @@
// aws/agents.ts — AWS Lightsail agent configs (thin wrapper over shared)
import { runServer, uploadFile } from "./aws";
import { createCloudAgents } from "../shared/agent-setup";
import { runServer, uploadFile } from "./aws";
export const { agents, resolveAgent } = createCloudAgents({
runServer,

View file

@ -1,39 +1,39 @@
// aws/aws.ts — Core AWS Lightsail provider: auth, provisioning, SSH execution
import { existsSync, mkdirSync, readFileSync } from "node:fs";
import type { CloudInitTier } from "../shared/agents";
import { createHash, createHmac } from "node:crypto";
import { existsSync, mkdirSync, readFileSync } from "node:fs";
import * as v from "valibot";
import { saveVmConnection } from "../history.js";
import { getPackagesForTier, NODE_INSTALL_CMD, needsBun, needsNode } from "../shared/cloud-init";
import { parseJsonWith } from "../shared/parse";
import {
logInfo,
logWarn,
logError,
logStep,
logStepInline,
logStepDone,
prompt,
selectFromList,
getSpawnCloudConfigPath,
validateServerName,
validateRegionName,
toKebabCase,
defaultSpawnName,
sanitizeTermValue,
jsonEscape,
} from "../shared/ui";
import type { CloudInitTier } from "../shared/agents";
import { getPackagesForTier, needsNode, needsBun, NODE_INSTALL_CMD } from "../shared/cloud-init";
import {
killWithTimeout,
SSH_BASE_OPTS,
SSH_INTERACTIVE_OPTS,
sleep,
waitForSsh as sharedWaitForSsh,
killWithTimeout,
sleep,
spawnInteractive,
} from "../shared/ssh";
import { ensureSshKeys, getSshKeyOpts } from "../shared/ssh-keys";
import * as v from "valibot";
import { parseJsonWith } from "../shared/parse";
import { saveVmConnection } from "../history.js";
import {
defaultSpawnName,
getSpawnCloudConfigPath,
jsonEscape,
logError,
logInfo,
logStep,
logStepDone,
logStepInline,
logWarn,
prompt,
sanitizeTermValue,
selectFromList,
toKebabCase,
validateRegionName,
validateServerName,
} from "../shared/ui";
const DASHBOARD_URL = "https://lightsail.aws.amazon.com/";
@ -956,7 +956,16 @@ export async function waitForInstance(maxAttempts = 60): Promise<void> {
logInfo(`Instance running: IP=${instanceIp}`);
// Save connection info
saveVmConnection(instanceIp, SSH_USER, "", instanceName, "aws");
saveVmConnection(
instanceIp,
SSH_USER,
"",
instanceName,
"aws",
undefined,
undefined,
process.env.SPAWN_ID || undefined,
);
return;
}

View file

@ -1,25 +1,27 @@
#!/usr/bin/env bun
// aws/main.ts — Orchestrator: deploys an agent on AWS Lightsail
import {
ensureAwsCli,
authenticate,
promptRegion,
promptBundle,
ensureSshKey,
promptSpawnName,
createInstance,
waitForInstance,
waitForCloudInit,
getServerName,
runServer,
uploadFile,
interactiveSession,
} from "./aws";
import { agents, resolveAgent } from "./agents";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { saveLaunchCmd } from "../history.js";
import { runOrchestration } from "../shared/orchestrate";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { agents, resolveAgent } from "./agents";
import {
authenticate,
createInstance,
ensureAwsCli,
ensureSshKey,
getServerName,
interactiveSession,
promptBundle,
promptRegion,
promptSpawnName,
runServer,
uploadFile,
waitForCloudInit,
waitForInstance,
} from "./aws";
async function main() {
const agentName = process.argv[2];
@ -49,7 +51,8 @@ async function main() {
async promptSize() {
// Bundle selection handled during authenticate()
},
async createServer(name: string) {
async createServer(name: string, spawnId?: string) {
process.env.SPAWN_ID = spawnId || "";
await createInstance(name, agent.cloudInitTier);
},
getServerName,
@ -58,7 +61,7 @@ async function main() {
await waitForCloudInit();
},
interactiveSession,
saveLaunchCmd,
saveLaunchCmd: (cmd: string, sid?: string) => saveLaunchCmd(cmd, sid),
};
await runOrchestration(cloud, agent, agentName);

View file

@ -1,9 +1,10 @@
import type { VMConnection } from "../history.js";
import type { Manifest } from "../manifest.js";
import * as p from "@clack/prompts";
import pc from "picocolors";
import type { Manifest } from "../manifest.js";
import type { VMConnection } from "../history.js";
import { getHistoryPath } from "../history.js";
import { validateConnectionIP, validateUsername, validateServerIdentifier, validateLaunchCmd } from "../security.js";
import { validateConnectionIP, validateLaunchCmd, validateServerIdentifier, validateUsername } from "../security.js";
import { SSH_INTERACTIVE_OPTS, spawnInteractive } from "../shared/ssh.js";
import { ensureSshKeys, getSshKeyOpts } from "../shared/ssh-keys.js";
import { getErrorMessage } from "./shared.js";

View file

@ -1,23 +1,24 @@
import type { SpawnRecord } from "../history.js";
import type { Manifest } from "../manifest.js";
import * as p from "@clack/prompts";
import pc from "picocolors";
import type { Manifest } from "../manifest.js";
import { loadManifest } from "../manifest.js";
import type { SpawnRecord } from "../history.js";
import { getActiveServers, markRecordDeleted, getHistoryPath } from "../history.js";
import { validateServerIdentifier, validateMetadataValue } from "../security.js";
import { destroyServer as hetznerDestroyServer, ensureHcloudToken } from "../hetzner/hetzner.js";
import { authenticate as awsAuthenticate, destroyServer as awsDestroyServer, ensureAwsCli } from "../aws/aws.js";
import { destroyServer as daytonaDestroyServer, ensureDaytonaToken } from "../daytona/daytona.js";
import { destroyServer as doDestroyServer, ensureDoToken } from "../digitalocean/digitalocean.js";
import {
authenticate as gcpAuthenticate,
destroyInstance as gcpDestroyInstance,
ensureGcloudCli as gcpEnsureGcloudCli,
authenticate as gcpAuthenticate,
resolveProject as gcpResolveProject,
} from "../gcp/gcp.js";
import { destroyServer as awsDestroyServer, ensureAwsCli, authenticate as awsAuthenticate } from "../aws/aws.js";
import { destroyServer as daytonaDestroyServer, ensureDaytonaToken } from "../daytona/daytona.js";
import { destroyServer as spriteDestroyServer, ensureSpriteCli, ensureSpriteAuthenticated } from "../sprite/sprite.js";
import { ensureHcloudToken, destroyServer as hetznerDestroyServer } from "../hetzner/hetzner.js";
import { getActiveServers, getHistoryPath, markRecordDeleted } from "../history.js";
import { loadManifest } from "../manifest.js";
import { validateMetadataValue, validateServerIdentifier } from "../security.js";
import { ensureSpriteAuthenticated, ensureSpriteCli, destroyServer as spriteDestroyServer } from "../sprite/sprite.js";
import { activeServerPicker, resolveListFilters } from "./list.js";
import { getErrorMessage, isInteractiveTTY } from "./shared.js";
import { resolveListFilters, activeServerPicker } from "./list.js";
/**
* Ensure credentials are available for a record's cloud provider.

View file

@ -1,5 +1,5 @@
import pc from "picocolors";
import { SPAWN_CDN, REPO } from "../manifest.js";
import { REPO, SPAWN_CDN } from "../manifest.js";
function getHelpUsageSection(): string {
return `${pc.bold("USAGE")}

View file

@ -1,74 +1,67 @@
// Barrel re-export — keeps all existing `import { ... } from "./commands.js"` working.
// shared.ts — helpers, entity resolution, fuzzy matching, credentials
export {
getErrorMessage,
loadManifestWithSpinner,
getImplementedClouds,
levenshtein,
findClosestMatch,
findClosestKeyByNameOrKey,
resolveAgentKey,
resolveCloudKey,
checkEntity,
hasCloudCli,
prioritizeCloudsByCredentials,
buildAgentPickerHints,
formatCredStatusLine,
preflightCredentialCheck,
credentialHints,
isInteractiveTTY,
resolveDisplayName,
buildRetryCommand,
getStatusDescription,
getImplementedAgents,
parseAuthEnvVars,
hasCloudCredentials,
} from "./shared.js";
// interactive.ts — cmdInteractive, cmdAgentInteractive
export { cmdInteractive, cmdAgentInteractive } from "./interactive.js";
// run.ts — cmdRun, cmdRunHeadless, script failure guidance
export type { HeadlessOptions } from "./run.js";
export {
cmdRun,
cmdRunHeadless,
getSignalGuidance,
getScriptFailureGuidance,
isRetryableExitCode,
} from "./run.js";
// list.ts — cmdList, cmdLast, cmdListClear, history display
export {
formatRelativeTime,
buildRecordLabel,
buildRecordSubtitle,
cmdListClear,
cmdList,
cmdLast,
} from "./list.js";
// delete.ts — cmdDelete
export { cmdDelete } from "./delete.js";
// info.ts — cmdMatrix, cmdAgents, cmdClouds, cmdAgentInfo, cmdCloudInfo
export {
getTerminalWidth,
calculateColumnWidth,
getMissingClouds,
cmdMatrix,
cmdAgents,
cmdClouds,
cmdAgentInfo,
cmdCloudInfo,
} from "./info.js";
// update.ts — cmdUpdate
export { cmdUpdate } from "./update.js";
// help.ts — cmdHelp
export { cmdHelp } from "./help.js";
// info.ts — cmdMatrix, cmdAgents, cmdClouds, cmdAgentInfo, cmdCloudInfo
export {
calculateColumnWidth,
cmdAgentInfo,
cmdAgents,
cmdCloudInfo,
cmdClouds,
cmdMatrix,
getMissingClouds,
getTerminalWidth,
} from "./info.js";
// interactive.ts — cmdInteractive, cmdAgentInteractive
export { cmdAgentInteractive, cmdInteractive } from "./interactive.js";
// list.ts — cmdList, cmdLast, cmdListClear, history display
export {
buildRecordLabel,
buildRecordSubtitle,
cmdLast,
cmdList,
cmdListClear,
formatRelativeTime,
} from "./list.js";
// pick.ts — cmdPick
export { cmdPick } from "./pick.js";
export {
cmdRun,
cmdRunHeadless,
getScriptFailureGuidance,
getSignalGuidance,
isRetryableExitCode,
} from "./run.js";
// shared.ts — helpers, entity resolution, fuzzy matching, credentials
export {
buildAgentPickerHints,
buildRetryCommand,
checkEntity,
credentialHints,
findClosestKeyByNameOrKey,
findClosestMatch,
formatCredStatusLine,
getErrorMessage,
getImplementedAgents,
getImplementedClouds,
getStatusDescription,
hasCloudCli,
hasCloudCredentials,
isInteractiveTTY,
levenshtein,
loadManifestWithSpinner,
parseAuthEnvVars,
preflightCredentialCheck,
prioritizeCloudsByCredentials,
resolveAgentKey,
resolveCloudKey,
resolveDisplayName,
} from "./shared.js";
// update.ts — cmdUpdate
export { cmdUpdate } from "./update.js";

View file

@ -1,19 +1,20 @@
import pc from "picocolors";
import type { Manifest } from "../manifest.js";
import { agentKeys, cloudKeys, matrixStatus, countImplemented } from "../manifest.js";
import pc from "picocolors";
import { agentKeys, cloudKeys, countImplemented, matrixStatus } from "../manifest.js";
import {
NAME_COLUMN_WIDTH,
loadManifestWithSpinner,
getImplementedClouds,
getImplementedAgents,
getImplementedClouds,
groupByType,
hasCloudCredentials,
loadManifestWithSpinner,
NAME_COLUMN_WIDTH,
parseAuthEnvVars,
printGroupedList,
printInfoHeader,
printQuickStart,
prioritizeCloudsByCredentials,
validateAndGetEntity,
printInfoHeader,
groupByType,
printGroupedList,
printQuickStart,
} from "./shared.js";
// ── Matrix display ───────────────────────────────────────────────────────────

View file

@ -1,21 +1,22 @@
import type { Manifest } from "../manifest.js";
import * as p from "@clack/prompts";
import pc from "picocolors";
import type { Manifest } from "../manifest.js";
import { agentKeys } from "../manifest.js";
import { execScript, showDryRunPreview } from "./run.js";
import {
VERSION,
buildAgentPickerHints,
findClosestKeyByNameOrKey,
getAuthHint,
getImplementedClouds,
handleCancel,
loadManifestWithSpinner,
mapToSelectOptions,
getImplementedClouds,
findClosestKeyByNameOrKey,
resolveAgentKey,
buildAgentPickerHints,
prioritizeCloudsByCredentials,
preflightCredentialCheck,
getAuthHint,
prioritizeCloudsByCredentials,
resolveAgentKey,
VERSION,
} from "./shared.js";
import { execScript, showDryRunPreview } from "./run.js";
// Prompt user to select an agent with hints and type-ahead filtering
async function selectAgent(manifest: Manifest): Promise<string> {

View file

@ -1,22 +1,23 @@
import type { SpawnRecord } from "../history.js";
import type { Manifest } from "../manifest.js";
import * as p from "@clack/prompts";
import pc from "picocolors";
import type { Manifest } from "../manifest.js";
import { loadManifest, agentKeys, cloudKeys } from "../manifest.js";
import type { SpawnRecord } from "../history.js";
import { filterHistory, clearHistory, removeRecord, getActiveServers } from "../history.js";
import {
handleCancel,
getErrorMessage,
resolveAgentKey,
resolveCloudKey,
findClosestKeyByNameOrKey,
isInteractiveTTY,
resolveDisplayName,
buildRetryCommand,
} from "./shared.js";
import { cmdRun } from "./run.js";
import { clearHistory, filterHistory, getActiveServers, removeRecord } from "../history.js";
import { agentKeys, cloudKeys, loadManifest } from "../manifest.js";
import { cmdConnect, cmdEnterAgent } from "./connect.js";
import { confirmAndDelete } from "./delete.js";
import { cmdRun } from "./run.js";
import {
buildRetryCommand,
findClosestKeyByNameOrKey,
getErrorMessage,
handleCancel,
isInteractiveTTY,
resolveAgentKey,
resolveCloudKey,
resolveDisplayName,
} from "./shared.js";
// ── Formatting helpers ───────────────────────────────────────────────────────
@ -306,6 +307,12 @@ export async function handleRecordAction(selected: SpawnRecord, manifest: Manife
});
}
options.push({
value: "remove",
label: "Remove from history",
hint: "remove this entry only",
});
const action = await p.select({
message: "What would you like to do?",
options,
@ -344,6 +351,16 @@ export async function handleRecordAction(selected: SpawnRecord, manifest: Manife
return;
}
if (action === "remove") {
const removed = removeRecord(selected);
if (removed) {
p.log.success("Removed from history.");
} else {
p.log.warn("Could not find record in history.");
}
return;
}
// Rerun (create new spawn). Clear any pre-set name so the user is prompted for
// a fresh one — this prevents cmdRun's duplicate-detection from immediately
// routing them back here in an infinite loop.

View file

@ -1,39 +1,40 @@
import * as p from "@clack/prompts";
import pc from "picocolors";
import type { Manifest } from "../manifest.js";
import { spawn, spawnSync } from "node:child_process";
import * as fs from "node:fs";
import * as path from "node:path";
import { spawn, spawnSync } from "node:child_process";
import type { Manifest } from "../manifest.js";
import { loadManifest, SPAWN_CDN, RAW_BASE, REPO } from "../manifest.js";
import {
validateIdentifier,
validateScriptContent,
validatePrompt,
validateConnectionIP,
validateUsername,
validateServerIdentifier,
} from "../security.js";
import { saveSpawnRecord, getActiveServers } from "../history.js";
import * as p from "@clack/prompts";
import pc from "picocolors";
import { buildDashboardHint, EXIT_CODE_GUIDANCE, SIGNAL_GUIDANCE } from "../guidance-data.js";
import { toKebabCase, prepareStdinForHandoff } from "../shared/ui.js";
import { generateSpawnId, getActiveServers, saveSpawnRecord } from "../history.js";
import { loadManifest, RAW_BASE, REPO, SPAWN_CDN } from "../manifest.js";
import {
FETCH_TIMEOUT,
getErrorMessage,
loadManifestWithSpinner,
resolveAgentKey,
resolveCloudKey,
validateRunSecurity,
validateEntities,
getAuthHint,
preflightCredentialCheck,
collectMissingCredentials,
parseAuthEnvVars,
credentialHints,
formatCredStatusLine,
buildRetryCommand,
} from "./shared.js";
validateConnectionIP,
validateIdentifier,
validatePrompt,
validateScriptContent,
validateServerIdentifier,
validateUsername,
} from "../security.js";
import { prepareStdinForHandoff, toKebabCase } from "../shared/ui.js";
import { promptSpawnName } from "./interactive.js";
import { handleRecordAction } from "./list.js";
import {
buildRetryCommand,
collectMissingCredentials,
credentialHints,
FETCH_TIMEOUT,
formatCredStatusLine,
getAuthHint,
getErrorMessage,
loadManifestWithSpinner,
parseAuthEnvVars,
preflightCredentialCheck,
resolveAgentKey,
resolveCloudKey,
validateEntities,
validateRunSecurity,
} from "./shared.js";
// ── Dry-run helpers ──────────────────────────────────────────────────────────
@ -603,9 +604,11 @@ export async function execScript(
return; // Exit early - cannot proceed without script content
}
// Record the spawn before execution (so it's logged even if the script fails midway)
// Generate a unique spawn ID and record the spawn before execution
const spawnId = generateSpawnId();
try {
saveSpawnRecord({
id: spawnId,
agent,
cloud,
timestamp: new Date().toISOString(),
@ -628,6 +631,9 @@ export async function execScript(
}
}
// Pass spawn ID to the bash script so connection data can be linked back
process.env.SPAWN_ID = spawnId;
const lastErr = runBashScript(scriptContent, prompt, dashboardUrl, debug, spawnName);
if (lastErr) {
reportScriptFailure(lastErr, cloud, agent, authHint, prompt, dashboardUrl, spawnName);

View file

@ -1,13 +1,14 @@
import "../unicode-detect.js"; // Must be first: configures TERM before clack reads it
import type { Manifest } from "../manifest.js";
import * as fs from "node:fs";
import * as p from "@clack/prompts";
import pc from "picocolors";
import * as v from "valibot";
import { isString } from "../shared/type-guards.js";
import * as fs from "node:fs";
import type { Manifest } from "../manifest.js";
import { loadManifest, agentKeys, cloudKeys, matrixStatus, isStaleCache } from "../manifest.js";
import pkg from "../../package.json" with { type: "json" };
import { agentKeys, cloudKeys, isStaleCache, loadManifest, matrixStatus } from "../manifest.js";
import { validateIdentifier, validatePrompt } from "../security.js";
import { isString } from "../shared/type-guards.js";
import { getSpawnCloudConfigPath } from "../shared/ui.js";
// ── Constants ────────────────────────────────────────────────────────────────

View file

@ -1,9 +1,9 @@
import { execFileSync } from "node:child_process";
import * as p from "@clack/prompts";
import pc from "picocolors";
import { execFileSync } from "node:child_process";
import { RAW_BASE, SPAWN_CDN, VERSION_URL } from "../manifest.js";
import { parseJsonWith } from "../shared/parse.js";
import { SPAWN_CDN, VERSION_URL, RAW_BASE } from "../manifest.js";
import { VERSION, PkgVersionSchema, getErrorMessage } from "./shared.js";
import { getErrorMessage, PkgVersionSchema, VERSION } from "./shared.js";
const INSTALL_URL = `${SPAWN_CDN}/cli/install.sh`;
const INSTALL_CMD = `curl --proto '=https' -fsSL ${INSTALL_URL} | bash`;

View file

@ -1,7 +1,7 @@
// daytona/agents.ts — Daytona agent configs (thin wrapper over shared)
import { runServer, uploadFile } from "./daytona";
import { createCloudAgents } from "../shared/agent-setup";
import { runServer, uploadFile } from "./daytona";
export const { agents, resolveAgent } = createCloudAgents({
runServer,

View file

@ -1,30 +1,30 @@
// daytona/daytona.ts — Core Daytona provider: API, SSH, provisioning, execution
import { mkdirSync, readFileSync } from "node:fs";
import type { CloudInitTier } from "../shared/agents";
import { mkdirSync, readFileSync } from "node:fs";
import { saveVmConnection } from "../history.js";
import { getPackagesForTier, NODE_INSTALL_CMD, needsBun, needsNode } from "../shared/cloud-init";
import { parseJsonObj } from "../shared/parse";
import { killWithTimeout, sleep, spawnInteractive } from "../shared/ssh";
import { isString } from "../shared/type-guards";
import {
logInfo,
logWarn,
logError,
logStep,
logStepInline,
logStepDone,
prompt,
jsonEscape,
getSpawnCloudConfigPath,
loadApiToken,
validateServerName,
toKebabCase,
defaultSpawnName,
getSpawnCloudConfigPath,
jsonEscape,
loadApiToken,
logError,
logInfo,
logStep,
logStepDone,
logStepInline,
logWarn,
prompt,
sanitizeTermValue,
selectFromList,
toKebabCase,
validateServerName,
} from "../shared/ui";
import type { CloudInitTier } from "../shared/agents";
import { getPackagesForTier, needsNode, needsBun, NODE_INSTALL_CMD } from "../shared/cloud-init";
import { parseJsonObj } from "../shared/parse";
import { isString } from "../shared/type-guards";
import { saveVmConnection } from "../history.js";
import { sleep, spawnInteractive, killWithTimeout } from "../shared/ssh";
const DAYTONA_API_BASE = "https://app.daytona.io/api";
const DAYTONA_DASHBOARD_URL = "https://app.daytona.io/";
@ -354,7 +354,16 @@ export async function createServer(name: string, sandboxSize?: SandboxSize): Pro
// Set up SSH access
await setupSshAccess();
saveVmConnection("daytona-sandbox", "daytona", sandboxId, name, "daytona");
saveVmConnection(
"daytona-sandbox",
"daytona",
sandboxId,
name,
"daytona",
undefined,
undefined,
process.env.SPAWN_ID || undefined,
);
}
// ─── Execution ───────────────────────────────────────────────────────────────

View file

@ -1,22 +1,24 @@
#!/usr/bin/env bun
// daytona/main.ts — Orchestrator: deploys an agent on Daytona
import {
ensureDaytonaToken,
promptSpawnName,
promptSandboxSize,
getServerName,
createServer as createDaytonaServer,
waitForCloudInit,
runServer,
uploadFile,
interactiveSession,
} from "./daytona";
import type { CloudOrchestrator } from "../shared/orchestrate";
import type { SandboxSize } from "./daytona";
import { agents, resolveAgent } from "./agents";
import { saveLaunchCmd } from "../history.js";
import { runOrchestration } from "../shared/orchestrate";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { agents, resolveAgent } from "./agents";
import {
createServer as createDaytonaServer,
ensureDaytonaToken,
getServerName,
interactiveSession,
promptSandboxSize,
promptSpawnName,
runServer,
uploadFile,
waitForCloudInit,
} from "./daytona";
async function main() {
const agentName = process.argv[2];
@ -44,7 +46,8 @@ async function main() {
async promptSize() {
sandboxSize = await promptSandboxSize();
},
async createServer(name: string) {
async createServer(name: string, spawnId?: string) {
process.env.SPAWN_ID = spawnId || "";
await createDaytonaServer(name, sandboxSize);
},
getServerName,
@ -52,7 +55,7 @@ async function main() {
await waitForCloudInit(agent.cloudInitTier);
},
interactiveSession,
saveLaunchCmd,
saveLaunchCmd: (cmd: string, sid?: string) => saveLaunchCmd(cmd, sid),
};
await runOrchestration(cloud, agent, agentName);

View file

@ -1,7 +1,7 @@
// digitalocean/agents.ts — DigitalOcean agent configs (thin wrapper over shared)
import { runServer, uploadFile } from "./digitalocean";
import { createCloudAgents } from "../shared/agent-setup";
import { runServer, uploadFile } from "./digitalocean";
export const { agents, resolveAgent } = createCloudAgents({
runServer,

View file

@ -1,39 +1,39 @@
// digitalocean/digitalocean.ts — Core DigitalOcean provider: API, auth, SSH, provisioning
import { mkdirSync, readFileSync } from "node:fs";
import {
logInfo,
logWarn,
logError,
logStep,
logStepInline,
logStepDone,
prompt,
openBrowser,
getSpawnCloudConfigPath,
validateServerName,
validateRegionName,
toKebabCase,
defaultSpawnName,
sanitizeTermValue,
selectFromList,
loadApiToken,
} from "../shared/ui";
import type { CloudInitTier } from "../shared/agents";
import { getPackagesForTier, needsNode, needsBun, NODE_INSTALL_CMD } from "../shared/cloud-init";
import { mkdirSync, readFileSync } from "node:fs";
import { saveVmConnection } from "../history.js";
import { getPackagesForTier, NODE_INSTALL_CMD, needsBun, needsNode } from "../shared/cloud-init";
import { parseJsonObj } from "../shared/parse";
import { isString, isNumber, toObjectArray } from "../shared/type-guards";
import {
killWithTimeout,
SSH_BASE_OPTS,
SSH_INTERACTIVE_OPTS,
sleep,
waitForSsh as sharedWaitForSsh,
killWithTimeout,
sleep,
spawnInteractive,
} from "../shared/ssh";
import { ensureSshKeys, getSshFingerprint, getSshKeyOpts } from "../shared/ssh-keys";
import { saveVmConnection } from "../history.js";
import { isNumber, isString, toObjectArray } from "../shared/type-guards";
import {
defaultSpawnName,
getSpawnCloudConfigPath,
loadApiToken,
logError,
logInfo,
logStep,
logStepDone,
logStepInline,
logWarn,
openBrowser,
prompt,
sanitizeTermValue,
selectFromList,
toKebabCase,
validateRegionName,
validateServerName,
} from "../shared/ui";
const DO_API_BASE = "https://api.digitalocean.com/v2";
const DO_DASHBOARD_URL = "https://cloud.digitalocean.com/droplets";
@ -836,7 +836,16 @@ export async function createServer(
// Wait for droplet to become active and get IP
await waitForDropletActive(doDropletId);
saveVmConnection(doServerIp, "root", doDropletId, name, "digitalocean");
saveVmConnection(
doServerIp,
"root",
doDropletId,
name,
"digitalocean",
undefined,
undefined,
process.env.SPAWN_ID || undefined,
);
}
async function waitForDropletActive(dropletId: string, maxAttempts = 60): Promise<void> {

View file

@ -1,24 +1,26 @@
#!/usr/bin/env bun
// digitalocean/main.ts — Orchestrator: deploys an agent on DigitalOcean
import {
ensureDoToken,
ensureSshKey,
promptSpawnName,
promptDropletSize,
promptDoRegion,
createServer as createDroplet,
getServerName,
waitForCloudInit,
runServer,
uploadFile,
interactiveSession,
} from "./digitalocean";
import { agents, resolveAgent } from "./agents";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { saveLaunchCmd } from "../history.js";
import { runOrchestration } from "../shared/orchestrate";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { logStep } from "../shared/ui";
import { agents, resolveAgent } from "./agents";
import {
createServer as createDroplet,
ensureDoToken,
ensureSshKey,
getServerName,
interactiveSession,
promptDoRegion,
promptDropletSize,
promptSpawnName,
runServer,
uploadFile,
waitForCloudInit,
} from "./digitalocean";
async function main() {
const agentName = process.argv[2];
@ -53,7 +55,8 @@ async function main() {
dropletSize = await promptDropletSize();
region = await promptDoRegion();
},
async createServer(name: string) {
async createServer(name: string, spawnId?: string) {
process.env.SPAWN_ID = spawnId || "";
await createDroplet(name, agent.cloudInitTier, dropletSize, region, agent.slowInstall ? agentName : undefined);
},
getServerName,
@ -61,7 +64,7 @@ async function main() {
await waitForCloudInit();
},
interactiveSession,
saveLaunchCmd,
saveLaunchCmd: (cmd: string, sid?: string) => saveLaunchCmd(cmd, sid),
};
await runOrchestration(cloud, agent, agentName);

View file

@ -1,7 +1,7 @@
// gcp/agents.ts — GCP Compute Engine agent configs (thin wrapper over shared)
import { runServer, uploadFile } from "./gcp";
import { createCloudAgents } from "../shared/agent-setup";
import { runServer, uploadFile } from "./gcp";
export const { agents, resolveAgent } = createCloudAgents({
runServer,

View file

@ -1,35 +1,35 @@
// gcp/gcp.ts — Core GCP Compute Engine provider: gcloud CLI wrapper, auth, provisioning, SSH
import type { CloudInitTier } from "../shared/agents";
import { existsSync, readFileSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import {
logInfo,
logWarn,
logError,
logStep,
logStepInline,
logStepDone,
prompt,
selectFromList,
validateServerName,
toKebabCase,
defaultSpawnName,
sanitizeTermValue,
} from "../shared/ui";
import type { CloudInitTier } from "../shared/agents";
import { getPackagesForTier, needsNode, needsBun, NODE_INSTALL_CMD } from "../shared/cloud-init";
import { saveVmConnection } from "../history.js";
import { getPackagesForTier, NODE_INSTALL_CMD, needsBun, needsNode } from "../shared/cloud-init";
import {
killWithTimeout,
SSH_BASE_OPTS,
SSH_INTERACTIVE_OPTS,
sleep,
waitForSsh as sharedWaitForSsh,
killWithTimeout,
sleep,
spawnInteractive,
} from "../shared/ssh";
import { ensureSshKeys, getSshKeyOpts } from "../shared/ssh-keys";
import { saveVmConnection } from "../history.js";
import {
defaultSpawnName,
logError,
logInfo,
logStep,
logStepDone,
logStepInline,
logWarn,
prompt,
sanitizeTermValue,
selectFromList,
toKebabCase,
validateServerName,
} from "../shared/ui";
const DASHBOARD_URL = "https://console.cloud.google.com/compute/instances";
@ -760,10 +760,19 @@ export async function createInstance(
logInfo(`Instance created: IP=${gcpServerIp}`);
// Save connection info with zone/project for later deletion
saveVmConnection(gcpServerIp, username, "", name, "gcp", undefined, {
zone,
project: gcpProject,
});
saveVmConnection(
gcpServerIp,
username,
"",
name,
"gcp",
undefined,
{
zone,
project: gcpProject,
},
process.env.SPAWN_ID || undefined,
);
}
// ─── SSH Operations ─────────────────────────────────────────────────────────

View file

@ -1,24 +1,26 @@
#!/usr/bin/env bun
// gcp/main.ts — Orchestrator: deploys an agent on GCP Compute Engine
import {
ensureGcloudCli,
authenticate,
resolveProject,
promptSpawnName,
promptMachineType,
promptZone,
getServerName,
createInstance,
waitForCloudInit,
runServer,
uploadFile,
interactiveSession,
} from "./gcp";
import { agents, resolveAgent } from "./agents";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { saveLaunchCmd } from "../history.js";
import { runOrchestration } from "../shared/orchestrate";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { agents, resolveAgent } from "./agents";
import {
authenticate,
createInstance,
ensureGcloudCli,
getServerName,
interactiveSession,
promptMachineType,
promptSpawnName,
promptZone,
resolveProject,
runServer,
uploadFile,
waitForCloudInit,
} from "./gcp";
async function main() {
const agentName = process.argv[2];
@ -50,7 +52,8 @@ async function main() {
machineType = await promptMachineType();
zone = await promptZone();
},
async createServer(name: string) {
async createServer(name: string, spawnId?: string) {
process.env.SPAWN_ID = spawnId || "";
await createInstance(name, zone, machineType, agent.cloudInitTier);
},
getServerName,
@ -58,7 +61,7 @@ async function main() {
await waitForCloudInit();
},
interactiveSession,
saveLaunchCmd,
saveLaunchCmd: (cmd: string, sid?: string) => saveLaunchCmd(cmd, sid),
};
await runOrchestration(cloud, agent, agentName);

View file

@ -1,7 +1,7 @@
// hetzner/agents.ts — Hetzner Cloud agent configs (thin wrapper over shared)
import { runServer, uploadFile } from "./hetzner";
import { createCloudAgents } from "../shared/agent-setup";
import { runServer, uploadFile } from "./hetzner";
export const { agents, resolveAgent } = createCloudAgents({
runServer,

View file

@ -1,39 +1,39 @@
// hetzner/hetzner.ts — Core Hetzner Cloud provider: API, auth, SSH, provisioning
import { mkdirSync, readFileSync } from "node:fs";
import {
logInfo,
logWarn,
logError,
logStep,
logStepInline,
logStepDone,
prompt,
jsonEscape,
getSpawnCloudConfigPath,
loadApiToken,
validateServerName,
validateRegionName,
toKebabCase,
defaultSpawnName,
sanitizeTermValue,
selectFromList,
} from "../shared/ui";
import type { CloudInitTier } from "../shared/agents";
import { getPackagesForTier, needsNode, needsBun, NODE_INSTALL_CMD } from "../shared/cloud-init";
import { mkdirSync, readFileSync } from "node:fs";
import { saveVmConnection } from "../history.js";
import { getPackagesForTier, NODE_INSTALL_CMD, needsBun, needsNode } from "../shared/cloud-init";
import { parseJsonObj } from "../shared/parse";
import {
killWithTimeout,
SSH_BASE_OPTS,
SSH_INTERACTIVE_OPTS,
sleep,
waitForSsh as sharedWaitForSsh,
killWithTimeout,
sleep,
spawnInteractive,
} from "../shared/ssh";
import { ensureSshKeys, getSshFingerprint, getSshKeyOpts } from "../shared/ssh-keys";
import { parseJsonObj } from "../shared/parse";
import { isString, isNumber, toObjectArray, toRecord } from "../shared/type-guards";
import { saveVmConnection } from "../history.js";
import { isNumber, isString, toObjectArray, toRecord } from "../shared/type-guards";
import {
defaultSpawnName,
getSpawnCloudConfigPath,
jsonEscape,
loadApiToken,
logError,
logInfo,
logStep,
logStepDone,
logStepInline,
logWarn,
prompt,
sanitizeTermValue,
selectFromList,
toKebabCase,
validateRegionName,
validateServerName,
} from "../shared/ui";
const HETZNER_API_BASE = "https://api.hetzner.cloud/v1";
const HETZNER_DASHBOARD_URL = "https://console.hetzner.cloud/";
@ -428,7 +428,16 @@ export async function createServer(
}
logInfo(`Server created: ID=${hetznerServerId}, IP=${hetznerServerIp}`);
saveVmConnection(hetznerServerIp, "root", hetznerServerId, name, "hetzner");
saveVmConnection(
hetznerServerIp,
"root",
hetznerServerId,
name,
"hetzner",
undefined,
undefined,
process.env.SPAWN_ID || undefined,
);
}
// ─── SSH Execution ───────────────────────────────────────────────────────────

View file

@ -1,23 +1,25 @@
#!/usr/bin/env bun
// hetzner/main.ts — Orchestrator: deploys an agent on Hetzner Cloud
import {
ensureHcloudToken,
ensureSshKey,
promptSpawnName,
promptServerType,
promptLocation,
createServer as createHetznerServer,
getServerName,
waitForCloudInit,
runServer,
uploadFile,
interactiveSession,
} from "./hetzner";
import { agents, resolveAgent } from "./agents";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { saveLaunchCmd } from "../history.js";
import { runOrchestration } from "../shared/orchestrate";
import type { CloudOrchestrator } from "../shared/orchestrate";
import { agents, resolveAgent } from "./agents";
import {
createServer as createHetznerServer,
ensureHcloudToken,
ensureSshKey,
getServerName,
interactiveSession,
promptLocation,
promptServerType,
promptSpawnName,
runServer,
uploadFile,
waitForCloudInit,
} from "./hetzner";
async function main() {
const agentName = process.argv[2];
@ -48,7 +50,8 @@ async function main() {
serverType = await promptServerType();
location = await promptLocation();
},
async createServer(name: string) {
async createServer(name: string, spawnId?: string) {
process.env.SPAWN_ID = spawnId || "";
await createHetznerServer(name, serverType, location, agent.cloudInitTier);
},
getServerName,
@ -56,7 +59,7 @@ async function main() {
await waitForCloudInit();
},
interactiveSession,
saveLaunchCmd,
saveLaunchCmd: (cmd: string, sid?: string) => saveLaunchCmd(cmd, sid),
};
await runOrchestration(cloud, agent, agentName);

Some files were not shown because too many files have changed in this diff Show more