mirror of
https://github.com/OpenRouterTeam/spawn.git
synced 2026-04-28 03:49:31 +00:00
Remove Daytona cloud provider from codebase (#2261)
Simplify the cloud matrix by removing Daytona. All Daytona-specific code, scripts, tests, and configuration have been removed. Daytona has been moved to "Previously Considered" in the Cloud Provider Wishlist (#1183) and can be revived on community demand. Closes #2260 Co-authored-by: Claude <claude@anthropic.com> Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
50397f19a3
commit
035e4bf830
26 changed files with 21 additions and 1578 deletions
|
|
@ -17,14 +17,13 @@ Look at `manifest.json` → `matrix` for any `"missing"` entry. To implement it:
|
|||
|
||||
## 2. Add a new cloud provider (HIGH BAR)
|
||||
|
||||
We are currently shipping with **7 curated clouds** (sorted by price):
|
||||
We are currently shipping with **6 curated clouds** (sorted by price):
|
||||
1. **local** — free (no provisioning)
|
||||
2. **hetzner** — ~€3.29/mo (CX22)
|
||||
3. **aws** — $3.50/mo (nano)
|
||||
4. **daytona** — pay-per-second sandboxes
|
||||
5. **digitalocean** — $4/mo (Basic droplet)
|
||||
6. **gcp** — $7.11/mo (e2-micro)
|
||||
7. **sprite** — managed cloud VMs
|
||||
4. **digitalocean** — $4/mo (Basic droplet)
|
||||
5. **gcp** — $7.11/mo (e2-micro)
|
||||
6. **sprite** — managed cloud VMs
|
||||
|
||||
**Do NOT add clouds speculatively.** Every cloud must be manually tested and verified end-to-end before shipping. Adding a cloud that can't be tested is worse than not having it.
|
||||
|
||||
|
|
|
|||
18
README.md
18
README.md
|
|
@ -164,15 +164,15 @@ If an agent fails to install or launch on a cloud:
|
|||
|
||||
## Matrix
|
||||
|
||||
| | [Local Machine](sh/local/) | [Hetzner Cloud](sh/hetzner/) | [AWS Lightsail](sh/aws/) | [Daytona](sh/daytona/) | [DigitalOcean](sh/digitalocean/) | [GCP Compute Engine](sh/gcp/) | [Sprite](sh/sprite/) |
|
||||
|---|---|---|---|---|---|---|---|
|
||||
| [**Claude Code**](https://claude.ai) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**OpenClaw**](https://github.com/openclaw/openclaw) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**ZeroClaw**](https://github.com/zeroclaw-labs/zeroclaw) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**Codex CLI**](https://github.com/openai/codex) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**OpenCode**](https://github.com/sst/opencode) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**Kilo Code**](https://github.com/Kilo-Org/kilocode) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**Hermes Agent**](https://github.com/NousResearch/hermes-agent) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| | [Local Machine](sh/local/) | [Hetzner Cloud](sh/hetzner/) | [AWS Lightsail](sh/aws/) | [DigitalOcean](sh/digitalocean/) | [GCP Compute Engine](sh/gcp/) | [Sprite](sh/sprite/) |
|
||||
|---|---|---|---|---|---|---|
|
||||
| [**Claude Code**](https://claude.ai) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**OpenClaw**](https://github.com/openclaw/openclaw) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**ZeroClaw**](https://github.com/zeroclaw-labs/zeroclaw) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**Codex CLI**](https://github.com/openai/codex) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**OpenCode**](https://github.com/sst/opencode) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**Kilo Code**](https://github.com/Kilo-Org/kilocode) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
| [**Hermes Agent**](https://github.com/NousResearch/hermes-agent) | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ |
|
||||
|
||||
### How it works
|
||||
|
||||
|
|
|
|||
|
|
@ -7,10 +7,6 @@
|
|||
"url": "https://a0.awsstatic.com/libra-css/images/site/touch-icon-ipad-144-smile.png",
|
||||
"ext": "png"
|
||||
},
|
||||
"daytona": {
|
||||
"url": "https://avatars.githubusercontent.com/u/130513197?s=400&v=4",
|
||||
"ext": "png"
|
||||
},
|
||||
"digitalocean": {
|
||||
"url": "https://www.digitalocean.com/_next/static/media/android-chrome-512x512.5f2e6221.png",
|
||||
"ext": "png"
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 4.3 KiB |
|
|
@ -151,7 +151,7 @@
|
|||
},
|
||||
"notes": "Natively supports OpenRouter via OPENROUTER_API_KEY env var. Go-based TUI using Bubble Tea.",
|
||||
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/agents/opencode.png",
|
||||
"featured_cloud": ["daytona", "gcp", "aws"],
|
||||
"featured_cloud": ["gcp", "aws", "digitalocean"],
|
||||
"creator": "SST",
|
||||
"repo": "sst/opencode",
|
||||
"license": "MIT",
|
||||
|
|
@ -265,24 +265,6 @@
|
|||
"notes": "Uses 'ubuntu' user instead of 'root'. Requires AWS CLI installed and configured.",
|
||||
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/clouds/aws.png"
|
||||
},
|
||||
"daytona": {
|
||||
"name": "Daytona",
|
||||
"description": "Instant sandboxes with pay-per-second pricing",
|
||||
"url": "https://www.daytona.io/",
|
||||
"type": "sandbox",
|
||||
"auth": "DAYTONA_API_KEY",
|
||||
"key_request": false,
|
||||
"provision_method": "daytona create",
|
||||
"exec_method": "daytona exec",
|
||||
"interactive_method": "daytona ssh",
|
||||
"defaults": {
|
||||
"cpu": 2,
|
||||
"memory": 2048,
|
||||
"disk": 5
|
||||
},
|
||||
"notes": "Sub-90ms sandbox creation. True SSH support via daytona ssh. Requires DAYTONA_API_KEY from https://app.daytona.io.",
|
||||
"icon": "https://raw.githubusercontent.com/OpenRouterTeam/spawn/main/assets/clouds/daytona.png"
|
||||
},
|
||||
"digitalocean": {
|
||||
"name": "DigitalOcean",
|
||||
"description": "Developer-friendly Droplets from $4/mo",
|
||||
|
|
@ -347,12 +329,6 @@
|
|||
"aws/codex": "implemented",
|
||||
"aws/opencode": "implemented",
|
||||
"aws/kilocode": "implemented",
|
||||
"daytona/claude": "implemented",
|
||||
"daytona/openclaw": "implemented",
|
||||
"daytona/zeroclaw": "implemented",
|
||||
"daytona/codex": "implemented",
|
||||
"daytona/opencode": "implemented",
|
||||
"daytona/kilocode": "implemented",
|
||||
"digitalocean/claude": "implemented",
|
||||
"digitalocean/openclaw": "implemented",
|
||||
"digitalocean/zeroclaw": "implemented",
|
||||
|
|
@ -374,7 +350,6 @@
|
|||
"local/hermes": "implemented",
|
||||
"hetzner/hermes": "implemented",
|
||||
"aws/hermes": "implemented",
|
||||
"daytona/hermes": "implemented",
|
||||
"digitalocean/hermes": "implemented",
|
||||
"gcp/hermes": "implemented",
|
||||
"sprite/hermes": "implemented"
|
||||
|
|
|
|||
1
packages/cli/.gitignore
vendored
1
packages/cli/.gitignore
vendored
|
|
@ -5,7 +5,6 @@ dist/
|
|||
*.tgz
|
||||
# Cloud provider bundles (built by build-clouds.ts)
|
||||
aws.js
|
||||
daytona.js
|
||||
digitalocean.js
|
||||
gcp.js
|
||||
hetzner.js
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@openrouter/spawn",
|
||||
"version": "0.15.3",
|
||||
"version": "0.15.4",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"spawn": "cli.js"
|
||||
|
|
|
|||
|
|
@ -193,44 +193,6 @@ describe("DigitalOcean --custom prompts", () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe("Daytona --custom prompts", () => {
|
||||
const savedCustom = process.env.SPAWN_CUSTOM;
|
||||
const savedCpu = process.env.DAYTONA_CPU;
|
||||
const savedMemory = process.env.DAYTONA_MEMORY;
|
||||
const savedDisk = process.env.DAYTONA_DISK;
|
||||
|
||||
afterEach(() => {
|
||||
restoreEnv("SPAWN_CUSTOM", savedCustom);
|
||||
restoreEnv("DAYTONA_CPU", savedCpu);
|
||||
restoreEnv("DAYTONA_MEMORY", savedMemory);
|
||||
restoreEnv("DAYTONA_DISK", savedDisk);
|
||||
});
|
||||
|
||||
it("promptSandboxSize should return default without --custom", async () => {
|
||||
delete process.env.DAYTONA_CPU;
|
||||
delete process.env.DAYTONA_MEMORY;
|
||||
delete process.env.DAYTONA_DISK;
|
||||
delete process.env.SPAWN_CUSTOM;
|
||||
const { promptSandboxSize, DEFAULT_SANDBOX_SIZE } = await import("../daytona/daytona");
|
||||
const result = await promptSandboxSize();
|
||||
expect(result.cpu).toBe(DEFAULT_SANDBOX_SIZE.cpu);
|
||||
expect(result.memory).toBe(DEFAULT_SANDBOX_SIZE.memory);
|
||||
expect(result.disk).toBe(DEFAULT_SANDBOX_SIZE.disk);
|
||||
});
|
||||
|
||||
it("promptSandboxSize should respect env vars", async () => {
|
||||
process.env.DAYTONA_CPU = "4";
|
||||
process.env.DAYTONA_MEMORY = "8";
|
||||
process.env.DAYTONA_DISK = "50";
|
||||
process.env.SPAWN_CUSTOM = "1";
|
||||
const { promptSandboxSize } = await import("../daytona/daytona");
|
||||
const result = await promptSandboxSize();
|
||||
expect(result.cpu).toBe(4);
|
||||
expect(result.memory).toBe(8);
|
||||
expect(result.disk).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
/** Helper to restore or delete an env var */
|
||||
function restoreEnv(key: string, savedValue: string | undefined): void {
|
||||
if (savedValue !== undefined) {
|
||||
|
|
|
|||
|
|
@ -24,12 +24,10 @@ describe("validateConnectionIP", () => {
|
|||
|
||||
it("should accept special sentinel values", () => {
|
||||
expect(() => validateConnectionIP("sprite-console")).not.toThrow();
|
||||
expect(() => validateConnectionIP("daytona-sandbox")).not.toThrow();
|
||||
expect(() => validateConnectionIP("localhost")).not.toThrow();
|
||||
});
|
||||
|
||||
it("should accept valid hostnames", () => {
|
||||
expect(() => validateConnectionIP("ssh.app.daytona.io")).not.toThrow();
|
||||
expect(() => validateConnectionIP("example.com")).not.toThrow();
|
||||
expect(() => validateConnectionIP("sub.domain.example.com")).not.toThrow();
|
||||
});
|
||||
|
|
|
|||
|
|
@ -68,20 +68,6 @@ export async function cmdConnect(connection: VMConnection): Promise<void> {
|
|||
);
|
||||
}
|
||||
|
||||
// Handle Daytona sandbox connections
|
||||
if (connection.ip === "daytona-sandbox" && connection.server_id) {
|
||||
p.log.step(`Connecting to Daytona sandbox ${pc.bold(connection.server_id)}...`);
|
||||
return runInteractiveCommand(
|
||||
"daytona",
|
||||
[
|
||||
"ssh",
|
||||
connection.server_id,
|
||||
],
|
||||
"Daytona sandbox connection failed",
|
||||
`daytona ssh ${connection.server_id}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Handle SSH connections
|
||||
p.log.step(`Connecting to ${pc.bold(connection.ip)}...`);
|
||||
const sshCmd = `ssh ${connection.user}@${connection.ip}`;
|
||||
|
|
@ -175,24 +161,6 @@ export async function cmdEnterAgent(
|
|||
);
|
||||
}
|
||||
|
||||
// Handle Daytona sandbox connections
|
||||
if (connection.ip === "daytona-sandbox" && connection.server_id) {
|
||||
p.log.step(`Entering ${pc.bold(agentName)} on Daytona sandbox ${pc.bold(connection.server_id)}...`);
|
||||
return runInteractiveCommand(
|
||||
"daytona",
|
||||
[
|
||||
"ssh",
|
||||
connection.server_id,
|
||||
"--",
|
||||
"bash",
|
||||
"-lc",
|
||||
remoteCmd,
|
||||
],
|
||||
`Failed to enter ${agentName}`,
|
||||
`daytona ssh ${connection.server_id} -- bash -lc '${remoteCmd}'`,
|
||||
);
|
||||
}
|
||||
|
||||
// Standard SSH connection with agent launch
|
||||
p.log.step(`Entering ${pc.bold(agentName)} on ${pc.bold(connection.ip)}...`);
|
||||
const escapedRemoteCmd = remoteCmd.replace(/'/g, "'\\''");
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import type { Manifest } from "../manifest.js";
|
|||
import * as p from "@clack/prompts";
|
||||
import pc from "picocolors";
|
||||
import { authenticate as awsAuthenticate, destroyServer as awsDestroyServer, ensureAwsCli } from "../aws/aws.js";
|
||||
import { destroyServer as daytonaDestroyServer, ensureDaytonaToken } from "../daytona/daytona.js";
|
||||
import { destroyServer as doDestroyServer, ensureDoToken } from "../digitalocean/digitalocean.js";
|
||||
import {
|
||||
authenticate as gcpAuthenticate,
|
||||
|
|
@ -57,9 +56,6 @@ async function ensureDeleteCredentials(record: SpawnRecord): Promise<void> {
|
|||
await ensureAwsCli();
|
||||
await awsAuthenticate();
|
||||
break;
|
||||
case "daytona":
|
||||
await ensureDaytonaToken();
|
||||
break;
|
||||
case "sprite":
|
||||
await ensureSpriteCli();
|
||||
await ensureSpriteAuthenticated();
|
||||
|
|
@ -163,12 +159,6 @@ async function execDeleteServer(record: SpawnRecord): Promise<boolean> {
|
|||
await awsDestroyServer(id);
|
||||
});
|
||||
|
||||
case "daytona":
|
||||
return tryDelete(async () => {
|
||||
await ensureDaytonaToken();
|
||||
await daytonaDestroyServer(id);
|
||||
});
|
||||
|
||||
case "sprite":
|
||||
return tryDelete(async () => {
|
||||
await ensureSpriteCli();
|
||||
|
|
|
|||
|
|
@ -284,12 +284,7 @@ export async function handleRecordAction(selected: SpawnRecord, manifest: Manife
|
|||
options.push({
|
||||
value: "reconnect",
|
||||
label: "SSH into VM",
|
||||
hint:
|
||||
conn.ip === "sprite-console"
|
||||
? `sprite console -s ${conn.server_name}`
|
||||
: conn.ip === "daytona-sandbox"
|
||||
? `daytona ssh ${conn.server_id}`
|
||||
: `ssh ${conn.user}@${conn.ip}`,
|
||||
hint: conn.ip === "sprite-console" ? `sprite console -s ${conn.server_name}` : `ssh ${conn.user}@${conn.ip}`,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
// daytona/agents.ts — Daytona agent configs (thin wrapper over shared)
|
||||
|
||||
import { createCloudAgents } from "../shared/agent-setup";
|
||||
import { runServer, uploadFile } from "./daytona";
|
||||
|
||||
export const { agents, resolveAgent } = createCloudAgents({
|
||||
runServer,
|
||||
uploadFile,
|
||||
});
|
||||
|
|
@ -1,670 +0,0 @@
|
|||
// daytona/daytona.ts — Core Daytona provider: API, SSH, provisioning, execution
|
||||
|
||||
import type { CloudInitTier } from "../shared/agents";
|
||||
|
||||
import { mkdirSync, readFileSync } from "node:fs";
|
||||
import { saveVmConnection } from "../history.js";
|
||||
import { getPackagesForTier, NODE_INSTALL_CMD, needsBun, needsNode } from "../shared/cloud-init";
|
||||
import { parseJsonObj } from "../shared/parse";
|
||||
import { killWithTimeout, sleep, spawnInteractive } from "../shared/ssh";
|
||||
import { isString } from "../shared/type-guards";
|
||||
import {
|
||||
defaultSpawnName,
|
||||
getSpawnCloudConfigPath,
|
||||
jsonEscape,
|
||||
loadApiToken,
|
||||
logError,
|
||||
logInfo,
|
||||
logStep,
|
||||
logStepDone,
|
||||
logStepInline,
|
||||
logWarn,
|
||||
prompt,
|
||||
sanitizeTermValue,
|
||||
selectFromList,
|
||||
toKebabCase,
|
||||
validateServerName,
|
||||
} from "../shared/ui";
|
||||
|
||||
const DAYTONA_API_BASE = "https://app.daytona.io/api";
|
||||
const DAYTONA_DASHBOARD_URL = "https://app.daytona.io/";
|
||||
|
||||
// ─── State ───────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface DaytonaState {
|
||||
apiKey: string;
|
||||
sandboxId: string;
|
||||
sshToken: string;
|
||||
sshHost: string;
|
||||
sshPort: string;
|
||||
}
|
||||
|
||||
let _state: DaytonaState = {
|
||||
apiKey: "",
|
||||
sandboxId: "",
|
||||
sshToken: "",
|
||||
sshHost: "",
|
||||
sshPort: "",
|
||||
};
|
||||
|
||||
/** Reset session state — used in tests for isolation. */
|
||||
export function resetDaytonaState(): void {
|
||||
_state = {
|
||||
apiKey: "",
|
||||
sandboxId: "",
|
||||
sshToken: "",
|
||||
sshHost: "",
|
||||
sshPort: "",
|
||||
};
|
||||
}
|
||||
|
||||
// ─── API Client ──────────────────────────────────────────────────────────────
|
||||
|
||||
async function daytonaApi(method: string, endpoint: string, body?: string, maxRetries = 3): Promise<string> {
|
||||
const url = `${DAYTONA_API_BASE}${endpoint}`;
|
||||
|
||||
let interval = 2;
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${_state.apiKey}`,
|
||||
};
|
||||
const opts: RequestInit = {
|
||||
method,
|
||||
headers,
|
||||
};
|
||||
if (body && (method === "POST" || method === "PUT" || method === "PATCH")) {
|
||||
opts.body = body;
|
||||
}
|
||||
const resp = await fetch(url, {
|
||||
...opts,
|
||||
signal: AbortSignal.timeout(30_000),
|
||||
});
|
||||
const text = await resp.text();
|
||||
|
||||
if ((resp.status === 429 || resp.status >= 500) && attempt < maxRetries) {
|
||||
logWarn(`API ${resp.status} (attempt ${attempt}/${maxRetries}), retrying in ${interval}s...`);
|
||||
await sleep(interval * 1000);
|
||||
interval = Math.min(interval * 2, 30);
|
||||
continue;
|
||||
}
|
||||
if (!resp.ok) {
|
||||
throw new Error(`Daytona API error ${resp.status}: ${extractApiError(text)}`);
|
||||
}
|
||||
return text;
|
||||
} catch (err) {
|
||||
if (attempt >= maxRetries) {
|
||||
throw err;
|
||||
}
|
||||
logWarn(`API request failed (attempt ${attempt}/${maxRetries}), retrying...`);
|
||||
await sleep(interval * 1000);
|
||||
interval = Math.min(interval * 2, 30);
|
||||
}
|
||||
}
|
||||
throw new Error("daytonaApi: unreachable");
|
||||
}
|
||||
|
||||
function extractApiError(text: string, fallback = "Unknown error"): string {
|
||||
const data = parseJsonObj(text);
|
||||
if (!data) {
|
||||
return fallback;
|
||||
}
|
||||
const msg = data.message || data.error || data.detail;
|
||||
return isString(msg) ? msg : fallback;
|
||||
}
|
||||
|
||||
// ─── Token Management ────────────────────────────────────────────────────────
|
||||
|
||||
async function saveTokenToConfig(token: string): Promise<void> {
|
||||
const configPath = getSpawnCloudConfigPath("daytona");
|
||||
const dir = configPath.replace(/\/[^/]+$/, "");
|
||||
mkdirSync(dir, {
|
||||
recursive: true,
|
||||
mode: 0o700,
|
||||
});
|
||||
const escaped = jsonEscape(token);
|
||||
await Bun.write(configPath, `{\n "api_key": ${escaped},\n "token": ${escaped}\n}\n`, {
|
||||
mode: 0o600,
|
||||
});
|
||||
}
|
||||
|
||||
async function testDaytonaToken(): Promise<boolean> {
|
||||
if (!_state.apiKey) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await daytonaApi("GET", "/sandbox?page=1&limit=1", undefined, 1);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureDaytonaToken(): Promise<void> {
|
||||
// 1. Env var
|
||||
if (process.env.DAYTONA_API_KEY) {
|
||||
_state.apiKey = process.env.DAYTONA_API_KEY.trim();
|
||||
if (await testDaytonaToken()) {
|
||||
logInfo("Using Daytona API key from environment");
|
||||
await saveTokenToConfig(_state.apiKey);
|
||||
return;
|
||||
}
|
||||
logWarn("DAYTONA_API_KEY from environment is invalid");
|
||||
_state.apiKey = "";
|
||||
}
|
||||
|
||||
// 2. Saved config
|
||||
const saved = loadApiToken("daytona");
|
||||
if (saved) {
|
||||
_state.apiKey = saved;
|
||||
if (await testDaytonaToken()) {
|
||||
logInfo("Using saved Daytona API key");
|
||||
return;
|
||||
}
|
||||
logWarn("Saved Daytona token is invalid or expired");
|
||||
_state.apiKey = "";
|
||||
}
|
||||
|
||||
// 3. Manual token entry
|
||||
logStep("Manual token entry");
|
||||
logWarn("Get your API key from: https://app.daytona.io/dashboard/keys");
|
||||
const token = await prompt("Enter your Daytona API key: ");
|
||||
if (!token) {
|
||||
throw new Error("No token provided");
|
||||
}
|
||||
_state.apiKey = token.trim();
|
||||
if (!(await testDaytonaToken())) {
|
||||
logError("Token is invalid");
|
||||
_state.apiKey = "";
|
||||
throw new Error("Invalid Daytona token");
|
||||
}
|
||||
await saveTokenToConfig(_state.apiKey);
|
||||
logInfo("Using manually entered Daytona API key");
|
||||
}
|
||||
|
||||
// ─── Connection Tracking ─────────────────────────────────────────────────────
|
||||
|
||||
// ─── SSH Helpers ─────────────────────────────────────────────────────────────
|
||||
|
||||
/** Build SSH args common to all SSH operations. */
|
||||
function sshBaseArgs(): string[] {
|
||||
const args = [
|
||||
"ssh",
|
||||
"-o",
|
||||
"StrictHostKeyChecking=no",
|
||||
"-o",
|
||||
"UserKnownHostsFile=/dev/null",
|
||||
"-o",
|
||||
"LogLevel=ERROR",
|
||||
"-o",
|
||||
"ServerAliveInterval=15",
|
||||
"-o",
|
||||
"ServerAliveCountMax=3",
|
||||
"-o",
|
||||
"ConnectTimeout=10",
|
||||
"-o",
|
||||
"PubkeyAuthentication=no",
|
||||
];
|
||||
if (_state.sshPort) {
|
||||
args.push("-o", `Port=${_state.sshPort}`);
|
||||
}
|
||||
return args;
|
||||
}
|
||||
|
||||
// ─── Sandbox Size Options ────────────────────────────────────────────────────
|
||||
|
||||
export interface SandboxSize {
|
||||
id: string;
|
||||
cpu: number;
|
||||
memory: number;
|
||||
disk: number;
|
||||
label: string;
|
||||
}
|
||||
|
||||
export const SANDBOX_SIZES: SandboxSize[] = [
|
||||
{
|
||||
id: "small",
|
||||
cpu: 2,
|
||||
memory: 4,
|
||||
disk: 30,
|
||||
label: "2 vCPU \u00b7 4 GiB RAM \u00b7 30 GiB disk",
|
||||
},
|
||||
{
|
||||
id: "medium",
|
||||
cpu: 4,
|
||||
memory: 8,
|
||||
disk: 50,
|
||||
label: "4 vCPU \u00b7 8 GiB RAM \u00b7 50 GiB disk",
|
||||
},
|
||||
{
|
||||
id: "large",
|
||||
cpu: 8,
|
||||
memory: 16,
|
||||
disk: 100,
|
||||
label: "8 vCPU \u00b7 16 GiB RAM \u00b7 100 GiB disk",
|
||||
},
|
||||
];
|
||||
|
||||
export const DEFAULT_SANDBOX_SIZE = SANDBOX_SIZES[0];
|
||||
|
||||
export async function promptSandboxSize(): Promise<SandboxSize> {
|
||||
if (process.env.DAYTONA_CPU || process.env.DAYTONA_MEMORY) {
|
||||
const cpu = Number.parseInt(process.env.DAYTONA_CPU || "2", 10);
|
||||
const memory = Number.parseInt(process.env.DAYTONA_MEMORY || "4", 10);
|
||||
const disk = Number.parseInt(process.env.DAYTONA_DISK || "30", 10);
|
||||
return {
|
||||
id: "env",
|
||||
cpu,
|
||||
memory,
|
||||
disk,
|
||||
label: `${cpu} vCPU \u00b7 ${memory} GiB RAM \u00b7 ${disk} GiB disk`,
|
||||
};
|
||||
}
|
||||
|
||||
if (process.env.SPAWN_CUSTOM !== "1") {
|
||||
return DEFAULT_SANDBOX_SIZE;
|
||||
}
|
||||
|
||||
if (process.env.SPAWN_NON_INTERACTIVE === "1") {
|
||||
return DEFAULT_SANDBOX_SIZE;
|
||||
}
|
||||
|
||||
process.stderr.write("\n");
|
||||
const items = SANDBOX_SIZES.map((s) => `${s.id}|${s.label}`);
|
||||
const selectedId = await selectFromList(items, "Daytona sandbox size", DEFAULT_SANDBOX_SIZE.id);
|
||||
return SANDBOX_SIZES.find((s) => s.id === selectedId) || DEFAULT_SANDBOX_SIZE;
|
||||
}
|
||||
|
||||
// ─── Provisioning ────────────────────────────────────────────────────────────
|
||||
|
||||
async function setupSshAccess(): Promise<void> {
|
||||
logStep("Setting up SSH access...");
|
||||
|
||||
const sshResp = await daytonaApi("POST", `/sandbox/${_state.sandboxId}/ssh-access?expiresInMinutes=480`);
|
||||
const data = parseJsonObj(sshResp);
|
||||
if (!data) {
|
||||
logError("Failed to parse SSH access response");
|
||||
throw new Error("SSH access parse failure");
|
||||
}
|
||||
|
||||
_state.sshToken = isString(data.token) ? data.token : "";
|
||||
const sshCommand = isString(data.sshCommand) ? data.sshCommand : "";
|
||||
|
||||
if (!_state.sshToken) {
|
||||
logError(`Failed to get SSH access: ${extractApiError(sshResp)}`);
|
||||
throw new Error("SSH access failed");
|
||||
}
|
||||
|
||||
// Parse host from sshCommand (e.g., "ssh -p 2222 TOKEN@HOST" or "ssh TOKEN@HOST")
|
||||
const hostMatch = sshCommand.match(/[^@ ]+$/);
|
||||
_state.sshHost = hostMatch ? hostMatch[0] : "ssh.app.daytona.io";
|
||||
|
||||
// Parse port if present
|
||||
const portMatch = sshCommand.match(/-p\s+(\d+)/);
|
||||
_state.sshPort = portMatch ? portMatch[1] : "";
|
||||
|
||||
logInfo("SSH access ready");
|
||||
}
|
||||
|
||||
export async function createServer(name: string, sandboxSize?: SandboxSize): Promise<void> {
|
||||
const cpu = sandboxSize?.cpu ?? Number.parseInt(process.env.DAYTONA_CPU || "2", 10);
|
||||
const memory = sandboxSize?.memory ?? Number.parseInt(process.env.DAYTONA_MEMORY || "4", 10);
|
||||
const disk = sandboxSize?.disk ?? Number.parseInt(process.env.DAYTONA_DISK || "30", 10);
|
||||
|
||||
logStep(`Creating Daytona sandbox '${name}' (${cpu} vCPU, ${memory} GiB RAM, ${disk} GiB disk)...`);
|
||||
|
||||
const image = process.env.DAYTONA_IMAGE || "daytonaio/sandbox:latest";
|
||||
if (/[^a-zA-Z0-9./:_-]/.test(image)) {
|
||||
logError(`Invalid image name: ${image}`);
|
||||
throw new Error("Invalid image");
|
||||
}
|
||||
const dockerfile = `FROM ${image}`;
|
||||
|
||||
const body = JSON.stringify({
|
||||
name,
|
||||
buildInfo: {
|
||||
dockerfileContent: dockerfile,
|
||||
},
|
||||
cpu,
|
||||
memory,
|
||||
disk,
|
||||
autoStopInterval: 0,
|
||||
autoArchiveInterval: 0,
|
||||
});
|
||||
|
||||
const response = await daytonaApi("POST", "/sandbox", body);
|
||||
const data = parseJsonObj(response);
|
||||
|
||||
_state.sandboxId = isString(data?.id) ? data.id : "";
|
||||
if (!_state.sandboxId) {
|
||||
logError(`Failed to create sandbox: ${extractApiError(response)}`);
|
||||
throw new Error("Sandbox creation failed");
|
||||
}
|
||||
|
||||
logInfo(`Sandbox created: ${_state.sandboxId}`);
|
||||
|
||||
// Wait for sandbox to reach started state
|
||||
logStep("Waiting for sandbox to start...");
|
||||
const maxWait = 120;
|
||||
let waited = 0;
|
||||
while (waited < maxWait) {
|
||||
const statusResp = await daytonaApi("GET", `/sandbox/${_state.sandboxId}`);
|
||||
const statusData = parseJsonObj(statusResp);
|
||||
const state = isString(statusData?.state) ? statusData.state : "";
|
||||
|
||||
if (state === "started" || state === "running") {
|
||||
break;
|
||||
}
|
||||
if (state === "error" || state === "failed") {
|
||||
const reason = isString(statusData?.errorReason) ? statusData.errorReason : "unknown";
|
||||
logError(`Sandbox entered error state: ${reason}`);
|
||||
throw new Error("Sandbox error state");
|
||||
}
|
||||
|
||||
await sleep(3000);
|
||||
waited += 3;
|
||||
}
|
||||
|
||||
if (waited >= maxWait) {
|
||||
logError(`Sandbox did not start within ${maxWait}s`);
|
||||
logWarn(`Check sandbox status at: ${DAYTONA_DASHBOARD_URL}`);
|
||||
throw new Error("Sandbox start timeout");
|
||||
}
|
||||
|
||||
// Set up SSH access
|
||||
await setupSshAccess();
|
||||
|
||||
saveVmConnection(
|
||||
"daytona-sandbox",
|
||||
"daytona",
|
||||
_state.sandboxId,
|
||||
name,
|
||||
"daytona",
|
||||
undefined,
|
||||
undefined,
|
||||
process.env.SPAWN_ID || undefined,
|
||||
);
|
||||
}
|
||||
|
||||
// ─── Execution ───────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Run a command on the remote sandbox via SSH.
|
||||
* Adds a brief sleep after each call to let Daytona's gateway release the connection slot.
|
||||
*/
|
||||
export async function runServer(cmd: string, timeoutSecs?: number): Promise<void> {
|
||||
const fullCmd = `export PATH="$HOME/.local/bin:$HOME/.bun/bin:$PATH" && ${cmd}`;
|
||||
const args = [
|
||||
...sshBaseArgs(),
|
||||
"-o",
|
||||
"BatchMode=yes",
|
||||
`${_state.sshToken}@${_state.sshHost}`,
|
||||
"--",
|
||||
fullCmd,
|
||||
];
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
stdio: [
|
||||
"pipe",
|
||||
"inherit",
|
||||
"inherit",
|
||||
],
|
||||
});
|
||||
// Close stdin but keep process alive (Daytona gateway doesn't propagate stdin EOF)
|
||||
try {
|
||||
proc.stdin!.end();
|
||||
} catch {
|
||||
/* already closed */
|
||||
}
|
||||
const timeout = (timeoutSecs || 300) * 1000;
|
||||
const timer = setTimeout(() => killWithTimeout(proc), timeout);
|
||||
try {
|
||||
const exitCode = await proc.exited;
|
||||
// Brief sleep to let gateway release connection slot
|
||||
await sleep(1000);
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(`run_server failed (exit ${exitCode}): ${cmd}`);
|
||||
}
|
||||
} finally {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
|
||||
/** Run a command and capture stdout. */
|
||||
export async function runServerCapture(cmd: string, timeoutSecs?: number): Promise<string> {
|
||||
const fullCmd = `export PATH="$HOME/.local/bin:$HOME/.bun/bin:$PATH" && ${cmd}`;
|
||||
const args = [
|
||||
...sshBaseArgs(),
|
||||
"-o",
|
||||
"BatchMode=yes",
|
||||
`${_state.sshToken}@${_state.sshHost}`,
|
||||
"--",
|
||||
fullCmd,
|
||||
];
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
stdio: [
|
||||
"pipe",
|
||||
"pipe",
|
||||
"pipe",
|
||||
],
|
||||
});
|
||||
try {
|
||||
proc.stdin!.end();
|
||||
} catch {
|
||||
/* already closed */
|
||||
}
|
||||
const timeout = (timeoutSecs || 300) * 1000;
|
||||
const timer = setTimeout(() => killWithTimeout(proc), timeout);
|
||||
try {
|
||||
// Drain both pipes before awaiting exit to prevent pipe buffer deadlock
|
||||
const [stdout] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]);
|
||||
const exitCode = await proc.exited;
|
||||
await sleep(1000);
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(`run_server_capture failed (exit ${exitCode})`);
|
||||
}
|
||||
return stdout.trim();
|
||||
} finally {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload a file to the remote sandbox via base64-encoded SSH command channel.
|
||||
* Daytona's SSH gateway doesn't support SCP/SFTP.
|
||||
*/
|
||||
export async function uploadFile(localPath: string, remotePath: string): Promise<void> {
|
||||
if (
|
||||
!/^[a-zA-Z0-9/_.~-]+$/.test(remotePath) ||
|
||||
remotePath.includes("..") ||
|
||||
remotePath.split("/").some((s) => s.startsWith("-"))
|
||||
) {
|
||||
logError(`Invalid remote path: ${remotePath}`);
|
||||
throw new Error("Invalid remote path");
|
||||
}
|
||||
|
||||
const content: Buffer = readFileSync(localPath);
|
||||
const b64 = content.toString("base64");
|
||||
|
||||
const args = [
|
||||
...sshBaseArgs(),
|
||||
"-o",
|
||||
"BatchMode=yes",
|
||||
`${_state.sshToken}@${_state.sshHost}`,
|
||||
"--",
|
||||
`base64 -d > '${remotePath}'`,
|
||||
];
|
||||
|
||||
const proc = Bun.spawn(args, {
|
||||
stdio: [
|
||||
"pipe",
|
||||
"ignore",
|
||||
"ignore",
|
||||
],
|
||||
});
|
||||
try {
|
||||
const stdin = proc.stdin;
|
||||
if (stdin) {
|
||||
stdin.write(b64 + "\n");
|
||||
stdin.end();
|
||||
}
|
||||
} catch {
|
||||
/* stdin already closed */
|
||||
}
|
||||
const exitCode = await proc.exited;
|
||||
|
||||
await sleep(1000);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
throw new Error(`upload_file failed for ${remotePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function interactiveSession(cmd: string): Promise<number> {
|
||||
const term = sanitizeTermValue(process.env.TERM || "xterm-256color");
|
||||
// Single-quote escaping prevents shell expansion ($(), ${}, backticks) unlike JSON.stringify double-quoting
|
||||
const shellEscapedCmd = cmd.replace(/'/g, "'\\''");
|
||||
const fullCmd = `export TERM=${term} PATH="$HOME/.local/bin:$HOME/.bun/bin:$PATH" && exec bash -l -c '${shellEscapedCmd}'`;
|
||||
|
||||
// Interactive mode — drop BatchMode so the PTY works
|
||||
const args = [
|
||||
...sshBaseArgs(),
|
||||
"-t", // Force PTY allocation
|
||||
`${_state.sshToken}@${_state.sshHost}`,
|
||||
"--",
|
||||
fullCmd,
|
||||
];
|
||||
|
||||
const exitCode = spawnInteractive(args);
|
||||
|
||||
// Post-session summary
|
||||
process.stderr.write("\n");
|
||||
logWarn(`Session ended. Your sandbox '${_state.sandboxId}' may still be running.`);
|
||||
logWarn("Remember to delete it when you're done to avoid ongoing charges.");
|
||||
logWarn("");
|
||||
logWarn("Manage or delete it in your dashboard:");
|
||||
logWarn(` ${DAYTONA_DASHBOARD_URL}`);
|
||||
logWarn("");
|
||||
logInfo("To delete from CLI:");
|
||||
logInfo(" spawn delete");
|
||||
|
||||
return exitCode;
|
||||
}
|
||||
|
||||
// ─── Cloud Init ──────────────────────────────────────────────────────────────
|
||||
|
||||
async function waitForSsh(maxAttempts = 20): Promise<void> {
|
||||
logStep("Waiting for SSH connectivity...");
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
const output = await runServerCapture("echo ok");
|
||||
if (output.includes("ok")) {
|
||||
logStepDone();
|
||||
logInfo("SSH is ready");
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
logStepInline(`SSH not ready yet (${attempt}/${maxAttempts})`);
|
||||
await sleep(5000);
|
||||
}
|
||||
logStepDone();
|
||||
logError(`SSH connectivity failed after ${maxAttempts} attempts`);
|
||||
throw new Error("SSH wait timeout");
|
||||
}
|
||||
|
||||
export async function waitForCloudInit(tier: CloudInitTier = "full"): Promise<void> {
|
||||
await waitForSsh();
|
||||
|
||||
const packages = getPackagesForTier(tier);
|
||||
logStep("Installing base tools in sandbox...");
|
||||
const parts = [
|
||||
"export DEBIAN_FRONTEND=noninteractive",
|
||||
"apt-get update -y",
|
||||
`apt-get install -y --no-install-recommends ${packages.join(" ")}`,
|
||||
];
|
||||
if (needsNode(tier)) {
|
||||
parts.push(NODE_INSTALL_CMD);
|
||||
}
|
||||
if (needsBun(tier)) {
|
||||
parts.push("curl --proto '=https' -fsSL https://bun.sh/install | bash");
|
||||
}
|
||||
parts.push(
|
||||
`echo 'export PATH="\${HOME}/.local/bin:\${HOME}/.bun/bin:\${PATH}"' >> ~/.bashrc`,
|
||||
`echo 'export PATH="\${HOME}/.local/bin:\${HOME}/.bun/bin:\${PATH}"' >> ~/.zshrc`,
|
||||
);
|
||||
|
||||
try {
|
||||
await runServer(parts.join(" && "));
|
||||
} catch {
|
||||
logWarn("Base tools install had errors, continuing...");
|
||||
}
|
||||
logInfo("Base tools installed");
|
||||
}
|
||||
|
||||
// ─── Server Name ─────────────────────────────────────────────────────────────
|
||||
|
||||
export async function getServerName(): Promise<string> {
|
||||
if (process.env.DAYTONA_SANDBOX_NAME) {
|
||||
const name = process.env.DAYTONA_SANDBOX_NAME;
|
||||
if (!validateServerName(name)) {
|
||||
logError(`Invalid DAYTONA_SANDBOX_NAME: '${name}'`);
|
||||
throw new Error("Invalid server name");
|
||||
}
|
||||
logInfo(`Using sandbox name from environment: ${name}`);
|
||||
return name;
|
||||
}
|
||||
|
||||
const kebab = process.env.SPAWN_NAME_KEBAB || (process.env.SPAWN_NAME ? toKebabCase(process.env.SPAWN_NAME) : "");
|
||||
return kebab || defaultSpawnName();
|
||||
}
|
||||
|
||||
export async function promptSpawnName(): Promise<void> {
|
||||
if (process.env.SPAWN_NAME_KEBAB) {
|
||||
return;
|
||||
}
|
||||
|
||||
let kebab: string;
|
||||
if (process.env.SPAWN_NON_INTERACTIVE === "1") {
|
||||
kebab = (process.env.SPAWN_NAME ? toKebabCase(process.env.SPAWN_NAME) : "") || defaultSpawnName();
|
||||
} else {
|
||||
const derived = process.env.SPAWN_NAME ? toKebabCase(process.env.SPAWN_NAME) : "";
|
||||
const fallback = derived || defaultSpawnName();
|
||||
process.stderr.write("\n");
|
||||
const answer = await prompt(`Daytona workspace name [${fallback}]: `);
|
||||
kebab = toKebabCase(answer || fallback) || defaultSpawnName();
|
||||
}
|
||||
|
||||
process.env.SPAWN_NAME_DISPLAY = kebab;
|
||||
process.env.SPAWN_NAME_KEBAB = kebab;
|
||||
logInfo(`Using resource name: ${kebab}`);
|
||||
}
|
||||
|
||||
// ─── Lifecycle ───────────────────────────────────────────────────────────────
|
||||
|
||||
export async function destroyServer(id?: string): Promise<void> {
|
||||
const targetId = id || _state.sandboxId;
|
||||
if (!targetId) {
|
||||
logWarn("No sandbox ID to destroy");
|
||||
return;
|
||||
}
|
||||
|
||||
logStep(`Destroying sandbox ${targetId}...`);
|
||||
try {
|
||||
await daytonaApi("DELETE", `/sandbox/${targetId}`);
|
||||
} catch (err) {
|
||||
logError(`Failed to destroy sandbox ${targetId}`);
|
||||
logError(err instanceof Error ? err.message : "Unknown error");
|
||||
logWarn("The sandbox may still be running and incurring charges.");
|
||||
logWarn(`Delete it manually at: ${DAYTONA_DASHBOARD_URL}`);
|
||||
throw new Error("Sandbox deletion failed");
|
||||
}
|
||||
|
||||
logInfo("Sandbox destroyed");
|
||||
}
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
#!/usr/bin/env bun
|
||||
|
||||
// daytona/main.ts — Orchestrator: deploys an agent on Daytona
|
||||
|
||||
import type { CloudOrchestrator } from "../shared/orchestrate";
|
||||
import type { SandboxSize } from "./daytona";
|
||||
|
||||
import { saveLaunchCmd } from "../history.js";
|
||||
import { runOrchestration } from "../shared/orchestrate";
|
||||
import { agents, resolveAgent } from "./agents";
|
||||
import {
|
||||
createServer as createDaytonaServer,
|
||||
ensureDaytonaToken,
|
||||
getServerName,
|
||||
interactiveSession,
|
||||
promptSandboxSize,
|
||||
promptSpawnName,
|
||||
runServer,
|
||||
uploadFile,
|
||||
waitForCloudInit,
|
||||
} from "./daytona";
|
||||
|
||||
async function main() {
|
||||
const agentName = process.argv[2];
|
||||
if (!agentName) {
|
||||
console.error("Usage: bun run daytona/main.ts <agent>");
|
||||
console.error(`Agents: ${Object.keys(agents).join(", ")}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const agent = resolveAgent(agentName);
|
||||
|
||||
let sandboxSize: SandboxSize | undefined;
|
||||
|
||||
const cloud: CloudOrchestrator = {
|
||||
cloudName: "daytona",
|
||||
cloudLabel: "Daytona",
|
||||
runner: {
|
||||
runServer,
|
||||
uploadFile,
|
||||
},
|
||||
async authenticate() {
|
||||
await promptSpawnName();
|
||||
await ensureDaytonaToken();
|
||||
},
|
||||
async promptSize() {
|
||||
sandboxSize = await promptSandboxSize();
|
||||
},
|
||||
async createServer(name: string, spawnId?: string) {
|
||||
process.env.SPAWN_ID = spawnId || "";
|
||||
await createDaytonaServer(name, sandboxSize);
|
||||
},
|
||||
getServerName,
|
||||
async waitForReady() {
|
||||
await waitForCloudInit(agent.cloudInitTier);
|
||||
},
|
||||
interactiveSession,
|
||||
saveLaunchCmd: (cmd: string, sid?: string) => saveLaunchCmd(cmd, sid),
|
||||
};
|
||||
|
||||
await runOrchestration(cloud, agent, agentName);
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
const msg = err && typeof err === "object" && "message" in err ? String(err.message) : String(err);
|
||||
process.stderr.write(`\x1b[0;31mFatal: ${msg}\x1b[0m\n`);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
@ -15,7 +15,7 @@ const IPV4_PATTERN = /^(\d{1,3}\.){3}\d{1,3}$/;
|
|||
// IPv6 address pattern (simplified - catches most valid IPv6 addresses)
|
||||
const IPV6_PATTERN = /^([0-9a-fA-F]{0,4}:){2,7}[0-9a-fA-F]{0,4}$/;
|
||||
|
||||
// Hostname pattern: valid DNS hostnames (e.g., ssh.app.daytona.io)
|
||||
// Hostname pattern: valid DNS hostnames (e.g., compute.amazonaws.com)
|
||||
// Only allows safe characters: lowercase alphanumeric, hyphens, dots
|
||||
// Must have at least two labels (e.g., "host.domain")
|
||||
const HOSTNAME_PATTERN = /^[a-z0-9]([a-z0-9-]*[a-z0-9])?(\.[a-z0-9]([a-z0-9-]*[a-z0-9])?)+$/;
|
||||
|
|
@ -27,7 +27,6 @@ const USERNAME_PATTERN = /^[a-z_][a-z0-9_-]*\$?$/;
|
|||
// Special connection sentinel values (not actual IPs)
|
||||
const CONNECTION_SENTINELS = [
|
||||
"sprite-console",
|
||||
"daytona-sandbox",
|
||||
"localhost",
|
||||
];
|
||||
|
||||
|
|
@ -171,8 +170,8 @@ export function validateScriptContent(script: string): void {
|
|||
* Allows:
|
||||
* - Valid IPv4 addresses (e.g., "192.168.1.1")
|
||||
* - Valid IPv6 addresses (e.g., "::1", "2001:db8::1")
|
||||
* - Valid hostnames (e.g., "ssh.app.daytona.io")
|
||||
* - Special sentinel values ("sprite-console", "daytona-sandbox", "localhost")
|
||||
* - Valid hostnames (e.g., "compute.amazonaws.com")
|
||||
* - Special sentinel values ("sprite-console", "localhost")
|
||||
*
|
||||
* @param ip - The IP address or sentinel to validate
|
||||
* @throws Error if validation fails
|
||||
|
|
@ -213,7 +212,7 @@ export function validateConnectionIP(ip: string): void {
|
|||
return;
|
||||
}
|
||||
|
||||
// Validate as hostname (e.g., ssh.app.daytona.io)
|
||||
// Validate as hostname (e.g., compute.amazonaws.com)
|
||||
if (HOSTNAME_PATTERN.test(ip)) {
|
||||
return;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,73 +0,0 @@
|
|||
# Daytona
|
||||
|
||||
Daytona sandboxed environments for AI code execution. [Daytona](https://www.daytona.io/)
|
||||
|
||||
> Sub-90ms sandbox creation. True SSH support via `daytona ssh`. Requires `DAYTONA_API_KEY` from https://app.daytona.io.
|
||||
|
||||
## Agents
|
||||
|
||||
#### Claude Code
|
||||
|
||||
```bash
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/claude.sh)
|
||||
```
|
||||
|
||||
#### OpenClaw
|
||||
|
||||
```bash
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/openclaw.sh)
|
||||
```
|
||||
|
||||
#### ZeroClaw
|
||||
|
||||
```bash
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/zeroclaw.sh)
|
||||
```
|
||||
|
||||
#### Codex CLI
|
||||
|
||||
```bash
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/codex.sh)
|
||||
```
|
||||
|
||||
#### OpenCode
|
||||
|
||||
```bash
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/opencode.sh)
|
||||
```
|
||||
|
||||
#### Kilo Code
|
||||
|
||||
```bash
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/kilocode.sh)
|
||||
```
|
||||
|
||||
#### Hermes
|
||||
|
||||
```bash
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/hermes.sh)
|
||||
```
|
||||
|
||||
## Non-Interactive Mode
|
||||
|
||||
```bash
|
||||
DAYTONA_SANDBOX_NAME=dev-mk1 \
|
||||
DAYTONA_API_KEY=your-api-key \
|
||||
OPENROUTER_API_KEY=sk-or-v1-xxxxx \
|
||||
bash <(curl -fsSL https://openrouter.ai/labs/spawn/daytona/claude.sh)
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `DAYTONA_API_KEY` | Daytona API key | _(prompted)_ |
|
||||
| `DAYTONA_SANDBOX_NAME` | Sandbox name | _(prompted)_ |
|
||||
| `DAYTONA_CLASS` | Sandbox class (e.g. `small`, `medium`, `large`) | `small` |
|
||||
| `DAYTONA_CPU` | Number of vCPUs (overrides `--class`) | _(unset)_ |
|
||||
| `DAYTONA_MEMORY` | Memory in MB (overrides `--class`) | _(unset)_ |
|
||||
| `DAYTONA_DISK` | Disk size in GB (overrides `--class`) | _(unset)_ |
|
||||
| `OPENROUTER_API_KEY` | OpenRouter API key | _(OAuth or prompted)_ |
|
||||
|
||||
> **Note:** Daytona rejects explicit `--cpu`/`--memory`/`--disk` flags when using snapshots.
|
||||
> Use `DAYTONA_CLASS` instead. If explicit resource flags fail due to snapshot conflict, spawn automatically retries with `--class small`.
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
# Thin shim: ensures bun is available, runs bundled daytona TypeScript (local or from GitHub release)
|
||||
|
||||
_ensure_bun() {
|
||||
if command -v bun &>/dev/null; then return 0; fi
|
||||
printf '\033[0;36mInstalling bun...\033[0m\n' >&2
|
||||
curl -fsSL --proto '=https' --show-error https://bun.sh/install | bash >/dev/null || { printf '\033[0;31mFailed to install bun\033[0m\n' >&2; exit 1; }
|
||||
export PATH="$HOME/.bun/bin:$PATH"
|
||||
command -v bun &>/dev/null || { printf '\033[0;31mbun not found after install\033[0m\n' >&2; exit 1; }
|
||||
}
|
||||
|
||||
_ensure_bun
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)"
|
||||
|
||||
# SPAWN_CLI_DIR override — force local source (used by e2e tests)
|
||||
if [[ -n "${SPAWN_CLI_DIR:-}" && -f "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" claude "$@"
|
||||
fi
|
||||
|
||||
# Local checkout — run from source
|
||||
if [[ -n "$SCRIPT_DIR" && -f "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" claude "$@"
|
||||
fi
|
||||
|
||||
# Remote — download bundled daytona.js from GitHub release
|
||||
DAYTONA_JS=$(mktemp)
|
||||
trap 'rm -f "$DAYTONA_JS"' EXIT
|
||||
curl -fsSL --proto '=https' "https://github.com/OpenRouterTeam/spawn/releases/download/daytona-latest/daytona.js" -o "$DAYTONA_JS" \
|
||||
|| { printf '\033[0;31mFailed to download daytona.js\033[0m\n' >&2; exit 1; }
|
||||
|
||||
exec bun run "$DAYTONA_JS" claude "$@"
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
# Thin shim: ensures bun is available, runs bundled daytona TypeScript (local or from GitHub release)
|
||||
|
||||
_ensure_bun() {
|
||||
if command -v bun &>/dev/null; then return 0; fi
|
||||
printf '\033[0;36mInstalling bun...\033[0m\n' >&2
|
||||
curl -fsSL --proto '=https' --show-error https://bun.sh/install | bash >/dev/null || { printf '\033[0;31mFailed to install bun\033[0m\n' >&2; exit 1; }
|
||||
export PATH="$HOME/.bun/bin:$PATH"
|
||||
command -v bun &>/dev/null || { printf '\033[0;31mbun not found after install\033[0m\n' >&2; exit 1; }
|
||||
}
|
||||
|
||||
_ensure_bun
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)"
|
||||
|
||||
# SPAWN_CLI_DIR override — force local source (used by e2e tests)
|
||||
if [[ -n "${SPAWN_CLI_DIR:-}" && -f "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" codex "$@"
|
||||
fi
|
||||
|
||||
# Local checkout — run from source
|
||||
if [[ -n "$SCRIPT_DIR" && -f "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" codex "$@"
|
||||
fi
|
||||
|
||||
# Remote — download bundled daytona.js from GitHub release
|
||||
DAYTONA_JS=$(mktemp)
|
||||
trap 'rm -f "$DAYTONA_JS"' EXIT
|
||||
curl -fsSL --proto '=https' "https://github.com/OpenRouterTeam/spawn/releases/download/daytona-latest/daytona.js" -o "$DAYTONA_JS" \
|
||||
|| { printf '\033[0;31mFailed to download daytona.js\033[0m\n' >&2; exit 1; }
|
||||
|
||||
exec bun run "$DAYTONA_JS" codex "$@"
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
# Thin shim: ensures bun is available, runs bundled daytona TypeScript (local or from GitHub release)
|
||||
|
||||
_ensure_bun() {
|
||||
if command -v bun &>/dev/null; then return 0; fi
|
||||
printf '\033[0;36mInstalling bun...\033[0m\n' >&2
|
||||
curl -fsSL --proto '=https' --show-error https://bun.sh/install | bash >/dev/null || { printf '\033[0;31mFailed to install bun\033[0m\n' >&2; exit 1; }
|
||||
export PATH="$HOME/.bun/bin:$PATH"
|
||||
command -v bun &>/dev/null || { printf '\033[0;31mbun not found after install\033[0m\n' >&2; exit 1; }
|
||||
}
|
||||
|
||||
_ensure_bun
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)"
|
||||
|
||||
# SPAWN_CLI_DIR override — force local source (used by e2e tests)
|
||||
if [[ -n "${SPAWN_CLI_DIR:-}" && -f "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" hermes "$@"
|
||||
fi
|
||||
|
||||
# Local checkout — run from source
|
||||
if [[ -n "$SCRIPT_DIR" && -f "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" hermes "$@"
|
||||
fi
|
||||
|
||||
# Remote — download bundled daytona.js from GitHub release
|
||||
DAYTONA_JS=$(mktemp)
|
||||
trap 'rm -f "$DAYTONA_JS"' EXIT
|
||||
curl -fsSL --proto '=https' "https://github.com/OpenRouterTeam/spawn/releases/download/daytona-latest/daytona.js" -o "$DAYTONA_JS" \
|
||||
|| { printf '\033[0;31mFailed to download daytona.js\033[0m\n' >&2; exit 1; }
|
||||
|
||||
exec bun run "$DAYTONA_JS" hermes "$@"
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
# Thin shim: ensures bun is available, runs bundled daytona TypeScript (local or from GitHub release)
|
||||
|
||||
_ensure_bun() {
|
||||
if command -v bun &>/dev/null; then return 0; fi
|
||||
printf '\033[0;36mInstalling bun...\033[0m\n' >&2
|
||||
curl -fsSL --proto '=https' --show-error https://bun.sh/install | bash >/dev/null || { printf '\033[0;31mFailed to install bun\033[0m\n' >&2; exit 1; }
|
||||
export PATH="$HOME/.bun/bin:$PATH"
|
||||
command -v bun &>/dev/null || { printf '\033[0;31mbun not found after install\033[0m\n' >&2; exit 1; }
|
||||
}
|
||||
|
||||
_ensure_bun
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)"
|
||||
|
||||
# SPAWN_CLI_DIR override — force local source (used by e2e tests)
|
||||
if [[ -n "${SPAWN_CLI_DIR:-}" && -f "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" kilocode "$@"
|
||||
fi
|
||||
|
||||
# Local checkout — run from source
|
||||
if [[ -n "$SCRIPT_DIR" && -f "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" kilocode "$@"
|
||||
fi
|
||||
|
||||
# Remote — download bundled daytona.js from GitHub release
|
||||
DAYTONA_JS=$(mktemp)
|
||||
trap 'rm -f "$DAYTONA_JS"' EXIT
|
||||
curl -fsSL --proto '=https' "https://github.com/OpenRouterTeam/spawn/releases/download/daytona-latest/daytona.js" -o "$DAYTONA_JS" \
|
||||
|| { printf '\033[0;31mFailed to download daytona.js\033[0m\n' >&2; exit 1; }
|
||||
|
||||
exec bun run "$DAYTONA_JS" kilocode "$@"
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
# Thin shim: ensures bun is available, runs bundled daytona TypeScript (local or from GitHub release)
|
||||
|
||||
_ensure_bun() {
|
||||
if command -v bun &>/dev/null; then return 0; fi
|
||||
printf '\033[0;36mInstalling bun...\033[0m\n' >&2
|
||||
curl -fsSL --proto '=https' --show-error https://bun.sh/install | bash >/dev/null || { printf '\033[0;31mFailed to install bun\033[0m\n' >&2; exit 1; }
|
||||
export PATH="$HOME/.bun/bin:$PATH"
|
||||
command -v bun &>/dev/null || { printf '\033[0;31mbun not found after install\033[0m\n' >&2; exit 1; }
|
||||
}
|
||||
|
||||
_ensure_bun
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)"
|
||||
|
||||
# SPAWN_CLI_DIR override — force local source (used by e2e tests)
|
||||
if [[ -n "${SPAWN_CLI_DIR:-}" && -f "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" openclaw "$@"
|
||||
fi
|
||||
|
||||
# Local checkout — run from source
|
||||
if [[ -n "$SCRIPT_DIR" && -f "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" openclaw "$@"
|
||||
fi
|
||||
|
||||
# Remote — download bundled daytona.js from GitHub release
|
||||
DAYTONA_JS=$(mktemp)
|
||||
trap 'rm -f "$DAYTONA_JS"' EXIT
|
||||
curl -fsSL --proto '=https' "https://github.com/OpenRouterTeam/spawn/releases/download/daytona-latest/daytona.js" -o "$DAYTONA_JS" \
|
||||
|| { printf '\033[0;31mFailed to download daytona.js\033[0m\n' >&2; exit 1; }
|
||||
|
||||
exec bun run "$DAYTONA_JS" openclaw "$@"
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
# Thin shim: ensures bun is available, runs bundled daytona TypeScript (local or from GitHub release)
|
||||
|
||||
_ensure_bun() {
|
||||
if command -v bun &>/dev/null; then return 0; fi
|
||||
printf '\033[0;36mInstalling bun...\033[0m\n' >&2
|
||||
curl -fsSL --proto '=https' --show-error https://bun.sh/install | bash >/dev/null || { printf '\033[0;31mFailed to install bun\033[0m\n' >&2; exit 1; }
|
||||
export PATH="$HOME/.bun/bin:$PATH"
|
||||
command -v bun &>/dev/null || { printf '\033[0;31mbun not found after install\033[0m\n' >&2; exit 1; }
|
||||
}
|
||||
|
||||
_ensure_bun
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)"
|
||||
|
||||
# SPAWN_CLI_DIR override — force local source (used by e2e tests)
|
||||
if [[ -n "${SPAWN_CLI_DIR:-}" && -f "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" opencode "$@"
|
||||
fi
|
||||
|
||||
# Local checkout — run from source
|
||||
if [[ -n "$SCRIPT_DIR" && -f "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" opencode "$@"
|
||||
fi
|
||||
|
||||
# Remote — download bundled daytona.js from GitHub release
|
||||
DAYTONA_JS=$(mktemp)
|
||||
trap 'rm -f "$DAYTONA_JS"' EXIT
|
||||
curl -fsSL --proto '=https' "https://github.com/OpenRouterTeam/spawn/releases/download/daytona-latest/daytona.js" -o "$DAYTONA_JS" \
|
||||
|| { printf '\033[0;31mFailed to download daytona.js\033[0m\n' >&2; exit 1; }
|
||||
|
||||
exec bun run "$DAYTONA_JS" opencode "$@"
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eo pipefail
|
||||
|
||||
# Thin shim: ensures bun is available, runs bundled daytona TypeScript (local or from GitHub release)
|
||||
|
||||
_ensure_bun() {
|
||||
if command -v bun &>/dev/null; then return 0; fi
|
||||
printf '\033[0;36mInstalling bun...\033[0m\n' >&2
|
||||
curl -fsSL --proto '=https' --show-error https://bun.sh/install | bash >/dev/null || { printf '\033[0;31mFailed to install bun\033[0m\n' >&2; exit 1; }
|
||||
export PATH="$HOME/.bun/bin:$PATH"
|
||||
command -v bun &>/dev/null || { printf '\033[0;31mbun not found after install\033[0m\n' >&2; exit 1; }
|
||||
}
|
||||
|
||||
_ensure_bun
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)"
|
||||
|
||||
# SPAWN_CLI_DIR override — force local source (used by e2e tests)
|
||||
if [[ -n "${SPAWN_CLI_DIR:-}" && -f "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SPAWN_CLI_DIR/packages/cli/src/daytona/main.ts" zeroclaw "$@"
|
||||
fi
|
||||
|
||||
# Local checkout — run from source
|
||||
if [[ -n "$SCRIPT_DIR" && -f "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" ]]; then
|
||||
exec bun run "$SCRIPT_DIR/../../packages/cli/src/daytona/main.ts" zeroclaw "$@"
|
||||
fi
|
||||
|
||||
# Remote — download bundled daytona.js from GitHub release
|
||||
DAYTONA_JS=$(mktemp)
|
||||
trap 'rm -f "$DAYTONA_JS"' EXIT
|
||||
curl -fsSL --proto '=https' "https://github.com/OpenRouterTeam/spawn/releases/download/daytona-latest/daytona.js" -o "$DAYTONA_JS" \
|
||||
|| { printf '\033[0;31mFailed to download daytona.js\033[0m\n' >&2; exit 1; }
|
||||
|
||||
exec bun run "$DAYTONA_JS" zeroclaw "$@"
|
||||
|
|
@ -34,7 +34,7 @@ source "${SCRIPT_DIR}/lib/teardown.sh"
|
|||
# ---------------------------------------------------------------------------
|
||||
# All supported clouds (excluding local — no infra to provision)
|
||||
# ---------------------------------------------------------------------------
|
||||
ALL_CLOUDS="aws hetzner digitalocean gcp daytona sprite"
|
||||
ALL_CLOUDS="aws hetzner digitalocean gcp sprite"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Parse arguments
|
||||
|
|
|
|||
|
|
@ -1,380 +0,0 @@
|
|||
#!/bin/bash
|
||||
# e2e/lib/clouds/daytona.sh — Daytona cloud driver for multi-cloud E2E
|
||||
#
|
||||
# Implements the standard cloud driver interface (_daytona_* prefixed functions).
|
||||
# Sourced by common.sh's load_cloud_driver() which wires these to generic names.
|
||||
#
|
||||
# Depends on: log_step, log_ok, log_err, log_warn, log_info, format_duration,
|
||||
# untrack_app (provided by common.sh)
|
||||
set -eo pipefail
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Constants
|
||||
# ---------------------------------------------------------------------------
|
||||
_DAYTONA_API_BASE="https://app.daytona.io/api"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_validate_env
|
||||
#
|
||||
# Check that DAYTONA_API_KEY is set and valid (test list endpoint).
|
||||
# Returns 0 on success, 1 on failure.
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_validate_env() {
|
||||
if [ -z "${DAYTONA_API_KEY:-}" ]; then
|
||||
log_err "DAYTONA_API_KEY is not set"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Validate the key by hitting the sandbox list endpoint
|
||||
if ! curl -sf \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox?page=1&limit=1" >/dev/null 2>&1; then
|
||||
log_err "DAYTONA_API_KEY is invalid or Daytona API is unreachable"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_ok "Daytona API key validated"
|
||||
return 0
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_headless_env APP AGENT
|
||||
#
|
||||
# Print export lines to stdout for headless provisioning.
|
||||
# These are eval'd by the provisioning harness before invoking the CLI.
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_headless_env() {
|
||||
local app="$1"
|
||||
# local agent="$2" # unused but part of the interface
|
||||
|
||||
printf 'export DAYTONA_SANDBOX_NAME="%s"\n' "${app}"
|
||||
printf 'export DAYTONA_SANDBOX_SIZE="%s"\n' "${DAYTONA_SANDBOX_SIZE:-small}"
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_provision_verify APP LOG_DIR
|
||||
#
|
||||
# After provisioning, find the sandbox by name, obtain SSH credentials via
|
||||
# the ssh-access endpoint, and write metadata files for downstream steps.
|
||||
#
|
||||
# Writes:
|
||||
# $LOG_DIR/$APP.ip — sentinel value "token-auth" (no traditional IP)
|
||||
# $LOG_DIR/$APP.meta — JSON with id, sshToken, sshHost, sshPort
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_provision_verify() {
|
||||
local app="$1"
|
||||
local log_dir="$2"
|
||||
|
||||
# List sandboxes and find the one matching our app name.
|
||||
# The API may return a JSON array directly or an object with items/sandboxes.
|
||||
local sandboxes_json
|
||||
sandboxes_json=$(curl -sf \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox" 2>/dev/null || true)
|
||||
|
||||
if [ -z "${sandboxes_json}" ]; then
|
||||
log_err "Failed to list Daytona sandboxes"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Extract sandbox ID by matching on name.
|
||||
# Handle both array response and object-with-items response.
|
||||
local sandbox_id
|
||||
sandbox_id=$(printf '%s' "${sandboxes_json}" | jq -r \
|
||||
'(if type == "array" then . else (.items // .sandboxes // []) end)
|
||||
| map(select(.name == "'"${app}"'"))
|
||||
| first
|
||||
| .id // empty' 2>/dev/null || true)
|
||||
|
||||
if [ -z "${sandbox_id}" ]; then
|
||||
log_err "Sandbox '${app}' not found after provisioning"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_ok "Sandbox found: ${sandbox_id}"
|
||||
|
||||
# Request SSH access credentials
|
||||
local ssh_json
|
||||
ssh_json=$(curl -sf -X POST \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox/${sandbox_id}/ssh-access?expiresInMinutes=480" 2>/dev/null || true)
|
||||
|
||||
if [ -z "${ssh_json}" ]; then
|
||||
log_err "Failed to get SSH access for sandbox ${sandbox_id}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local ssh_token
|
||||
ssh_token=$(printf '%s' "${ssh_json}" | jq -r '.token // empty' 2>/dev/null || true)
|
||||
|
||||
if [ -z "${ssh_token}" ]; then
|
||||
log_err "SSH token not found in ssh-access response"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Parse host and port from sshCommand (e.g., "ssh -p 2222 TOKEN@HOST" or "ssh TOKEN@HOST")
|
||||
local ssh_command
|
||||
ssh_command=$(printf '%s' "${ssh_json}" | jq -r '.sshCommand // empty' 2>/dev/null || true)
|
||||
|
||||
local ssh_host="ssh.app.daytona.io"
|
||||
local ssh_port=""
|
||||
|
||||
if [ -n "${ssh_command}" ]; then
|
||||
# Extract host: last token after @ in the sshCommand
|
||||
local host_part
|
||||
host_part=$(printf '%s' "${ssh_command}" | sed 's/.*@//')
|
||||
if [ -n "${host_part}" ]; then
|
||||
ssh_host="${host_part}"
|
||||
fi
|
||||
|
||||
# Extract port if -p flag is present
|
||||
local port_part
|
||||
port_part=$(printf '%s' "${ssh_command}" | sed -n 's/.*-p[[:space:]]\{1,\}\([0-9]\{1,\}\).*/\1/p')
|
||||
if [ -n "${port_part}" ]; then
|
||||
ssh_port="${port_part}"
|
||||
fi
|
||||
fi
|
||||
|
||||
log_ok "SSH access ready (host: ${ssh_host}${ssh_port:+, port: ${ssh_port}})"
|
||||
|
||||
# Write sentinel IP file (Daytona uses token-based SSH, not traditional IP)
|
||||
printf 'token-auth' > "${log_dir}/${app}.ip"
|
||||
|
||||
# Write metadata file with SSH connection details
|
||||
printf '{"id":"%s","sshToken":"%s","sshHost":"%s","sshPort":"%s"}\n' \
|
||||
"${sandbox_id}" "${ssh_token}" "${ssh_host}" "${ssh_port}" \
|
||||
> "${log_dir}/${app}.meta"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_read_meta APP
|
||||
#
|
||||
# Internal helper: read SSH connection details from the .meta file.
|
||||
# Sets _DT_ID, _DT_TOKEN, _DT_HOST, _DT_PORT variables.
|
||||
# Returns 1 if the meta file is missing or unreadable.
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_read_meta() {
|
||||
local app="$1"
|
||||
|
||||
local meta_file="${LOG_DIR:-/tmp}/${app}.meta"
|
||||
if [ ! -f "${meta_file}" ]; then
|
||||
log_err "Meta file not found: ${meta_file}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
_DT_ID=$(jq -r '.id // empty' "${meta_file}" 2>/dev/null || true)
|
||||
_DT_TOKEN=$(jq -r '.sshToken // empty' "${meta_file}" 2>/dev/null || true)
|
||||
_DT_HOST=$(jq -r '.sshHost // empty' "${meta_file}" 2>/dev/null || true)
|
||||
_DT_PORT=$(jq -r '.sshPort // empty' "${meta_file}" 2>/dev/null || true)
|
||||
|
||||
if [ -z "${_DT_TOKEN}" ] || [ -z "${_DT_HOST}" ]; then
|
||||
log_err "Incomplete SSH credentials in meta file for ${app}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_exec APP CMD
|
||||
#
|
||||
# Run CMD on the Daytona sandbox via SSH using token-based authentication.
|
||||
# The token serves as the SSH username; PubkeyAuthentication is disabled.
|
||||
# Returns the exit code of the remote command.
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_exec() {
|
||||
local app="$1"
|
||||
local cmd="$2"
|
||||
|
||||
_daytona_read_meta "${app}" || return 1
|
||||
|
||||
local ssh_args=""
|
||||
ssh_args="-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
|
||||
ssh_args="${ssh_args} -o PubkeyAuthentication=no -o ConnectTimeout=10"
|
||||
ssh_args="${ssh_args} -o LogLevel=ERROR"
|
||||
|
||||
if [ -n "${_DT_PORT}" ]; then
|
||||
ssh_args="${ssh_args} -o Port=${_DT_PORT}"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
ssh ${ssh_args} "${_DT_TOKEN}@${_DT_HOST}" "${cmd}"
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_exec_long APP CMD TIMEOUT
|
||||
#
|
||||
# Same as _daytona_exec but with ServerAliveInterval keep-alives and the
|
||||
# remote command wrapped in `timeout` for long-running operations.
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_exec_long() {
|
||||
local app="$1"
|
||||
local cmd="$2"
|
||||
local timeout="${3:-120}"
|
||||
|
||||
_daytona_read_meta "${app}" || return 1
|
||||
|
||||
local alive_count=$((timeout / 15 + 1))
|
||||
|
||||
local ssh_args=""
|
||||
ssh_args="-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
|
||||
ssh_args="${ssh_args} -o PubkeyAuthentication=no -o ConnectTimeout=10"
|
||||
ssh_args="${ssh_args} -o LogLevel=ERROR"
|
||||
ssh_args="${ssh_args} -o ServerAliveInterval=15 -o ServerAliveCountMax=${alive_count}"
|
||||
|
||||
if [ -n "${_DT_PORT}" ]; then
|
||||
ssh_args="${ssh_args} -o Port=${_DT_PORT}"
|
||||
fi
|
||||
|
||||
# Base64-encode the command to avoid shell injection via single-quote breakout
|
||||
local encoded_cmd
|
||||
encoded_cmd=$(printf '%s' "${cmd}" | base64 | tr -d '\n')
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
ssh ${ssh_args} "${_DT_TOKEN}@${_DT_HOST}" "timeout ${timeout} bash -c \"\$(printf '%s' '${encoded_cmd}' | base64 -d)\""
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_teardown APP
|
||||
#
|
||||
# Delete the Daytona sandbox by ID (read from .meta file) and untrack it.
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_teardown() {
|
||||
local app="$1"
|
||||
|
||||
log_step "Tearing down ${app}..."
|
||||
|
||||
_daytona_read_meta "${app}" || {
|
||||
log_warn "Could not read meta for ${app} — attempting name-based lookup"
|
||||
# Fall back to listing sandboxes by name
|
||||
local sandboxes_json
|
||||
sandboxes_json=$(curl -sf \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox" 2>/dev/null || true)
|
||||
|
||||
if [ -n "${sandboxes_json}" ]; then
|
||||
_DT_ID=$(printf '%s' "${sandboxes_json}" | jq -r \
|
||||
'(if type == "array" then . else (.items // .sandboxes // []) end)
|
||||
| map(select(.name == "'"${app}"'"))
|
||||
| first
|
||||
| .id // empty' 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
if [ -z "${_DT_ID:-}" ]; then
|
||||
log_err "Cannot find sandbox ID for ${app}"
|
||||
untrack_app "${app}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Delete the sandbox via API
|
||||
curl -sf -X DELETE \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox/${_DT_ID}" >/dev/null 2>&1 || true
|
||||
|
||||
# Brief wait for deletion to propagate
|
||||
sleep 2
|
||||
|
||||
# Verify deletion — check if sandbox still exists
|
||||
local check_json
|
||||
check_json=$(curl -sf \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox/${_DT_ID}" 2>/dev/null || true)
|
||||
|
||||
if [ -n "${check_json}" ]; then
|
||||
local state
|
||||
state=$(printf '%s' "${check_json}" | jq -r '.state // empty' 2>/dev/null || true)
|
||||
if [ -n "${state}" ] && [ "${state}" != "deleted" ] && [ "${state}" != "destroyed" ]; then
|
||||
log_warn "Sandbox ${app} (${_DT_ID}) may still exist (state: ${state})"
|
||||
else
|
||||
log_ok "Sandbox ${app} torn down"
|
||||
fi
|
||||
else
|
||||
log_ok "Sandbox ${app} torn down"
|
||||
fi
|
||||
|
||||
untrack_app "${app}"
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _daytona_cleanup_stale
|
||||
#
|
||||
# List all Daytona sandboxes, filter for e2e-* names, and destroy any
|
||||
# older than 30 minutes (based on the unix timestamp embedded in the name).
|
||||
# ---------------------------------------------------------------------------
|
||||
_daytona_cleanup_stale() {
|
||||
local now
|
||||
now=$(date +%s)
|
||||
local max_age=1800 # 30 minutes in seconds
|
||||
|
||||
# Fetch all sandboxes (handle pagination by requesting a large limit)
|
||||
local sandboxes_json
|
||||
sandboxes_json=$(curl -sf \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox?page=1&limit=100" 2>/dev/null || true)
|
||||
|
||||
if [ -z "${sandboxes_json}" ]; then
|
||||
log_info "Could not list sandboxes or no sandboxes found — skipping cleanup"
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Extract names and IDs of e2e-* sandboxes as "name:id" pairs
|
||||
local e2e_entries
|
||||
e2e_entries=$(printf '%s' "${sandboxes_json}" | jq -r \
|
||||
'(if type == "array" then . else (.items // .sandboxes // []) end)
|
||||
| map(select(.name // "" | startswith("e2e-")))
|
||||
| .[]
|
||||
| "\(.name):\(.id)"' 2>/dev/null || true)
|
||||
|
||||
if [ -z "${e2e_entries}" ]; then
|
||||
log_ok "No stale e2e sandboxes found"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local cleaned=0
|
||||
local skipped=0
|
||||
|
||||
for entry in ${e2e_entries}; do
|
||||
local sandbox_name
|
||||
sandbox_name=$(printf '%s' "${entry}" | cut -d: -f1)
|
||||
local sandbox_id
|
||||
sandbox_id=$(printf '%s' "${entry}" | cut -d: -f2-)
|
||||
|
||||
# Extract timestamp from name: e2e-AGENT-TIMESTAMP
|
||||
# The timestamp is the last dash-separated segment
|
||||
local ts
|
||||
ts=$(printf '%s' "${sandbox_name}" | sed 's/.*-//')
|
||||
|
||||
# Validate it looks like a unix timestamp (all digits, 10 chars)
|
||||
if ! printf '%s' "${ts}" | grep -qE '^[0-9]{10}$'; then
|
||||
log_warn "Skipping ${sandbox_name} — cannot parse timestamp"
|
||||
skipped=$((skipped + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
local age=$((now - ts))
|
||||
if [ "${age}" -gt "${max_age}" ]; then
|
||||
local age_str
|
||||
age_str=$(format_duration "${age}")
|
||||
log_step "Destroying stale sandbox ${sandbox_name} (age: ${age_str})"
|
||||
|
||||
curl -sf -X DELETE \
|
||||
-H "Authorization: Bearer ${DAYTONA_API_KEY}" \
|
||||
"${_DAYTONA_API_BASE}/sandbox/${sandbox_id}" >/dev/null 2>&1 || \
|
||||
log_warn "Failed to delete sandbox ${sandbox_name} (${sandbox_id})"
|
||||
|
||||
cleaned=$((cleaned + 1))
|
||||
else
|
||||
skipped=$((skipped + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "${cleaned}" -gt 0 ]; then
|
||||
log_ok "Cleaned ${cleaned} stale sandbox(es)"
|
||||
fi
|
||||
if [ "${skipped}" -gt 0 ]; then
|
||||
log_info "Skipped ${skipped} recent sandbox(es)"
|
||||
fi
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue