spawn/.claude/skills/setup-spa/helpers.ts
L 65a81edc57
fix: add unique spawn IDs to prevent history record corruption (#2235)
* fix: add unique spawn IDs to prevent history record corruption

History records were matched by heuristic ("most recent record for this
cloud without a connection"), which caused saveVmConnection and
saveLaunchCmd to overwrite the wrong record during concurrent or failed
spawns.

Fix: every SpawnRecord now has a unique `id` (UUID). All history
operations (saveVmConnection, saveLaunchCmd, removeRecord,
markRecordDeleted, mergeLastConnection) match by id when available,
falling back to the old heuristic for pre-migration records.

The orchestrator (TS path) now creates the history record AFTER server
creation succeeds, not before — so failed provisions don't leave orphan
entries.

Also adds "Remove from history" option to the spawn ls action picker,
restoring the ability to soft-delete entries without destroying the VM.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

* test: add 18 unit tests for spawn ID history behavior

Tests cover:
- generateSpawnId returns unique UUIDs
- saveSpawnRecord auto-generates id when not provided
- saveVmConnection matches by spawnId (not heuristic)
- saveVmConnection does not cross-contaminate concurrent spawns
- saveVmConnection falls back to heuristic without spawnId
- saveLaunchCmd matches by spawnId (not heuristic)
- saveLaunchCmd falls back without spawnId
- removeRecord matches by id, not by timestamp+agent+cloud
- removeRecord handles duplicate timestamps correctly
- removeRecord falls back for legacy records without id
- markRecordDeleted targets correct record by id
- mergeLastConnection uses spawn_id from last-connection.json
- mergeLastConnection falls back to heuristic without spawn_id

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

* style: enable biome import sorting with grouped imports

Adds organizeImports to biome assist config with groups:
1. Type imports
2. Node built-ins
3. Third-party packages
4. @openrouter/* packages
5. Aliases

Auto-fixed import order and lint issues across all TypeScript files,
including .claude/skills/ and packages/cli/src/.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-05 23:27:03 -08:00

362 lines
9.9 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

// SPA helpers — pure functions for parsing Claude Code stream events,
// Slack formatting, state management, and file download/cleanup.
import type { Result } from "@openrouter/spawn-shared";
import { existsSync, mkdirSync, readdirSync, readFileSync, rmSync, statSync, writeFileSync } from "node:fs";
import { dirname } from "node:path";
import { Err, isString, Ok, toRecord } from "@openrouter/spawn-shared";
import { slackifyMarkdown } from "slackify-markdown";
import * as v from "valibot";
// #region State
const STATE_PATH = process.env.STATE_PATH ?? `${process.env.HOME ?? "/root"}/.config/spawn/slack-issues.json`;
const MappingSchema = v.object({
channel: v.string(),
threadTs: v.string(),
sessionId: v.string(),
createdAt: v.string(),
});
const StateSchema = v.object({
mappings: v.array(MappingSchema),
});
export type Mapping = v.InferOutput<typeof MappingSchema>;
export type State = v.InferOutput<typeof StateSchema>;
export function loadState(): Result<State> {
try {
if (!existsSync(STATE_PATH)) {
return Ok({
mappings: [],
});
}
const raw = readFileSync(STATE_PATH, "utf-8");
const parsed = v.parse(StateSchema, JSON.parse(raw));
return Ok(parsed);
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
return Err(new Error(`Failed to load state: ${msg}`));
}
}
export function saveState(s: State): Result<void> {
try {
const dir = dirname(STATE_PATH);
mkdirSync(dir, {
recursive: true,
});
writeFileSync(STATE_PATH, `${JSON.stringify(s, null, 2)}\n`);
return Ok(undefined);
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
return Err(new Error(`Failed to save state: ${msg}`));
}
}
export function findMapping(s: State, channel: string, threadTs: string): Mapping | undefined {
return s.mappings.find((m) => m.channel === channel && m.threadTs === threadTs);
}
export function addMapping(s: State, mapping: Mapping): Result<void> {
s.mappings.push(mapping);
return saveState(s);
}
// #endregion
// #region Claude Code stream parsing
export const ResultSchema = v.object({
type: v.literal("result"),
session_id: v.string(),
});
export interface SlackSegment {
kind: "text" | "tool_use" | "tool_result";
text: string;
toolName?: string; // set for tool_use
toolHint?: string; // set for tool_use — truncated command/pattern/path
isError?: boolean; // set for tool_result
}
/** Tracked tool call for history and stats. */
export interface ToolCall {
name: string;
hint: string;
errored?: boolean;
}
/** Extract a truncated hint from a tool_use input block. */
export function extractToolHint(block: Record<string, unknown>): string {
const input = toRecord(block.input);
if (!input) {
return "";
}
const hint =
(isString(input.command) ? input.command : null) ??
(isString(input.pattern) ? input.pattern : null) ??
(isString(input.file_path) ? input.file_path : null);
if (!hint) {
return "";
}
return hint.length > 80 ? `${hint.slice(0, 80)}...` : hint;
}
/** Format a tool_use input block into a truncated backtick hint string. */
function formatToolHint(block: Record<string, unknown>): string {
const hint = extractToolHint(block);
if (!hint) {
return "";
}
return ` \`${hint}\``;
}
/** Format tool counts into a compact stats string: "1× Bash, 4× Read, 5× Grep". */
export function formatToolStats(counts: ReadonlyMap<string, number>): string {
return Array.from(counts.entries())
.map(([name, count]) => `${count}× ${name}`)
.join(", ");
}
/** Format the full ordered tool history into a numbered list for the expandable attachment. */
export function formatToolHistory(history: readonly ToolCall[]): string {
return history
.map((t, i) => {
const icon = t.errored ? "✗" : "✓";
const hint = t.hint ? `${t.hint}` : "";
return `${i + 1}. ${icon} ${t.name}${hint}`;
})
.join("\n");
}
/** Parse an assistant-type event into a SlackSegment. */
function parseAssistantEvent(event: Record<string, unknown>): SlackSegment | null {
const msg = toRecord(event.message);
if (!msg) {
return null;
}
const content = Array.isArray(msg.content) ? msg.content : [];
const textParts: string[] = [];
const toolParts: string[] = [];
let firstToolName: string | undefined;
let firstToolHint: string | undefined;
for (const rawBlock of content) {
const block = toRecord(rawBlock);
if (!block) {
continue;
}
if (block.type === "text" && isString(block.text)) {
textParts.push(markdownToSlack(block.text));
}
if (block.type === "tool_use" && isString(block.name)) {
if (!firstToolName) {
firstToolName = block.name;
firstToolHint = extractToolHint(block);
}
toolParts.push(`:hammer_and_wrench: *${block.name}*${formatToolHint(block)}`);
}
}
// Tool use takes priority — it's a distinct event kind
if (toolParts.length > 0) {
return {
kind: "tool_use",
text: toolParts.join("\n"),
toolName: firstToolName,
toolHint: firstToolHint,
};
}
if (textParts.length > 0) {
return {
kind: "text",
text: textParts.join(""),
};
}
return null;
}
/** Parse a user-type event (tool results) into a SlackSegment. */
function parseUserEvent(event: Record<string, unknown>): SlackSegment | null {
const msg = toRecord(event.message);
if (!msg) {
return null;
}
const content = Array.isArray(msg.content) ? msg.content : [];
const parts: string[] = [];
let hasError = false;
for (const rawBlock of content) {
const block = toRecord(rawBlock);
if (!block || block.type !== "tool_result") {
continue;
}
const isError = block.is_error === true;
if (isError) {
hasError = true;
}
const prefix = isError ? ":x: Error" : ":white_check_mark: Result";
const resultText = isString(block.content) ? block.content : "";
const truncated = resultText.length > 500 ? `${resultText.slice(0, 500)}...` : resultText;
if (!truncated) {
parts.push(`${prefix}: (empty)`);
} else {
parts.push(`${prefix}:\n\`\`\`\n${truncated}\n\`\`\``);
}
}
if (parts.length === 0) {
return null;
}
return {
kind: "tool_result",
text: parts.join("\n"),
isError: hasError || undefined,
};
}
/**
* Parse a Claude Code stream-json event into a typed Slack segment.
*
* Claude Code emits complete messages (not Anthropic streaming deltas):
* {"type":"assistant","message":{"content":[{"type":"text","text":"..."}]}}
* {"type":"assistant","message":{"content":[{"type":"tool_use","name":"Bash","input":{...}}]}}
* {"type":"user","message":{"content":[{"type":"tool_result","content":"..."}]}}
* {"type":"result","result":"...","session_id":"..."}
*/
export function parseStreamEvent(event: Record<string, unknown>): SlackSegment | null {
const type = event.type;
if (type === "assistant") {
return parseAssistantEvent(event);
}
if (type === "user") {
return parseUserEvent(event);
}
return null;
}
// #endregion
// #region Text helpers
export function stripMention(text: string): string {
return text.replace(/<@[A-Z0-9]+>/g, "").trim();
}
/** Convert standard Markdown to Slack mrkdwn using slackify-markdown. */
export function markdownToSlack(text: string): string {
return slackifyMarkdown(text);
}
// #endregion
// #region File downloads
const DOWNLOADS_DIR = "/tmp/spa-downloads";
/** Download a Slack-hosted file into a thread-scoped temp dir. */
export async function downloadSlackFile(
url: string,
filename: string,
threadTs: string,
botToken: string,
): Promise<Result<string>> {
try {
const resp = await fetch(url, {
headers: {
Authorization: `Bearer ${botToken}`,
},
});
if (!resp.ok) {
return Err(new Error(`Failed to download ${filename}: ${resp.status}`));
}
const dir = `${DOWNLOADS_DIR}/${threadTs}`;
mkdirSync(dir, {
recursive: true,
});
const safeName = filename.replace(/[^a-zA-Z0-9._-]/g, "_");
const localPath = `${dir}/${safeName}`;
const buf = await resp.arrayBuffer();
writeFileSync(localPath, Buffer.from(buf));
return Ok(localPath);
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
return Err(new Error(`Error downloading ${filename}: ${msg}`));
}
}
// #endregion
// #region Cleanup
const CLEANUP_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
const CLEANUP_TIMESTAMP_PATH = `${DOWNLOADS_DIR}/.last-cleanup`;
/** Remove download directories older than 30 days. */
export function cleanupStaleDownloads(): void {
if (!existsSync(DOWNLOADS_DIR)) {
return;
}
const thirtyDaysMs = 30 * 24 * 60 * 60 * 1000;
const cutoff = Date.now() - thirtyDaysMs;
let removed = 0;
try {
for (const entry of readdirSync(DOWNLOADS_DIR)) {
const entryPath = `${DOWNLOADS_DIR}/${entry}`;
try {
const stat = statSync(entryPath);
if (stat.isDirectory() && stat.mtimeMs < cutoff) {
rmSync(entryPath, {
recursive: true,
force: true,
});
removed++;
}
} catch {
// skip entries we can't stat
}
}
} catch {
// ignore if dir disappeared
}
if (removed > 0) {
console.log(`[spa] Cleaned up ${removed} stale download dir(s)`);
}
}
/** Run cleanup only if at least 1 hour since last run. Persists timestamp to disk. */
export function runCleanupIfDue(): void {
try {
if (existsSync(CLEANUP_TIMESTAMP_PATH)) {
const lastRun = Number.parseInt(readFileSync(CLEANUP_TIMESTAMP_PATH, "utf-8").trim(), 10);
if (Date.now() - lastRun < CLEANUP_INTERVAL_MS) {
return;
}
}
} catch {
// file missing or unreadable — run cleanup
}
cleanupStaleDownloads();
try {
mkdirSync(DOWNLOADS_DIR, {
recursive: true,
});
writeFileSync(CLEANUP_TIMESTAMP_PATH, String(Date.now()));
} catch {
// non-fatal
}
}
// #endregion