Add persistent disk cache for parsed session data
Some checks are pending
CI / semgrep (push) Waiting to run

Cache normalized turns/calls to ~/.cache/codeburn/session-cache.json so
the CLI skips re-parsing unchanged JSONL files on subsequent runs.
File reconciliation uses dev+ino+mtime+size fingerprinting; cost,
classification, and summaries are recomputed at query time. Atomic
writes via temp+fsync+rename, deep structural validation on load,
per-provider env fingerprinting, and best-effort save so cache failures
never break the CLI. ~6x speedup on warm cache.
This commit is contained in:
iamtoruk 2026-05-16 01:04:13 -07:00
parent d568c8c103
commit bd41fa3962
6 changed files with 1236 additions and 51 deletions

View file

@ -71,6 +71,8 @@ export type SessionLine = string | Buffer
type ReadSessionLinesOptions = {
largeLineAsBuffer?: boolean
largeLineThresholdBytes?: number
startByteOffset?: number
byteOffsetTracker?: { lastCompleteLineOffset: number }
}
export function readSessionLines(
@ -102,9 +104,10 @@ export async function* readSessionLines(
return
}
// Raw Buffers — no encoding. This avoids readline's ConsString trees
// which OOM on V8 when regex-flattening 100 MB+ lines.
const stream = createReadStream(filePath)
const stream = createReadStream(
filePath,
options.startByteOffset !== undefined ? { start: options.startByteOffset } : undefined,
)
const SKIP_HEAD = 2048
const largeLineThreshold = options.largeLineThresholdBytes ?? LARGE_STREAM_LINE_BYTES
const formatLine = (buf: Buffer, lineLen: number, head?: string): SessionLine => {
@ -115,6 +118,8 @@ export async function* readSessionLines(
let len = 0
let skipping = false
let headChecked = false
let chunkBase = options.startByteOffset ?? 0
const tracker = options.byteOffsetTracker
try {
for await (const raw of stream) {
@ -128,6 +133,7 @@ export async function* readSessionLines(
if (nl === -1) {
pos = chunk.length
} else {
if (tracker) tracker.lastCompleteLineOffset = chunkBase + nl + 1
skipping = false
pos = nl + 1
}
@ -140,6 +146,7 @@ export async function* readSessionLines(
len += nl - pos
}
pos = nl + 1
if (tracker) tracker.lastCompleteLineOffset = chunkBase + pos
if (len === 0) {
parts = []
@ -183,6 +190,7 @@ export async function* readSessionLines(
}
}
}
chunkBase += chunk.length
}
if (!skipping && len > 0) {

View file

@ -6,6 +6,19 @@ import { discoverAllSessions, getProvider } from './providers/index.js'
import { flushCodexCache } from './codex-cache.js'
import { flushAntigravityCache } from './providers/antigravity.js'
import { isSqliteBusyError } from './sqlite.js'
import {
type CachedCall,
type CachedFile,
type CachedTurn,
type ProviderSection,
type SessionCache,
cleanupOrphanedTempFiles,
computeEnvFingerprint,
fingerprintFile,
loadCache,
reconcileFile,
saveCache,
} from './session-cache.js'
import type { ParsedProviderCall } from './providers/types.js'
import type {
AssistantMessageContent,
@ -995,6 +1008,7 @@ function parseApiCall(entry: JournalEntry): ParsedApiCall | null {
timestamp: entry.timestamp ?? '',
bashCommands: bashCmds,
deduplicationKey: msg.id ?? `claude:${entry.timestamp}`,
cacheCreationOneHourTokens: cacheCreation.oneHourTokens || undefined,
}
}
@ -1321,31 +1335,115 @@ async function collectJsonlFiles(dirPath: string): Promise<string[]> {
return jsonlFiles
}
async function scanProjectDirs(dirs: Array<{ path: string; name: string }>, seenMsgIds: Set<string>, dateRange?: DateRange): Promise<ProjectSummary[]> {
const projectMap = new Map<string, { project: string; projectPath: string; sessions: SessionSummary[] }>()
async function scanProjectDirs(
dirs: Array<{ path: string; name: string }>,
seenMsgIds: Set<string>,
diskCache: SessionCache,
dateRange?: DateRange,
): Promise<ProjectSummary[]> {
const section = getOrCreateProviderSection(diskCache, 'claude')
const allDiscoveredFiles = new Set<string>()
type FileInfo = { dirName: string; fp: NonNullable<Awaited<ReturnType<typeof fingerprintFile>>> }
const unchangedFiles: Array<{ filePath: string; dirName: string; cached: CachedFile }> = []
const changedFiles: Array<{ filePath: string; info: FileInfo }> = []
for (const { path: dirPath, name: dirName } of dirs) {
const jsonlFiles = await collectJsonlFiles(dirPath)
for (const filePath of jsonlFiles) {
const parsed = await parseSessionFile(filePath, dirName, seenMsgIds, dateRange)
if (parsed && parsed.session.apiCalls > 0) {
const projectPath = parsed.canonicalCwd ?? unsanitizePath(dirName)
const projectKey = parsed.canonicalCwd ? normalizeProjectPathKey(parsed.canonicalCwd) : `slug:${dirName}`
const existing = projectMap.get(projectKey)
if (existing) {
existing.sessions.push(parsed.session)
} else {
projectMap.set(projectKey, { project: dirName, projectPath, sessions: [parsed.session] })
}
allDiscoveredFiles.add(filePath)
const fp = await fingerprintFile(filePath)
if (!fp) continue
const action = reconcileFile(fp, section.files[filePath])
if (action.action === 'unchanged') {
unchangedFiles.push({ filePath, dirName, cached: section.files[filePath]! })
} else {
changedFiles.push({ filePath, info: { dirName, fp } })
}
}
}
// If a slug has both cwd-keyed and slug-keyed entries (mixed sessions where
// some carry a canonical cwd and some don't), fold the slug-keyed sessions
// into the cwd-keyed entry so the canonical projectPath is preserved
// regardless of file iteration order.
// Pre-seed dedup set from cached (unchanged) files
for (const { cached } of unchangedFiles) {
for (const turn of cached.turns) {
for (const call of turn.calls) {
seenMsgIds.add(call.deduplicationKey)
}
}
}
// Parse changed files, update cache
for (const { filePath, info } of changedFiles) {
// Clear stale entry before parse — if parse fails, file is excluded
delete section.files[filePath]
const tracker = { lastCompleteLineOffset: 0 }
const entries = await parseClaudeEntries(filePath, tracker)
if (!entries) continue
const turns = groupIntoTurns(dedupeStreamingMessageIds(entries), seenMsgIds)
section.files[filePath] = {
fingerprint: info.fp,
lastCompleteLineOffset: tracker.lastCompleteLineOffset,
canonicalCwd: extractCanonicalCwd(entries),
mcpInventory: extractMcpInventory(entries),
turns: turns.map(parsedTurnToCachedTurn),
}
}
// Remove deleted files from cache
for (const cachedPath of Object.keys(section.files)) {
if (!allDiscoveredFiles.has(cachedPath)) {
delete section.files[cachedPath]
}
}
// Query-time: derive ProjectSummary[] from all cached turns
const projectMap = new Map<string, { project: string; projectPath: string; sessions: SessionSummary[] }>()
const allFiles = [
...unchangedFiles.map(f => ({ filePath: f.filePath, dirName: f.dirName })),
...changedFiles.map(f => ({ filePath: f.filePath, dirName: f.info.dirName })),
]
for (const { filePath, dirName } of allFiles) {
const cachedFile = section.files[filePath]
if (!cachedFile || cachedFile.turns.length === 0) continue
let classifiedTurns = cachedFile.turns.map(cachedTurnToClassified)
if (dateRange) {
classifiedTurns = classifiedTurns.filter(turn => {
if (turn.assistantCalls.length === 0) return false
const firstCallTs = turn.assistantCalls[0]!.timestamp
if (!firstCallTs) return false
const ts = new Date(firstCallTs)
return ts >= dateRange.start && ts <= dateRange.end
})
}
if (classifiedTurns.length === 0) continue
const sessionId = basename(filePath, '.jsonl')
const projectPath = cachedFile.canonicalCwd ?? unsanitizePath(dirName)
const mcpInv = cachedFile.mcpInventory.length > 0 ? cachedFile.mcpInventory : undefined
const session = buildSessionSummary(sessionId, dirName, classifiedTurns, mcpInv)
if (session.apiCalls > 0) {
const projectKey = cachedFile.canonicalCwd
? normalizeProjectPathKey(cachedFile.canonicalCwd)
: `slug:${dirName}`
const existing = projectMap.get(projectKey)
if (existing) {
existing.sessions.push(session)
} else {
projectMap.set(projectKey, { project: dirName, projectPath, sessions: [session] })
}
}
}
// Fold slug-keyed entries into cwd-keyed entries
const cwdKeyByDirName = new Map<string, string>()
for (const [key, entry] of projectMap) {
if (!key.startsWith('slug:') && !cwdKeyByDirName.has(entry.project)) {
@ -1411,6 +1509,136 @@ function providerCallToTurn(call: ParsedProviderCall): ParsedTurn {
}
}
// ── Cache Conversion ───────────────────────────────────────────────────
function apiCallToCachedCall(call: ParsedApiCall): CachedCall {
return {
provider: call.provider,
model: call.model,
usage: { ...call.usage, cacheCreationOneHourTokens: call.cacheCreationOneHourTokens ?? 0 },
speed: call.speed,
timestamp: call.timestamp,
tools: call.tools,
bashCommands: call.bashCommands,
skills: call.skills,
deduplicationKey: call.deduplicationKey,
}
}
function parsedTurnToCachedTurn(turn: ParsedTurn): CachedTurn {
return {
timestamp: turn.timestamp,
sessionId: turn.sessionId,
userMessage: turn.userMessage.slice(0, 2000),
calls: turn.assistantCalls.map(apiCallToCachedCall),
}
}
function providerCallToCachedTurn(call: ParsedProviderCall): CachedTurn {
return {
timestamp: call.timestamp,
sessionId: call.sessionId,
userMessage: call.userMessage.slice(0, 2000),
calls: [{
provider: call.provider,
model: call.model,
usage: {
inputTokens: call.inputTokens,
outputTokens: call.outputTokens,
cacheCreationInputTokens: call.cacheCreationInputTokens,
cacheReadInputTokens: call.cacheReadInputTokens,
cachedInputTokens: call.cachedInputTokens,
reasoningTokens: call.reasoningTokens,
webSearchRequests: call.webSearchRequests,
cacheCreationOneHourTokens: 0,
},
speed: call.speed,
timestamp: call.timestamp,
tools: call.tools,
bashCommands: call.bashCommands,
skills: [],
deduplicationKey: call.deduplicationKey,
project: call.project,
projectPath: call.projectPath,
}],
}
}
function cachedCallToApiCall(call: CachedCall): ParsedApiCall {
const u = call.usage
const outputForCost = call.provider === 'claude'
? u.outputTokens
: u.outputTokens + u.reasoningTokens
const costUSD = calculateCost(
call.model, u.inputTokens, outputForCost,
u.cacheCreationInputTokens, u.cacheReadInputTokens,
u.webSearchRequests, call.speed, u.cacheCreationOneHourTokens,
)
return {
provider: call.provider,
model: call.model,
usage: {
inputTokens: u.inputTokens,
outputTokens: u.outputTokens,
cacheCreationInputTokens: u.cacheCreationInputTokens,
cacheReadInputTokens: u.cacheReadInputTokens,
cachedInputTokens: u.cachedInputTokens,
reasoningTokens: u.reasoningTokens,
webSearchRequests: u.webSearchRequests,
},
costUSD,
tools: call.tools,
mcpTools: extractMcpTools(call.tools),
skills: call.skills,
hasAgentSpawn: call.tools.includes('Agent'),
hasPlanMode: call.tools.includes('EnterPlanMode'),
speed: call.speed,
timestamp: call.timestamp,
bashCommands: call.bashCommands,
deduplicationKey: call.deduplicationKey,
cacheCreationOneHourTokens: u.cacheCreationOneHourTokens || undefined,
}
}
function cachedTurnToClassified(turn: CachedTurn): ClassifiedTurn {
const parsed: ParsedTurn = {
userMessage: turn.userMessage,
assistantCalls: turn.calls.map(cachedCallToApiCall),
timestamp: turn.timestamp,
sessionId: turn.sessionId,
}
return classifyTurn(parsed)
}
// ── Cache-Aware Parsing Helpers ────────────────────────────────────────
async function parseClaudeEntries(
filePath: string,
tracker: { lastCompleteLineOffset: number },
): Promise<JournalEntry[] | null> {
const entries: JournalEntry[] = []
let hasLines = false
for await (const line of readSessionLines(filePath, undefined, {
largeLineAsBuffer: true,
byteOffsetTracker: tracker,
})) {
hasLines = true
const entry = parseJsonlLine(line)
if (entry) entries.push(compactEntry(entry))
}
if (!hasLines || entries.length === 0) return null
return entries
}
function getOrCreateProviderSection(cache: SessionCache, provider: string): ProviderSection {
const envFp = computeEnvFingerprint(provider)
const existing = cache.providers[provider]
if (existing && existing.envFingerprint === envFp) return existing
const section = { envFingerprint: envFp, files: {} }
cache.providers[provider] = section
return section
}
const warnedProviderReadFailures = new Set<string>()
function warnProviderReadFailureOnce(providerName: string, err: unknown): void {
@ -1428,47 +1656,66 @@ async function parseProviderSources(
providerName: string,
sources: Array<{ path: string; project: string }>,
seenKeys: Set<string>,
diskCache: SessionCache,
dateRange?: DateRange,
): Promise<ProjectSummary[]> {
const provider = await getProvider(providerName)
if (!provider) return []
const sessionMap = new Map<string, { project: string; projectPath?: string; turns: ClassifiedTurn[] }>()
const section = getOrCreateProviderSection(diskCache, providerName)
const allDiscoveredFiles = new Set<string>()
try {
for (const source of sources) {
if (dateRange) {
try {
const s = await stat(source.path)
if (s.mtimeMs < dateRange.start.getTime()) continue
} catch { /* fall through; treat unknown stat as "may contain data" */ }
type SourceInfo = { source: { path: string; project: string }; fp: NonNullable<Awaited<ReturnType<typeof fingerprintFile>>> }
const unchangedSources: Array<{ source: { path: string; project: string }; cached: CachedFile }> = []
const changedSources: SourceInfo[] = []
for (const source of sources) {
allDiscoveredFiles.add(source.path)
const fp = await fingerprintFile(source.path)
if (!fp) continue
const action = reconcileFile(fp, section.files[source.path])
if (action.action === 'unchanged') {
unchangedSources.push({ source, cached: section.files[source.path]! })
} else {
changedSources.push({ source, fp })
}
}
// Parser dedup: cross-provider keys + cached file keys.
// Separate from seenKeys so parsing doesn't suppress query-time output.
const parserDedup = new Set(seenKeys)
for (const { cached } of unchangedSources) {
for (const turn of cached.turns) {
for (const call of turn.calls) {
parserDedup.add(call.deduplicationKey)
}
}
}
// Parse changed files, update cache
let didParse = false
try {
for (const { source, fp } of changedSources) {
if (dateRange) {
if (fp.mtimeMs < dateRange.start.getTime()) continue
}
// Clear stale entry before parse — if parse fails, file is excluded
delete section.files[source.path]
const parser = provider.createSessionParser(
{ path: source.path, project: source.project, provider: providerName },
seenKeys,
parserDedup,
)
try {
const turns: CachedTurn[] = []
for await (const call of parser.parse()) {
if (dateRange) {
if (!call.timestamp) continue
const ts = new Date(call.timestamp)
if (ts < dateRange.start || ts > dateRange.end) continue
}
const turn = providerCallToTurn(call)
const classified = classifyTurn(turn)
const project = call.project ?? source.project
const key = `${providerName}:${call.sessionId}:${project}`
const existing = sessionMap.get(key)
if (existing) {
existing.turns.push(classified)
if (!existing.projectPath && call.projectPath) existing.projectPath = call.projectPath
} else {
sessionMap.set(key, { project, projectPath: call.projectPath, turns: [classified] })
}
turns.push(providerCallToCachedTurn(call))
}
section.files[source.path] = { fingerprint: fp, mcpInventory: [], turns }
didParse = true
} catch (err) {
if (isSqliteBusyError(err)) {
warnProviderReadFailureOnce(providerName, err)
@ -1478,13 +1725,57 @@ async function parseProviderSources(
}
}
} finally {
if (providerName === 'codex') await flushCodexCache()
if (providerName === 'antigravity') {
if (didParse && providerName === 'codex') await flushCodexCache()
if (didParse && providerName === 'antigravity') {
const liveIds = new Set(sources.map(s => basename(s.path, '.pb')))
await flushAntigravityCache(liveIds)
}
}
// Remove deleted files from cache
for (const cachedPath of Object.keys(section.files)) {
if (!allDiscoveredFiles.has(cachedPath)) {
delete section.files[cachedPath]
}
}
// Query-time: derive SessionSummary from all cached turns.
// Uses seenKeys (shared across providers) for cross-provider dedup.
const sessionMap = new Map<string, { project: string; projectPath?: string; turns: ClassifiedTurn[] }>()
for (const source of sources) {
const cachedFile = section.files[source.path]
if (!cachedFile) continue
for (const turn of cachedFile.turns) {
const hasDup = turn.calls.some(c => seenKeys.has(c.deduplicationKey))
if (hasDup) continue
for (const c of turn.calls) seenKeys.add(c.deduplicationKey)
if (dateRange) {
const callTs = turn.calls[0]?.timestamp
if (!callTs) continue
const ts = new Date(callTs)
if (ts < dateRange.start || ts > dateRange.end) continue
}
const classified = cachedTurnToClassified(turn)
const project = turn.calls[0]?.project ?? source.project
const key = `${providerName}:${turn.sessionId}:${project}`
const existing = sessionMap.get(key)
if (existing) {
existing.turns.push(classified)
if (!existing.projectPath && turn.calls[0]?.projectPath) {
existing.projectPath = turn.calls[0]!.projectPath
}
} else {
sessionMap.set(key, { project, projectPath: turn.calls[0]?.projectPath, turns: [classified] })
}
}
}
const projectMap = new Map<string, { projectPath?: string; sessions: SessionSummary[] }>()
for (const [key, { project, projectPath, turns }] of sessionMap) {
const sessionId = key.split(':')[1] ?? key
@ -1602,6 +1893,9 @@ export async function parseAllSessions(dateRange?: DateRange, providerFilter?: s
const cached = sessionCache.get(key)
if (cached && Date.now() - cached.ts < CACHE_TTL_MS) return cached.data
const diskCache = await loadCache()
await cleanupOrphanedTempFiles()
const seenMsgIds = new Set<string>()
const seenKeys = new Set<string>()
const allSources = await discoverAllSessions(providerFilter)
@ -1610,7 +1904,7 @@ export async function parseAllSessions(dateRange?: DateRange, providerFilter?: s
const nonClaudeSources = allSources.filter(s => s.provider !== 'claude')
const claudeDirs = claudeSources.map(s => ({ path: s.path, name: s.project }))
const claudeProjects = await scanProjectDirs(claudeDirs, seenMsgIds, dateRange)
const claudeProjects = await scanProjectDirs(claudeDirs, seenMsgIds, diskCache, dateRange)
const providerGroups = new Map<string, Array<{ path: string; project: string }>>()
for (const source of nonClaudeSources) {
@ -1621,10 +1915,12 @@ export async function parseAllSessions(dateRange?: DateRange, providerFilter?: s
const otherProjects: ProjectSummary[] = []
for (const [providerName, sources] of providerGroups) {
const projects = await parseProviderSources(providerName, sources, seenKeys, dateRange)
const projects = await parseProviderSources(providerName, sources, seenKeys, diskCache, dateRange)
otherProjects.push(...projects)
}
try { await saveCache(diskCache) } catch {}
const mergedMap = new Map<string, ProjectSummary>()
for (const p of [...claudeProjects, ...otherProjects]) {
const existing = mergedMap.get(p.project)

319
src/session-cache.ts Normal file
View file

@ -0,0 +1,319 @@
import { readFile, stat, open, rename, unlink, readdir, mkdir } from 'fs/promises'
import { existsSync } from 'fs'
import { createHash, randomBytes } from 'crypto'
import { join } from 'path'
import { homedir } from 'os'
// ── Types ──────────────────────────────────────────────────────────────
export type CachedUsage = {
inputTokens: number
outputTokens: number
cacheCreationInputTokens: number
cacheReadInputTokens: number
cachedInputTokens: number
reasoningTokens: number
webSearchRequests: number
cacheCreationOneHourTokens: number
}
export type CachedCall = {
provider: string
model: string
usage: CachedUsage
speed: 'standard' | 'fast'
timestamp: string
tools: string[]
bashCommands: string[]
skills: string[]
deduplicationKey: string
project?: string
projectPath?: string
}
export type CachedTurn = {
timestamp: string
sessionId: string
userMessage: string
calls: CachedCall[]
}
export type FileFingerprint = {
dev: number
ino: number
mtimeMs: number
sizeBytes: number
}
export type CachedFile = {
fingerprint: FileFingerprint
lastCompleteLineOffset?: number
canonicalCwd?: string
mcpInventory: string[]
turns: CachedTurn[]
}
export type ProviderSection = {
envFingerprint: string
files: Record<string, CachedFile>
}
export type SessionCache = {
version: number
providers: Record<string, ProviderSection>
}
// ── Constants ──────────────────────────────────────────────────────────
export const CACHE_VERSION = 1
const CACHE_FILE = 'session-cache.json'
const TEMP_FILE_MAX_AGE_MS = 5 * 60 * 1000
const PROVIDER_ENV_VARS: Record<string, string[]> = {
claude: ['CLAUDE_CONFIG_DIRS', 'CLAUDE_CONFIG_DIR'],
codex: ['CODEX_HOME'],
droid: ['FACTORY_DIR'],
cursor: ['XDG_DATA_HOME'],
'cursor-agent': ['XDG_DATA_HOME'],
opencode: ['XDG_DATA_HOME'],
goose: ['XDG_DATA_HOME'],
crush: ['XDG_DATA_HOME'],
antigravity: ['CODEBURN_CACHE_DIR'],
qwen: ['QWEN_DATA_DIR'],
'ibm-bob': ['XDG_CONFIG_HOME'],
}
// ── Cache Dir ──────────────────────────────────────────────────────────
function getCacheDir(): string {
return process.env['CODEBURN_CACHE_DIR'] ?? join(homedir(), '.cache', 'codeburn')
}
function getCachePath(): string {
return join(getCacheDir(), CACHE_FILE)
}
// ── Env Fingerprint ────────────────────────────────────────────────────
export function computeEnvFingerprint(provider: string): string {
const vars = PROVIDER_ENV_VARS[provider] ?? []
const parts = vars.map(v => `${v}=${process.env[v] ?? ''}`)
return createHash('sha256').update(parts.join('\0')).digest('hex').slice(0, 16)
}
// ── Load / Save ────────────────────────────────────────────────────────
export function emptyCache(): SessionCache {
return { version: CACHE_VERSION, providers: {} }
}
function isNum(v: unknown): v is number {
return typeof v === 'number' && Number.isFinite(v)
}
function isStringArray(v: unknown): v is string[] {
return Array.isArray(v) && v.every(e => typeof e === 'string')
}
function isOptionalString(v: unknown): boolean {
return v === undefined || typeof v === 'string'
}
function isOptionalNum(v: unknown): boolean {
return v === undefined || isNum(v)
}
function validateFingerprint(fp: unknown): fp is FileFingerprint {
if (!fp || typeof fp !== 'object') return false
const f = fp as Record<string, unknown>
return isNum(f['dev']) && isNum(f['ino']) && isNum(f['mtimeMs']) && isNum(f['sizeBytes'])
}
function validateUsage(u: unknown): u is CachedUsage {
if (!u || typeof u !== 'object') return false
const o = u as Record<string, unknown>
return isNum(o['inputTokens']) && isNum(o['outputTokens'])
&& isNum(o['cacheCreationInputTokens']) && isNum(o['cacheReadInputTokens'])
&& isNum(o['cachedInputTokens']) && isNum(o['reasoningTokens'])
&& isNum(o['webSearchRequests']) && isNum(o['cacheCreationOneHourTokens'])
}
function validateCall(c: unknown): c is CachedCall {
if (!c || typeof c !== 'object') return false
const o = c as Record<string, unknown>
return typeof o['provider'] === 'string'
&& typeof o['model'] === 'string'
&& typeof o['deduplicationKey'] === 'string'
&& typeof o['timestamp'] === 'string'
&& (o['speed'] === 'standard' || o['speed'] === 'fast')
&& isStringArray(o['tools'])
&& isStringArray(o['bashCommands'])
&& isStringArray(o['skills'])
&& isOptionalString(o['project'])
&& isOptionalString(o['projectPath'])
&& validateUsage(o['usage'])
}
function validateTurn(t: unknown): t is CachedTurn {
if (!t || typeof t !== 'object') return false
const o = t as Record<string, unknown>
return typeof o['timestamp'] === 'string'
&& typeof o['sessionId'] === 'string'
&& typeof o['userMessage'] === 'string'
&& Array.isArray(o['calls'])
&& (o['calls'] as unknown[]).every(validateCall)
}
function validateCachedFile(f: unknown): f is CachedFile {
if (!f || typeof f !== 'object') return false
const o = f as Record<string, unknown>
return validateFingerprint(o['fingerprint'])
&& isOptionalNum(o['lastCompleteLineOffset'])
&& isOptionalString(o['canonicalCwd'])
&& isStringArray(o['mcpInventory'])
&& Array.isArray(o['turns'])
&& (o['turns'] as unknown[]).every(validateTurn)
}
function validateProviderSection(s: unknown): s is ProviderSection {
if (!s || typeof s !== 'object') return false
const o = s as Record<string, unknown>
if (typeof o['envFingerprint'] !== 'string') return false
if (!o['files'] || typeof o['files'] !== 'object' || Array.isArray(o['files'])) return false
return Object.values(o['files'] as Record<string, unknown>).every(validateCachedFile)
}
function validateCache(raw: unknown): raw is SessionCache {
if (!raw || typeof raw !== 'object') return false
const o = raw as Record<string, unknown>
if (o['version'] !== CACHE_VERSION) return false
if (!o['providers'] || typeof o['providers'] !== 'object' || Array.isArray(o['providers'])) return false
return Object.values(o['providers'] as Record<string, unknown>).every(validateProviderSection)
}
export async function loadCache(): Promise<SessionCache> {
try {
const raw = await readFile(getCachePath(), 'utf-8')
const parsed = JSON.parse(raw)
if (!validateCache(parsed)) return emptyCache()
return parsed
} catch {
return emptyCache()
}
}
export async function saveCache(cache: SessionCache): Promise<void> {
const dir = getCacheDir()
if (!existsSync(dir)) await mkdir(dir, { recursive: true })
const finalPath = getCachePath()
const tempPath = `${finalPath}.${randomBytes(8).toString('hex')}.tmp`
const payload = JSON.stringify(cache)
const handle = await open(tempPath, 'w', 0o600)
try {
await handle.writeFile(payload, { encoding: 'utf-8' })
await handle.sync()
} finally {
await handle.close()
}
try {
await rename(tempPath, finalPath)
} catch (err) {
try { await unlink(tempPath) } catch {}
throw err
}
}
// ── File Fingerprinting ────────────────────────────────────────────────
export async function fingerprintFile(filePath: string): Promise<FileFingerprint | null> {
try {
const s = await stat(filePath)
return { dev: s.dev, ino: s.ino, mtimeMs: s.mtimeMs, sizeBytes: s.size }
} catch {
return null
}
}
// ── Reconciliation ─────────────────────────────────────────────────────
export type ReconcileAction =
| { action: 'unchanged' }
| { action: 'appended'; readFromOffset: number }
| { action: 'modified' }
| { action: 'new' }
export function reconcileFile(
current: FileFingerprint,
cached: CachedFile | undefined,
): ReconcileAction {
if (!cached) return { action: 'new' }
const fp = cached.fingerprint
if (
fp.dev === current.dev &&
fp.ino === current.ino &&
fp.mtimeMs === current.mtimeMs &&
fp.sizeBytes === current.sizeBytes
) {
return { action: 'unchanged' }
}
if (
cached.lastCompleteLineOffset !== undefined &&
fp.dev === current.dev &&
fp.ino === current.ino &&
current.sizeBytes > fp.sizeBytes
) {
return { action: 'appended', readFromOffset: cached.lastCompleteLineOffset }
}
return { action: 'modified' }
}
// ── Dedup Merge ────────────────────────────────────────────────────────
// When appending incremental data, streaming Claude messages can re-emit
// the same dedup key with updated usage. Merge by key: keep the earliest
// timestamp, take incoming usage/tools/bashCommands/skills (latest wins).
export function mergeCallByDedupKey(
existing: CachedCall,
incoming: CachedCall,
): CachedCall {
return {
...incoming,
timestamp: existing.timestamp < incoming.timestamp
? existing.timestamp
: incoming.timestamp,
}
}
// ── Temp Cleanup ───────────────────────────────────────────────────────
export async function cleanupOrphanedTempFiles(): Promise<void> {
const dir = getCacheDir()
if (!existsSync(dir)) return
try {
const entries = await readdir(dir)
const now = Date.now()
const prefix = 'session-cache.json.'
for (const entry of entries) {
if (!entry.startsWith(prefix) || !entry.endsWith('.tmp')) continue
try {
const fullPath = join(dir, entry)
const s = await stat(fullPath)
if (now - s.mtimeMs > TEMP_FILE_MAX_AGE_MS) {
await unlink(fullPath)
}
} catch {}
}
} catch {}
}

View file

@ -83,6 +83,7 @@ export type ParsedApiCall = {
timestamp: string
bashCommands: string[]
deduplicationKey: string
cacheCreationOneHourTokens?: number
}
export type TaskCategory =

View file

@ -117,4 +117,56 @@ describe('readSessionLines', () => {
await gen.next()
await gen.return(undefined)
})
it('reads from startByteOffset, yielding only lines after the offset', async () => {
const content = 'line1\nline2\nline3\n'
const p = await tmpPath(content)
const offset = Buffer.byteLength('line1\n')
const lines: string[] = []
for await (const line of readSessionLines(p, undefined, { startByteOffset: offset })) {
lines.push(line)
}
expect(lines).toEqual(['line2', 'line3'])
})
it('byteOffsetTracker tracks position after last complete newline', async () => {
const content = 'aaa\nbbb\nccc\n'
const p = await tmpPath(content)
const tracker = { lastCompleteLineOffset: 0 }
const lines: string[] = []
for await (const line of readSessionLines(p, undefined, { byteOffsetTracker: tracker })) {
lines.push(line)
}
expect(lines).toEqual(['aaa', 'bbb', 'ccc'])
expect(tracker.lastCompleteLineOffset).toBe(Buffer.byteLength(content))
})
it('byteOffsetTracker accounts for startByteOffset', async () => {
const content = 'line1\nline2\nline3\n'
const p = await tmpPath(content)
const offset = Buffer.byteLength('line1\n')
const tracker = { lastCompleteLineOffset: 0 }
for await (const _line of readSessionLines(p, undefined, { startByteOffset: offset, byteOffsetTracker: tracker })) {}
expect(tracker.lastCompleteLineOffset).toBe(Buffer.byteLength(content))
})
it('byteOffsetTracker excludes trailing partial line (no final newline)', async () => {
const content = 'line1\nline2\npartial'
const p = await tmpPath(content)
const tracker = { lastCompleteLineOffset: 0 }
for await (const _line of readSessionLines(p, undefined, { byteOffsetTracker: tracker })) {}
expect(tracker.lastCompleteLineOffset).toBe(Buffer.byteLength('line1\nline2\n'))
})
it('byteOffsetTracker updates for skipped lines too', async () => {
const content = 'skip-me\nkeep-me\n'
const p = await tmpPath(content)
const tracker = { lastCompleteLineOffset: 0 }
const lines: string[] = []
for await (const line of readSessionLines(p, head => head.includes('skip-me'), { byteOffsetTracker: tracker })) {
lines.push(line)
}
expect(lines).toEqual(['keep-me'])
expect(tracker.lastCompleteLineOffset).toBe(Buffer.byteLength(content))
})
})

509
tests/session-cache.test.ts Normal file
View file

@ -0,0 +1,509 @@
import { afterEach, beforeEach, describe, expect, it } from 'vitest'
import { readFile, rm, writeFile, mkdir } from 'fs/promises'
import { existsSync } from 'fs'
import { tmpdir } from 'os'
import { join } from 'path'
import {
CACHE_VERSION,
type CachedCall,
type CachedFile,
type CachedTurn,
type FileFingerprint,
type SessionCache,
cleanupOrphanedTempFiles,
computeEnvFingerprint,
emptyCache,
fingerprintFile,
loadCache,
mergeCallByDedupKey,
reconcileFile,
saveCache,
} from '../src/session-cache.js'
const TMP_DIR = join(tmpdir(), `codeburn-scache-test-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`)
beforeEach(() => {
process.env['CODEBURN_CACHE_DIR'] = TMP_DIR
})
afterEach(async () => {
delete process.env['CODEBURN_CACHE_DIR']
if (existsSync(TMP_DIR)) await rm(TMP_DIR, { recursive: true })
})
function makeCall(overrides: Partial<CachedCall> = {}): CachedCall {
return {
provider: 'claude',
model: 'claude-sonnet-4-20250514',
usage: {
inputTokens: 1000,
outputTokens: 500,
cacheCreationInputTokens: 0,
cacheReadInputTokens: 0,
cachedInputTokens: 0,
reasoningTokens: 0,
webSearchRequests: 0,
cacheCreationOneHourTokens: 0,
},
speed: 'standard',
timestamp: '2026-05-15T10:00:00Z',
tools: ['Read', 'Edit'],
bashCommands: [],
skills: [],
deduplicationKey: 'msg-abc123',
...overrides,
}
}
function makeTurn(overrides: Partial<CachedTurn> = {}): CachedTurn {
return {
timestamp: '2026-05-15T10:00:00Z',
sessionId: 'sess-1',
userMessage: 'fix the bug',
calls: [makeCall()],
...overrides,
}
}
function makeCachedFile(overrides: Partial<CachedFile> = {}): CachedFile {
return {
fingerprint: { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 },
mcpInventory: [],
turns: [makeTurn()],
...overrides,
}
}
// ── emptyCache ─────────────────────────────────────────────────────────
describe('emptyCache', () => {
it('returns a valid empty cache', () => {
const cache = emptyCache()
expect(cache.version).toBe(CACHE_VERSION)
expect(cache.providers).toEqual({})
})
})
// ── loadCache / saveCache ──────────────────────────────────────────────
describe('loadCache / saveCache', () => {
it('returns empty cache when no file exists', async () => {
const cache = await loadCache()
expect(cache.version).toBe(CACHE_VERSION)
expect(cache.providers).toEqual({})
})
it('round-trips a cache through save and load', async () => {
const cache: SessionCache = {
version: CACHE_VERSION,
providers: {
claude: {
envFingerprint: 'abc123',
files: {
'/path/to/session.jsonl': makeCachedFile(),
},
},
},
}
await saveCache(cache)
const loaded = await loadCache()
expect(loaded).toEqual(cache)
})
it('returns empty cache on version mismatch', async () => {
const bad: SessionCache = { version: 999, providers: { claude: { envFingerprint: 'x', files: {} } } }
await mkdir(TMP_DIR, { recursive: true })
await writeFile(join(TMP_DIR, 'session-cache.json'), JSON.stringify(bad))
const loaded = await loadCache()
expect(loaded.version).toBe(CACHE_VERSION)
expect(loaded.providers).toEqual({})
})
it('returns empty cache on corrupt JSON', async () => {
await mkdir(TMP_DIR, { recursive: true })
await writeFile(join(TMP_DIR, 'session-cache.json'), '{broken')
const loaded = await loadCache()
expect(loaded.version).toBe(CACHE_VERSION)
expect(loaded.providers).toEqual({})
})
it('atomic write does not leave partial file on error', async () => {
await saveCache(emptyCache())
const raw = await readFile(join(TMP_DIR, 'session-cache.json'), 'utf-8')
expect(JSON.parse(raw)).toEqual(emptyCache())
})
})
// ── computeEnvFingerprint ──────────────────────────────────────────────
describe('computeEnvFingerprint', () => {
it('returns stable hash for same env', () => {
const a = computeEnvFingerprint('claude')
const b = computeEnvFingerprint('claude')
expect(a).toBe(b)
expect(a).toHaveLength(16)
})
it('changes when env var changes', () => {
const before = computeEnvFingerprint('claude')
const orig = process.env['CLAUDE_CONFIG_DIR']
process.env['CLAUDE_CONFIG_DIR'] = '/tmp/different'
const after = computeEnvFingerprint('claude')
if (orig === undefined) delete process.env['CLAUDE_CONFIG_DIR']
else process.env['CLAUDE_CONFIG_DIR'] = orig
expect(before).not.toBe(after)
})
it('returns stable hash for unknown provider (no env vars)', () => {
const a = computeEnvFingerprint('unknown-provider')
const b = computeEnvFingerprint('unknown-provider')
expect(a).toBe(b)
})
})
// ── fingerprintFile ────────────────────────────────────────────────────
describe('fingerprintFile', () => {
it('returns fingerprint for existing file', async () => {
await mkdir(TMP_DIR, { recursive: true })
const filePath = join(TMP_DIR, 'test.jsonl')
await writeFile(filePath, 'line1\nline2\n')
const fp = await fingerprintFile(filePath)
expect(fp).not.toBeNull()
expect(fp!.sizeBytes).toBe(12)
expect(fp!.dev).toBeGreaterThan(0)
expect(fp!.ino).toBeGreaterThan(0)
expect(fp!.mtimeMs).toBeGreaterThan(0)
})
it('returns null for non-existent file', async () => {
const fp = await fingerprintFile('/no/such/file')
expect(fp).toBeNull()
})
})
// ── reconcileFile ──────────────────────────────────────────────────────
describe('reconcileFile', () => {
it('returns "new" when no cached entry', () => {
const fp: FileFingerprint = { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 }
expect(reconcileFile(fp, undefined)).toEqual({ action: 'new' })
})
it('returns "unchanged" when all fields match', () => {
const fp: FileFingerprint = { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 }
const cached = makeCachedFile({ fingerprint: { ...fp } })
expect(reconcileFile(fp, cached)).toEqual({ action: 'unchanged' })
})
it('returns "appended" when ino same, size grew, and has lastCompleteLineOffset', () => {
const cached = makeCachedFile({
fingerprint: { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 },
lastCompleteLineOffset: 4500,
})
const current: FileFingerprint = { dev: 1, ino: 100, mtimeMs: 2000, sizeBytes: 8000 }
const result = reconcileFile(current, cached)
expect(result).toEqual({ action: 'appended', readFromOffset: 4500 })
})
it('returns "modified" when ino changed', () => {
const cached = makeCachedFile({
fingerprint: { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 },
})
const current: FileFingerprint = { dev: 1, ino: 200, mtimeMs: 2000, sizeBytes: 5000 }
expect(reconcileFile(current, cached)).toEqual({ action: 'modified' })
})
it('returns "modified" when size shrank', () => {
const cached = makeCachedFile({
fingerprint: { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 },
lastCompleteLineOffset: 4500,
})
const current: FileFingerprint = { dev: 1, ino: 100, mtimeMs: 2000, sizeBytes: 3000 }
expect(reconcileFile(current, cached)).toEqual({ action: 'modified' })
})
it('returns "modified" when same size but different mtime', () => {
const cached = makeCachedFile({
fingerprint: { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 },
})
const current: FileFingerprint = { dev: 1, ino: 100, mtimeMs: 2000, sizeBytes: 5000 }
expect(reconcileFile(current, cached)).toEqual({ action: 'modified' })
})
it('returns "modified" for DB provider (no lastCompleteLineOffset) on any fingerprint change', () => {
const cached = makeCachedFile({
fingerprint: { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 },
})
const current: FileFingerprint = { dev: 1, ino: 100, mtimeMs: 2000, sizeBytes: 8000 }
expect(reconcileFile(current, cached)).toEqual({ action: 'modified' })
})
it('returns "modified" when dev changed even if ino same and size grew', () => {
const cached = makeCachedFile({
fingerprint: { dev: 1, ino: 100, mtimeMs: 1000, sizeBytes: 5000 },
lastCompleteLineOffset: 4500,
})
const current: FileFingerprint = { dev: 2, ino: 100, mtimeMs: 2000, sizeBytes: 8000 }
expect(reconcileFile(current, cached)).toEqual({ action: 'modified' })
})
})
// ── mergeCallByDedupKey ────────────────────────────────────────────────
describe('mergeCallByDedupKey', () => {
it('keeps earlier timestamp', () => {
const existing = makeCall({ timestamp: '2026-05-15T10:00:00Z' })
const incoming = makeCall({ timestamp: '2026-05-15T10:01:00Z' })
const merged = mergeCallByDedupKey(existing, incoming)
expect(merged.timestamp).toBe('2026-05-15T10:00:00Z')
})
it('takes incoming usage (latest wins)', () => {
const existing = makeCall({ usage: { ...makeCall().usage, outputTokens: 100 } })
const incoming = makeCall({ usage: { ...makeCall().usage, outputTokens: 999 } })
const merged = mergeCallByDedupKey(existing, incoming)
expect(merged.usage.outputTokens).toBe(999)
})
it('takes incoming tools (latest wins)', () => {
const existing = makeCall({ tools: ['Read'] })
const incoming = makeCall({ tools: ['Read', 'Edit', 'Bash'] })
const merged = mergeCallByDedupKey(existing, incoming)
expect(merged.tools).toEqual(['Read', 'Edit', 'Bash'])
})
})
// ── deep validation (loadCache) ────────────────────────────────────────
describe('loadCache validation', () => {
async function writeRawCache(data: unknown): Promise<void> {
await mkdir(TMP_DIR, { recursive: true })
await writeFile(join(TMP_DIR, 'session-cache.json'), JSON.stringify(data))
}
it('rejects providers as array', async () => {
await writeRawCache({ version: CACHE_VERSION, providers: [] })
expect((await loadCache()).providers).toEqual({})
})
it('rejects provider section missing envFingerprint', async () => {
await writeRawCache({ version: CACHE_VERSION, providers: { claude: { files: {} } } })
expect((await loadCache()).providers).toEqual({})
})
it('rejects provider section with files as array', async () => {
await writeRawCache({ version: CACHE_VERSION, providers: { claude: { envFingerprint: 'x', files: [] } } })
expect((await loadCache()).providers).toEqual({})
})
it('rejects file with invalid fingerprint (missing ino)', async () => {
await writeRawCache({
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, mtimeMs: 1, sizeBytes: 1 }, mcpInventory: [], turns: [] },
} } },
})
expect((await loadCache()).providers).toEqual({})
})
it('rejects file with non-numeric fingerprint field', async () => {
await writeRawCache({
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, ino: 'bad', mtimeMs: 1, sizeBytes: 1 }, mcpInventory: [], turns: [] },
} } },
})
expect((await loadCache()).providers).toEqual({})
})
it('rejects turn with missing sessionId', async () => {
const badTurn = { timestamp: 'x', userMessage: 'y', calls: [] }
await writeRawCache({
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, ino: 2, mtimeMs: 3, sizeBytes: 4 }, mcpInventory: [], turns: [badTurn] },
} } },
})
expect((await loadCache()).providers).toEqual({})
})
it('rejects call with missing usage object', async () => {
const badCall = { provider: 'claude', model: 'm', deduplicationKey: 'k', timestamp: 't', tools: [], bashCommands: [], skills: [] }
const turn = { timestamp: 'x', sessionId: 's', userMessage: 'y', calls: [badCall] }
await writeRawCache({
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, ino: 2, mtimeMs: 3, sizeBytes: 4 }, mcpInventory: [], turns: [turn] },
} } },
})
expect((await loadCache()).providers).toEqual({})
})
it('rejects call with NaN in usage', async () => {
const badUsage = { inputTokens: NaN, outputTokens: 0, cacheCreationInputTokens: 0, cacheReadInputTokens: 0, cachedInputTokens: 0, reasoningTokens: 0, webSearchRequests: 0, cacheCreationOneHourTokens: 0 }
const call = { provider: 'claude', model: 'm', usage: badUsage, deduplicationKey: 'k', timestamp: 't', tools: [], bashCommands: [], skills: [], speed: 'standard' }
const turn = { timestamp: 'x', sessionId: 's', userMessage: 'y', calls: [call] }
await writeRawCache({
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, ino: 2, mtimeMs: 3, sizeBytes: 4 }, mcpInventory: [], turns: [turn] },
} } },
})
expect((await loadCache()).providers).toEqual({})
})
function validCallJson() {
return {
provider: 'claude', model: 'm', deduplicationKey: 'k', timestamp: 't', speed: 'standard',
tools: ['Read'], bashCommands: ['ls'], skills: [],
usage: { inputTokens: 1, outputTokens: 1, cacheCreationInputTokens: 0, cacheReadInputTokens: 0, cachedInputTokens: 0, reasoningTokens: 0, webSearchRequests: 0, cacheCreationOneHourTokens: 0 },
}
}
function wrapCall(callOverride: Record<string, unknown>) {
return {
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, ino: 2, mtimeMs: 3, sizeBytes: 4 }, mcpInventory: [], turns: [
{ timestamp: 'x', sessionId: 's', userMessage: 'y', calls: [{ ...validCallJson(), ...callOverride }] },
] },
} } },
}
}
function wrapFile(fileOverride: Record<string, unknown>) {
return {
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, ino: 2, mtimeMs: 3, sizeBytes: 4 }, mcpInventory: [], turns: [], ...fileOverride },
} } },
}
}
it('rejects tools containing non-string element', async () => {
await writeRawCache(wrapCall({ tools: ['Read', 42] }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects bashCommands containing object element', async () => {
await writeRawCache(wrapCall({ bashCommands: [{}] }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects skills containing null element', async () => {
await writeRawCache(wrapCall({ skills: [null] }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects invalid speed value', async () => {
await writeRawCache(wrapCall({ speed: 'turbo' }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects non-string project', async () => {
await writeRawCache(wrapCall({ project: 123 }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects non-string projectPath', async () => {
await writeRawCache(wrapCall({ projectPath: true }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects mcpInventory containing non-string element', async () => {
await writeRawCache(wrapFile({ mcpInventory: ['valid', 99] }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects non-numeric lastCompleteLineOffset', async () => {
await writeRawCache(wrapFile({ lastCompleteLineOffset: 'bad' }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects NaN lastCompleteLineOffset', async () => {
await writeRawCache(wrapFile({ lastCompleteLineOffset: null }))
expect((await loadCache()).providers).toEqual({})
})
it('rejects non-string canonicalCwd', async () => {
await writeRawCache(wrapFile({ canonicalCwd: 42 }))
expect((await loadCache()).providers).toEqual({})
})
it('accepts optional fields when absent', async () => {
const cache: SessionCache = {
version: CACHE_VERSION,
providers: { claude: { envFingerprint: 'x', files: {
'/f': { fingerprint: { dev: 1, ino: 2, mtimeMs: 3, sizeBytes: 4 }, mcpInventory: [], turns: [] },
} } },
}
await writeRawCache(cache)
expect((await loadCache())).toEqual(cache)
})
it('accepts a fully valid cache with all fields populated', async () => {
const cache: SessionCache = {
version: CACHE_VERSION,
providers: {
claude: {
envFingerprint: 'abc',
files: { '/f': makeCachedFile() },
},
},
}
await writeRawCache(cache)
const loaded = await loadCache()
expect(loaded).toEqual(cache)
})
})
// ── cleanupOrphanedTempFiles ───────────────────────────────────────────
describe('cleanupOrphanedTempFiles', () => {
it('removes .tmp files older than 5 minutes', async () => {
await mkdir(TMP_DIR, { recursive: true })
const oldTmp = join(TMP_DIR, 'session-cache.json.abc123.tmp')
await writeFile(oldTmp, 'stale')
const { utimes } = await import('fs/promises')
const oldTime = new Date(Date.now() - 10 * 60 * 1000)
await utimes(oldTmp, oldTime, oldTime)
await cleanupOrphanedTempFiles()
expect(existsSync(oldTmp)).toBe(false)
})
it('preserves recent .tmp files', async () => {
await mkdir(TMP_DIR, { recursive: true })
const recentTmp = join(TMP_DIR, 'session-cache.json.def456.tmp')
await writeFile(recentTmp, 'recent')
await cleanupOrphanedTempFiles()
expect(existsSync(recentTmp)).toBe(true)
})
it('ignores .tmp files from other caches', async () => {
await mkdir(TMP_DIR, { recursive: true })
const otherTmp = join(TMP_DIR, 'codex-results.json.abc123.tmp')
await writeFile(otherTmp, 'other cache temp')
const { utimes } = await import('fs/promises')
const oldTime = new Date(Date.now() - 10 * 60 * 1000)
await utimes(otherTmp, oldTime, oldTime)
await cleanupOrphanedTempFiles()
expect(existsSync(otherTmp)).toBe(true)
})
it('does not fail when cache dir does not exist', async () => {
process.env['CODEBURN_CACHE_DIR'] = '/no/such/dir'
await cleanupOrphanedTempFiles()
})
})