refactor: harden open-sse services, eliminate any casts, add dashboard pages

- Replace all `as any` casts in MCP advancedTools with typed helpers (toRecord, toString, toNumber)
- Harden open-sse services: rateLimitManager, sessionManager, usage, roleNormalizer, signatureCache, comboMetrics
- Improve responseSanitizer and responseTranslator type safety
- Remove deprecated openai-responses request translator
- Add dashboard pages: /a2a, /mcp, /auto-combo with live data
- Improve error/loading/not-found pages with consistent design
- Add root loading.tsx and typecheck tsconfig variants
- Add check-t11-any-budget.mjs audit script
This commit is contained in:
diegosouzapw 2026-03-04 19:38:34 -03:00
parent 85c6b63c8f
commit 33dfbf0177
39 changed files with 1528 additions and 825 deletions

View file

@ -9,17 +9,6 @@
* 4. Converts developer role system for non-OpenAI providers
*/
// ── Standard OpenAI ChatCompletion fields ──────────────────────────────────
const ALLOWED_TOP_LEVEL_FIELDS = new Set([
"id",
"object",
"created",
"model",
"choices",
"usage",
"system_fingerprint",
]);
const ALLOWED_USAGE_FIELDS = new Set([
"prompt_tokens",
"completion_tokens",
@ -28,16 +17,20 @@ const ALLOWED_USAGE_FIELDS = new Set([
"completion_tokens_details",
]);
const ALLOWED_MESSAGE_FIELDS = new Set([
"role",
"content",
"tool_calls",
"function_call",
"refusal",
"reasoning_content",
]);
type JsonRecord = Record<string, unknown>;
const ALLOWED_CHOICE_FIELDS = new Set(["index", "message", "delta", "finish_reason", "logprobs"]);
function toRecord(value: unknown): JsonRecord | null {
if (!value || typeof value !== "object" || Array.isArray(value)) return null;
return value as JsonRecord;
}
function toString(value: unknown): string | undefined {
return typeof value === "string" ? value : undefined;
}
function toNumber(value: unknown): number | undefined {
return typeof value === "number" && Number.isFinite(value) ? value : undefined;
}
// ── Think tag regex ────────────────────────────────────────────────────────
// Matches <think>...</think> blocks (greedy, dotAll)
@ -81,33 +74,34 @@ export function extractThinkingFromContent(text: string): {
* Sanitize a non-streaming OpenAI ChatCompletion response.
* Strips non-standard fields and normalizes required fields.
*/
export function sanitizeOpenAIResponse(body: any): any {
if (!body || typeof body !== "object") return body;
export function sanitizeOpenAIResponse(body: unknown): unknown {
const bodyRecord = toRecord(body);
if (!bodyRecord) return body;
// Build sanitized response with only allowed top-level fields
const sanitized: Record<string, any> = {};
const sanitized: JsonRecord = {};
// Ensure required fields exist
sanitized.id = normalizeResponseId(body.id);
sanitized.object = body.object || "chat.completion";
sanitized.created = body.created || Math.floor(Date.now() / 1000);
sanitized.model = body.model || "unknown";
sanitized.id = normalizeResponseId(bodyRecord.id);
sanitized.object = toString(bodyRecord.object) || "chat.completion";
sanitized.created = toNumber(bodyRecord.created) ?? Math.floor(Date.now() / 1000);
sanitized.model = toString(bodyRecord.model) || "unknown";
// Sanitize choices
if (Array.isArray(body.choices)) {
sanitized.choices = body.choices.map((choice: any, idx: number) => sanitizeChoice(choice, idx));
if (Array.isArray(bodyRecord.choices)) {
sanitized.choices = bodyRecord.choices.map((choice, idx) => sanitizeChoice(choice, idx));
} else {
sanitized.choices = [];
}
// Sanitize usage
if (body.usage && typeof body.usage === "object") {
sanitized.usage = sanitizeUsage(body.usage);
if (bodyRecord.usage !== undefined) {
sanitized.usage = sanitizeUsage(bodyRecord.usage);
}
// Keep system_fingerprint if present (it's a valid OpenAI field)
if (body.system_fingerprint) {
sanitized.system_fingerprint = body.system_fingerprint;
if (bodyRecord.system_fingerprint) {
sanitized.system_fingerprint = bodyRecord.system_fingerprint;
}
return sanitized;
@ -116,23 +110,32 @@ export function sanitizeOpenAIResponse(body: any): any {
/**
* Sanitize a single choice object.
*/
function sanitizeChoice(choice: any, defaultIndex: number): any {
const sanitized: Record<string, any> = {
index: choice.index ?? defaultIndex,
finish_reason: choice.finish_reason || null,
function sanitizeChoice(choice: unknown, defaultIndex: number): JsonRecord {
const choiceRecord = toRecord(choice);
const sanitized: JsonRecord = {
index: defaultIndex,
finish_reason: null,
};
// Sanitize message (non-streaming) or delta (streaming)
if (choice.message) {
sanitized.message = sanitizeMessage(choice.message);
if (choiceRecord?.index !== undefined) {
sanitized.index = choiceRecord.index;
}
if (choice.delta) {
sanitized.delta = sanitizeMessage(choice.delta);
if (choiceRecord?.finish_reason !== undefined) {
sanitized.finish_reason = choiceRecord.finish_reason;
}
// Sanitize message (non-streaming) or delta (streaming)
if (choiceRecord?.message !== undefined) {
sanitized.message = sanitizeMessage(choiceRecord.message);
}
if (choiceRecord?.delta !== undefined) {
sanitized.delta = sanitizeMessage(choiceRecord.delta);
}
// Keep logprobs if present
if (choice.logprobs !== undefined) {
sanitized.logprobs = choice.logprobs;
if (choiceRecord?.logprobs !== undefined) {
sanitized.logprobs = choiceRecord.logprobs;
}
return sanitized;
@ -141,41 +144,42 @@ function sanitizeChoice(choice: any, defaultIndex: number): any {
/**
* Sanitize a message object, extracting <think> tags if present.
*/
function sanitizeMessage(msg: any): any {
if (!msg || typeof msg !== "object") return msg;
function sanitizeMessage(msg: unknown): unknown {
const msgRecord = toRecord(msg);
if (!msgRecord) return msg;
const sanitized: Record<string, any> = {};
const sanitized: JsonRecord = {};
// Copy only allowed fields
if (msg.role) sanitized.role = msg.role;
if (msg.refusal !== undefined) sanitized.refusal = msg.refusal;
if (msgRecord.role) sanitized.role = msgRecord.role;
if (msgRecord.refusal !== undefined) sanitized.refusal = msgRecord.refusal;
// Handle content — extract <think> tags
if (typeof msg.content === "string") {
const { content, thinking } = extractThinkingFromContent(msg.content);
if (typeof msgRecord.content === "string") {
const { content, thinking } = extractThinkingFromContent(msgRecord.content);
sanitized.content = content;
// Set reasoning_content from <think> tags (if not already set)
if (thinking && !msg.reasoning_content) {
if (thinking && !msgRecord.reasoning_content) {
sanitized.reasoning_content = thinking;
}
} else if (msg.content !== undefined) {
sanitized.content = msg.content;
} else if (msgRecord.content !== undefined) {
sanitized.content = msgRecord.content;
}
// Preserve existing reasoning_content (from providers that natively support it)
if (msg.reasoning_content && !sanitized.reasoning_content) {
sanitized.reasoning_content = msg.reasoning_content;
if (msgRecord.reasoning_content && !sanitized.reasoning_content) {
sanitized.reasoning_content = msgRecord.reasoning_content;
}
// Preserve tool_calls
if (msg.tool_calls) {
sanitized.tool_calls = msg.tool_calls;
if (msgRecord.tool_calls) {
sanitized.tool_calls = msgRecord.tool_calls;
}
// Preserve function_call (legacy)
if (msg.function_call) {
sanitized.function_call = msg.function_call;
if (msgRecord.function_call) {
sanitized.function_call = msgRecord.function_call;
}
return sanitized;
@ -184,22 +188,25 @@ function sanitizeMessage(msg: any): any {
/**
* Sanitize usage object keep only standard fields.
*/
function sanitizeUsage(usage: any): any {
if (!usage || typeof usage !== "object") return usage;
function sanitizeUsage(usage: unknown): unknown {
const usageRecord = toRecord(usage);
if (!usageRecord) return usage;
const sanitized: Record<string, any> = {};
const sanitized: JsonRecord = {};
for (const key of ALLOWED_USAGE_FIELDS) {
if (usage[key] !== undefined) {
sanitized[key] = usage[key];
if (usageRecord[key] !== undefined) {
sanitized[key] = usageRecord[key];
}
}
// Ensure required fields
if (sanitized.prompt_tokens === undefined) sanitized.prompt_tokens = 0;
if (sanitized.completion_tokens === undefined) sanitized.completion_tokens = 0;
if (sanitized.total_tokens === undefined) {
sanitized.total_tokens = sanitized.prompt_tokens + sanitized.completion_tokens;
}
const promptTokens = toNumber(sanitized.prompt_tokens) ?? 0;
const completionTokens = toNumber(sanitized.completion_tokens) ?? 0;
const totalTokens = toNumber(sanitized.total_tokens) ?? promptTokens + completionTokens;
sanitized.prompt_tokens = promptTokens;
sanitized.completion_tokens = completionTokens;
sanitized.total_tokens = totalTokens;
return sanitized;
}
@ -207,7 +214,7 @@ function sanitizeUsage(usage: any): any {
/**
* Normalize response ID to use chatcmpl- prefix.
*/
function normalizeResponseId(id: any): string {
function normalizeResponseId(id: unknown): string {
if (!id || typeof id !== "string") {
return `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 29)}`;
}
@ -221,48 +228,60 @@ function normalizeResponseId(id: any): string {
* Sanitize a streaming SSE chunk for passthrough mode.
* Lighter than full sanitization only strips problematic extra fields.
*/
export function sanitizeStreamingChunk(parsed: any): any {
if (!parsed || typeof parsed !== "object") return parsed;
export function sanitizeStreamingChunk(parsed: unknown): unknown {
const parsedRecord = toRecord(parsed);
if (!parsedRecord) return parsed;
// Build sanitized chunk
const sanitized: Record<string, any> = {};
const sanitized: JsonRecord = {};
// Keep only standard fields
if (parsed.id !== undefined) sanitized.id = parsed.id;
sanitized.object = parsed.object || "chat.completion.chunk";
if (parsed.created !== undefined) sanitized.created = parsed.created;
if (parsed.model !== undefined) sanitized.model = parsed.model;
if (parsedRecord.id !== undefined) sanitized.id = parsedRecord.id;
sanitized.object = toString(parsedRecord.object) || "chat.completion.chunk";
if (parsedRecord.created !== undefined) sanitized.created = parsedRecord.created;
if (parsedRecord.model !== undefined) sanitized.model = parsedRecord.model;
// Sanitize choices with delta
if (Array.isArray(parsed.choices)) {
sanitized.choices = parsed.choices.map((choice: any) => {
const c: Record<string, any> = {
index: choice.index ?? 0,
};
if (choice.delta !== undefined) {
c.delta = {};
const delta = choice.delta;
if (delta.role !== undefined) c.delta.role = delta.role;
if (delta.content !== undefined) c.delta.content = delta.content;
if (delta.reasoning_content !== undefined)
c.delta.reasoning_content = delta.reasoning_content;
if (delta.tool_calls !== undefined) c.delta.tool_calls = delta.tool_calls;
if (delta.function_call !== undefined) c.delta.function_call = delta.function_call;
if (Array.isArray(parsedRecord.choices)) {
sanitized.choices = parsedRecord.choices.map((choice) => {
const c: JsonRecord = { index: 0 };
const choiceRecord = toRecord(choice);
if (!choiceRecord) return c;
c.index = toNumber(choiceRecord.index) ?? 0;
if (choiceRecord.delta !== undefined) {
const deltaRecord = toRecord(choiceRecord.delta);
if (deltaRecord) {
const delta: JsonRecord = {};
if (deltaRecord.role !== undefined) delta.role = deltaRecord.role;
if (deltaRecord.content !== undefined) delta.content = deltaRecord.content;
if (deltaRecord.reasoning_content !== undefined) {
delta.reasoning_content = deltaRecord.reasoning_content;
}
if (deltaRecord.tool_calls !== undefined) delta.tool_calls = deltaRecord.tool_calls;
if (deltaRecord.function_call !== undefined)
delta.function_call = deltaRecord.function_call;
c.delta = delta;
} else {
c.delta = choiceRecord.delta;
}
}
if (choice.finish_reason !== undefined) c.finish_reason = choice.finish_reason;
if (choice.logprobs !== undefined) c.logprobs = choice.logprobs;
if (choiceRecord.finish_reason !== undefined) c.finish_reason = choiceRecord.finish_reason;
if (choiceRecord.logprobs !== undefined) c.logprobs = choiceRecord.logprobs;
return c;
});
}
// Sanitize usage if present
if (parsed.usage && typeof parsed.usage === "object") {
sanitized.usage = sanitizeUsage(parsed.usage);
if (parsedRecord.usage !== undefined) {
sanitized.usage = sanitizeUsage(parsedRecord.usage);
}
// Keep system_fingerprint if present
if (parsed.system_fingerprint) {
sanitized.system_fingerprint = parsed.system_fingerprint;
if (parsedRecord.system_fingerprint) {
sanitized.system_fingerprint = parsedRecord.system_fingerprint;
}
return sanitized;

View file

@ -1,10 +1,34 @@
import { FORMATS } from "../translator/formats.ts";
type JsonRecord = Record<string, unknown>;
function toRecord(value: unknown): JsonRecord {
return value && typeof value === "object" && !Array.isArray(value) ? (value as JsonRecord) : {};
}
function toString(value: unknown, fallback = ""): string {
return typeof value === "string" ? value : fallback;
}
function toNumber(value: unknown, fallback = 0): number {
const parsed =
typeof value === "number"
? value
: typeof value === "string" && value.trim().length > 0
? Number(value)
: Number.NaN;
return Number.isFinite(parsed) ? parsed : fallback;
}
/**
* Translate non-streaming response to OpenAI format
* Handles different provider response formats (Gemini, Claude, etc.)
*/
export function translateNonStreamingResponse(responseBody, targetFormat, sourceFormat) {
export function translateNonStreamingResponse(
responseBody: unknown,
targetFormat: string,
sourceFormat: string
): unknown {
// If already in source format (usually OpenAI), return as-is
if (targetFormat === sourceFormat || targetFormat === FORMATS.OPENAI) {
return responseBody;
@ -12,51 +36,60 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
// Handle OpenAI Responses API format
if (targetFormat === FORMATS.OPENAI_RESPONSES) {
const responseRoot = toRecord(responseBody);
const response =
responseBody?.object === "response" ? responseBody : responseBody?.response || responseBody;
const output = Array.isArray(response?.output) ? response.output : [];
const usage = response?.usage || responseBody?.usage;
responseRoot.object === "response"
? responseRoot
: toRecord(responseRoot.response ?? responseRoot);
const output = Array.isArray(response.output) ? response.output : [];
const usage = toRecord(response.usage ?? responseRoot.usage);
let textContent = "";
let reasoningContent = "";
const toolCalls = [];
const toolCalls: JsonRecord[] = [];
for (const item of output) {
if (!item || typeof item !== "object") continue;
const itemObj = toRecord(item);
if (item.type === "message" && Array.isArray(item.content)) {
for (const part of item.content) {
if (itemObj.type === "message" && Array.isArray(itemObj.content)) {
for (const part of itemObj.content) {
if (!part || typeof part !== "object") continue;
if (part.type === "output_text" && typeof part.text === "string") {
textContent += part.text;
} else if (part.type === "summary_text" && typeof part.text === "string") {
reasoningContent += part.text;
const partObj = toRecord(part);
if (partObj.type === "output_text" && typeof partObj.text === "string") {
textContent += partObj.text;
} else if (partObj.type === "summary_text" && typeof partObj.text === "string") {
reasoningContent += partObj.text;
}
}
} else if (item.type === "reasoning" && Array.isArray(item.summary)) {
for (const part of item.summary) {
if (part?.type === "summary_text" && typeof part.text === "string") {
reasoningContent += part.text;
} else if (itemObj.type === "reasoning" && Array.isArray(itemObj.summary)) {
for (const part of itemObj.summary) {
const partObj = toRecord(part);
if (partObj.type === "summary_text" && typeof partObj.text === "string") {
reasoningContent += partObj.text;
}
}
} else if (item.type === "function_call") {
const callId = item.call_id || item.id || `call_${Date.now()}_${toolCalls.length}`;
} else if (itemObj.type === "function_call") {
const callId =
toString(itemObj.call_id) ||
toString(itemObj.id) ||
`call_${Date.now()}_${toolCalls.length}`;
const fnArgs =
typeof item.arguments === "string"
? item.arguments
: JSON.stringify(item.arguments || {});
typeof itemObj.arguments === "string"
? itemObj.arguments
: JSON.stringify(itemObj.arguments || {});
toolCalls.push({
id: callId,
type: "function",
function: {
name: item.name || "",
name: toString(itemObj.name),
arguments: fnArgs,
},
});
}
}
const message: Record<string, any> = { role: "assistant" };
const message: JsonRecord = { role: "assistant" };
if (textContent) {
message.content = textContent;
}
@ -70,12 +103,12 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
message.content = "";
}
const createdAt = Number(response?.created_at) || Math.floor(Date.now() / 1000);
const model = response?.model || responseBody?.model || "openai-responses";
const createdAt = toNumber(response.created_at, Math.floor(Date.now() / 1000));
const model = toString(response.model || responseRoot.model, "openai-responses");
const finishReason = toolCalls.length > 0 ? "tool_calls" : "stop";
const result: Record<string, any> = {
id: `chatcmpl-${response?.id || Date.now()}`,
const result: JsonRecord = {
id: `chatcmpl-${toString(response.id, String(Date.now()))}`,
object: "chat.completion",
created: createdAt,
model,
@ -88,28 +121,31 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
],
};
if (usage && typeof usage === "object") {
const inputTokens = usage.input_tokens || 0;
const outputTokens = usage.output_tokens || 0;
if (Object.keys(usage).length > 0) {
const inputTokens = toNumber(usage.input_tokens, 0);
const outputTokens = toNumber(usage.output_tokens, 0);
result.usage = {
prompt_tokens: inputTokens,
completion_tokens: outputTokens,
total_tokens: inputTokens + outputTokens,
};
if (usage.reasoning_tokens > 0) {
result.usage.completion_tokens_details = {
reasoning_tokens: usage.reasoning_tokens,
if (toNumber(usage.reasoning_tokens, 0) > 0) {
(result.usage as JsonRecord).completion_tokens_details = {
reasoning_tokens: toNumber(usage.reasoning_tokens, 0),
};
}
if (usage.cache_read_input_tokens > 0 || usage.cache_creation_input_tokens > 0) {
result.usage.prompt_tokens_details = {};
if (usage.cache_read_input_tokens > 0) {
result.usage.prompt_tokens_details.cached_tokens = usage.cache_read_input_tokens;
if (
toNumber(usage.cache_read_input_tokens, 0) > 0 ||
toNumber(usage.cache_creation_input_tokens, 0) > 0
) {
(result.usage as JsonRecord).prompt_tokens_details = {};
const promptDetails = (result.usage as JsonRecord).prompt_tokens_details as JsonRecord;
if (toNumber(usage.cache_read_input_tokens, 0) > 0) {
promptDetails.cached_tokens = toNumber(usage.cache_read_input_tokens, 0);
}
if (usage.cache_creation_input_tokens > 0) {
result.usage.prompt_tokens_details.cache_creation_tokens =
usage.cache_creation_input_tokens;
if (toNumber(usage.cache_creation_input_tokens, 0) > 0) {
promptDetails.cache_creation_tokens = toNumber(usage.cache_creation_input_tokens, 0);
}
}
}
@ -123,38 +159,42 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
targetFormat === FORMATS.ANTIGRAVITY ||
targetFormat === FORMATS.GEMINI_CLI
) {
const response = responseBody.response || responseBody;
if (!response?.candidates?.[0]) {
const root = toRecord(responseBody);
const response = toRecord(root.response ?? root);
const candidates = Array.isArray(response.candidates) ? response.candidates : [];
if (!candidates[0]) {
return responseBody; // Can't translate, return raw
}
const candidate = response.candidates[0];
const content = candidate.content;
const usage = response.usageMetadata || responseBody.usageMetadata;
const candidate = toRecord(candidates[0]);
const content = toRecord(candidate.content);
const usage = toRecord(response.usageMetadata ?? root.usageMetadata);
// Build message content
let textContent = "";
const toolCalls = [];
const toolCalls: JsonRecord[] = [];
let reasoningContent = "";
if (content?.parts) {
if (Array.isArray(content.parts)) {
for (const part of content.parts) {
const partObj = toRecord(part);
// Handle thinking/reasoning
if (part.thought === true && part.text) {
reasoningContent += part.text;
if (partObj.thought === true && typeof partObj.text === "string") {
reasoningContent += partObj.text;
}
// Regular text
else if (part.text !== undefined) {
textContent += part.text;
else if (typeof partObj.text === "string") {
textContent += partObj.text;
}
// Function calls
if (part.functionCall) {
if (partObj.functionCall) {
const fn = toRecord(partObj.functionCall);
toolCalls.push({
id: `call_${part.functionCall.name}_${Date.now()}_${toolCalls.length}`,
id: `call_${toString(fn.name, "unknown")}_${Date.now()}_${toolCalls.length}`,
type: "function",
function: {
name: part.functionCall.name,
arguments: JSON.stringify(part.functionCall.args || {}),
name: toString(fn.name),
arguments: JSON.stringify(fn.args || {}),
},
});
}
@ -162,7 +202,7 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
}
// Build OpenAI format message
const message: Record<string, any> = { role: "assistant" };
const message: JsonRecord = { role: "assistant" };
if (textContent) {
message.content = textContent;
}
@ -178,16 +218,18 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
}
// Determine finish reason
let finishReason = (candidate.finishReason || "stop").toLowerCase();
let finishReason = toString(candidate.finishReason, "stop").toLowerCase();
if (finishReason === "stop" && toolCalls.length > 0) {
finishReason = "tool_calls";
}
const result: Record<string, any> = {
id: `chatcmpl-${response.responseId || Date.now()}`,
const result: JsonRecord = {
id: `chatcmpl-${toString(response.responseId, String(Date.now()))}`,
object: "chat.completion",
created: Math.floor(new Date(response.createTime || Date.now()).getTime() / 1000),
model: response.modelVersion || "gemini",
created: Math.floor(
new Date(toString(response.createTime, String(Date.now()))).getTime() / 1000
),
model: toString(response.modelVersion, "gemini"),
choices: [
{
index: 0,
@ -198,15 +240,15 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
};
// Add usage if available (match streaming translator: add thoughtsTokenCount to prompt_tokens)
if (usage) {
if (Object.keys(usage).length > 0) {
result.usage = {
prompt_tokens: (usage.promptTokenCount || 0) + (usage.thoughtsTokenCount || 0),
completion_tokens: usage.candidatesTokenCount || 0,
total_tokens: usage.totalTokenCount || 0,
prompt_tokens: toNumber(usage.promptTokenCount, 0) + toNumber(usage.thoughtsTokenCount, 0),
completion_tokens: toNumber(usage.candidatesTokenCount, 0),
total_tokens: toNumber(usage.totalTokenCount, 0),
};
if (usage.thoughtsTokenCount > 0) {
result.usage.completion_tokens_details = {
reasoning_tokens: usage.thoughtsTokenCount,
if (toNumber(usage.thoughtsTokenCount, 0) > 0) {
(result.usage as JsonRecord).completion_tokens_details = {
reasoning_tokens: toNumber(usage.thoughtsTokenCount, 0),
};
}
}
@ -216,32 +258,35 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
// Handle Claude format
if (targetFormat === FORMATS.CLAUDE) {
if (!responseBody.content) {
const root = toRecord(responseBody);
const contentBlocks = Array.isArray(root.content) ? root.content : [];
if (contentBlocks.length === 0) {
return responseBody; // Can't translate, return raw
}
let textContent = "";
let thinkingContent = "";
const toolCalls = [];
const toolCalls: JsonRecord[] = [];
for (const block of responseBody.content) {
if (block.type === "text") {
textContent += block.text;
} else if (block.type === "thinking") {
thinkingContent += block.thinking || "";
} else if (block.type === "tool_use") {
for (const block of contentBlocks) {
const blockObj = toRecord(block);
if (blockObj.type === "text") {
textContent += toString(blockObj.text);
} else if (blockObj.type === "thinking") {
thinkingContent += toString(blockObj.thinking);
} else if (blockObj.type === "tool_use") {
toolCalls.push({
id: block.id,
id: toString(blockObj.id, `call_${Date.now()}_${toolCalls.length}`),
type: "function",
function: {
name: block.name,
arguments: JSON.stringify(block.input || {}),
name: toString(blockObj.name),
arguments: JSON.stringify(blockObj.input || {}),
},
});
}
}
const message: Record<string, any> = { role: "assistant" };
const message: JsonRecord = { role: "assistant" };
if (textContent) {
message.content = textContent;
}
@ -255,15 +300,15 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
message.content = "";
}
let finishReason = responseBody.stop_reason || "stop";
let finishReason = toString(root.stop_reason, "stop");
if (finishReason === "end_turn") finishReason = "stop";
if (finishReason === "tool_use") finishReason = "tool_calls";
const result: Record<string, any> = {
id: `chatcmpl-${responseBody.id || Date.now()}`,
const result: JsonRecord = {
id: `chatcmpl-${toString(root.id, String(Date.now()))}`,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model: responseBody.model || "claude",
model: toString(root.model, "claude"),
choices: [
{
index: 0,
@ -273,12 +318,14 @@ export function translateNonStreamingResponse(responseBody, targetFormat, source
],
};
if (responseBody.usage) {
const usage = toRecord(root.usage);
if (Object.keys(usage).length > 0) {
const promptTokens = toNumber(usage.input_tokens, 0);
const completionTokens = toNumber(usage.output_tokens, 0);
result.usage = {
prompt_tokens: responseBody.usage.input_tokens || 0,
completion_tokens: responseBody.usage.output_tokens || 0,
total_tokens:
(responseBody.usage.input_tokens || 0) + (responseBody.usage.output_tokens || 0),
prompt_tokens: promptTokens,
completion_tokens: completionTokens,
total_tokens: promptTokens + completionTokens,
};
}

View file

@ -10,13 +10,48 @@ import { hashInput, summarizeOutput } from "./schemas/audit.ts";
// ============ Database Connection ============
let db: any = null;
interface StatementLike<TRow = unknown> {
get: (...params: unknown[]) => TRow | undefined;
all: (...params: unknown[]) => TRow[];
run: (...params: unknown[]) => unknown;
}
interface AuditDatabase {
prepare: <TRow = unknown>(sql: string) => StatementLike<TRow>;
}
interface AuditStatsRow {
total: unknown;
successRate: unknown;
avgDuration: unknown;
}
interface AuditTopToolRow {
tool: unknown;
count: unknown;
}
let db: AuditDatabase | null = null;
function toNumber(value: unknown, fallback = 0): number {
const parsed =
typeof value === "number"
? value
: typeof value === "string" && value.trim().length > 0
? Number(value)
: Number.NaN;
return Number.isFinite(parsed) ? parsed : fallback;
}
function toString(value: unknown): string {
return typeof value === "string" ? value : "";
}
/**
* Lazy-load the database connection.
* Uses the same SQLite database as the main OmniRoute app.
*/
async function getDb(): Promise<any> {
async function getDb(): Promise<AuditDatabase | null> {
if (db) return db;
try {
@ -34,11 +69,14 @@ async function getDb(): Promise<any> {
return null;
}
const Database = (await import("better-sqlite3")).default;
const Database = (await import("better-sqlite3")).default as unknown as new (
dbPath: string
) => AuditDatabase;
db = new Database(dbPath);
return db;
} catch (err) {
console.error("[MCP Audit] Failed to connect to database:", err);
} catch (err: unknown) {
const message = err instanceof Error ? err.message : String(err);
console.error("[MCP Audit] Failed to connect to database:", message);
return null;
}
}
@ -81,9 +119,10 @@ export async function logToolCall(
success ? 1 : 0,
errorCode || null
);
} catch (err) {
} catch (err: unknown) {
// Never let audit failure break tool execution
console.error("[MCP Audit] Failed to log:", err);
const message = err instanceof Error ? err.message : String(err);
console.error("[MCP Audit] Failed to log:", message);
}
}
@ -125,7 +164,7 @@ export async function getAuditStats(): Promise<{
FROM mcp_tool_audit
WHERE created_at > datetime('now', '-24 hours')`
)
.get() as any;
.get() as AuditStatsRow | undefined;
const topTools = database
.prepare(
@ -136,13 +175,16 @@ export async function getAuditStats(): Promise<{
ORDER BY count DESC
LIMIT 10`
)
.all() as any[];
.all() as AuditTopToolRow[];
return {
totalCalls: stats?.total || 0,
successRate: stats?.successRate || 0,
avgDurationMs: stats?.avgDuration || 0,
topTools: topTools || [],
totalCalls: toNumber(stats?.total, 0),
successRate: toNumber(stats?.successRate, 0),
avgDurationMs: toNumber(stats?.avgDuration, 0),
topTools: (topTools || []).map((entry) => ({
tool: toString(entry.tool),
count: toNumber(entry.count, 0),
})),
};
} catch {
return { totalCalls: 0, successRate: 0, avgDurationMs: 0, topTools: [] };

View file

@ -1,6 +1,6 @@
/**
* OmniRoute MCP Advanced Tools 8 intelligence tools that differentiate
* OmniRoute from any other AI gateway.
* OmniRoute from all other AI gateways.
*
* Tools:
* 1. omniroute_simulate_route Dry-run routing simulation
@ -34,12 +34,59 @@ async function apiFetch(path: string, options: RequestInit = {}): Promise<unknow
return response.json();
}
function normalizeCombosResponse(raw: unknown): any[] {
if (Array.isArray(raw)) return raw;
if (raw && typeof raw === "object" && Array.isArray((raw as any).combos)) {
return (raw as any).combos;
}
return [];
type JsonRecord = Record<string, unknown>;
interface ComboModel {
provider: string;
model: string;
inputCostPer1M: number;
}
function isRecord(value: unknown): value is JsonRecord {
return !!value && typeof value === "object" && !Array.isArray(value);
}
function toRecord(value: unknown): JsonRecord {
return isRecord(value) ? value : {};
}
function toArrayOfRecords(value: unknown): JsonRecord[] {
return Array.isArray(value) ? value.filter(isRecord) : [];
}
function toString(value: unknown, fallback = ""): string {
return typeof value === "string" ? value : fallback;
}
function toNumber(value: unknown, fallback = 0): number {
const parsed =
typeof value === "number"
? value
: typeof value === "string" && value.trim().length > 0
? Number(value)
: Number.NaN;
return Number.isFinite(parsed) ? parsed : fallback;
}
function toBoolean(value: unknown, fallback = false): boolean {
return typeof value === "boolean" ? value : fallback;
}
function getComboModels(combo: JsonRecord): ComboModel[] {
const directModels = toArrayOfRecords(combo.models);
const nestedModels = toArrayOfRecords(toRecord(combo.data).models);
const sourceModels = directModels.length > 0 ? directModels : nestedModels;
return sourceModels.map((model) => ({
provider: toString(model.provider, "unknown"),
model: toString(model.model, ""),
inputCostPer1M: toNumber(model.inputCostPer1M, 3.0),
}));
}
function normalizeCombosResponse(raw: unknown): JsonRecord[] {
if (Array.isArray(raw)) return raw.filter(isRecord);
const source = toRecord(raw);
return Array.isArray(source.combos) ? source.combos.filter(isRecord) : [];
}
// ============ In-Memory State ============
@ -87,7 +134,7 @@ export async function handleSimulateRoute(args: {
]);
const combos = combosRaw.status === "fulfilled" ? normalizeCombosResponse(combosRaw.value) : [];
const health = healthRaw.status === "fulfilled" ? (healthRaw.value as any) : {};
const health = healthRaw.status === "fulfilled" ? toRecord(healthRaw.value) : {};
const quota =
quotaRaw.status === "fulfilled"
? normalizeQuotaResponse(quotaRaw.value)
@ -95,8 +142,10 @@ export async function handleSimulateRoute(args: {
// Find target combo
const targetCombo = args.combo
? combos.find((c: any) => c.id === args.combo || c.name === args.combo)
: combos.find((c: any) => c.enabled !== false);
? combos.find(
(combo) => toString(combo.id) === args.combo || toString(combo.name) === args.combo
)
: combos.find((combo) => combo.enabled !== false);
if (!targetCombo) {
return {
@ -107,31 +156,31 @@ export async function handleSimulateRoute(args: {
};
}
const models = targetCombo.models || targetCombo.data?.models || [];
const breakers = health?.circuitBreakers || [];
const models = getComboModels(targetCombo);
const breakers = toArrayOfRecords(health.circuitBreakers);
const providers = quota.providers;
// Simulate path
const simulatedPath = models.map((m: any, idx: number) => {
const cb = breakers.find((b: any) => b.provider === m.provider);
const q = providers.find((p: any) => p.provider === m.provider);
const estimatedCost = (args.promptTokenEstimate / 1_000_000) * (m.inputCostPer1M || 3.0);
const simulatedPath = models.map((model, idx: number) => {
const cb = breakers.find((breaker) => toString(breaker.provider) === model.provider);
const q = providers.find((providerEntry) => providerEntry.provider === model.provider);
const estimatedCost = (args.promptTokenEstimate / 1_000_000) * model.inputCostPer1M;
return {
provider: m.provider,
model: m.model || args.model,
provider: model.provider,
model: model.model || args.model,
probability: idx === 0 ? 0.85 : 0.15 / Math.max(models.length - 1, 1),
estimatedCost: Math.round(estimatedCost * 10000) / 10000,
healthStatus: cb?.state || "CLOSED",
healthStatus: toString(cb?.state, "CLOSED"),
quotaAvailable: q?.percentRemaining ?? 100,
};
});
const costs = simulatedPath.map((p: any) => p.estimatedCost);
const costs = simulatedPath.map((pathEntry) => pathEntry.estimatedCost);
const result = {
simulatedPath,
fallbackTree: {
primary: simulatedPath[0]?.provider || "unknown",
fallbacks: simulatedPath.slice(1).map((p: any) => p.provider),
fallbacks: simulatedPath.slice(1).map((pathEntry) => pathEntry.provider),
worstCaseCost: Math.max(...costs, 0),
bestCaseCost: Math.min(...costs, 0),
},
@ -156,8 +205,8 @@ export async function handleSetBudgetGuard(args: {
// Get current session cost
let spent = 0;
try {
const analytics = (await apiFetch("/api/usage/analytics?period=session")) as any;
spent = analytics?.totalCost || 0;
const analytics = toRecord(await apiFetch("/api/usage/analytics?period=session"));
spent = toNumber(analytics.totalCost, 0);
} catch {
/* ignore if analytics not available */
}
@ -246,7 +295,10 @@ export async function handleTestCombo(args: { comboId: string; testPrompt: strin
try {
// Get combo details
const combos = normalizeCombosResponse(await apiFetch("/api/combos"));
const combo = combos.find((c: any) => c.id === args.comboId || c.name === args.comboId);
const combo = combos.find(
(comboEntry) =>
toString(comboEntry.id) === args.comboId || toString(comboEntry.name) === args.comboId
);
if (!combo) {
return {
content: [
@ -259,37 +311,40 @@ export async function handleTestCombo(args: { comboId: string; testPrompt: strin
};
}
const models = combo.models || combo.data?.models || [];
const models = getComboModels(combo);
const prompt = (args.testPrompt || "Say hello").slice(0, 200);
// Test each provider in parallel
const results = await Promise.allSettled(
models.map(async (m: any) => {
models.map(async (model) => {
const providerStart = Date.now();
try {
const resp = (await apiFetch("/v1/chat/completions", {
method: "POST",
body: JSON.stringify({
model: m.model || "auto",
messages: [{ role: "user", content: prompt }],
max_tokens: 50,
stream: false,
"x-provider": m.provider,
}),
})) as any;
const resp = toRecord(
await apiFetch("/v1/chat/completions", {
method: "POST",
body: JSON.stringify({
model: model.model || "auto",
messages: [{ role: "user", content: prompt }],
max_tokens: 50,
stream: false,
"x-provider": model.provider,
}),
})
);
const usage = toRecord(resp.usage);
return {
provider: m.provider,
model: m.model || resp?.model || "unknown",
provider: model.provider,
model: model.model || toString(resp.model, "unknown"),
success: true,
latencyMs: Date.now() - providerStart,
cost: resp?.cost || 0,
tokenCount: (resp?.usage?.prompt_tokens || 0) + (resp?.usage?.completion_tokens || 0),
cost: toNumber(resp.cost, 0),
tokenCount: toNumber(usage.prompt_tokens, 0) + toNumber(usage.completion_tokens, 0),
};
} catch (err) {
return {
provider: m.provider,
model: m.model || "unknown",
provider: model.provider,
model: model.model || "unknown",
success: false,
latencyMs: Date.now() - providerStart,
cost: 0,
@ -351,27 +406,29 @@ export async function handleGetProviderMetrics(args: { provider: string }) {
apiFetch(`/api/usage/analytics?period=session&provider=${encodeURIComponent(args.provider)}`),
]);
const health = healthRaw.status === "fulfilled" ? (healthRaw.value as any) : {};
const health = healthRaw.status === "fulfilled" ? toRecord(healthRaw.value) : {};
const quota =
quotaRaw.status === "fulfilled"
? normalizeQuotaResponse(quotaRaw.value, { provider: args.provider })
: normalizeQuotaResponse({});
const analytics = analyticsRaw.status === "fulfilled" ? (analyticsRaw.value as any) : {};
const analytics = analyticsRaw.status === "fulfilled" ? toRecord(analyticsRaw.value) : {};
const cb = (health.circuitBreakers || []).find((b: any) => b.provider === args.provider);
const cb = toArrayOfRecords(health.circuitBreakers).find(
(breaker) => toString(breaker.provider) === args.provider
);
const providerQuota = quota.providers.find((p) => p.provider === args.provider) || null;
const result = {
provider: args.provider,
successRate: analytics?.successRate ?? 1.0,
requestCount: analytics?.requestCount ?? 0,
avgLatencyMs: analytics?.avgLatencyMs ?? 0,
p50LatencyMs: analytics?.p50LatencyMs ?? 0,
p95LatencyMs: analytics?.p95LatencyMs ?? 0,
p99LatencyMs: analytics?.p99LatencyMs ?? 0,
errorRate: analytics?.errorRate ?? 0,
lastError: analytics?.lastError || null,
circuitBreakerState: cb?.state || "CLOSED",
successRate: toNumber(analytics.successRate, 1.0),
requestCount: toNumber(analytics.requestCount, 0),
avgLatencyMs: toNumber(analytics.avgLatencyMs, 0),
p50LatencyMs: toNumber(analytics.p50LatencyMs, 0),
p95LatencyMs: toNumber(analytics.p95LatencyMs, 0),
p99LatencyMs: toNumber(analytics.p99LatencyMs, 0),
errorRate: toNumber(analytics.errorRate, 0),
lastError: toString(analytics.lastError) || null,
circuitBreakerState: toString(cb?.state, "CLOSED"),
quotaInfo: providerQuota
? {
used: providerQuota.quotaUsed,
@ -399,7 +456,7 @@ export async function handleBestComboForTask(args: {
try {
const fitness = TASK_FITNESS[args.taskType] || TASK_FITNESS.coding;
const combos = normalizeCombosResponse(await apiFetch("/api/combos"));
const enabledCombos = combos.filter((c: any) => c.enabled !== false);
const enabledCombos = combos.filter((combo) => combo.enabled !== false);
if (enabledCombos.length === 0) {
return {
@ -411,18 +468,18 @@ export async function handleBestComboForTask(args: {
}
// Score combos by task fitness
const scored = enabledCombos.map((c: any) => {
const models = c.models || c.data?.models || [];
const scored = enabledCombos.map((combo) => {
const models = getComboModels(combo);
let score = 0;
// Provider preference scoring
for (const m of models) {
const prefIdx = fitness.preferred.indexOf(m.provider);
for (const model of models) {
const prefIdx = fitness.preferred.indexOf(model.provider);
if (prefIdx >= 0) score += (fitness.preferred.length - prefIdx) * 10;
}
// Name-based trait scoring
const name = (c.name || "").toLowerCase();
const name = toString(combo.name).toLowerCase();
for (const trait of fitness.traits) {
if (name.includes(trait)) score += 5;
}
@ -430,9 +487,9 @@ export async function handleBestComboForTask(args: {
// Check if it's a free combo
const isFree =
name.includes("free") ||
models.every((m: any) => (m.provider || "").toLowerCase().includes("free"));
models.every((model) => model.provider.toLowerCase().includes("free"));
return { combo: c, score, isFree };
return { combo, score, isFree };
});
scored.sort((a, b) => b.score - a.score);
@ -477,9 +534,11 @@ export async function handleExplainRoute(args: { requestId: string }) {
const start = Date.now();
try {
// Query routing_decisions table via API
let decision: any = null;
let decision: JsonRecord | null = null;
try {
decision = await apiFetch(`/api/routing/decisions/${encodeURIComponent(args.requestId)}`);
decision = toRecord(
await apiFetch(`/api/routing/decisions/${encodeURIComponent(args.requestId)}`)
);
} catch {
// Fall back to a generic explanation
}
@ -537,28 +596,34 @@ export async function handleExplainRoute(args: { requestId: string }) {
export async function handleGetSessionSnapshot() {
const start = Date.now();
try {
const analytics = (await apiFetch("/api/usage/analytics?period=session").catch(
() => ({})
)) as any;
const analytics = toRecord(
await apiFetch("/api/usage/analytics?period=session").catch(() => ({}))
);
const tokenCount = toRecord(analytics.tokenCount);
const byModel = toArrayOfRecords(analytics.byModel);
const byProvider = toArrayOfRecords(analytics.byProvider);
const result = {
sessionStart: analytics?.sessionStart || new Date().toISOString(),
duration: analytics?.duration || "unknown",
requestCount: analytics?.requestCount || 0,
costTotal: analytics?.totalCost || 0,
sessionStart: toString(analytics.sessionStart, new Date().toISOString()),
duration: toString(analytics.duration, "unknown"),
requestCount: toNumber(analytics.requestCount, 0),
costTotal: toNumber(analytics.totalCost, 0),
tokenCount: {
prompt: analytics?.tokenCount?.prompt || 0,
completion: analytics?.tokenCount?.completion || 0,
prompt: toNumber(tokenCount.prompt, 0),
completion: toNumber(tokenCount.completion, 0),
},
topModels:
analytics?.byModel?.slice(0, 5).map((m: any) => ({ model: m.model, count: m.requests })) ||
[],
topProviders:
analytics?.byProvider
?.slice(0, 5)
.map((p: any) => ({ provider: p.name, count: p.requests })) || [],
errors: analytics?.errorCount || 0,
fallbacks: analytics?.fallbackCount || 0,
topModels: byModel
.slice(0, 5)
.map((model) => ({
model: toString(model.model, "unknown"),
count: toNumber(model.requests, 0),
})),
topProviders: byProvider.slice(0, 5).map((provider) => ({
provider: toString(provider.name, "unknown"),
count: toNumber(provider.requests, 0),
})),
errors: toNumber(analytics.errorCount, 0),
fallbacks: toNumber(analytics.fallbackCount, 0),
budgetGuard: activeBudgetGuard
? {
active: true,

View file

@ -37,7 +37,7 @@ function ensureCleanupTimer() {
}
}, 15_000);
if (typeof _cleanupTimer === "object" && "unref" in _cleanupTimer) {
(_cleanupTimer as any).unref(); // Don't prevent process exit (Node.js only)
(_cleanupTimer as { unref?: () => void }).unref?.(); // Don't prevent process exit (Node.js only)
}
} catch {
// Cloudflare Workers may not support setInterval outside handlers — skip cleanup timer
@ -516,7 +516,7 @@ export function applyErrorState(account, status, errorText, provider = null) {
* @param {object} account
* @returns {number} score 0 = unhealthy, 100 = perfectly healthy
*/
export function getAccountHealth(account, model?: any) {
export function getAccountHealth(account, model?: unknown) {
if (!account) return 0;
let score = 100;
score -= (account.backoffLevel || 0) * 10;

View file

@ -43,7 +43,12 @@ export function selectAccountP2C(accounts, model = null) {
* @param {string} [model] - Model name
* @returns {{ account: object|null, state: object }}
*/
export function selectAccount(accounts, strategy = "fill-first", state: any = {}, model = null) {
export function selectAccount(
accounts,
strategy = "fill-first",
state: { lastIndex?: number } = {},
model = null
) {
if (!accounts || accounts.length === 0) {
return { account: null, state };
}

View file

@ -106,6 +106,20 @@ export function resetStats(): void {
// ── Detection ───────────────────────────────────────────────────────────────
interface BackgroundMessage {
role?: string;
content?: unknown;
}
interface BackgroundTaskBody {
messages?: BackgroundMessage[];
input?: BackgroundMessage[];
}
function toMessageArray(value: unknown): BackgroundMessage[] {
return Array.isArray(value) ? (value as BackgroundMessage[]) : [];
}
/**
* Check if a request is a background/utility task.
*
@ -114,10 +128,11 @@ export function resetStats(): void {
* @returns {boolean} True if the request looks like a background task
*/
export function isBackgroundTask(
body: any,
body: BackgroundTaskBody | unknown,
headers: Record<string, string> | null = null
): boolean {
if (!body || typeof body !== "object") return false;
const typedBody = body as BackgroundTaskBody;
// 1. Check explicit header
if (headers) {
@ -127,11 +142,13 @@ export function isBackgroundTask(
}
// 2. Check system prompt for background task patterns
const messages = body.messages || body.input || [];
const messages = toMessageArray(typedBody.messages ?? typedBody.input ?? []);
if (!Array.isArray(messages) || messages.length === 0) return false;
// Find system message
const systemMsg = messages.find((m: any) => m.role === "system" || m.role === "developer");
const systemMsg = messages.find(
(message: BackgroundMessage) => message.role === "system" || message.role === "developer"
);
if (!systemMsg) return false;
const systemContent =
@ -148,7 +165,7 @@ export function isBackgroundTask(
// 3. Additional heuristic: background tasks typically have very few messages
// (system + 1-2 user messages)
const userMessages = messages.filter((m: any) => m.role === "user");
const userMessages = messages.filter((message: BackgroundMessage) => message.role === "user");
if (userMessages.length > 3) return false; // Too many turns for a background task
return true;

View file

@ -27,7 +27,7 @@ const DEFAULT_COMBO_CONFIG = {
* @param {string} [provider] - Optional provider to apply provider-level overrides
* @returns {Object} Resolved config
*/
export function resolveComboConfig(combo, settings, provider?: any) {
export function resolveComboConfig(combo, settings, provider?: string | null) {
const global = settings?.comboDefaults || {};
const providerOverride = provider ? settings?.providerOverrides?.[provider] || {} : {};
const comboConfig = combo?.config || {};

View file

@ -4,8 +4,41 @@
* Provides API for reading metrics from the dashboard
*/
interface ModelMetrics {
requests: number;
successes: number;
failures: number;
totalLatencyMs: number;
lastStatus: "ok" | "error" | null;
lastUsedAt: string | null;
}
interface ComboMetricsEntry {
totalRequests: number;
totalSuccesses: number;
totalFailures: number;
totalFallbacks: number;
totalLatencyMs: number;
strategy: string;
lastUsedAt: string | null;
byModel: Record<string, ModelMetrics>;
}
interface ComboMetricsView extends ComboMetricsEntry {
avgLatencyMs: number;
successRate: number;
fallbackRate: number;
byModel: Record<
string,
ModelMetrics & {
avgLatencyMs: number;
successRate: number;
}
>;
}
// In-memory store
const metrics = new Map();
const metrics = new Map<string, ComboMetricsEntry>();
/**
* Record a combo request result
@ -18,10 +51,15 @@ const metrics = new Map();
* @param {string} [options.strategy] - "priority" or "weighted"
*/
export function recordComboRequest(
comboName,
modelStr,
{ success, latencyMs, fallbackCount = 0, strategy = "priority" }
) {
comboName: string,
modelStr: string | null,
{
success,
latencyMs,
fallbackCount = 0,
strategy = "priority",
}: { success: boolean; latencyMs: number; fallbackCount?: number; strategy?: string }
): void {
if (!metrics.has(comboName)) {
metrics.set(comboName, {
totalRequests: 0,
@ -35,7 +73,8 @@ export function recordComboRequest(
});
}
const combo: any = metrics.get(comboName);
const combo = metrics.get(comboName);
if (!combo) return;
combo.totalRequests++;
combo.totalLatencyMs += latencyMs;
combo.totalFallbacks += fallbackCount;
@ -80,8 +119,8 @@ export function recordComboRequest(
* @param {string} comboName
* @returns {Object|null}
*/
export function getComboMetrics(comboName) {
const combo: any = metrics.get(comboName);
export function getComboMetrics(comboName: string): ComboMetricsView | null {
const combo = metrics.get(comboName);
if (!combo) return null;
return {
@ -93,7 +132,7 @@ export function getComboMetrics(comboName) {
fallbackRate:
combo.totalRequests > 0 ? Math.round((combo.totalFallbacks / combo.totalRequests) * 100) : 0,
byModel: Object.fromEntries(
Object.entries(combo.byModel).map(([model, m]: [string, any]) => [
Object.entries(combo.byModel).map(([model, m]) => [
model,
{
...m,
@ -109,8 +148,8 @@ export function getComboMetrics(comboName) {
* Get metrics for all combos
* @returns {Object} Map of comboName metrics
*/
export function getAllComboMetrics() {
const result: Record<string, any> = {};
export function getAllComboMetrics(): Record<string, ComboMetricsView | null> {
const result: Record<string, ComboMetricsView | null> = {};
for (const [name] of metrics) {
result[name] = getComboMetrics(name);
}
@ -120,13 +159,13 @@ export function getAllComboMetrics() {
/**
* Reset metrics for a specific combo
*/
export function resetComboMetrics(comboName) {
export function resetComboMetrics(comboName: string): void {
metrics.delete(comboName);
}
/**
* Reset all combo metrics
*/
export function resetAllComboMetrics() {
export function resetAllComboMetrics(): void {
metrics.clear();
}

View file

@ -34,7 +34,13 @@ export function getTokenLimit(provider, model = null) {
const lower = model.toLowerCase();
if (lower.includes("claude")) return DEFAULT_LIMITS.claude;
if (lower.includes("gemini")) return DEFAULT_LIMITS.gemini;
if (lower.includes("gpt") || lower.includes("o1") || lower.includes("o3") || lower.includes("o4")) return DEFAULT_LIMITS.openai;
if (
lower.includes("gpt") ||
lower.includes("o1") ||
lower.includes("o3") ||
lower.includes("o4")
)
return DEFAULT_LIMITS.openai;
}
return DEFAULT_LIMITS[provider] || DEFAULT_LIMITS.default;
}
@ -51,7 +57,10 @@ export function getTokenLimit(provider, model = null) {
* @param {object} options - { provider?, model?, maxTokens?, reserveTokens? }
* @returns {{ body: object, compressed: boolean, stats: object }}
*/
export function compressContext(body, options: any = {}) {
export function compressContext(
body,
options: { provider?: string; model?: string; maxTokens?: number; reserveTokens?: number } = {}
) {
if (!body || !body.messages || !Array.isArray(body.messages)) {
return { body, compressed: false, stats: {} };
}
@ -123,7 +132,11 @@ function trimToolMessages(messages, maxChars) {
return {
...msg,
content: msg.content.map((block) => {
if (block.type === "tool_result" && typeof block.content === "string" && block.content.length > maxChars) {
if (
block.type === "tool_result" &&
typeof block.content === "string" &&
block.content.length > maxChars
) {
return { ...block, content: block.content.slice(0, maxChars) + "\n... [truncated]" };
}
return block;

View file

@ -156,7 +156,12 @@ export function getProviderFallbackCount(provider) {
}
// Build provider URL
export function buildProviderUrl(provider, model, stream = true, options: any = {}) {
export function buildProviderUrl(
provider,
model,
stream = true,
options: { baseUrl?: string; baseUrlIndex?: number } = {}
) {
if (isOpenAICompatible(provider)) {
const apiType = getOpenAICompatibleType(provider);
const baseUrl = options?.baseUrl || OPENAI_COMPATIBLE_DEFAULTS.baseUrl;

View file

@ -13,14 +13,46 @@ import { parseRetryAfterFromBody, lockModel } from "./accountFallback.ts";
import { getProviderCategory } from "../config/providerRegistry.ts";
import { DEFAULT_API_LIMITS } from "../config/constants.ts";
interface LearnedLimitEntry {
provider: string;
connectionId: string;
lastUpdated: number;
limit?: number;
remaining?: number;
minTime?: number;
}
interface LimiterUpdateSettings {
minTime: number;
reservoir?: number | null;
reservoirRefreshAmount?: number | null;
reservoirRefreshInterval?: number | null;
}
type JsonRecord = Record<string, unknown>;
function toRecord(value: unknown): JsonRecord {
return value && typeof value === "object" && !Array.isArray(value) ? (value as JsonRecord) : {};
}
function toNumber(value: unknown, fallback = 0): number {
const parsed =
typeof value === "number"
? value
: typeof value === "string" && value.trim().length > 0
? Number(value)
: Number.NaN;
return Number.isFinite(parsed) ? parsed : fallback;
}
// Store limiters keyed by "provider:connectionId" (and optionally ":model")
const limiters = new Map();
const limiters = new Map<string, Bottleneck>();
// Store connections that have rate limit protection enabled
const enabledConnections = new Set();
const enabledConnections = new Set<string>();
// Store learned limits for persistence (debounced)
const learnedLimits: Record<string, any> = {};
const learnedLimits: Record<string, LearnedLimitEntry> = {};
let persistTimer: ReturnType<typeof setTimeout> | null = null;
const PERSIST_DEBOUNCE_MS = 60_000; // Debounce persistence to every 60s max
@ -50,22 +82,25 @@ export async function initializeRateLimits() {
let explicitCount = 0;
let autoCount = 0;
for (const conn of connections) {
if (conn.rateLimitProtection) {
for (const connRaw of connections as unknown[]) {
const conn = toRecord(connRaw);
const connectionId = typeof conn.id === "string" ? conn.id : "";
const provider = typeof conn.provider === "string" ? conn.provider : "";
const isActive = conn.isActive === true;
const rateLimitProtection = conn.rateLimitProtection === true;
if (!connectionId || !provider) continue;
if (rateLimitProtection) {
// Explicitly enabled by user
enabledConnections.add(conn.id);
enabledConnections.add(connectionId);
explicitCount++;
} else if (
conn.provider &&
getProviderCategory(conn.provider) === "apikey" &&
conn.isActive
) {
} else if (getProviderCategory(provider) === "apikey" && isActive) {
// Auto-enable for API key providers (safety net)
enabledConnections.add(conn.id);
enabledConnections.add(connectionId);
autoCount++;
// Create a pre-configured limiter with conservative defaults
const key = `${conn.provider}:${conn.id}`;
const key = `${provider}:${connectionId}`;
if (!limiters.has(key)) {
limiters.set(
key,
@ -160,7 +195,7 @@ function getLimiter(provider, connectionId, model = null) {
* @param {string} connectionId - Connection ID
* @param {string} model - Model name (optional, for per-model limits)
* @param {Function} fn - The async function to execute (e.g., executor.execute)
* @returns {Promise<any>} Result of fn()
* @returns {Promise<unknown>} Result of fn()
*/
export async function withRateLimit(provider, connectionId, model, fn) {
if (!enabledConnections.has(connectionId)) {
@ -301,7 +336,7 @@ export function updateFromHeaders(provider, connectionId, headers, status, model
// Calculate optimal minTime from RPM limit
const minTime = Math.max(0, Math.floor(60000 / limit) - 10); // Small buffer
const updates: Record<string, any> = { minTime };
const updates: LimiterUpdateSettings = { minTime };
// If remaining is low (< 10% of limit), set reservoir to throttle immediately
if (!isNaN(remaining)) {
@ -359,7 +394,7 @@ export function getRateLimitStatus(provider, connectionId) {
* Get all active limiters status (for dashboard overview)
*/
export function getAllRateLimitStatus() {
const result: Record<string, any> = {};
const result: Record<string, { queued: number; running: number; executing: number }> = {};
for (const [key, limiter] of limiters) {
const counts = limiter.counts();
result[key] = {
@ -383,7 +418,11 @@ export function getLearnedLimits() {
/**
* Record a learned limit for debounced persistence.
*/
function recordLearnedLimit(provider: string, connectionId: string, limits: any) {
function recordLearnedLimit(
provider: string,
connectionId: string,
limits: Partial<Omit<LearnedLimitEntry, "provider" | "connectionId" | "lastUpdated">>
) {
const key = `${provider}:${connectionId}`;
learnedLimits[key] = {
...limits,
@ -417,23 +456,38 @@ async function loadPersistedLimits() {
const { getSettings } = await import("@/lib/db/settings");
const settings = await getSettings();
const raw = settings?.learnedRateLimits;
if (!raw) return;
if (typeof raw !== "string" || raw.trim().length === 0) return;
const parsed = JSON.parse(raw);
const parsed = toRecord(JSON.parse(raw) as unknown);
let count = 0;
for (const [key, data] of Object.entries<any>(parsed)) {
for (const [key, dataRaw] of Object.entries(parsed)) {
const data = toRecord(dataRaw);
const lastUpdated = toNumber(data.lastUpdated, 0);
// Skip stale entries (older than 24h)
if (data.lastUpdated && Date.now() - data.lastUpdated > 24 * 60 * 60 * 1000) continue;
if (lastUpdated > 0 && Date.now() - lastUpdated > 24 * 60 * 60 * 1000) continue;
learnedLimits[key] = data;
const connectionId = typeof data.connectionId === "string" ? data.connectionId : "";
const provider = typeof data.provider === "string" ? data.provider : "";
const limit = toNumber(data.limit, 0);
const remaining = toNumber(data.remaining, 0);
const minTime = toNumber(data.minTime, 0);
learnedLimits[key] = {
provider,
connectionId,
lastUpdated,
...(limit > 0 ? { limit } : {}),
...(remaining >= 0 ? { remaining } : {}),
...(minTime >= 0 ? { minTime } : {}),
};
// Apply to limiter if it exists and has rate limit enabled
if (data.connectionId && enabledConnections.has(data.connectionId)) {
if (connectionId && enabledConnections.has(connectionId)) {
const limiter = limiters.get(key);
if (limiter && data.limit) {
const minTime = data.minTime || Math.max(0, Math.floor(60000 / data.limit) - 10);
limiter.updateSettings({ minTime });
if (limiter && limit > 0) {
const inferredMinTime = minTime || Math.max(0, Math.floor(60000 / limit) - 10);
limiter.updateSettings({ minTime: inferredMinTime });
count++;
}
}

View file

@ -116,8 +116,10 @@ export function acquire(modelStr, { maxConcurrency = 3, timeoutMs = 30000 } = {}
// Remove from queue on timeout
const idx = gate.queue.findIndex((item) => item.timer === timer);
if (idx !== -1) gate.queue.splice(idx, 1);
const err = new Error(`Semaphore timeout after ${timeoutMs}ms for ${modelStr}`);
(err as any).code = "SEMAPHORE_TIMEOUT";
const err = new Error(`Semaphore timeout after ${timeoutMs}ms for ${modelStr}`) as Error & {
code?: string;
};
err.code = "SEMAPHORE_TIMEOUT";
reject(err);
}, timeoutMs);

View file

@ -34,6 +34,33 @@ const MODELS_WITHOUT_SYSTEM_ROLE = [
"ernie-", // Baidu ERNIE models
];
interface MessageContentPart {
type?: string;
text?: string;
[key: string]: unknown;
}
interface NormalizedMessage {
role?: string;
content?: unknown;
[key: string]: unknown;
}
function extractTextFromContent(content: unknown): string {
if (typeof content === "string") return content;
if (!Array.isArray(content)) return "";
return content
.filter(
(part): part is MessageContentPart =>
!!part &&
typeof part === "object" &&
"type" in part &&
(part as MessageContentPart).type === "text"
)
.map((part) => (typeof part.text === "string" ? part.text : ""))
.join("\n");
}
/**
* Check if a provider+model combo supports the system role.
*/
@ -57,14 +84,17 @@ function supportsSystemRole(provider: string, model: string): boolean {
* @param targetFormat - The target format (e.g., "openai", "claude", "gemini")
* @returns Modified messages array
*/
export function normalizeDeveloperRole(messages: any[], targetFormat: string): any[] {
export function normalizeDeveloperRole(
messages: NormalizedMessage[] | unknown,
targetFormat: string
): NormalizedMessage[] | unknown {
if (!Array.isArray(messages)) return messages;
// For OpenAI format, keep developer role as-is (it's valid)
// For all other formats, convert developer → system
if (targetFormat === "openai") return messages;
return messages.map((msg) => {
return messages.map((msg: NormalizedMessage) => {
if (msg.role === "developer") {
return { ...msg, role: "system" };
}
@ -82,49 +112,44 @@ export function normalizeDeveloperRole(messages: any[], targetFormat: string): a
* @param model - Model name
* @returns Modified messages array
*/
export function normalizeSystemRole(messages: any[], provider: string, model: string): any[] {
export function normalizeSystemRole(
messages: NormalizedMessage[] | unknown,
provider: string,
model: string
): NormalizedMessage[] | unknown {
if (!Array.isArray(messages) || messages.length === 0) return messages;
if (supportsSystemRole(provider, model)) return messages;
// Extract system messages
const systemMessages = messages.filter((m) => m.role === "system" || m.role === "developer");
const systemMessages = messages.filter(
(message: NormalizedMessage) => message.role === "system" || message.role === "developer"
);
if (systemMessages.length === 0) return messages;
// Build system content string
const systemContent = systemMessages
.map((m) => {
if (typeof m.content === "string") return m.content;
if (Array.isArray(m.content)) {
return m.content
.filter((c: any) => c.type === "text")
.map((c: any) => c.text)
.join("\n");
}
return "";
})
.map((message: NormalizedMessage) => extractTextFromContent(message.content))
.filter(Boolean)
.join("\n\n");
if (!systemContent) {
return messages.filter((m) => m.role !== "system" && m.role !== "developer");
return messages.filter(
(message: NormalizedMessage) => message.role !== "system" && message.role !== "developer"
);
}
// Remove system messages and merge into first user message
const nonSystemMessages = messages.filter((m) => m.role !== "system" && m.role !== "developer");
const nonSystemMessages = messages.filter(
(message: NormalizedMessage) => message.role !== "system" && message.role !== "developer"
);
// Find first user message and prepend system content
const firstUserIdx = nonSystemMessages.findIndex((m) => m.role === "user");
const firstUserIdx = nonSystemMessages.findIndex(
(message: NormalizedMessage) => message.role === "user"
);
if (firstUserIdx >= 0) {
const userMsg = nonSystemMessages[firstUserIdx];
const userContent =
typeof userMsg.content === "string"
? userMsg.content
: Array.isArray(userMsg.content)
? userMsg.content
.filter((c: any) => c.type === "text")
.map((c: any) => c.text)
.join("\n")
: "";
const userContent = extractTextFromContent(userMsg.content);
nonSystemMessages[firstUserIdx] = {
...userMsg,
@ -152,11 +177,11 @@ export function normalizeSystemRole(messages: any[], provider: string, model: st
* @returns Normalized messages array
*/
export function normalizeRoles(
messages: any[],
messages: NormalizedMessage[] | unknown,
provider: string,
model: string,
targetFormat: string
): any[] {
): NormalizedMessage[] | unknown {
if (!Array.isArray(messages)) return messages;
// Step 1: Normalize developer → system (for non-OpenAI formats)

View file

@ -7,9 +7,34 @@
import { createHash } from "node:crypto";
interface SessionEntry {
createdAt: number;
lastActive: number;
requestCount: number;
connectionId: string | null;
}
interface SessionFingerprintOptions {
provider?: string;
connectionId?: string;
}
interface SessionMessage {
role?: string;
content?: unknown;
}
interface SessionBody {
model?: string;
system?: unknown;
tools?: Array<{ name?: string; function?: { name?: string } }>;
messages?: SessionMessage[];
input?: SessionMessage[];
}
// In-memory session store with metadata
// key: sessionId → { createdAt, lastActive, requestCount, connectionId? }
const sessions = new Map();
const sessions = new Map<string, SessionEntry>();
// Auto-cleanup sessions older than 30 minutes
const SESSION_TTL_MS = 30 * 60 * 1000;
@ -36,8 +61,12 @@ _cleanupTimer.unref();
* @param {object} [options] - Extra context
* @returns {string} Session ID (hex hash)
*/
export function generateSessionId(body, options: any = {}) {
const parts = [];
export function generateSessionId(
body: SessionBody | null | undefined,
options: SessionFingerprintOptions = {}
): string | null {
if (!body || typeof body !== "object") return null;
const parts: string[] = [];
// Model contributes to fingerprint
if (body.model) parts.push(`model:${body.model}`);
@ -79,7 +108,7 @@ export function generateSessionId(body, options: any = {}) {
/**
* Touch or create a session
*/
export function touchSession(sessionId, connectionId = null) {
export function touchSession(sessionId: string | null, connectionId: string | null = null): void {
if (!sessionId) return;
const existing = sessions.get(sessionId);
if (existing) {
@ -99,7 +128,7 @@ export function touchSession(sessionId, connectionId = null) {
/**
* Get session info (for sticky routing decisions)
*/
export function getSessionInfo(sessionId) {
export function getSessionInfo(sessionId: string | null): SessionEntry | null {
if (!sessionId) return null;
const entry = sessions.get(sessionId);
if (!entry) return null;
@ -113,7 +142,7 @@ export function getSessionInfo(sessionId) {
/**
* Get the bound connection for a session (sticky routing)
*/
export function getSessionConnection(sessionId) {
export function getSessionConnection(sessionId: string | null): string | null {
const info = getSessionInfo(sessionId);
return info?.connectionId || null;
}
@ -121,16 +150,16 @@ export function getSessionConnection(sessionId) {
/**
* Get session count (for dashboard)
*/
export function getActiveSessionCount() {
export function getActiveSessionCount(): number {
return sessions.size;
}
/**
* Get all active sessions (for dashboard)
*/
export function getActiveSessions() {
export function getActiveSessions(): Array<SessionEntry & { sessionId: string; ageMs: number }> {
const now = Date.now();
const result = [];
const result: Array<SessionEntry & { sessionId: string; ageMs: number }> = [];
for (const [id, entry] of sessions) {
if (now - entry.lastActive <= SESSION_TTL_MS) {
result.push({ sessionId: id, ...entry, ageMs: now - entry.createdAt });
@ -142,23 +171,24 @@ export function getActiveSessions() {
/**
* Clear all sessions (for testing)
*/
export function clearSessions() {
export function clearSessions(): void {
sessions.clear();
}
// ─── Internal Helpers ───────────────────────────────────────────────────────
function hashShort(text) {
function hashShort(text: string): string {
return createHash("sha256").update(text).digest("hex").slice(0, 8);
}
function extractSystemPrompt(body) {
function extractSystemPrompt(body: SessionBody | null | undefined): string | null {
if (!body || typeof body !== "object") return null;
// Claude format: body.system
if (body.system) {
return typeof body.system === "string" ? body.system : JSON.stringify(body.system);
}
// OpenAI format: messages[0].role === "system"
if (body.messages && Array.isArray(body.messages)) {
if (Array.isArray(body.messages)) {
const sys = body.messages.find((m) => m.role === "system" || m.role === "developer");
if (sys) {
return typeof sys.content === "string" ? sys.content : JSON.stringify(sys.content);
@ -167,7 +197,8 @@ function extractSystemPrompt(body) {
return null;
}
function extractFirstUserMessage(body) {
function extractFirstUserMessage(body: SessionBody | null | undefined): string | null {
if (!body || typeof body !== "object") return null;
const messages = body.messages || body.input || [];
if (!Array.isArray(messages)) return null;
for (const msg of messages) {

View file

@ -7,10 +7,18 @@
// 3-layer cache: tool → model family → session
// Each layer stores patterns detected from responses
interface SignatureContext {
tool?: string;
modelFamily?: string;
sessionId?: string;
}
type SignatureLayer = Map<string, Set<string>>;
const layers = {
tool: new Map(), // e.g. "cursor" → Set of signature patterns
family: new Map(), // e.g. "claude-sonnet" → Set of signature patterns
session: new Map(), // e.g. sessionId → Set of signature patterns
tool: new Map<string, Set<string>>(), // e.g. "cursor" → Set of signature patterns
family: new Map<string, Set<string>>(), // e.g. "claude-sonnet" → Set of signature patterns
session: new Map<string, Set<string>>(), // e.g. sessionId → Set of signature patterns
};
// Known default signatures (bootstrap — will be supplemented by learning)
@ -34,7 +42,7 @@ const MAX_PATTERNS_PER_KEY = 50;
* @param {object} context - { tool?, modelFamily?, sessionId? }
* @returns {string[]} Array of unique signature patterns
*/
export function getSignatures(context: any = {}) {
export function getSignatures(context: SignatureContext = {}): string[] {
const patterns = new Set(DEFAULT_SIGNATURES);
// Layer 1: Tool (e.g., "cursor", "cline", "antigravity")
@ -61,10 +69,10 @@ export function getSignatures(context: any = {}) {
* @param {string} pattern - The signature pattern (e.g., "<antThinking>")
* @param {object} context - { tool?, modelFamily?, sessionId? }
*/
export function addSignature(pattern: any, context: any = {}) {
export function addSignature(pattern: unknown, context: SignatureContext = {}): void {
if (!pattern || typeof pattern !== "string") return;
const addToLayer = (layer, key) => {
const addToLayer = (layer: SignatureLayer, key: string | undefined) => {
if (!key) return;
if (!layer.has(key)) {
if (layer.size >= MAX_ENTRIES_PER_LAYER) {
@ -93,10 +101,13 @@ export function addSignature(pattern: any, context: any = {}) {
* @param {object} context - { tool?, modelFamily?, sessionId? }
* @returns {{ found: string[], cleaned: string }} Detected tags and cleaned text
*/
export function detectAndLearn(text: any, context: any = {}) {
export function detectAndLearn(
text: unknown,
context: SignatureContext = {}
): { found: string[]; cleaned: unknown } {
if (!text || typeof text !== "string") return { found: [], cleaned: text };
const found = [];
const found: string[] = [];
let cleaned = text;
// Check all known signatures
@ -109,7 +120,8 @@ export function detectAndLearn(text: any, context: any = {}) {
}
// Auto-detect new XML-like thinking tags
const tagRegex = /<\/?([a-zA-Z_][a-zA-Z0-9_]*(?:Thinking|thinking|thought|Thought|internal_thought))>/g;
const tagRegex =
/<\/?([a-zA-Z_][a-zA-Z0-9_]*(?:Thinking|thinking|thought|Thought|internal_thought))>/g;
let match;
while ((match = tagRegex.exec(text)) !== null) {
const tag = match[0];
@ -128,16 +140,17 @@ export function detectAndLearn(text: any, context: any = {}) {
* "claude-sonnet-4-20250514" "claude-sonnet"
* "gpt-4o-2024-08-06" "gpt-4o"
*/
export function getModelFamily(model) {
export function getModelFamily(model: unknown): string | null {
if (!model) return null;
// Remove date suffixes and version numbers
const cleaned = model
const modelName = typeof model === "string" ? model : String(model);
const cleaned = modelName
.replace(/-\d{4}-\d{2}-\d{2}$/, "") // Remove YYYY-MM-DD suffix
.replace(/-\d{8,}$/, "") // Remove YYYYMMDD suffix
.replace(/-\d+(\.\d+)*$/, "") // Remove version suffix like -4
.replace(/@.*$/, ""); // Remove @latest etc.
.replace(/-\d{8,}$/, "") // Remove YYYYMMDD suffix
.replace(/-\d+(\.\d+)*$/, "") // Remove version suffix like -4
.replace(/@.*$/, ""); // Remove @latest etc.
// Keep meaningful prefix
return cleaned || model;
return cleaned || modelName;
}
/**

View file

@ -866,6 +866,18 @@ const CIRCUIT_BREAKER_THRESHOLD = 5; // consecutive failures before tripping
const CIRCUIT_BREAKER_COOLDOWN = 30 * 60 * 1000; // 30 minutes
const REFRESH_TIMEOUT_MS = 30_000; // 30s max per refresh attempt
interface CircuitBreakerStatusEntry {
failures: number;
blocked: boolean;
blockedUntil: string | null;
remainingMs: number;
}
interface RefreshLoggerLike {
error?: (scope: string, message: string) => void;
warn?: (scope: string, message: string) => void;
}
/**
* Check if a provider is circuit-breaker blocked.
*/
@ -881,8 +893,8 @@ export function isProviderBlocked(provider: string): boolean {
/**
* Get circuit breaker status for all providers (for diagnostics).
*/
export function getCircuitBreakerStatus(): Record<string, any> {
const result: Record<string, any> = {};
export function getCircuitBreakerStatus(): Record<string, CircuitBreakerStatusEntry> {
const result: Record<string, CircuitBreakerStatusEntry> = {};
for (const [provider, state] of Object.entries(_circuitBreaker)) {
result[provider] = {
failures: state.failures,
@ -907,7 +919,7 @@ function recordSuccess(provider: string) {
/**
* Record a failed refresh increments circuit breaker counter.
*/
function recordFailure(provider: string, log: any = null) {
function recordFailure(provider: string, log: RefreshLoggerLike | null = null) {
if (!_circuitBreaker[provider]) {
_circuitBreaker[provider] = { failures: 0, blockedUntil: 0 };
}

View file

@ -35,10 +35,40 @@ const CLAUDE_CONFIG = {
settingsUrl: "https://api.anthropic.com/v1/settings",
};
type JsonRecord = Record<string, unknown>;
type UsageQuota = {
used: number;
total: number;
remaining?: number;
remainingPercentage?: number;
resetAt: string | null;
unlimited: boolean;
displayName?: string;
};
function toRecord(value: unknown): JsonRecord {
return value && typeof value === "object" && !Array.isArray(value) ? (value as JsonRecord) : {};
}
function toNumber(value: unknown, fallback = 0): number {
const parsed =
typeof value === "number"
? value
: typeof value === "string" && value.trim().length > 0
? Number(value)
: Number.NaN;
return Number.isFinite(parsed) ? parsed : fallback;
}
function getFieldValue(source: unknown, snakeKey: string, camelKey: string): unknown {
const obj = toRecord(source);
return obj[snakeKey] ?? obj[camelKey] ?? null;
}
/**
* Get usage data for a provider connection
* @param {Object} connection - Provider connection with accessToken
* @returns {Promise<any>} Usage data with quotas
* @returns {Promise<unknown>} Usage data with quotas
*/
export async function getUsageForProvider(connection) {
const { provider, accessToken, providerSpecificData } = connection;
@ -83,7 +113,7 @@ function parseResetTime(resetValue) {
return new Date(resetValue).toISOString();
}
// If it's a string (ISO date or any parseable date string)
// If it's a string (ISO date or parseable date string)
if (typeof resetValue === "string") {
return new Date(resetValue).toISOString();
}
@ -273,7 +303,7 @@ function getAntigravityPlanLabel(subscriptionInfo) {
// 5. If upgradeSubscriptionType exists, account is on free tier
if (subscriptionInfo.currentTier?.upgradeSubscriptionType) return "Free";
// 6. If we have a tier name that didn't match any pattern, return it title-cased
// 6. If we have a tier name that didn't match known patterns, return it title-cased
if (tierName) {
return tierName.charAt(0).toUpperCase() + tierName.slice(1).toLowerCase();
}
@ -311,10 +341,12 @@ async function getAntigravityUsage(accessToken, providerSpecificData) {
}
const data = await response.json();
const quotas: Record<string, any> = {};
const dataObj = toRecord(data);
const modelEntries = toRecord(dataObj.models);
const quotas: Record<string, UsageQuota> = {};
// Parse model quotas (inspired by vscode-antigravity-cockpit)
if (data.models) {
if (Object.keys(modelEntries).length > 0) {
// Filter only recommended/important models (must match PROVIDER_MODELS ag ids)
const importantModels = [
"claude-opus-4-6-thinking",
@ -325,18 +357,20 @@ async function getAntigravityUsage(accessToken, providerSpecificData) {
"gpt-oss-120b-medium",
];
for (const [modelKey, info] of Object.entries(data.models) as [string, any][]) {
for (const [modelKey, infoValue] of Object.entries(modelEntries)) {
const info = toRecord(infoValue);
const quotaInfo = toRecord(info.quotaInfo);
// Skip models without quota info
if (!info.quotaInfo) {
if (Object.keys(quotaInfo).length === 0) {
continue;
}
// Skip internal models and non-important models
if (info.isInternal || !importantModels.includes(modelKey)) {
if (info.isInternal === true || !importantModels.includes(modelKey)) {
continue;
}
const remainingFraction = info.quotaInfo.remainingFraction || 0;
const remainingFraction = toNumber(quotaInfo.remainingFraction, 0);
const remainingPercentage = remainingFraction * 100;
// Convert percentage to used/total for UI compatibility
@ -351,10 +385,10 @@ async function getAntigravityUsage(accessToken, providerSpecificData) {
quotas[modelKey] = {
used,
total,
resetAt: parseResetTime(info.quotaInfo.resetTime),
resetAt: parseResetTime(quotaInfo.resetTime),
remainingPercentage,
unlimited: false,
displayName: info.displayName || modelKey,
displayName: typeof info.displayName === "string" ? info.displayName : modelKey,
};
}
}
@ -483,10 +517,13 @@ async function getClaudeUsage(accessToken) {
* IMPORTANT: Uses persisted workspaceId from OAuth to ensure correct workspace binding.
* No fallback to other workspaces - strict binding to user's selected workspace.
*/
async function getCodexUsage(accessToken, providerSpecificData: Record<string, any> = {}) {
async function getCodexUsage(accessToken, providerSpecificData: Record<string, unknown> = {}) {
try {
// Use persisted workspace ID from OAuth - NO FALLBACK
const accountId = providerSpecificData?.workspaceId || null;
const accountId =
typeof providerSpecificData.workspaceId === "string"
? providerSpecificData.workspaceId
: null;
const headers: Record<string, string> = {
Authorization: `Bearer ${accessToken}`,
@ -508,33 +545,35 @@ async function getCodexUsage(accessToken, providerSpecificData: Record<string, a
const data = await response.json();
// Helper to get field with snake_case/camelCase fallback
const getField = (obj: any, snakeKey: string, camelKey: string) =>
obj?.[snakeKey] ?? obj?.[camelKey] ?? null;
// Parse rate limit info (supports both snake_case and camelCase)
const rateLimit = getField(data, "rate_limit", "rateLimit") || {};
const primaryWindow = getField(rateLimit, "primary_window", "primaryWindow") || {};
const secondaryWindow = getField(rateLimit, "secondary_window", "secondaryWindow") || {};
const rateLimit = toRecord(getFieldValue(data, "rate_limit", "rateLimit"));
const primaryWindow = toRecord(getFieldValue(rateLimit, "primary_window", "primaryWindow"));
const secondaryWindow = toRecord(
getFieldValue(rateLimit, "secondary_window", "secondaryWindow")
);
// Parse reset times (reset_at is Unix timestamp in seconds)
const parseWindowReset = (window: any) => {
const resetAt = getField(window, "reset_at", "resetAt");
const resetAfterSeconds = getField(window, "reset_after_seconds", "resetAfterSeconds");
if (resetAt) return parseResetTime(resetAt * 1000);
if (resetAfterSeconds) return parseResetTime(Date.now() + resetAfterSeconds * 1000);
const parseWindowReset = (window: unknown) => {
const resetAt = toNumber(getFieldValue(window, "reset_at", "resetAt"), 0);
const resetAfterSeconds = toNumber(
getFieldValue(window, "reset_after_seconds", "resetAfterSeconds"),
0
);
if (resetAt > 0) return parseResetTime(resetAt * 1000);
if (resetAfterSeconds > 0) return parseResetTime(Date.now() + resetAfterSeconds * 1000);
return null;
};
// Build quota windows
const quotas: Record<string, any> = {};
const quotas: Record<string, UsageQuota> = {};
// Primary window (5-hour)
if (Object.keys(primaryWindow).length > 0) {
const usedPercent = toNumber(getFieldValue(primaryWindow, "used_percent", "usedPercent"), 0);
quotas.session = {
used: getField(primaryWindow, "used_percent", "usedPercent") || 0,
used: usedPercent,
total: 100,
remaining: 100 - (getField(primaryWindow, "used_percent", "usedPercent") || 0),
remaining: 100 - usedPercent,
resetAt: parseWindowReset(primaryWindow),
unlimited: false,
};
@ -542,40 +581,48 @@ async function getCodexUsage(accessToken, providerSpecificData: Record<string, a
// Secondary window (weekly)
if (Object.keys(secondaryWindow).length > 0) {
const usedPercent = toNumber(
getFieldValue(secondaryWindow, "used_percent", "usedPercent"),
0
);
quotas.weekly = {
used: getField(secondaryWindow, "used_percent", "usedPercent") || 0,
used: usedPercent,
total: 100,
remaining: 100 - (getField(secondaryWindow, "used_percent", "usedPercent") || 0),
remaining: 100 - usedPercent,
resetAt: parseWindowReset(secondaryWindow),
unlimited: false,
};
}
// Code review rate limit (3rd window — differs per plan: Plus/Pro/Team)
const codeReviewRateLimit =
getField(data, "code_review_rate_limit", "codeReviewRateLimit") || {};
const codeReviewWindow = getField(codeReviewRateLimit, "primary_window", "primaryWindow") || {};
const codeReviewRateLimit = toRecord(
getFieldValue(data, "code_review_rate_limit", "codeReviewRateLimit")
);
const codeReviewWindow = toRecord(
getFieldValue(codeReviewRateLimit, "primary_window", "primaryWindow")
);
// Only include code review quota if the API returned data for it
const codeReviewUsedPercent = getField(codeReviewWindow, "used_percent", "usedPercent");
const codeReviewRemainingCount = getField(
const codeReviewUsedRaw = getFieldValue(codeReviewWindow, "used_percent", "usedPercent");
const codeReviewRemainingRaw = getFieldValue(
codeReviewWindow,
"remaining_count",
"remainingCount"
);
if (codeReviewUsedPercent !== null || codeReviewRemainingCount !== null) {
if (codeReviewUsedRaw !== null || codeReviewRemainingRaw !== null) {
const codeReviewUsedPercent = toNumber(codeReviewUsedRaw, 0);
quotas.code_review = {
used: codeReviewUsedPercent || 0,
used: codeReviewUsedPercent,
total: 100,
remaining: 100 - (codeReviewUsedPercent || 0),
remaining: 100 - codeReviewUsedPercent,
resetAt: parseWindowReset(codeReviewWindow),
unlimited: false,
};
}
return {
plan: getField(data, "plan_type", "planType") || "unknown",
limitReached: getField(rateLimit, "limit_reached", "limitReached") || false,
plan: String(getFieldValue(data, "plan_type", "planType") || "unknown"),
limitReached: Boolean(getFieldValue(rateLimit, "limit_reached", "limitReached")),
quotas,
};
} catch (error) {

View file

@ -7,7 +7,7 @@
/**
* Match a model name against a pattern with glob wildcards.
* Supports * (any sequence) and ? (single char).
* Supports * (wildcard sequence) and ? (single char).
*
* @param {string} model - Model name to match
* @param {string} pattern - Pattern with wildcards
@ -60,7 +60,7 @@ export function getSpecificity(pattern) {
* Returns the most specific match.
*
* @param {string} model - Model name to resolve
* @param {Array<{ pattern: string, target: string, [key: string]: any }>} aliases - Alias entries
* @param {Array<{ pattern: string, target: string, [key: string]: unknown }>} aliases - Alias entries
* @returns {{ pattern: string, target: string, specificity: number } | null}
*/
export function resolveWildcardAlias(model, aliases) {

View file

@ -1,307 +0,0 @@
/**
* Translator: OpenAI Responses API OpenAI Chat Completions
*
* Responses API uses: { input: [...], instructions: "..." }
* Chat API uses: { messages: [...] }
*/
import { register } from "../registry.ts";
import { FORMATS } from "../formats.ts";
/**
* Convert OpenAI Responses API request to OpenAI Chat Completions format
*/
export function openaiResponsesToOpenAIRequest(model, body, stream, credentials) {
if (!body.input) return body;
// Validate unsupported features — return clear errors instead of silent failure
const UNSUPPORTED_TOOLS = ["file_search", "code_interpreter", "web_search_preview"];
if (body.tools?.length) {
for (const tool of body.tools) {
if (UNSUPPORTED_TOOLS.includes(tool.type)) {
const error = new Error(
`Unsupported Responses API feature: ${tool.type} tool type is not supported by omniroute`
);
(error as any).statusCode = 400;
(error as any).errorType = "unsupported_feature";
throw error;
}
}
}
if (body.background) {
const error = new Error(
"Unsupported Responses API feature: background mode is not supported by omniroute"
);
(error as any).statusCode = 400;
(error as any).errorType = "unsupported_feature";
throw error;
}
const result: Record<string, any> = { ...body };
result.messages = [];
// Convert instructions to system message
if (body.instructions) {
result.messages.push({ role: "system", content: body.instructions });
}
// Group items by conversation turn
let currentAssistantMsg = null;
let pendingToolResults = [];
for (const item of body.input) {
// Determine item type - Droid CLI sends role-based items without 'type' field
// Fallback: if no type but has role property, treat as message
const itemType = item.type || (item.role ? "message" : null);
if (itemType === "message") {
// Flush any pending assistant message with tool calls
if (currentAssistantMsg) {
result.messages.push(currentAssistantMsg);
currentAssistantMsg = null;
}
// Flush pending tool results
if (pendingToolResults.length > 0) {
for (const tr of pendingToolResults) {
result.messages.push(tr);
}
pendingToolResults = [];
}
// Convert content: input_text → text, output_text → text
const content = Array.isArray(item.content)
? item.content.map((c) => {
if (c.type === "input_text") return { type: "text", text: c.text };
if (c.type === "output_text") return { type: "text", text: c.text };
return c;
})
: item.content;
result.messages.push({ role: item.role, content });
} else if (itemType === "function_call") {
// Start or append to assistant message with tool_calls
if (!currentAssistantMsg) {
currentAssistantMsg = {
role: "assistant",
content: null,
tool_calls: [],
};
}
currentAssistantMsg.tool_calls.push({
id: item.call_id,
type: "function",
function: {
name: item.name,
arguments: item.arguments,
},
});
} else if (itemType === "function_call_output") {
// Flush assistant message first if exists
if (currentAssistantMsg) {
result.messages.push(currentAssistantMsg);
currentAssistantMsg = null;
}
// Flush any pending tool results first
if (pendingToolResults.length > 0) {
for (const tr of pendingToolResults) {
result.messages.push(tr);
}
pendingToolResults = [];
}
// Add tool result immediately
result.messages.push({
role: "tool",
tool_call_id: item.call_id,
content: typeof item.output === "string" ? item.output : JSON.stringify(item.output),
});
} else if (itemType === "reasoning") {
// Skip reasoning items - they are for display only
continue;
}
}
// Flush remaining
if (currentAssistantMsg) {
result.messages.push(currentAssistantMsg);
}
if (pendingToolResults.length > 0) {
for (const tr of pendingToolResults) {
result.messages.push(tr);
}
}
// Convert tools format
if (body.tools && Array.isArray(body.tools)) {
result.tools = body.tools.map((tool) => {
if (tool.function) return tool;
return {
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters,
strict: tool.strict,
},
};
});
}
// Cleanup Responses API specific fields
delete result.input;
delete result.instructions;
delete result.include;
delete result.prompt_cache_key;
delete result.store;
delete result.reasoning;
return result;
}
/**
* Convert OpenAI Chat Completions to OpenAI Responses API format
*/
export function openaiToOpenAIResponsesRequest(model, body, stream, credentials) {
const result: Record<string, any> = {
model,
input: [],
stream: true,
store: false,
};
// Extract system message as instructions
let hasSystemMessage = false;
const messages = body.messages || [];
for (const msg of messages) {
if (msg.role === "system") {
// Use first system message as instructions
if (!hasSystemMessage) {
result.instructions = typeof msg.content === "string" ? msg.content : "";
hasSystemMessage = true;
}
continue; // Skip system messages in input
}
// Convert user messages
if (msg.role === "user") {
const content =
typeof msg.content === "string"
? [{ type: "input_text", text: msg.content }]
: Array.isArray(msg.content)
? msg.content.map((c) => {
if (c.type === "text") return { type: "input_text", text: c.text };
if (c.type === "image_url") return c; // Pass through image content
return c;
})
: [{ type: "input_text", text: "" }];
result.input.push({
type: "message",
role: "user",
content,
});
}
// Convert assistant messages
if (msg.role === "assistant") {
// Add reasoning/thinking content BEFORE the assistant output
if (msg.reasoning_content) {
result.input.push({
type: "reasoning",
id: `reasoning_${result.input.length}`,
summary: [{ type: "summary_text", text: msg.reasoning_content }],
});
}
// Handle thinking blocks in array content
if (Array.isArray(msg.content)) {
for (const block of msg.content) {
if (block.type === "thinking" || block.type === "redacted_thinking") {
result.input.push({
type: "reasoning",
id: `reasoning_${result.input.length}`,
summary: [{ type: "summary_text", text: block.thinking || block.data || "..." }],
});
}
}
}
// Build the assistant output content
const outputContent = [];
if (typeof msg.content === "string" && msg.content) {
outputContent.push({ type: "output_text", text: msg.content });
} else if (Array.isArray(msg.content)) {
for (const c of msg.content) {
if (c.type === "text" && c.text) {
outputContent.push({ type: "output_text", text: c.text });
} else if (c.type === "thinking" || c.type === "redacted_thinking") {
// Already handled above as reasoning items
continue;
} else if (c.type !== "thinking" && c.type !== "redacted_thinking") {
outputContent.push(c);
}
}
}
// Only add the assistant message if there's actual content
if (outputContent.length > 0) {
result.input.push({
type: "message",
role: "assistant",
content: outputContent,
});
}
// Convert tool_calls to function_call items
if (msg.tool_calls && Array.isArray(msg.tool_calls)) {
for (const tc of msg.tool_calls) {
result.input.push({
type: "function_call",
call_id: tc.id,
name: tc.function?.name || "",
arguments: tc.function?.arguments || "{}",
});
}
}
}
// Convert tool results
if (msg.role === "tool") {
result.input.push({
type: "function_call_output",
call_id: msg.tool_call_id,
output: msg.content,
});
}
}
// If no system message, leave instructions empty
if (!hasSystemMessage) {
result.instructions = "";
}
// Convert tools format
if (body.tools && Array.isArray(body.tools)) {
result.tools = body.tools.map((tool) => {
if (tool.type === "function") {
return {
type: "function",
name: tool.function.name,
description: tool.function.description,
parameters: tool.function.parameters,
strict: tool.function.strict,
};
}
return tool;
});
}
// Pass through other relevant fields
if (body.temperature !== undefined) result.temperature = body.temperature;
if (body.max_tokens !== undefined) result.max_tokens = body.max_tokens;
if (body.top_p !== undefined) result.top_p = body.top_p;
return result;
}
// Register both directions
register(FORMATS.OPENAI_RESPONSES, FORMATS.OPENAI, openaiResponsesToOpenAIRequest, null);
register(FORMATS.OPENAI, FORMATS.OPENAI_RESPONSES, openaiToOpenAIResponsesRequest, null);

View file

@ -17,11 +17,33 @@ const budget = [
{ file: "open-sse/translator/registry.ts", maxAny: 0 },
// Freeze legacy hot spots to avoid any-regression while strict migration continues.
{ file: "src/lib/db/apiKeys.ts", maxAny: 0 },
{ file: "src/lib/db/cliToolState.ts", maxAny: 0 },
{ file: "src/lib/db/encryption.ts", maxAny: 0 },
{ file: "src/lib/db/prompts.ts", maxAny: 0 },
{ file: "src/lib/db/providers.ts", maxAny: 0 },
{ file: "src/lib/db/settings.ts", maxAny: 0 },
{ file: "open-sse/config/providerRegistry.ts", maxAny: 0 },
{ file: "open-sse/config/providerModels.ts", maxAny: 0 },
{ file: "open-sse/mcp-server/audit.ts", maxAny: 0 },
{ file: "open-sse/mcp-server/server.ts", maxAny: 0 },
{ file: "open-sse/mcp-server/tools/advancedTools.ts", maxAny: 0 },
{ file: "open-sse/services/signatureCache.ts", maxAny: 0 },
{ file: "open-sse/services/comboMetrics.ts", maxAny: 0 },
{ file: "open-sse/services/sessionManager.ts", maxAny: 0 },
{ file: "open-sse/services/provider.ts", maxAny: 0 },
{ file: "open-sse/services/contextManager.ts", maxAny: 0 },
{ file: "open-sse/services/comboConfig.ts", maxAny: 0 },
{ file: "open-sse/services/accountSelector.ts", maxAny: 0 },
{ file: "open-sse/services/wildcardRouter.ts", maxAny: 0 },
{ file: "open-sse/services/rateLimitSemaphore.ts", maxAny: 0 },
{ file: "open-sse/services/roleNormalizer.ts", maxAny: 0 },
{ file: "open-sse/services/usage.ts", maxAny: 0 },
{ file: "open-sse/services/rateLimitManager.ts", maxAny: 0 },
{ file: "open-sse/services/tokenRefresh.ts", maxAny: 0 },
{ file: "open-sse/services/backgroundTaskDetector.ts", maxAny: 0 },
{ file: "open-sse/services/accountFallback.ts", maxAny: 0 },
{ file: "open-sse/handlers/responseSanitizer.ts", maxAny: 0 },
{ file: "open-sse/handlers/responseTranslator.ts", maxAny: 0 },
];
const anyRegex = /\bany\b/g;

View file

@ -0,0 +1,118 @@
/**
* Dashboard A2A Panel /dashboard/a2a
*
* Shows Agent Card, active/completed tasks, and routing metadata.
*/
"use client";
import { useEffect, useState, useCallback } from "react";
export default function A2ADashboard() {
const [agentCard, setAgentCard] = useState<any>(null);
const [tasks, setTasks] = useState<any[]>([]);
const fetchData = useCallback(async () => {
try {
const [cardRes, tasksRes] = await Promise.allSettled([
fetch("/.well-known/agent.json"),
fetch("/api/a2a/tasks"),
]);
if (cardRes.status === "fulfilled") setAgentCard(await cardRes.value.json());
if (tasksRes.status === "fulfilled") {
const data = await tasksRes.value.json();
setTasks(Array.isArray(data) ? data : data.tasks || []);
}
} catch {
/* ignore */
}
}, []);
useEffect(() => {
const id = setTimeout(fetchData, 0);
const interval = setInterval(fetchData, 30_000);
return () => {
clearTimeout(id);
clearInterval(interval);
};
}, [fetchData]);
return (
<div className="p-6 max-w-7xl mx-auto">
<h1 className="text-2xl font-bold mb-6">🤖 A2A Server Dashboard</h1>
{/* Agent Card */}
{agentCard && (
<div className="mb-8 p-4 bg-white dark:bg-gray-800 rounded-lg shadow border">
<h2 className="text-lg font-semibold mb-2">{agentCard.name}</h2>
<p className="text-sm text-gray-500 mb-3">{agentCard.description}</p>
<div className="flex gap-2 mb-3">
<span className="px-2 py-1 bg-blue-100 dark:bg-blue-900/30 rounded text-xs">
v{agentCard.version}
</span>
{agentCard.capabilities?.streaming && (
<span className="px-2 py-1 bg-green-100 dark:bg-green-900/30 rounded text-xs">
Streaming
</span>
)}
</div>
<h3 className="font-medium text-sm mb-2">Skills ({agentCard.skills?.length || 0})</h3>
<div className="grid grid-cols-1 md:grid-cols-2 gap-2">
{agentCard.skills?.map((s: any) => (
<div key={s.id} className="p-2 bg-gray-50 dark:bg-gray-700 rounded text-sm">
<span className="font-medium">{s.name}</span>
<p className="text-xs text-gray-500 mt-1">{s.description?.slice(0, 100)}</p>
<div className="flex gap-1 mt-1">
{s.tags?.slice(0, 4).map((t: string) => (
<span key={t} className="px-1 bg-gray-200 dark:bg-gray-600 rounded text-xs">
{t}
</span>
))}
</div>
</div>
))}
</div>
</div>
)}
{/* Tasks */}
<div>
<h2 className="text-lg font-semibold mb-3">📋 Task History</h2>
{tasks.length === 0 ? (
<p className="text-gray-500">
No A2A tasks yet. Send a request to <code>/a2a</code> to get started.
</p>
) : (
<div className="space-y-2">
{tasks.map((task: any) => (
<div key={task.id} className="p-3 bg-white dark:bg-gray-800 rounded border">
<div className="flex justify-between items-center">
<span className="font-mono text-xs">{task.id}</span>
<span
className={`px-2 py-0.5 rounded text-xs ${
task.state === "completed"
? "bg-green-100 text-green-700"
: task.state === "failed"
? "bg-red-100 text-red-700"
: task.state === "working"
? "bg-yellow-100 text-yellow-700"
: "bg-gray-100 text-gray-700"
}`}
>
{task.state}
</span>
</div>
<p className="text-sm mt-1">
Skill: <strong>{task.skill}</strong>
</p>
{task.metadata?.routing_explanation && (
<p className="text-xs text-gray-500 mt-1">{task.metadata.routing_explanation}</p>
)}
</div>
))}
</div>
)}
</div>
</div>
);
}

View file

@ -1,15 +1,17 @@
"use client";
import { Skeleton } from "@/shared/components/Loading";
export default function AnalyticsLoading() {
return (
<div className="space-y-6 animate-pulse p-6">
<div className="h-8 bg-gray-200 dark:bg-gray-700 rounded w-40" />
<div className="space-y-6 p-6" role="status" aria-live="polite" aria-busy="true">
<Skeleton className="h-8 w-40" />
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4">
{[1, 2, 3, 4].map((i) => (
<div key={i} className="h-24 bg-gray-200 dark:bg-gray-700 rounded-lg" />
{[0, 1, 2, 3].map((index) => (
<Skeleton key={index} className="h-24 rounded-lg" />
))}
</div>
<div className="h-64 bg-gray-200 dark:bg-gray-700 rounded-lg" />
<Skeleton className="h-64 rounded-lg" />
</div>
);
}

View file

@ -57,6 +57,7 @@ interface ApiKey {
name: string;
key: string;
allowedModels: string[] | null;
noLog?: boolean;
createdAt: string;
}
@ -226,7 +227,7 @@ export default function ApiManagerPageClient() {
setShowPermissionsModal(true);
};
const handleUpdatePermissions = async (allowedModels: string[]) => {
const handleUpdatePermissions = async (allowedModels: string[], noLog: boolean) => {
if (!editingKey || !editingKey.id) return;
// Validate models array
@ -253,7 +254,7 @@ export default function ApiManagerPageClient() {
const res = await fetch(`/api/keys/${encodeURIComponent(editingKey.id)}`, {
method: "PATCH",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ allowedModels: validModels }),
body: JSON.stringify({ allowedModels: validModels, noLog }),
});
if (res.ok) {
@ -448,6 +449,7 @@ export default function ApiManagerPageClient() {
{keys.map((key) => {
const stats = usageStats[key.id];
const isRestricted = Array.isArray(key.allowedModels) && key.allowedModels.length > 0;
const noLogEnabled = key.noLog === true;
return (
<div
key={key.id}
@ -476,23 +478,33 @@ export default function ApiManagerPageClient() {
</button>
</div>
<div className="col-span-2 flex items-center">
{isRestricted ? (
<button
onClick={() => handleOpenPermissions(key)}
className="flex items-center gap-1.5 px-2 py-1 rounded-md bg-amber-500/10 text-amber-600 dark:text-amber-400 text-xs font-medium hover:bg-amber-500/20 transition-colors"
>
<span className="material-symbols-outlined text-[14px]">lock</span>
{t("modelsCount", { count: key.allowedModels.length })}
</button>
) : (
<button
onClick={() => handleOpenPermissions(key)}
className="flex items-center gap-1.5 px-2 py-1 rounded-md bg-green-500/10 text-green-600 dark:text-green-400 text-xs font-medium hover:bg-green-500/20 transition-colors"
>
<span className="material-symbols-outlined text-[14px]">lock_open</span>
{t("allModels")}
</button>
)}
<div className="flex flex-col items-start gap-1">
{isRestricted ? (
<button
onClick={() => handleOpenPermissions(key)}
className="flex items-center gap-1.5 px-2 py-1 rounded-md bg-amber-500/10 text-amber-600 dark:text-amber-400 text-xs font-medium hover:bg-amber-500/20 transition-colors"
>
<span className="material-symbols-outlined text-[14px]">lock</span>
{t("modelsCount", { count: key.allowedModels.length })}
</button>
) : (
<button
onClick={() => handleOpenPermissions(key)}
className="flex items-center gap-1.5 px-2 py-1 rounded-md bg-green-500/10 text-green-600 dark:text-green-400 text-xs font-medium hover:bg-green-500/20 transition-colors"
>
<span className="material-symbols-outlined text-[14px]">lock_open</span>
{t("allModels")}
</button>
)}
{noLogEnabled && (
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-md bg-violet-500/10 text-violet-600 dark:text-violet-400 text-[11px] font-medium">
<span className="material-symbols-outlined text-[12px]">
visibility_off
</span>
No-Log
</span>
)}
</div>
</div>
<div className="col-span-2 flex flex-col justify-center">
<span className="text-sm font-medium tabular-nums">
@ -675,7 +687,7 @@ const PermissionsModal = memo(function PermissionsModal({
allModels: Model[];
searchModel: string;
onSearchChange: (v: string) => void;
onSave: (models: string[]) => void;
onSave: (models: string[], noLog: boolean) => void;
}) {
const t = useTranslations("apiManager");
const tc = useTranslations("common");
@ -684,6 +696,7 @@ const PermissionsModal = memo(function PermissionsModal({
const initialModels = Array.isArray(apiKey?.allowedModels) ? apiKey.allowedModels : [];
const [selectedModels, setSelectedModels] = useState<string[]>(initialModels);
const [allowAll, setAllowAll] = useState(initialModels.length === 0);
const [noLogEnabled, setNoLogEnabled] = useState(apiKey?.noLog === true);
const [expandedProviders, setExpandedProviders] = useState<Set<string>>(() => {
// Expand all providers by default when in restrict mode with existing selections
if (initialModels.length > 0) {
@ -757,12 +770,8 @@ const PermissionsModal = memo(function PermissionsModal({
}, []);
const handleSave = useCallback(() => {
onSave(allowAll ? [] : selectedModels);
}, [onSave, allowAll, selectedModels]);
const handleClearSearch = useCallback(() => {
onSearchChange("");
}, [onSearchChange]);
onSave(allowAll ? [] : selectedModels, noLogEnabled);
}, [onSave, allowAll, selectedModels, noLogEnabled]);
const selectedCount = selectedModels.length;
const totalModels = allModels.length;
@ -824,6 +833,32 @@ const PermissionsModal = memo(function PermissionsModal({
</p>
</div>
{/* Privacy Toggle */}
<div className="flex items-start justify-between gap-3 p-3 rounded-lg border border-border bg-surface/40">
<div className="flex flex-col gap-1">
<p className="text-sm font-medium text-text-main">No-Log Payload Privacy</p>
<p className="text-xs text-text-muted">
Disable request/response payload persistence for this API key.
</p>
</div>
<button
type="button"
role="switch"
aria-checked={noLogEnabled}
onClick={() => setNoLogEnabled((prev) => !prev)}
className={`inline-flex items-center gap-1.5 px-2.5 py-1.5 rounded-md text-xs font-semibold transition-colors ${
noLogEnabled
? "bg-violet-500/15 text-violet-700 dark:text-violet-300 border border-violet-500/30"
: "bg-black/5 dark:bg-white/5 text-text-muted border border-border"
}`}
>
<span className="material-symbols-outlined text-[14px]">
{noLogEnabled ? "visibility_off" : "visibility"}
</span>
{noLogEnabled ? tc("enabled") : tc("disabled")}
</button>
</div>
{/* Selected Models Summary (only in restrict mode) */}
{!allowAll && selectedCount > 0 && (
<div className="flex flex-col gap-1.5 p-2 bg-primary/5 rounded-lg border border-primary/20">

View file

@ -0,0 +1,174 @@
/**
* Dashboard Auto-Combo Panel /dashboard/auto-combo
*
* Shows provider scores, scoring factors, exclusions, mode packs, and routing history.
*/
"use client";
import { useEffect, useState, useCallback } from "react";
interface ProviderScore {
provider: string;
model: string;
score: number;
factors: Record<string, number>;
}
interface ExclusionEntry {
provider: string;
excludedAt: string;
cooldownMs: number;
reason: string;
}
export default function AutoComboDashboard() {
const [scores, setScores] = useState<ProviderScore[]>([]);
const [exclusions, setExclusions] = useState<ExclusionEntry[]>([]);
const [incidentMode, setIncidentMode] = useState(false);
const [modePack, setModePack] = useState("ship-fast");
const fetchData = useCallback(async () => {
try {
const [combosRes, healthRes] = await Promise.allSettled([
fetch("/api/combos/auto"),
fetch("/api/monitoring/health"),
]);
if (healthRes.status === "fulfilled") {
const health = await healthRes.value.json();
const breakers = health?.circuitBreakers || [];
const openCount = breakers.filter((b: any) => b.state === "OPEN").length;
setIncidentMode(openCount / Math.max(breakers.length, 1) > 0.5);
}
} catch {
/* ignore */
}
}, []);
useEffect(() => {
const id = setTimeout(fetchData, 0);
const interval = setInterval(fetchData, 30_000);
return () => {
clearTimeout(id);
clearInterval(interval);
};
}, [fetchData]);
const FACTOR_LABELS: Record<string, string> = {
quota: "📊 Quota",
health: "💚 Health",
costInv: "💰 Cost",
latencyInv: "⚡ Latency",
taskFit: "🎯 Task Fit",
stability: "📈 Stability",
};
const MODE_PACKS = [
{ id: "ship-fast", label: "🚀 Ship Fast" },
{ id: "cost-saver", label: "💰 Cost Saver" },
{ id: "quality-first", label: "🎯 Quality First" },
{ id: "offline-friendly", label: "📡 Offline Friendly" },
];
return (
<div className="p-6 max-w-7xl mx-auto">
<h1 className="text-2xl font-bold mb-6"> Auto-Combo Engine</h1>
{/* Status Bar */}
<div className="flex gap-4 mb-6">
<div
className={`px-3 py-2 rounded-lg text-sm font-medium ${incidentMode ? "bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-300" : "bg-green-100 text-green-700 dark:bg-green-900/30 dark:text-green-300"}`}
>
{incidentMode ? "🚨 INCIDENT MODE" : "✅ Normal"}
</div>
<div className="px-3 py-2 bg-blue-100 dark:bg-blue-900/30 rounded-lg text-sm">
Mode: <strong>{MODE_PACKS.find((m) => m.id === modePack)?.label || modePack}</strong>
</div>
</div>
{/* Mode Pack Selector */}
<div className="mb-8">
<h2 className="text-lg font-semibold mb-3">🎛 Mode Pack</h2>
<div className="flex gap-2">
{MODE_PACKS.map((mp) => (
<button
key={mp.id}
onClick={() => setModePack(mp.id)}
className={`px-4 py-2 rounded-lg text-sm transition-colors ${
modePack === mp.id
? "bg-blue-600 text-white"
: "bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700"
}`}
>
{mp.label}
</button>
))}
</div>
</div>
{/* Provider Scores */}
<div className="mb-8">
<h2 className="text-lg font-semibold mb-3">📊 Provider Scores</h2>
{scores.length === 0 ? (
<p className="text-gray-500">
No auto-combo configured. Create one via <code>POST /api/combos/auto</code>.
</p>
) : (
<div className="space-y-3">
{scores.map((s) => (
<div key={s.provider} className="p-3 bg-white dark:bg-gray-800 rounded-lg border">
<div className="flex justify-between items-center mb-2">
<span className="font-medium">
{s.provider} / {s.model}
</span>
<span className="font-bold text-lg">{(s.score * 100).toFixed(0)}%</span>
</div>
{/* Score Bar */}
<div className="h-2 bg-gray-200 dark:bg-gray-700 rounded overflow-hidden mb-2">
<div
className="h-full bg-blue-500 rounded"
style={{ width: `${s.score * 100}%` }}
/>
</div>
{/* Factor Breakdown */}
<div className="grid grid-cols-3 gap-1 text-xs text-gray-500">
{Object.entries(s.factors || {}).map(([key, val]) => (
<span key={key}>
{FACTOR_LABELS[key] || key}: {((val as number) * 100).toFixed(0)}%
</span>
))}
</div>
</div>
))}
</div>
)}
</div>
{/* Exclusions */}
<div>
<h2 className="text-lg font-semibold mb-3">🚫 Excluded Providers</h2>
{exclusions.length === 0 ? (
<p className="text-gray-500">No providers currently excluded.</p>
) : (
<div className="space-y-2">
{exclusions.map((e) => (
<div
key={e.provider}
className="p-3 bg-red-50 dark:bg-red-900/10 rounded border border-red-200 dark:border-red-800"
>
<div className="flex justify-between">
<span className="font-medium text-red-700 dark:text-red-400">{e.provider}</span>
<span className="text-xs text-gray-500">
Cooldown: {Math.round(e.cooldownMs / 60000)}min
</span>
</div>
<p className="text-xs text-gray-500 mt-1">{e.reason}</p>
</div>
))}
</div>
)}
</div>
</div>
);
}

View file

@ -18,7 +18,7 @@ export default function DefaultToolCard({
}) {
const t = useTranslations("cliTools");
const translateOrFallback = useCallback(
(key, fallback, values) => {
(key, fallback, values = undefined) => {
try {
return t(key, values);
} catch {

View file

@ -0,0 +1,147 @@
/**
* Dashboard MCP Panel /dashboard/mcp
*
* Shows MCP tool audit log, usage stats, and real-time metrics.
*/
"use client";
import { useEffect, useState, useCallback } from "react";
interface AuditEntry {
tool_name: string;
timestamp: string;
duration_ms: number;
success: boolean;
api_key_hash: string;
}
interface McpStats {
totalCalls: number;
successRate: number;
avgDurationMs: number;
byTool: Array<{ tool: string; count: number; avgMs: number }>;
}
export default function McpDashboard() {
const [audit, setAudit] = useState<AuditEntry[]>([]);
const [stats, setStats] = useState<McpStats | null>(null);
const [loading, setLoading] = useState(true);
const fetchData = useCallback(async () => {
try {
const [auditRes, statsRes] = await Promise.allSettled([
fetch("/api/mcp/audit?limit=50"),
fetch("/api/mcp/audit/stats"),
]);
if (auditRes.status === "fulfilled") setAudit(await auditRes.value.json());
if (statsRes.status === "fulfilled") setStats(await statsRes.value.json());
} catch {
/* fallback data */
}
setLoading(false);
}, []);
useEffect(() => {
const id = setTimeout(fetchData, 0);
const interval = setInterval(fetchData, 30_000);
return () => {
clearTimeout(id);
clearInterval(interval);
};
}, [fetchData]);
const tools = [
"omniroute_get_health",
"omniroute_list_combos",
"omniroute_get_combo_metrics",
"omniroute_switch_combo",
"omniroute_check_quota",
"omniroute_route_request",
"omniroute_cost_report",
"omniroute_list_models_catalog",
"omniroute_simulate_route",
"omniroute_set_budget_guard",
"omniroute_set_resilience_profile",
"omniroute_test_combo",
"omniroute_get_provider_metrics",
"omniroute_best_combo_for_task",
"omniroute_explain_route",
"omniroute_get_session_snapshot",
];
return (
<div className="p-6 max-w-7xl mx-auto">
<h1 className="text-2xl font-bold mb-6">🔧 MCP Server Dashboard</h1>
{/* Stats Grid */}
<div className="grid grid-cols-1 md:grid-cols-4 gap-4 mb-8">
<StatCard label="Total Calls" value={stats?.totalCalls || 0} />
<StatCard label="Success Rate" value={`${((stats?.successRate || 1) * 100).toFixed(1)}%`} />
<StatCard label="Avg Latency" value={`${stats?.avgDurationMs || 0}ms`} />
<StatCard label="Active Tools" value={tools.length} />
</div>
{/* Tool List */}
<div className="mb-8">
<h2 className="text-lg font-semibold mb-3">📋 Available Tools ({tools.length})</h2>
<div className="grid grid-cols-2 md:grid-cols-4 gap-2">
{tools.map((t) => (
<div
key={t}
className="p-2 bg-green-50 dark:bg-green-900/20 rounded text-sm border border-green-200 dark:border-green-800"
>
<span className="text-green-600 mr-1"></span> {t.replace("omniroute_", "")}
</div>
))}
</div>
</div>
{/* Audit Log */}
<div>
<h2 className="text-lg font-semibold mb-3">📊 Recent Calls</h2>
{loading ? (
<p className="text-gray-500">Loading...</p>
) : audit.length === 0 ? (
<p className="text-gray-500">
No MCP calls yet. Use <code>omniroute --mcp</code> to connect.
</p>
) : (
<div className="overflow-x-auto">
<table className="w-full text-sm">
<thead>
<tr className="border-b">
<th className="text-left p-2">Tool</th>
<th className="text-left p-2">Time</th>
<th className="text-left p-2">Duration</th>
<th className="text-left p-2">Status</th>
</tr>
</thead>
<tbody>
{audit.map((entry, i) => (
<tr key={i} className="border-b hover:bg-gray-50 dark:hover:bg-gray-800">
<td className="p-2 font-mono text-xs">{entry.tool_name}</td>
<td className="p-2 text-xs">
{new Date(entry.timestamp).toLocaleTimeString()}
</td>
<td className="p-2">{entry.duration_ms}ms</td>
<td className="p-2">{entry.success ? "✅" : "❌"}</td>
</tr>
))}
</tbody>
</table>
</div>
)}
</div>
</div>
);
}
function StatCard({ label, value }: { label: string; value: string | number }) {
return (
<div className="p-4 bg-white dark:bg-gray-800 rounded-lg shadow border">
<p className="text-sm text-gray-500 dark:text-gray-400">{label}</p>
<p className="text-2xl font-bold mt-1">{value}</p>
</div>
);
}

View file

@ -1,24 +1,34 @@
"use client";
export default function ProvidersError({
error,
error: _error,
reset,
}: {
error: Error & { digest?: string };
reset: () => void;
}) {
return (
<div className="flex flex-col items-center justify-center min-h-[400px] p-6">
<div
className="flex flex-col items-center justify-center min-h-[400px] p-6"
role="alert"
aria-live="assertive"
>
<div className="text-center space-y-4">
<h2 className="text-xl font-semibold text-red-600 dark:text-red-400">
Failed to load providers
</h2>
<p className="text-gray-600 dark:text-gray-400 max-w-md">
{error.message || "An unexpected error occurred while loading provider data."}
<p className="text-text-muted max-w-md">
We could not load provider data right now. Check your connection and try again.
</p>
{_error?.digest && (
<p className="text-xs text-text-muted font-mono">Error ID: {_error.digest}</p>
)}
{process.env.NODE_ENV === "development" && _error?.message && (
<p className="text-xs text-red-600 dark:text-red-400 font-mono">{_error.message}</p>
)}
<button
onClick={reset}
className="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 transition-colors"
className="px-4 py-2 bg-primary text-white rounded-lg hover:bg-primary-hover transition-colors focus:outline-2 focus:outline-offset-2 focus:outline-primary"
>
Try Again
</button>

View file

@ -1,12 +1,14 @@
"use client";
import { CardSkeleton, Skeleton } from "@/shared/components/Loading";
export default function ProvidersLoading() {
return (
<div className="space-y-6 animate-pulse p-6">
<div className="h-8 bg-gray-200 dark:bg-gray-700 rounded w-48" />
<div className="space-y-6 p-6" role="status" aria-live="polite" aria-busy="true">
<Skeleton className="h-8 w-48" />
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
{[1, 2, 3].map((i) => (
<div key={i} className="h-40 bg-gray-200 dark:bg-gray-700 rounded-lg" />
{[0, 1, 2].map((index) => (
<CardSkeleton key={index} />
))}
</div>
</div>

View file

@ -5,8 +5,8 @@ import { Card } from "@/shared/components";
import { useTranslations } from "next-intl";
export default function ModelAliasesTab() {
const [builtIn, setBuiltIn] = useState({});
const [custom, setCustom] = useState({});
const [builtIn, setBuiltIn] = useState<Record<string, string>>({});
const [custom, setCustom] = useState<Record<string, string>>({});
const [loading, setLoading] = useState(true);
const [saving, setSaving] = useState(false);
const [status, setStatus] = useState("");
@ -49,7 +49,7 @@ export default function ModelAliasesTab() {
}
};
const removeAlias = async (from) => {
const removeAlias = async (from: string) => {
setSaving(true);
try {
const res = await fetch("/api/settings/model-aliases", {
@ -97,9 +97,7 @@ export default function ModelAliasesTab() {
{/* Add custom alias */}
<div className="p-4 rounded-lg bg-surface/30 border border-border/30 mb-4">
<p className="text-sm font-medium mb-3">
{t("addCustomAlias") || "Add Custom Alias"}
</p>
<p className="text-sm font-medium mb-3">{t("addCustomAlias") || "Add Custom Alias"}</p>
<div className="flex items-center gap-2">
<input
type="text"

View file

@ -1,24 +1,34 @@
"use client";
export default function SettingsError({
error,
error: _error,
reset,
}: {
error: Error & { digest?: string };
reset: () => void;
}) {
return (
<div className="flex flex-col items-center justify-center min-h-[400px] p-6">
<div
className="flex flex-col items-center justify-center min-h-[400px] p-6"
role="alert"
aria-live="assertive"
>
<div className="text-center space-y-4">
<h2 className="text-xl font-semibold text-red-600 dark:text-red-400">
Failed to load settings
</h2>
<p className="text-gray-600 dark:text-gray-400 max-w-md">
{error.message || "An unexpected error occurred while loading settings."}
<p className="text-text-muted max-w-md">
We could not load settings right now. Please retry in a few seconds.
</p>
{_error?.digest && (
<p className="text-xs text-text-muted font-mono">Error ID: {_error.digest}</p>
)}
{process.env.NODE_ENV === "development" && _error?.message && (
<p className="text-xs text-red-600 dark:text-red-400 font-mono">{_error.message}</p>
)}
<button
onClick={reset}
className="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 transition-colors"
className="px-4 py-2 bg-primary text-white rounded-lg hover:bg-primary-hover transition-colors focus:outline-2 focus:outline-offset-2 focus:outline-primary"
>
Try Again
</button>

View file

@ -1,14 +1,16 @@
"use client";
import { Skeleton } from "@/shared/components/Loading";
export default function SettingsLoading() {
return (
<div className="space-y-6 animate-pulse p-6">
<div className="h-8 bg-gray-200 dark:bg-gray-700 rounded w-36" />
<div className="space-y-6 p-6" role="status" aria-live="polite" aria-busy="true">
<Skeleton className="h-8 w-36" />
<div className="space-y-4">
{[1, 2, 3, 4].map((i) => (
<div key={i} className="space-y-2">
<div className="h-4 bg-gray-200 dark:bg-gray-700 rounded w-24" />
<div className="h-10 bg-gray-200 dark:bg-gray-700 rounded" />
{[0, 1, 2, 3].map((index) => (
<div key={index} className="space-y-2">
<Skeleton className="h-4 w-24" />
<Skeleton className="h-10 w-full" />
</div>
))}
</div>

View file

@ -212,7 +212,7 @@ export default function EvalsTab() {
return (
<div className="flex flex-col gap-6">
{/* Hero Section — always visible */}
<HeroSection />
<HeroSection t={t} />
<EmptyState
icon="science"
title={t("noEvalSuitesFound")}

View file

@ -26,8 +26,8 @@ export default function Error({ error, reset }: ErrorProps) {
Internal Server Error
</h1>
<p className="text-[15px] text-[var(--color-text-muted)] max-w-[400px] leading-relaxed mb-2">
Something went wrong while processing your request. Our team has been
notified and is working on a fix.
Something went wrong while processing your request. Our team has been notified and is
working on a fix.
</p>
{error?.digest && (
<p className="text-xs text-[var(--color-text-muted)] mb-6 font-mono">
@ -47,17 +47,24 @@ export default function Error({ error, reset }: ErrorProps) {
<button
onClick={reset}
aria-label="Retry loading the page"
className="px-6 py-2.5 rounded-lg text-white text-sm font-semibold cursor-pointer transition-all duration-200 bg-[var(--color-accent)] hover:bg-[var(--color-accent-hover)] focus:outline-2 focus:outline-offset-2 focus:outline-[var(--color-accent)]"
className="px-6 py-2.5 rounded-lg text-white text-sm font-semibold cursor-pointer transition-all duration-200 motion-reduce:transition-none bg-[var(--color-accent)] hover:bg-[var(--color-accent-hover)] focus:outline-2 focus:outline-offset-2 focus:outline-[var(--color-accent)]"
>
Try Again
</button>
<a
href="/dashboard"
className="px-6 py-2.5 rounded-lg text-[var(--color-text-main)] text-sm font-semibold cursor-pointer transition-all duration-200 border border-[var(--color-border)] hover:bg-[var(--color-bg-alt)] no-underline focus:outline-2 focus:outline-offset-2 focus:outline-[var(--color-accent)]"
className="px-6 py-2.5 rounded-lg text-[var(--color-text-main)] text-sm font-semibold cursor-pointer transition-all duration-200 motion-reduce:transition-none border border-[var(--color-border)] hover:bg-[var(--color-bg-alt)] no-underline focus:outline-2 focus:outline-offset-2 focus:outline-[var(--color-accent)]"
aria-label="Return to dashboard"
>
Go to Dashboard
</a>
<a
href="/status"
className="px-6 py-2.5 rounded-lg text-[var(--color-text-main)] text-sm font-semibold cursor-pointer transition-all duration-200 motion-reduce:transition-none border border-[var(--color-border)] hover:bg-[var(--color-bg-alt)] no-underline focus:outline-2 focus:outline-offset-2 focus:outline-[var(--color-accent)]"
aria-label="Open system status"
>
System Status
</a>
</div>
</div>
);

View file

@ -16,11 +16,13 @@ interface GlobalErrorProps {
export default function GlobalError({ error, reset }: GlobalErrorProps) {
return (
<html lang="en">
<body className="flex flex-col items-center justify-center min-h-screen p-6 bg-[#0a0a0f] text-[#e0e0e0] font-[system-ui,-apple-system,sans-serif] text-center m-0">
<body className="flex flex-col items-center justify-center min-h-screen p-6 bg-bg text-text-main font-[system-ui,-apple-system,sans-serif] text-center m-0">
<main role="alert" aria-live="assertive" className="flex flex-col items-center">
<div className="text-[64px] mb-4" aria-hidden="true"></div>
<div className="text-[64px] mb-4" aria-hidden="true">
</div>
<h1 className="text-[28px] font-bold mb-2">Something went wrong</h1>
<p className="text-[15px] text-[#888] max-w-[400px] leading-relaxed mb-6">
<p className="text-[15px] text-text-muted max-w-[400px] leading-relaxed mb-6">
An unexpected error occurred. This has been logged and our team will investigate.
</p>
{process.env.NODE_ENV === "development" && error?.message && (
@ -31,13 +33,22 @@ export default function GlobalError({ error, reset }: GlobalErrorProps) {
{error.message}
</pre>
)}
<button
onClick={reset}
aria-label="Retry loading the page"
className="px-8 py-3 rounded-[10px] text-white border-none text-sm font-semibold cursor-pointer transition-transform duration-200 shadow-[0_4px_16px_rgba(99,102,241,0.3)] hover:-translate-y-0.5 bg-gradient-to-br from-[#6366f1] to-[#8b5cf6] focus:outline-2 focus:outline-offset-2 focus:outline-[#6366f1]"
>
Try Again
</button>
<div className="flex flex-col sm:flex-row gap-3">
<button
onClick={reset}
aria-label="Retry loading the page"
className="px-8 py-3 rounded-[10px] text-white border-none text-sm font-semibold cursor-pointer transition-transform duration-200 motion-reduce:transition-none motion-reduce:transform-none shadow-warm hover:-translate-y-0.5 bg-gradient-to-br from-primary to-primary-hover focus:outline-2 focus:outline-offset-2 focus:outline-primary"
>
Try Again
</button>
<a
href="/status"
className="px-8 py-3 rounded-[10px] text-sm font-semibold border border-[var(--color-border)] hover:bg-[var(--color-bg-alt)] no-underline focus:outline-2 focus:outline-offset-2 focus:outline-primary"
aria-label="Open system status"
>
System Status
</a>
</div>
</main>
</body>
</html>

7
src/app/loading.tsx Normal file
View file

@ -0,0 +1,7 @@
"use client";
import { PageLoading } from "@/shared/components/Loading";
export default function AppLoading() {
return <PageLoading message="Loading OmniRoute..." />;
}

View file

@ -21,13 +21,22 @@ export default function NotFound() {
<p className="text-[15px] text-text-muted max-w-[400px] leading-relaxed mb-8">
The page you&apos;re looking for doesn&apos;t exist or has been moved.
</p>
<Link
href="/dashboard"
className="px-8 py-3 rounded-xl text-white text-sm font-medium no-underline transition-all duration-200 shadow-warm hover:-translate-y-0.5 bg-gradient-to-br from-primary to-primary-hover hover:shadow-elevated focus:outline-2 focus:outline-offset-2 focus:outline-primary"
aria-label="Return to dashboard"
>
Go to Dashboard
</Link>
<div className="flex flex-col sm:flex-row items-center gap-3">
<Link
href="/dashboard"
className="px-8 py-3 rounded-xl text-white text-sm font-medium no-underline transition-all duration-200 motion-reduce:transition-none shadow-warm hover:-translate-y-0.5 bg-gradient-to-br from-primary to-primary-hover hover:shadow-elevated focus:outline-2 focus:outline-offset-2 focus:outline-primary"
aria-label="Return to dashboard"
>
Go to Dashboard
</Link>
<Link
href="/status"
className="px-8 py-3 rounded-xl text-sm font-medium no-underline border border-border hover:bg-bg-alt transition-colors duration-200 motion-reduce:transition-none focus:outline-2 focus:outline-offset-2 focus:outline-primary"
aria-label="Open system status page"
>
System Status
</Link>
</div>
</div>
);
}

View file

@ -10,14 +10,24 @@
"include": [],
"files": [
"src/app/api/settings/proxy/test/route.ts",
"src/lib/db/apiKeys.ts",
"src/lib/db/cliToolState.ts",
"src/lib/db/encryption.ts",
"src/lib/db/prompts.ts",
"src/lib/db/providers.ts",
"src/lib/db/settings.ts",
"src/lib/db/stateReset.ts",
"src/shared/validation/providerSchema.ts",
"src/shared/validation/schemas.ts",
"open-sse/config/providerModels.ts",
"open-sse/config/providerRegistry.ts",
"open-sse/mcp-server/audit.ts",
"open-sse/mcp-server/server.ts",
"open-sse/mcp-server/tools/advancedTools.ts",
"open-sse/mcp-server/scopeEnforcement.ts",
"open-sse/translator/registry.ts"
"open-sse/translator/registry.ts",
"open-sse/handlers/responseSanitizer.ts",
"open-sse/handlers/responseTranslator.ts"
],
"exclude": ["node_modules", ".next", "app.__qa_backup", "vscode-extension"]
}

View file

@ -9,11 +9,21 @@
},
"include": [],
"files": [
"src/lib/db/apiKeys.ts",
"src/lib/db/cliToolState.ts",
"src/lib/db/encryption.ts",
"src/lib/db/prompts.ts",
"src/lib/db/providers.ts",
"src/lib/db/settings.ts",
"src/lib/db/stateReset.ts",
"open-sse/config/providerModels.ts",
"open-sse/mcp-server/audit.ts",
"open-sse/mcp-server/server.ts",
"open-sse/mcp-server/tools/advancedTools.ts",
"open-sse/translator/registry.ts",
"open-sse/mcp-server/scopeEnforcement.ts"
"open-sse/mcp-server/scopeEnforcement.ts",
"open-sse/handlers/responseSanitizer.ts",
"open-sse/handlers/responseTranslator.ts"
],
"exclude": ["node_modules", ".next", "app.__qa_backup", "vscode-extension"]
}