refactor(anthropic-vertex): move SDK runtime to plugin (#71174)

* refactor(anthropic-vertex): move sdk runtime to plugin

* fix(anthropic-vertex): stage provider runtime deps

* fix(anthropic-vertex): reuse stream factory wrapper
This commit is contained in:
Vincent Koc 2026-04-24 11:52:35 -07:00 committed by GitHub
parent 07f33b2909
commit d795000377
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 350 additions and 216 deletions

View file

@ -12,6 +12,7 @@ Docs: https://docs.openclaw.ai
- Plugin hooks: expose first-class run, message, sender, session, and trace correlation fields on message hook contexts and run lifecycle events. Thanks @vincentkoc.
- TUI/dependencies: remove direct `cli-highlight` usage from the OpenClaw TUI code-block renderer, keeping themed code coloring without the extra root dependency. Thanks @vincentkoc.
- Diagnostics/OTEL: export run, model-call, and tool-execution diagnostic lifecycle events as OTEL spans without retaining live span state. Thanks @vincentkoc.
- Providers/Anthropic Vertex: move the Vertex SDK runtime behind the bundled provider plugin so core no longer owns that provider-specific dependency. Thanks @vincentkoc.
- Plugins/activation: expose activation plan reasons and a richer plan API so callers can inspect why a plugin was selected while preserving existing id-list activation behavior. (#70943) Thanks @vincentkoc.
- Plugins/source metadata: expose normalized install-source facts on provider and channel catalogs so onboarding can explain npm pinning, integrity state, and local availability before runtime loads. (#70951) Thanks @vincentkoc.
- Plugins/catalog: pin the official external WeCom channel source to an exact npm release plus dist integrity, with a guard that official external sources stay integrity-pinned. (#70997) Thanks @vincentkoc.

View file

@ -0,0 +1,77 @@
import type { Model } from "@mariozechner/pi-ai";
import { beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
const hoisted = vi.hoisted(() => {
const streamAnthropicMock = vi.fn(() => Symbol("anthropic-vertex-stream"));
const anthropicVertexCtorMock = vi.fn();
return {
streamAnthropicMock,
anthropicVertexCtorMock,
};
});
vi.mock("@mariozechner/pi-ai", async () => {
const original =
await vi.importActual<typeof import("@mariozechner/pi-ai")>("@mariozechner/pi-ai");
return {
...original,
streamAnthropic: hoisted.streamAnthropicMock,
};
});
vi.mock("@anthropic-ai/vertex-sdk", () => ({
AnthropicVertex: vi.fn(function MockAnthropicVertex(options: unknown) {
hoisted.anthropicVertexCtorMock(options);
return { options };
}),
}));
let createAnthropicVertexStreamFn: typeof import("./api.js").createAnthropicVertexStreamFn;
let createAnthropicVertexStreamFnForModel: typeof import("./api.js").createAnthropicVertexStreamFnForModel;
function makeModel(): Model<"anthropic-messages"> {
return {
id: "claude-sonnet-4-6",
api: "anthropic-messages",
provider: "anthropic-vertex",
maxTokens: 128000,
} as Model<"anthropic-messages">;
}
describe("Anthropic Vertex API stream factories", () => {
beforeAll(async () => {
({ createAnthropicVertexStreamFn, createAnthropicVertexStreamFnForModel } =
await import("./api.js"));
});
beforeEach(() => {
hoisted.streamAnthropicMock.mockClear();
hoisted.anthropicVertexCtorMock.mockClear();
});
it("reuses the runtime stream factory across direct stream calls", async () => {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel();
await streamFn(model, { messages: [] }, {});
await streamFn(model, { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledTimes(1);
expect(hoisted.streamAnthropicMock).toHaveBeenCalledTimes(2);
});
it("reuses the runtime stream factory across model-derived stream calls", async () => {
const streamFn = createAnthropicVertexStreamFnForModel(makeModel(), {
ANTHROPIC_VERTEX_PROJECT_ID: "vertex-project",
GOOGLE_CLOUD_LOCATION: "us-east5",
} as NodeJS.ProcessEnv);
const model = makeModel();
await streamFn(model, { messages: [] }, {});
await streamFn(model, { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledTimes(1);
expect(hoisted.streamAnthropicMock).toHaveBeenCalledTimes(2);
});
});

View file

@ -1,3 +1,5 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
export {
ANTHROPIC_VERTEX_DEFAULT_MODEL_ID,
buildAnthropicVertexProvider,
@ -40,3 +42,30 @@ export function resolveImplicitAnthropicVertexProvider(params?: { env?: NodeJS.P
return buildAnthropicVertexProvider({ env });
}
export function createAnthropicVertexStreamFn(
projectId: string | undefined,
region: string,
baseURL?: string,
): StreamFn {
const streamFnPromise = import("./stream-runtime.js").then((runtime) =>
runtime.createAnthropicVertexStreamFn(projectId, region, baseURL),
);
return async (model, context, options) => {
const streamFn = await streamFnPromise;
return streamFn(model, context, options);
};
}
export function createAnthropicVertexStreamFnForModel(
model: { baseUrl?: string },
env: NodeJS.ProcessEnv = process.env,
): StreamFn {
const streamFnPromise = import("./stream-runtime.js").then((runtime) =>
runtime.createAnthropicVertexStreamFnForModel(model, env),
);
return async (...args) => {
const streamFn = await streamFnPromise;
return streamFn(...args);
};
}

View file

@ -4,10 +4,18 @@
"private": true,
"description": "OpenClaw Anthropic Vertex provider plugin",
"type": "module",
"dependencies": {
"@anthropic-ai/vertex-sdk": "^0.16.0",
"@mariozechner/pi-agent-core": "0.70.2",
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"
},
"openclaw": {
"bundle": {
"stageRuntimeDependencies": true
},
"extensions": [
"./index.ts"
]

View file

@ -1,6 +1,7 @@
import type { Model } from "@mariozechner/pi-ai";
import { beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { SYSTEM_PROMPT_CACHE_BOUNDARY } from "./system-prompt-cache-boundary.js";
const SYSTEM_PROMPT_CACHE_BOUNDARY = "\n<!-- OPENCLAW_CACHE_BOUNDARY -->\n";
const hoisted = vi.hoisted(() => {
const streamAnthropicMock = vi.fn<(model: unknown, context: unknown, options: unknown) => symbol>(
@ -31,28 +32,8 @@ vi.mock("@anthropic-ai/vertex-sdk", () => ({
}),
}));
vi.mock("../plugin-sdk/anthropic-vertex.js", () => ({
resolveAnthropicVertexProjectId: (env: NodeJS.ProcessEnv = process.env) =>
env.ANTHROPIC_VERTEX_PROJECT_ID || env.GOOGLE_CLOUD_PROJECT || env.GOOGLE_CLOUD_PROJECT_ID,
resolveAnthropicVertexClientRegion: (params?: { baseUrl?: string; env?: NodeJS.ProcessEnv }) => {
const baseUrl = params?.baseUrl?.trim();
if (baseUrl) {
try {
const host = new URL(baseUrl).hostname;
const match = /^([a-z0-9-]+)-aiplatform\.googleapis\.com$/u.exec(host);
if (match?.[1]) {
return match[1];
}
} catch {
// noop; test seam only
}
}
return params?.env?.GOOGLE_CLOUD_LOCATION || params?.env?.CLOUD_ML_REGION || "global";
},
}));
let createAnthropicVertexStreamFn: typeof import("./anthropic-vertex-stream.js").createAnthropicVertexStreamFn;
let createAnthropicVertexStreamFnForModel: typeof import("./anthropic-vertex-stream.js").createAnthropicVertexStreamFnForModel;
let createAnthropicVertexStreamFn: typeof import("./stream-runtime.js").createAnthropicVertexStreamFn;
let createAnthropicVertexStreamFnForModel: typeof import("./stream-runtime.js").createAnthropicVertexStreamFnForModel;
function makeModel(params: { id: string; maxTokens?: number }): Model<"anthropic-messages"> {
return {
@ -121,7 +102,7 @@ function buildExpectedCacheBoundaryPayload(messageText: string) {
describe("createAnthropicVertexStreamFn", () => {
beforeAll(async () => {
({ createAnthropicVertexStreamFn, createAnthropicVertexStreamFnForModel } =
await import("./anthropic-vertex-stream.js"));
await import("./stream-runtime.js"));
});
beforeEach(() => {

View file

@ -0,0 +1,199 @@
import { AnthropicVertex } from "@anthropic-ai/vertex-sdk";
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamAnthropic, type AnthropicOptions, type Model } from "@mariozechner/pi-ai";
import {
applyAnthropicPayloadPolicyToParams,
resolveAnthropicPayloadPolicy,
} from "openclaw/plugin-sdk/provider-stream-shared";
import { resolveAnthropicVertexClientRegion, resolveAnthropicVertexProjectId } from "./region.js";
type AnthropicVertexEffort = NonNullable<AnthropicOptions["effort"]>;
type AnthropicVertexAdaptiveEffort = AnthropicVertexEffort | "xhigh";
function isClaudeOpus47Model(modelId: string): boolean {
return modelId.includes("opus-4-7") || modelId.includes("opus-4.7");
}
function isClaudeOpus46Model(modelId: string): boolean {
return modelId.includes("opus-4-6") || modelId.includes("opus-4.6");
}
function supportsAdaptiveThinking(modelId: string): boolean {
return (
isClaudeOpus47Model(modelId) ||
isClaudeOpus46Model(modelId) ||
modelId.includes("sonnet-4-6") ||
modelId.includes("sonnet-4.6")
);
}
function mapAnthropicAdaptiveEffort(
reasoning: string,
modelId: string,
): AnthropicVertexAdaptiveEffort {
const effortMap: Record<string, AnthropicVertexAdaptiveEffort> = {
minimal: "low",
low: "low",
medium: "medium",
high: "high",
xhigh: isClaudeOpus47Model(modelId) ? "xhigh" : isClaudeOpus46Model(modelId) ? "max" : "high",
};
return effortMap[reasoning] ?? "high";
}
function resolveAnthropicVertexMaxTokens(params: {
modelMaxTokens: number | undefined;
requestedMaxTokens: number | undefined;
}): number | undefined {
const modelMax =
typeof params.modelMaxTokens === "number" &&
Number.isFinite(params.modelMaxTokens) &&
params.modelMaxTokens > 0
? Math.floor(params.modelMaxTokens)
: undefined;
const requested =
typeof params.requestedMaxTokens === "number" &&
Number.isFinite(params.requestedMaxTokens) &&
params.requestedMaxTokens > 0
? Math.floor(params.requestedMaxTokens)
: undefined;
if (modelMax !== undefined && requested !== undefined) {
return Math.min(requested, modelMax);
}
return requested ?? modelMax;
}
function createAnthropicVertexOnPayload(params: {
model: { api: string; baseUrl?: string; provider: string };
cacheRetention: AnthropicOptions["cacheRetention"] | undefined;
onPayload: AnthropicOptions["onPayload"] | undefined;
}): NonNullable<AnthropicOptions["onPayload"]> {
const policy = resolveAnthropicPayloadPolicy({
provider: params.model.provider,
api: params.model.api,
baseUrl: params.model.baseUrl,
cacheRetention: params.cacheRetention,
enableCacheControl: true,
});
function applyPolicy(payload: unknown): unknown {
if (payload && typeof payload === "object" && !Array.isArray(payload)) {
applyAnthropicPayloadPolicyToParams(payload as Record<string, unknown>, policy);
}
return payload;
}
return async (payload, model) => {
const shapedPayload = applyPolicy(payload);
const nextPayload = await params.onPayload?.(shapedPayload, model);
if (nextPayload === undefined || nextPayload === shapedPayload) {
return shapedPayload;
}
return applyPolicy(nextPayload);
};
}
/**
* Create a StreamFn that routes through pi-ai's `streamAnthropic` with an
* injected `AnthropicVertex` client. All streaming, message conversion, and
* event handling is handled by pi-ai we only supply the GCP-authenticated
* client and map SimpleStreamOptions AnthropicOptions.
*/
export function createAnthropicVertexStreamFn(
projectId: string | undefined,
region: string,
baseURL?: string,
): StreamFn {
const client = new AnthropicVertex({
region,
...(baseURL ? { baseURL } : {}),
...(projectId ? { projectId } : {}),
});
return (model, context, options) => {
const transportModel = model as Model<"anthropic-messages"> & {
api: string;
baseUrl?: string;
provider: string;
};
const maxTokens = resolveAnthropicVertexMaxTokens({
modelMaxTokens: transportModel.maxTokens,
requestedMaxTokens: options?.maxTokens,
});
const opts: AnthropicOptions = {
client: client as unknown as AnthropicOptions["client"],
temperature: options?.temperature,
...(maxTokens !== undefined ? { maxTokens } : {}),
signal: options?.signal,
cacheRetention: options?.cacheRetention,
sessionId: options?.sessionId,
headers: options?.headers,
onPayload: createAnthropicVertexOnPayload({
model: transportModel,
cacheRetention: options?.cacheRetention,
onPayload: options?.onPayload,
}),
maxRetryDelayMs: options?.maxRetryDelayMs,
metadata: options?.metadata,
};
if (options?.reasoning) {
if (supportsAdaptiveThinking(model.id)) {
opts.thinkingEnabled = true;
opts.effort = mapAnthropicAdaptiveEffort(
options.reasoning,
model.id,
) as AnthropicVertexEffort;
} else {
opts.thinkingEnabled = true;
const budgets = options.thinkingBudgets;
opts.thinkingBudgetTokens =
(budgets && options.reasoning in budgets
? budgets[options.reasoning as keyof typeof budgets]
: undefined) ?? 10000;
}
} else {
opts.thinkingEnabled = false;
}
return streamAnthropic(transportModel, context, opts);
};
}
function resolveAnthropicVertexSdkBaseUrl(baseUrl?: string): string | undefined {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return undefined;
}
try {
const url = new URL(trimmed);
const normalizedPath = url.pathname.replace(/\/+$/, "");
if (!normalizedPath || normalizedPath === "") {
url.pathname = "/v1";
return url.toString().replace(/\/$/, "");
}
if (!normalizedPath.endsWith("/v1")) {
url.pathname = `${normalizedPath}/v1`;
return url.toString().replace(/\/$/, "");
}
return trimmed;
} catch {
return trimmed;
}
}
export function createAnthropicVertexStreamFnForModel(
model: { baseUrl?: string },
env: NodeJS.ProcessEnv = process.env,
): StreamFn {
return createAnthropicVertexStreamFn(
resolveAnthropicVertexProjectId(env),
resolveAnthropicVertexClientRegion({
baseUrl: model.baseUrl,
env,
}),
resolveAnthropicVertexSdkBaseUrl(model.baseUrl),
);
}

View file

@ -1581,7 +1581,6 @@
},
"dependencies": {
"@agentclientprotocol/sdk": "0.19.1",
"@anthropic-ai/vertex-sdk": "^0.16.0",
"@clack/prompts": "^1.2.0",
"@lydell/node-pty": "1.2.0-beta.12",
"@mariozechner/pi-agent-core": "0.70.2",

13
pnpm-lock.yaml generated
View file

@ -42,9 +42,6 @@ importers:
'@agentclientprotocol/sdk':
specifier: 0.19.1
version: 0.19.1(zod@4.3.6)
'@anthropic-ai/vertex-sdk':
specifier: ^0.16.0
version: 0.16.0(zod@4.3.6)
'@clack/prompts':
specifier: ^1.2.0
version: 1.2.0
@ -292,6 +289,16 @@ importers:
version: link:../../packages/plugin-sdk
extensions/anthropic-vertex:
dependencies:
'@anthropic-ai/vertex-sdk':
specifier: ^0.16.0
version: 0.16.0(zod@4.3.6)
'@mariozechner/pi-agent-core':
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
'@mariozechner/pi-ai':
specifier: 0.70.2
version: 0.70.2(@modelcontextprotocol/sdk@1.29.0(zod@4.3.6))(ws@8.20.0)(zod@4.3.6)
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*

View file

@ -6,11 +6,6 @@
"class": "core-runtime",
"risk": ["protocol-client"]
},
"@anthropic-ai/vertex-sdk": {
"owner": "provider:anthropic-vertex",
"class": "default-runtime-initially",
"risk": ["provider-sdk"]
},
"@clack/prompts": {
"owner": "core:cli",
"class": "core-runtime",

View file

@ -1,202 +1,40 @@
import { AnthropicVertex } from "@anthropic-ai/vertex-sdk";
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamAnthropic, type AnthropicOptions, type Model } from "@mariozechner/pi-ai";
import {
resolveAnthropicVertexClientRegion,
resolveAnthropicVertexProjectId,
} from "../plugin-sdk/anthropic-vertex.js";
import {
applyAnthropicPayloadPolicyToParams,
resolveAnthropicPayloadPolicy,
} from "./anthropic-payload-policy.js";
import { loadBundledPluginPublicSurfaceModuleSync } from "../plugin-sdk/facade-loader.js";
type AnthropicVertexEffort = NonNullable<AnthropicOptions["effort"]>;
type AnthropicVertexAdaptiveEffort = AnthropicVertexEffort | "xhigh";
type AnthropicVertexStreamFacade = {
createAnthropicVertexStreamFn: (
projectId: string | undefined,
region: string,
baseURL?: string,
) => StreamFn;
createAnthropicVertexStreamFnForModel: (
model: { baseUrl?: string },
env?: NodeJS.ProcessEnv,
) => StreamFn;
};
function isClaudeOpus47Model(modelId: string): boolean {
return modelId.includes("opus-4-7") || modelId.includes("opus-4.7");
}
function isClaudeOpus46Model(modelId: string): boolean {
return modelId.includes("opus-4-6") || modelId.includes("opus-4.6");
}
function supportsAdaptiveThinking(modelId: string): boolean {
return (
isClaudeOpus47Model(modelId) ||
isClaudeOpus46Model(modelId) ||
modelId.includes("sonnet-4-6") ||
modelId.includes("sonnet-4.6")
);
}
function mapAnthropicAdaptiveEffort(
reasoning: string,
modelId: string,
): AnthropicVertexAdaptiveEffort {
const effortMap: Record<string, AnthropicVertexAdaptiveEffort> = {
minimal: "low",
low: "low",
medium: "medium",
high: "high",
xhigh: isClaudeOpus47Model(modelId) ? "xhigh" : isClaudeOpus46Model(modelId) ? "max" : "high",
};
return effortMap[reasoning] ?? "high";
}
function resolveAnthropicVertexMaxTokens(params: {
modelMaxTokens: number | undefined;
requestedMaxTokens: number | undefined;
}): number | undefined {
const modelMax =
typeof params.modelMaxTokens === "number" &&
Number.isFinite(params.modelMaxTokens) &&
params.modelMaxTokens > 0
? Math.floor(params.modelMaxTokens)
: undefined;
const requested =
typeof params.requestedMaxTokens === "number" &&
Number.isFinite(params.requestedMaxTokens) &&
params.requestedMaxTokens > 0
? Math.floor(params.requestedMaxTokens)
: undefined;
if (modelMax !== undefined && requested !== undefined) {
return Math.min(requested, modelMax);
}
return requested ?? modelMax;
}
function createAnthropicVertexOnPayload(params: {
model: { api: string; baseUrl?: string; provider: string };
cacheRetention: AnthropicOptions["cacheRetention"] | undefined;
onPayload: AnthropicOptions["onPayload"] | undefined;
}): NonNullable<AnthropicOptions["onPayload"]> {
const policy = resolveAnthropicPayloadPolicy({
provider: params.model.provider,
api: params.model.api,
baseUrl: params.model.baseUrl,
cacheRetention: params.cacheRetention,
enableCacheControl: true,
function loadAnthropicVertexStreamFacade(): AnthropicVertexStreamFacade {
return loadBundledPluginPublicSurfaceModuleSync<AnthropicVertexStreamFacade>({
dirName: "anthropic-vertex",
artifactBasename: "api.js",
});
function applyPolicy(payload: unknown): unknown {
if (payload && typeof payload === "object" && !Array.isArray(payload)) {
applyAnthropicPayloadPolicyToParams(payload as Record<string, unknown>, policy);
}
return payload;
}
return async (payload, model) => {
const shapedPayload = applyPolicy(payload);
const nextPayload = await params.onPayload?.(shapedPayload, model);
if (nextPayload === undefined || nextPayload === shapedPayload) {
return shapedPayload;
}
return applyPolicy(nextPayload);
};
}
/**
* Create a StreamFn that routes through pi-ai's `streamAnthropic` with an
* injected `AnthropicVertex` client. All streaming, message conversion, and
* event handling is handled by pi-ai we only supply the GCP-authenticated
* client and map SimpleStreamOptions AnthropicOptions.
*/
export function createAnthropicVertexStreamFn(
projectId: string | undefined,
region: string,
baseURL?: string,
): StreamFn {
const client = new AnthropicVertex({
return loadAnthropicVertexStreamFacade().createAnthropicVertexStreamFn(
projectId,
region,
...(baseURL ? { baseURL } : {}),
...(projectId ? { projectId } : {}),
});
return (model, context, options) => {
const transportModel = model as Model<"anthropic-messages"> & {
api: string;
baseUrl?: string;
provider: string;
};
const maxTokens = resolveAnthropicVertexMaxTokens({
modelMaxTokens: transportModel.maxTokens,
requestedMaxTokens: options?.maxTokens,
});
const opts: AnthropicOptions = {
client: client as unknown as AnthropicOptions["client"],
temperature: options?.temperature,
...(maxTokens !== undefined ? { maxTokens } : {}),
signal: options?.signal,
cacheRetention: options?.cacheRetention,
sessionId: options?.sessionId,
headers: options?.headers,
onPayload: createAnthropicVertexOnPayload({
model: transportModel,
cacheRetention: options?.cacheRetention,
onPayload: options?.onPayload,
}),
maxRetryDelayMs: options?.maxRetryDelayMs,
metadata: options?.metadata,
};
if (options?.reasoning) {
if (supportsAdaptiveThinking(model.id)) {
opts.thinkingEnabled = true;
opts.effort = mapAnthropicAdaptiveEffort(
options.reasoning,
model.id,
) as AnthropicVertexEffort;
} else {
opts.thinkingEnabled = true;
const budgets = options.thinkingBudgets;
opts.thinkingBudgetTokens =
(budgets && options.reasoning in budgets
? budgets[options.reasoning as keyof typeof budgets]
: undefined) ?? 10000;
}
} else {
opts.thinkingEnabled = false;
}
return streamAnthropic(transportModel, context, opts);
};
}
function resolveAnthropicVertexSdkBaseUrl(baseUrl?: string): string | undefined {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return undefined;
}
try {
const url = new URL(trimmed);
const normalizedPath = url.pathname.replace(/\/+$/, "");
if (!normalizedPath || normalizedPath === "") {
url.pathname = "/v1";
return url.toString().replace(/\/$/, "");
}
if (!normalizedPath.endsWith("/v1")) {
url.pathname = `${normalizedPath}/v1`;
return url.toString().replace(/\/$/, "");
}
return trimmed;
} catch {
return trimmed;
}
baseURL,
);
}
export function createAnthropicVertexStreamFnForModel(
model: { baseUrl?: string },
env: NodeJS.ProcessEnv = process.env,
): StreamFn {
return createAnthropicVertexStreamFn(
resolveAnthropicVertexProjectId(env),
resolveAnthropicVertexClientRegion({
baseUrl: model.baseUrl,
env,
}),
resolveAnthropicVertexSdkBaseUrl(model.baseUrl),
);
return loadAnthropicVertexStreamFacade().createAnthropicVertexStreamFnForModel(model, env);
}