feat: add LLM Gateway provider (#7847)
Some checks failed
containers / build (push) Waiting to run
deploy / deploy (push) Waiting to run
generate / generate (push) Waiting to run
nix-eval / nix-eval (push) Waiting to run
nix-hashes / compute-hash (blacksmith-4vcpu-ubuntu-2404, x86_64-linux) (push) Waiting to run
nix-hashes / compute-hash (blacksmith-4vcpu-ubuntu-2404-arm, aarch64-linux) (push) Waiting to run
nix-hashes / compute-hash (macos-15-intel, x86_64-darwin) (push) Waiting to run
nix-hashes / compute-hash (macos-latest, aarch64-darwin) (push) Waiting to run
nix-hashes / update-hashes (push) Blocked by required conditions
publish / version (push) Waiting to run
publish / build-cli (push) Blocked by required conditions
publish / sign-cli-windows (push) Blocked by required conditions
publish / build-tauri (map[host:blacksmith-4vcpu-ubuntu-2404 target:x86_64-unknown-linux-gnu]) (push) Blocked by required conditions
publish / build-tauri (map[host:blacksmith-4vcpu-windows-2025 target:x86_64-pc-windows-msvc]) (push) Blocked by required conditions
publish / build-tauri (map[host:blacksmith-8vcpu-ubuntu-2404-arm target:aarch64-unknown-linux-gnu]) (push) Blocked by required conditions
publish / build-tauri (map[host:macos-latest target:aarch64-apple-darwin]) (push) Blocked by required conditions
publish / build-tauri (map[host:macos-latest target:x86_64-apple-darwin]) (push) Blocked by required conditions
publish / build-tauri (map[host:windows-2025 target:aarch64-pc-windows-msvc]) (push) Blocked by required conditions
publish / build-electron (map[host:blacksmith-4vcpu-ubuntu-2404 platform_flag:--linux target:aarch64-unknown-linux-gnu]) (push) Blocked by required conditions
publish / build-electron (map[host:blacksmith-4vcpu-ubuntu-2404 platform_flag:--linux target:x86_64-unknown-linux-gnu]) (push) Blocked by required conditions
publish / build-electron (map[host:blacksmith-4vcpu-windows-2025 platform_flag:--win target:x86_64-pc-windows-msvc]) (push) Blocked by required conditions
publish / build-electron (map[host:macos-latest platform_flag:--mac --arm64 target:aarch64-apple-darwin]) (push) Blocked by required conditions
publish / build-electron (map[host:macos-latest platform_flag:--mac --x64 target:x86_64-apple-darwin]) (push) Blocked by required conditions
publish / build-electron (map[host:windows-2025 platform_flag:--win --arm64 target:aarch64-pc-windows-msvc]) (push) Blocked by required conditions
publish / publish (push) Blocked by required conditions
storybook / storybook build (push) Waiting to run
test / unit (linux) (push) Waiting to run
test / unit (windows) (push) Waiting to run
test / e2e (linux) (push) Waiting to run
test / e2e (windows) (push) Waiting to run
typecheck / typecheck (push) Waiting to run
docs-locale-sync / sync-locales (push) Has been cancelled

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
Co-authored-by: Aiden Cline <63023139+rekram1-node@users.noreply.github.com>
Co-authored-by: Aiden Cline <aidenpcline@gmail.com>
This commit is contained in:
Ismail Ghallou 2026-04-17 17:29:31 +02:00 committed by GitHub
parent 3a4b49095c
commit 3fe602cda3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 83 additions and 2 deletions

View file

@ -390,6 +390,17 @@ function custom(dep: CustomDep): Record<string, CustomLoader> {
}, },
} }
}), }),
llmgateway: () =>
Effect.succeed({
autoload: false,
options: {
headers: {
"HTTP-Referer": "https://opencode.ai/",
"X-Title": "opencode",
"X-Source": "opencode",
},
},
}),
openrouter: () => openrouter: () =>
Effect.succeed({ Effect.succeed({
autoload: false, autoload: false,

View file

@ -807,7 +807,7 @@ export function options(input: {
result["promptCacheKey"] = input.sessionID result["promptCacheKey"] = input.sessionID
} }
if (input.model.api.npm === "@openrouter/ai-sdk-provider") { if (input.model.api.npm === "@openrouter/ai-sdk-provider" || input.model.api.npm === "@llmgateway/ai-sdk-provider") {
result["usage"] = { result["usage"] = {
include: true, include: true,
} }
@ -944,7 +944,7 @@ export function smallOptions(model: Provider.Model) {
} }
return { thinkingConfig: { thinkingBudget: 0 } } return { thinkingConfig: { thinkingBudget: 0 } }
} }
if (model.providerID === "openrouter") { if (model.providerID === "openrouter" || model.providerID === "llmgateway") {
if (model.api.id.includes("google")) { if (model.api.id.includes("google")) {
return { reasoning: { enabled: false } } return { reasoning: { enabled: false } }
} }

View file

@ -62,6 +62,7 @@ delete process.env["AWS_PROFILE"]
delete process.env["AWS_REGION"] delete process.env["AWS_REGION"]
delete process.env["AWS_BEARER_TOKEN_BEDROCK"] delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
delete process.env["OPENROUTER_API_KEY"] delete process.env["OPENROUTER_API_KEY"]
delete process.env["LLM_GATEWAY_API_KEY"]
delete process.env["GROQ_API_KEY"] delete process.env["GROQ_API_KEY"]
delete process.env["MISTRAL_API_KEY"] delete process.env["MISTRAL_API_KEY"]
delete process.env["PERPLEXITY_API_KEY"] delete process.env["PERPLEXITY_API_KEY"]

View file

@ -32,6 +32,7 @@ export const iconNames = [
"perplexity", "perplexity",
"ovhcloud", "ovhcloud",
"openrouter", "openrouter",
"llmgateway",
"opencode", "opencode",
"opencode-go", "opencode-go",
"openai", "openai",

View file

@ -1577,6 +1577,74 @@ OpenCode Zen is a list of tested and verified models provided by the OpenCode te
--- ---
### LLM Gateway
1. Head over to the [LLM Gateway dashboard](https://llmgateway.io/dashboard), click **Create API Key**, and copy the key.
2. Run the `/connect` command and search for LLM Gateway.
```txt
/connect
```
3. Enter the API key for the provider.
```txt
┌ API key
└ enter
```
4. Many LLM Gateway models are preloaded by default, run the `/models` command to select the one you want.
```txt
/models
```
You can also add additional models through your opencode config.
```json title="opencode.json" {6}
{
"$schema": "https://opencode.ai/config.json",
"provider": {
"llmgateway": {
"models": {
"somecoolnewmodel": {}
}
}
}
}
```
5. You can also customize them through your opencode config. Here's an example of specifying a provider
```json title="opencode.json"
{
"$schema": "https://opencode.ai/config.json",
"provider": {
"llmgateway": {
"models": {
"glm-4.7": {
"name": "GLM 4.7"
},
"gpt-5.2": {
"name": "GPT-5.2"
},
"gemini-2.5-pro": {
"name": "Gemini 2.5 Pro"
},
"claude-3-5-sonnet-20241022": {
"name": "Claude 3.5 Sonnet"
}
}
}
}
}
```
---
### SAP AI Core ### SAP AI Core
SAP AI Core provides access to 40+ models from OpenAI, Anthropic, Google, Amazon, Meta, Mistral, and AI21 through a unified platform. SAP AI Core provides access to 40+ models from OpenAI, Anthropic, Google, Amazon, Meta, Mistral, and AI21 through a unified platform.