Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions bun.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion packages/app/src/hooks/use-providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { base64Decode } from "@opencode-ai/util/encode"
import { useParams } from "@solidjs/router"
import { createMemo } from "solid-js"

export const popularProviders = ["opencode", "anthropic", "github-copilot", "openai", "google", "openrouter", "vercel"]
export const popularProviders = ["opencode", "anthropic", "github-copilot", "openai", "google", "openrouter", "vercel", "llmgateway"]

export function useProviders() {
const globalSync = useGlobalSync()
Expand Down
1 change: 1 addition & 0 deletions packages/opencode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
"@clack/prompts": "1.0.0-alpha.1",
"@hono/standard-validator": "0.1.5",
"@hono/zod-validator": "catalog:",
"@llmgateway/ai-sdk-provider": "2.5.1",
"@modelcontextprotocol/sdk": "1.25.2",
"@octokit/graphql": "9.0.2",
"@octokit/rest": "catalog:",
Expand Down
1 change: 1 addition & 0 deletions packages/opencode/src/cli/cmd/auth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,7 @@ export const AuthLoginCommand = cmd({
google: 4,
openrouter: 5,
vercel: 6,
llmgateway: 7,
}
let provider = await prompts.autocomplete({
message: "Select provider",
Expand Down
39 changes: 37 additions & 2 deletions packages/opencode/src/provider/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,41 @@ export namespace ModelsDev {
const log = Log.create({ service: "models.dev" })
const filepath = path.join(Global.Path.cache, "models.json")

// Built-in provider entries that should always be available in OpenCode's provider directory
// (even if models.dev doesn't list them).
const BUILTIN_PROVIDERS: Record<string, Provider> = {
llmgateway: {
id: "llmgateway",
name: "LLM Gateway",
api: "https://api.llmgateway.io/v1",
npm: "@llmgateway/ai-sdk-provider",
env: ["LLM_GATEWAY_API_KEY", "LLMGATEWAY_API_KEY"],
// Keep at least one model so the UI can select a default model for the provider.
models: {
"glm-4.7": {
id: "glm-4.7",
name: "GLM-4.7",
release_date: "2025-12-22",
attachment: false,
reasoning: true,
temperature: false,
tool_call: true,
interleaved: true,
modalities: { input: ["text"], output: ["text"] },
limit: { context: 200000, output: 128000 },
options: {},
},
},
},
}

function overlayBuiltins(providers: Record<string, Provider>) {
for (const [id, provider] of Object.entries(BUILTIN_PROVIDERS)) {
if (!providers[id]) providers[id] = provider
}
return providers
}

export const Model = z.object({
id: z.string(),
name: z.string(),
Expand Down Expand Up @@ -79,9 +114,9 @@ export namespace ModelsDev {
refresh()
const file = Bun.file(filepath)
const result = await file.json().catch(() => {})
if (result) return result as Record<string, Provider>
if (result) return overlayBuiltins(result as Record<string, Provider>)
const json = await data()
return JSON.parse(json) as Record<string, Provider>
return overlayBuiltins(JSON.parse(json) as Record<string, Provider>)
}

export async function refresh() {
Expand Down
30 changes: 30 additions & 0 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,29 @@ import { createGateway } from "@ai-sdk/gateway"
import { createTogetherAI } from "@ai-sdk/togetherai"
import { createPerplexity } from "@ai-sdk/perplexity"
import { createVercel } from "@ai-sdk/vercel"
import { createLLMGateway } from "@llmgateway/ai-sdk-provider"
import { ProviderTransform } from "./transform"

export namespace Provider {
const log = Log.create({ service: "provider" })

// @llmgateway/ai-sdk-provider's return type doesn't currently satisfy `ai`'s Provider interface,
// even on versions that peer with ai@5. OpenCode does use image/embedding models in some flows,
// so we provide those via the OpenAI-compatible provider as a fallback, while keeping LLM Gateway
// for language models.
const createLLMGatewayAdapter: (options: any) => SDK = (options) => {
const llmgw: any = createLLMGateway(options)
const compat: any = createOpenAICompatible(options)
return {
// Prefer LLM Gateway for language models (routing/headers), but keep compat defaults.
...compat,
...llmgw,
// Ensure required Provider surface exists.
imageModel: llmgw.imageModel ?? compat.imageModel,
textEmbeddingModel: llmgw.textEmbeddingModel ?? compat.textEmbeddingModel,
} as SDK
}

const BUNDLED_PROVIDERS: Record<string, (options: any) => SDK> = {
"@ai-sdk/amazon-bedrock": createAmazonBedrock,
"@ai-sdk/anthropic": createAnthropic,
Expand All @@ -50,6 +68,7 @@ export namespace Provider {
"@ai-sdk/openai": createOpenAI,
"@ai-sdk/openai-compatible": createOpenAICompatible,
"@openrouter/ai-sdk-provider": createOpenRouter,
"@llmgateway/ai-sdk-provider": createLLMGatewayAdapter,
"@ai-sdk/xai": createXai,
"@ai-sdk/mistral": createMistral,
"@ai-sdk/groq": createGroq,
Expand Down Expand Up @@ -311,6 +330,17 @@ export namespace Provider {
},
}
},
llmgateway: async () => {
return {
autoload: false,
options: {
headers: {
"HTTP-Referer": "https://opencode.ai/",
"X-Title": "opencode",
},
},
}
},
vercel: async () => {
return {
autoload: false,
Expand Down
8 changes: 6 additions & 2 deletions packages/opencode/src/provider/transform.ts
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ export namespace ProviderTransform {
): Record<string, any> {
const result: Record<string, any> = {}

if (model.api.npm === "@openrouter/ai-sdk-provider") {
if (model.api.npm === "@openrouter/ai-sdk-provider" || model.api.npm === "@llmgateway/ai-sdk-provider") {
result["usage"] = {
include: true,
}
Expand Down Expand Up @@ -506,7 +506,7 @@ export namespace ProviderTransform {
}
return { thinkingConfig: { thinkingBudget: 0 } }
}
if (model.providerID === "openrouter") {
if (model.providerID === "openrouter" || model.providerID === "llmgateway") {
if (model.api.id.includes("google")) {
return { reasoning: { enabled: false } }
}
Expand Down Expand Up @@ -544,6 +544,10 @@ export namespace ProviderTransform {
return {
["openrouter" as string]: options,
}
case "@llmgateway/ai-sdk-provider":
return {
["llmgateway" as string]: options,
}
default:
return {
[model.providerID]: options,
Expand Down
1 change: 1 addition & 0 deletions packages/opencode/test/preload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ delete process.env["AWS_PROFILE"]
delete process.env["AWS_REGION"]
delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
delete process.env["OPENROUTER_API_KEY"]
delete process.env["LLM_GATEWAY_API_KEY"]
delete process.env["GROQ_API_KEY"]
delete process.env["MISTRAL_API_KEY"]
delete process.env["PERPLEXITY_API_KEY"]
Expand Down
1 change: 1 addition & 0 deletions packages/ui/src/components/provider-icons/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ export const iconNames = [
"perplexity",
"ovhcloud",
"openrouter",
"llmgateway",
"opencode",
"openai",
"ollama-cloud",
Expand Down
68 changes: 68 additions & 0 deletions packages/web/src/content/docs/providers.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -1195,6 +1195,74 @@ OpenCode Zen is a list of tested and verified models provided by the OpenCode te

---

### LLM Gateway

1. Head over to the [LLM Gateway dashboard](https://llmgateway.io/dashboard), click **Create API Key**, and copy the key.

2. Run the `/connect` command and search for LLM Gateway.

```txt
/connect
```

3. Enter the API key for the provider.

```txt
┌ API key
└ enter
```

4. Many LLM Gateway models are preloaded by default, run the `/models` command to select the one you want.

```txt
/models
```

You can also add additional models through your opencode config.

```json title="opencode.json" {6}
{
"$schema": "https://opencode.ai/config.json",
"provider": {
"llmgateway": {
"models": {
"somecoolnewmodel": {}
}
}
}
}
```

5. You can also customize them through your opencode config. Here's an example of specifying a provider

```json title="opencode.json"
{
"$schema": "https://opencode.ai/config.json",
"provider": {
"llmgateway": {
"models": {
"glm-4.7": {
"name": "GLM 4.7"
},
"gpt-5.2": {
"name": "GPT-5.2"
},
"google/gemini-2.5-pro": {
"name": "Gemini 2.5 Pro"
},
"anthropic/claude-3-5-sonnet-20241022": {
"name": "Claude 3.5 Sonnet"
}
}
}
}
}
```

---

### SAP AI Core

SAP AI Core provides access to 40+ models from OpenAI, Anthropic, Google, Amazon, Meta, Mistral, and AI21 through a unified platform.
Expand Down
1 change: 1 addition & 0 deletions packages/web/src/content/docs/troubleshooting.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ Examples:

- `openai/gpt-4.1`
- `openrouter/google/gemini-2.5-flash`
- `llmgateway/glm-4.7`
- `opencode/kimi-k2`

To figure out what models you have access to, run `opencode models`
Expand Down
2 changes: 1 addition & 1 deletion packages/web/src/content/docs/zen.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ configured very differently; so you get very different performance and quality.
We tested a select group of models and providers that work well with OpenCode.
:::

So if you are using a model through something like OpenRouter, you can never be
So if you are using a model through something like OpenRouter or LLM Gateway, you can never be
sure if you are getting the best version of the model you want.

To fix this, we did a couple of things:
Expand Down