From 7b15df0dd0e30a23d6c9058c2f099b44f18d9f81 Mon Sep 17 00:00:00 2001 From: Patrick Nikoletich Date: Thu, 29 Jan 2026 08:23:57 -0800 Subject: [PATCH 1/2] Add provider info to docs --- go/README.md | 70 +++++++++++++++++++++++++++++++++++++++++++++++- nodejs/README.md | 62 +++++++++++++++++++++++++++++++++++++++++- 2 files changed, 130 insertions(+), 2 deletions(-) diff --git a/go/README.md b/go/README.md index ac6a5397..b6c85dcf 100644 --- a/go/README.md +++ b/go/README.md @@ -97,10 +97,21 @@ func main() { - `AutoRestart` (\*bool): Auto-restart on crash (default: true). Use `Bool(false)` to disable. - `Env` ([]string): Environment variables for CLI process (default: inherits from current process) +**SessionConfig:** + +- `Model` (string): Model to use ("gpt-5", "claude-sonnet-4.5", etc.). **Required when using custom provider.** +- `SessionID` (string): Custom session ID +- `Tools` ([]Tool): Custom tools exposed to the CLI +- `SystemMessage` (\*SystemMessageConfig): System message configuration +- `Provider` (\*ProviderConfig): Custom API provider configuration (BYOK). See [Custom Providers](#custom-providers) section. +- `Streaming` (bool): Enable streaming delta events +- `InfiniteSessions` (\*InfiniteSessionConfig): Automatic context compaction configuration + **ResumeSessionConfig:** - `Tools` ([]Tool): Tools to expose when resuming -- `Provider` (\*ProviderConfig): Custom model provider configuration +- `Provider` (\*ProviderConfig): Custom API provider configuration (BYOK). See [Custom Providers](#custom-providers) section. +- `Streaming` (bool): Enable streaming delta events ### Session @@ -327,6 +338,63 @@ When enabled, sessions emit compaction events: - `session.compaction_start` - Background compaction started - `session.compaction_complete` - Compaction finished (includes token counts) +## Custom Providers + +The SDK supports custom OpenAI-compatible API providers (BYOK - Bring Your Own Key), including local providers like Ollama. When using a custom provider, you must specify the `Model` explicitly. + +**ProviderConfig:** + +- `Type` (string): Provider type - "openai", "azure", or "anthropic" (default: "openai") +- `BaseURL` (string): API endpoint URL (required) +- `APIKey` (string): API key (optional for local providers like Ollama) +- `BearerToken` (string): Bearer token for authentication (takes precedence over APIKey) +- `WireApi` (string): API format for OpenAI/Azure - "completions" or "responses" (default: "completions") +- `Azure.APIVersion` (string): Azure API version (default: "2024-10-21") + +**Example with Ollama:** + +```go +session, err := client.CreateSession(&copilot.SessionConfig{ + Model: "deepseek-coder-v2:16b", // Required when using custom provider + Provider: &copilot.ProviderConfig{ + Type: "openai", + BaseURL: "http://localhost:11434/v1", // Ollama endpoint + // APIKey not required for Ollama + }, +}) +``` + +**Example with custom OpenAI-compatible API:** + +```go +session, err := client.CreateSession(&copilot.SessionConfig{ + Model: "gpt-4", + Provider: &copilot.ProviderConfig{ + Type: "openai", + BaseURL: "https://my-api.example.com/v1", + APIKey: os.Getenv("MY_API_KEY"), + }, +}) +``` + +**Example with Azure OpenAI:** + +```go +session, err := client.CreateSession(&copilot.SessionConfig{ + Model: "gpt-4", + Provider: &copilot.ProviderConfig{ + Type: "azure", + BaseURL: "https://my-resource.openai.azure.com", + APIKey: os.Getenv("AZURE_OPENAI_KEY"), + Azure: &copilot.AzureProviderOptions{ + APIVersion: "2024-10-21", + }, + }, +}) +``` + +> **Note:** When using a custom provider, the `Model` parameter is **required**. The SDK will return an error if no model is specified. + ## Transport Modes ### stdio (Default) diff --git a/nodejs/README.md b/nodejs/README.md index bd4ef15b..4126e68d 100644 --- a/nodejs/README.md +++ b/nodejs/README.md @@ -86,10 +86,11 @@ Create a new conversation session. **Config:** - `sessionId?: string` - Custom session ID -- `model?: string` - Model to use ("gpt-5", "claude-sonnet-4.5", etc.) +- `model?: string` - Model to use ("gpt-5", "claude-sonnet-4.5", etc.). **Required when using custom provider.** - `tools?: Tool[]` - Custom tools exposed to the CLI - `systemMessage?: SystemMessageConfig` - System message customization (see below) - `infiniteSessions?: InfiniteSessionConfig` - Configure automatic context compaction (see below) +- `provider?: ProviderConfig` - Custom API provider configuration (BYOK - Bring Your Own Key). See [Custom Providers](#custom-providers) section. ##### `resumeSession(sessionId: string, config?: ResumeSessionConfig): Promise` @@ -407,6 +408,65 @@ await session.send({ }); ``` +### Custom Providers + +The SDK supports custom OpenAI-compatible API providers (BYOK - Bring Your Own Key), including local providers like Ollama. When using a custom provider, you must specify the `model` explicitly. + +**ProviderConfig:** + +- `type?: "openai" | "azure" | "anthropic"` - Provider type (default: "openai") +- `baseUrl: string` - API endpoint URL (required) +- `apiKey?: string` - API key (optional for local providers like Ollama) +- `bearerToken?: string` - Bearer token for authentication (takes precedence over apiKey) +- `wireApi?: "completions" | "responses"` - API format for OpenAI/Azure (default: "completions") +- `azure?.apiVersion?: string` - Azure API version (default: "2024-10-21") + +**Example with Ollama:** + +```typescript +const session = await client.createSession({ + model: "deepseek-coder-v2:16b", // Required when using custom provider + provider: { + type: "openai", + baseUrl: "http://localhost:11434/v1", // Ollama endpoint + // apiKey not required for Ollama + }, +}); + +await session.sendAndWait({ prompt: "Hello!" }); +``` + +**Example with custom OpenAI-compatible API:** + +```typescript +const session = await client.createSession({ + model: "gpt-4", + provider: { + type: "openai", + baseUrl: "https://my-api.example.com/v1", + apiKey: process.env.MY_API_KEY, + }, +}); +``` + +**Example with Azure OpenAI:** + +```typescript +const session = await client.createSession({ + model: "gpt-4", + provider: { + type: "azure", + baseUrl: "https://my-resource.openai.azure.com", + apiKey: process.env.AZURE_OPENAI_KEY, + azure: { + apiVersion: "2024-10-21", + }, + }, +}); +``` + +> **Note:** When using a custom provider, the `model` parameter is **required**. The SDK will throw an error if no model is specified. + ## Error Handling ```typescript From 848540969a4cd1f8eda2afa03c5811c57e47fdb1 Mon Sep 17 00:00:00 2001 From: Patrick Nikoletich Date: Thu, 29 Jan 2026 08:44:55 -0800 Subject: [PATCH 2/2] docs: add custom provider documentation across all SDKs - Add Custom Providers section to Node.js, Go, and Python READMEs - Document ProviderConfig options with examples for Ollama, OpenAI-compatible APIs, and Azure OpenAI - Add SessionConfig documentation with provider option - Highlight important notes: - Model is required when using custom providers - Azure endpoints require type 'azure', not 'openai' - Base URL should not include /openai/v1 path --- go/README.md | 9 ++++-- nodejs/README.md | 9 ++++-- python/README.md | 76 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 88 insertions(+), 6 deletions(-) diff --git a/go/README.md b/go/README.md index b6c85dcf..15f33881 100644 --- a/go/README.md +++ b/go/README.md @@ -383,8 +383,8 @@ session, err := client.CreateSession(&copilot.SessionConfig{ session, err := client.CreateSession(&copilot.SessionConfig{ Model: "gpt-4", Provider: &copilot.ProviderConfig{ - Type: "azure", - BaseURL: "https://my-resource.openai.azure.com", + Type: "azure", // Must be "azure" for Azure endpoints, NOT "openai" + BaseURL: "https://my-resource.openai.azure.com", // Just the host, no path APIKey: os.Getenv("AZURE_OPENAI_KEY"), Azure: &copilot.AzureProviderOptions{ APIVersion: "2024-10-21", @@ -393,7 +393,10 @@ session, err := client.CreateSession(&copilot.SessionConfig{ }) ``` -> **Note:** When using a custom provider, the `Model` parameter is **required**. The SDK will return an error if no model is specified. +> **Important notes:** +> - When using a custom provider, the `Model` parameter is **required**. The SDK will return an error if no model is specified. +> - For Azure OpenAI endpoints (`*.openai.azure.com`), you **must** use `Type: "azure"`, not `Type: "openai"`. +> - The `BaseURL` should be just the host (e.g., `https://my-resource.openai.azure.com`). Do **not** include `/openai/v1` in the URL - the SDK handles path construction automatically. ## Transport Modes diff --git a/nodejs/README.md b/nodejs/README.md index 4126e68d..fbd730e5 100644 --- a/nodejs/README.md +++ b/nodejs/README.md @@ -455,8 +455,8 @@ const session = await client.createSession({ const session = await client.createSession({ model: "gpt-4", provider: { - type: "azure", - baseUrl: "https://my-resource.openai.azure.com", + type: "azure", // Must be "azure" for Azure endpoints, NOT "openai" + baseUrl: "https://my-resource.openai.azure.com", // Just the host, no path apiKey: process.env.AZURE_OPENAI_KEY, azure: { apiVersion: "2024-10-21", @@ -465,7 +465,10 @@ const session = await client.createSession({ }); ``` -> **Note:** When using a custom provider, the `model` parameter is **required**. The SDK will throw an error if no model is specified. +> **Important notes:** +> - When using a custom provider, the `model` parameter is **required**. The SDK will throw an error if no model is specified. +> - For Azure OpenAI endpoints (`*.openai.azure.com`), you **must** use `type: "azure"`, not `type: "openai"`. +> - The `baseUrl` should be just the host (e.g., `https://my-resource.openai.azure.com`). Do **not** include `/openai/v1` in the URL - the SDK handles path construction automatically. ## Error Handling diff --git a/python/README.md b/python/README.md index fefc1e0f..ab324508 100644 --- a/python/README.md +++ b/python/README.md @@ -96,6 +96,16 @@ await client.stop() - `auto_start` (bool): Auto-start server on first use (default: True) - `auto_restart` (bool): Auto-restart on crash (default: True) +**SessionConfig Options (for `create_session`):** + +- `model` (str): Model to use ("gpt-5", "claude-sonnet-4.5", etc.). **Required when using custom provider.** +- `session_id` (str): Custom session ID +- `tools` (list): Custom tools exposed to the CLI +- `system_message` (dict): System message configuration +- `streaming` (bool): Enable streaming delta events +- `provider` (dict): Custom API provider configuration (BYOK). See [Custom Providers](#custom-providers) section. +- `infinite_sessions` (dict): Automatic context compaction configuration + ### Tools Define tools with automatic JSON schema generation using the `@define_tool` decorator and Pydantic models: @@ -273,6 +283,72 @@ When enabled, sessions emit compaction events: - `session.compaction_start` - Background compaction started - `session.compaction_complete` - Compaction finished (includes token counts) +## Custom Providers + +The SDK supports custom OpenAI-compatible API providers (BYOK - Bring Your Own Key), including local providers like Ollama. When using a custom provider, you must specify the `model` explicitly. + +**ProviderConfig fields:** + +- `type` (str): Provider type - `"openai"`, `"azure"`, or `"anthropic"` (default: `"openai"`) +- `base_url` (str): API endpoint URL (required) +- `api_key` (str): API key (optional for local providers like Ollama) +- `bearer_token` (str): Bearer token for authentication (takes precedence over `api_key`) +- `wire_api` (str): API format for OpenAI/Azure - `"completions"` or `"responses"` (default: `"completions"`) +- `azure` (dict): Azure-specific options with `api_version` (default: `"2024-10-21"`) + +**Example with Ollama:** + +```python +session = await client.create_session({ + "model": "deepseek-coder-v2:16b", # Required when using custom provider + "provider": { + "type": "openai", + "base_url": "http://localhost:11434/v1", # Ollama endpoint + # api_key not required for Ollama + }, +}) + +await session.send({"prompt": "Hello!"}) +``` + +**Example with custom OpenAI-compatible API:** + +```python +import os + +session = await client.create_session({ + "model": "gpt-4", + "provider": { + "type": "openai", + "base_url": "https://my-api.example.com/v1", + "api_key": os.environ["MY_API_KEY"], + }, +}) +``` + +**Example with Azure OpenAI:** + +```python +import os + +session = await client.create_session({ + "model": "gpt-4", + "provider": { + "type": "azure", # Must be "azure" for Azure endpoints, NOT "openai" + "base_url": "https://my-resource.openai.azure.com", # Just the host, no path + "api_key": os.environ["AZURE_OPENAI_KEY"], + "azure": { + "api_version": "2024-10-21", + }, + }, +}) +``` + +> **Important notes:** +> - When using a custom provider, the `model` parameter is **required**. The SDK will throw an error if no model is specified. +> - For Azure OpenAI endpoints (`*.openai.azure.com`), you **must** use `type: "azure"`, not `type: "openai"`. +> - The `base_url` should be just the host (e.g., `https://my-resource.openai.azure.com`). Do **not** include `/openai/v1` in the URL - the SDK handles path construction automatically. + ## Requirements - Python 3.9+