Skip to main content

Overview

Goose supports two approaches to adding custom providers:
  1. Declarative Providers - JSON configuration files for OpenAI/Anthropic/Ollama-compatible APIs
  2. Rust Implementation - Full Provider trait implementation for custom logic
This guide focuses on declarative providers, which cover most use cases.

Declarative Providers

Declarative providers let you add OpenAI-compatible APIs without writing Rust code. They’re perfect for:
  • Commercial API providers (Together, Groq, Fireworks, etc.)
  • Self-hosted LLM servers (vLLM, text-generation-inference)
  • OpenRouter and other aggregator services
  • Custom proxy endpoints

Provider Engines

Choose the engine that matches your API format:
  • openai - OpenAI Chat Completions API format
  • anthropic - Anthropic Messages API format
  • ollama - Ollama API format
Most providers use the OpenAI format.

Configuration Structure

{
  "name": "custom_provider_id",
  "engine": "openai",
  "display_name": "My Custom Provider",
  "description": "Description of the provider",
  "api_key_env": "CUSTOM_PROVIDER_API_KEY",
  "base_url": "https://api.example.com",
  "models": [
    {
      "name": "model-name-1",
      "context_limit": 128000
    }
  ],
  "requires_auth": true,
  "supports_streaming": true,
  "timeout_seconds": 600,
  "headers": {
    "X-Custom-Header": "value"
  },
  "base_path": "v1/chat/completions"
}

Configuration Fields

name
string
required
Unique identifier for the provider (auto-generated from display_name)
engine
string
required
API format: openai, anthropic, or ollama
display_name
string
required
Human-readable name shown in UIs
description
string
Optional description of the provider
api_key_env
string
Environment variable name for the API key (required if requires_auth is true)
base_url
string
required
Base URL of the API endpoint
models
array
required
List of available models with their context limits
requires_auth
boolean
default:"true"
Whether the provider requires authentication
supports_streaming
boolean
default:"true"
Whether the provider supports streaming responses
timeout_seconds
number
default:"600"
Request timeout in seconds
headers
object
Optional custom headers to include in requests
base_path
string
Optional API path (defaults based on engine)

Creating Custom Providers

Method 1: Configuration File

Create a JSON file in ~/.config/goose/custom_providers/:
mkdir -p ~/.config/goose/custom_providers
cat > ~/.config/goose/custom_providers/groq.json <<EOF
{
  "name": "custom_groq",
  "engine": "openai",
  "display_name": "Groq",
  "description": "Fast LLM inference from Groq",
  "api_key_env": "GROQ_API_KEY",
  "base_url": "https://api.groq.com/openai/v1",
  "models": [
    {
      "name": "llama-3.3-70b-versatile",
      "context_limit": 32768
    },
    {
      "name": "mixtral-8x7b-32768",
      "context_limit": 32768
    }
  ],
  "requires_auth": true,
  "supports_streaming": true
}
EOF

# Set the API key
export GROQ_API_KEY="your-api-key"

# Use the provider
goose session start --provider custom_groq --model llama-3.3-70b-versatile

Method 2: Programmatic Creation (Rust)

use goose::config::declarative_providers::{
    create_custom_provider,
    CreateCustomProviderParams,
};
use goose::providers::base::ModelInfo;

let params = CreateCustomProviderParams {
    engine: "openai_compatible".to_string(),
    display_name: "Groq".to_string(),
    api_url: "https://api.groq.com/openai/v1".to_string(),
    api_key: "your-api-key".to_string(),
    models: vec![
        "llama-3.3-70b-versatile".to_string(),
        "mixtral-8x7b-32768".to_string(),
    ],
    supports_streaming: Some(true),
    headers: None,
    requires_auth: true,
    catalog_provider_id: None,
    base_path: None,
};

let config = create_custom_provider(params)?;
The API key is automatically stored in the system keyring.

Example Providers

Together.ai

{
  "name": "custom_together",
  "engine": "openai",
  "display_name": "Together AI",
  "api_key_env": "TOGETHER_API_KEY",
  "base_url": "https://api.together.xyz",
  "models": [
    {
      "name": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
      "context_limit": 8192
    },
    {
      "name": "mistralai/Mixtral-8x7B-Instruct-v0.1",
      "context_limit": 32768
    }
  ],
  "base_path": "v1/chat/completions"
}

Fireworks.ai

{
  "name": "custom_fireworks",
  "engine": "openai",
  "display_name": "Fireworks AI",
  "api_key_env": "FIREWORKS_API_KEY",
  "base_url": "https://api.fireworks.ai",
  "models": [
    {
      "name": "accounts/fireworks/models/llama-v3p3-70b-instruct",
      "context_limit": 131072
    }
  ],
  "base_path": "inference/v1/chat/completions"
}

OpenRouter

{
  "name": "custom_openrouter",
  "engine": "openai",
  "display_name": "OpenRouter",
  "api_key_env": "OPENROUTER_API_KEY",
  "base_url": "https://openrouter.ai/api",
  "models": [
    {
      "name": "anthropic/claude-sonnet-4",
      "context_limit": 200000
    },
    {
      "name": "openai/gpt-4o",
      "context_limit": 128000
    }
  ],
  "base_path": "v1/chat/completions",
  "headers": {
    "HTTP-Referer": "https://github.com/block/goose",
    "X-Title": "Goose Agent"
  }
}

Self-Hosted vLLM

{
  "name": "custom_vllm",
  "engine": "openai",
  "display_name": "Local vLLM",
  "base_url": "http://localhost:8000",
  "models": [
    {
      "name": "meta-llama/Meta-Llama-3-70B-Instruct",
      "context_limit": 8192
    }
  ],
  "requires_auth": false,
  "base_path": "v1/chat/completions"
}

Anthropic-Compatible Provider

For providers using Anthropic’s API format:
{
  "name": "custom_anthropic_compatible",
  "engine": "anthropic",
  "display_name": "Anthropic-Compatible Provider",
  "api_key_env": "CUSTOM_API_KEY",
  "base_url": "https://api.example.com",
  "models": [
    {
      "name": "claude-compatible-model",
      "context_limit": 200000
    }
  ],
  "headers": {
    "anthropic-version": "2023-06-01"
  }
}

Managing Custom Providers

List Providers

# List all providers including custom ones
goose provider list
use goose::providers::providers;

let available_providers = providers();
for provider_name in available_providers {
    println!("Available: {}", provider_name);
}

Update Provider

use goose::config::declarative_providers::{
    update_custom_provider,
    UpdateCustomProviderParams,
};

let params = UpdateCustomProviderParams {
    id: "custom_groq".to_string(),
    engine: "openai_compatible".to_string(),
    display_name: "Groq (Updated)".to_string(),
    api_url: "https://api.groq.com/openai/v1".to_string(),
    api_key: "new-api-key".to_string(),
    models: vec!["llama-3.3-70b-versatile".to_string()],
    supports_streaming: Some(true),
    headers: None,
    requires_auth: true,
    catalog_provider_id: None,
    base_path: None,
};

update_custom_provider(params)?;

Delete Provider

rm ~/.config/goose/custom_providers/groq.json
Or programmatically:
use goose::config::declarative_providers::{
    delete_custom_provider,
    custom_providers_dir,
};

delete_custom_provider("custom_groq")?;

Refresh Providers

After adding/removing provider files:
use goose::providers::refresh_custom_providers;

refresh_custom_providers().await?;

Advanced Configuration

Custom Headers

Add provider-specific headers:
{
  "headers": {
    "X-API-Version": "2024-01",
    "X-Custom-Header": "value"
  }
}

Base Path Customization

Override the default API path:
{
  "engine": "openai",
  "base_url": "https://api.example.com",
  "base_path": "custom/v2/chat/completions"
}

No Authentication

For providers that don’t require auth:
{
  "requires_auth": false,
  "api_key_env": ""
}

Multiple Model Configurations

{
  "models": [
    {
      "name": "small-model",
      "context_limit": 4096,
      "input_token_cost": 0.0000005,
      "output_token_cost": 0.0000015
    },
    {
      "name": "large-model",
      "context_limit": 32768,
      "input_token_cost": 0.000003,
      "output_token_cost": 0.000015
    }
  ]
}

Implementing Provider Trait (Advanced)

For full control, implement the Provider trait:
use goose::providers::base::{Provider, ProviderDef, ProviderMetadata};
use async_trait::async_trait;

pub struct MyCustomProvider {
    api_client: ApiClient,
    model: ModelConfig,
}

#[async_trait]
impl Provider for MyCustomProvider {
    fn get_name(&self) -> &str {
        "my_custom"
    }
    
    async fn stream(
        &self,
        model_config: &ModelConfig,
        session_id: &str,
        system: &str,
        messages: &[Message],
        tools: &[Tool],
    ) -> Result<MessageStream, ProviderError> {
        // Custom implementation
        todo!()
    }
    
    fn get_model_config(&self) -> ModelConfig {
        self.model.clone()
    }
}

impl ProviderDef for MyCustomProvider {
    type Provider = Self;
    
    fn metadata() -> ProviderMetadata {
        ProviderMetadata::new(
            "my_custom",
            "My Custom Provider",
            "Custom provider implementation",
            "default-model",
            vec!["model-1"],
            "https://docs.example.com",
            vec![],
        )
    }
    
    fn from_env(
        model: ModelConfig,
        _extensions: Vec<ExtensionConfig>,
    ) -> BoxFuture<'static, Result<Self::Provider>> {
        Box::pin(async move {
            // Initialize from environment
            Ok(MyCustomProvider {
                api_client: /* ... */,
                model,
            })
        })
    }
}
Then register it:
use goose::providers::provider_registry::register_provider;

register_provider::<MyCustomProvider>();

Troubleshooting

Provider Not Found

Check that the JSON file is in the correct location:
ls ~/.config/goose/custom_providers/

Invalid Configuration

Validate your JSON:
jq . ~/.config/goose/custom_providers/your-provider.json

Authentication Errors

Verify the API key environment variable:
echo $YOUR_PROVIDER_API_KEY

Connection Errors

Test the API endpoint:
curl -H "Authorization: Bearer $API_KEY" \
  https://api.example.com/v1/models

See Also

Build docs developers (and LLMs) love