Documentation Index
Fetch the complete documentation index at: https://www.getmaxim.ai/docs/llms.txt
Use this file to discover all available pages before exploring further.
30-Second Setup
1. Install Package
go mod init my-bifrost-app
go get github.com/maximhq/bifrost/core
2. Set Environment Variable
export OPENAI_API_KEY="your-openai-api-key"
3. Create main.go
package main
import (
"context"
"fmt"
"os"
bifrost "github.com/maximhq/bifrost/core"
"github.com/maximhq/bifrost/core/schemas"
)
// Simple account implementation
type MyAccount struct{}
func (a *MyAccount) GetConfiguredProviders() ([]schemas.ModelProvider, error) {
return []schemas.ModelProvider{schemas.OpenAI}, nil
}
func (a *MyAccount) GetKeysForProvider(provider schemas.ModelProvider) ([]schemas.Key, error) {
if provider == schemas.OpenAI {
return []schemas.Key{{
Value: os.Getenv("OPENAI_API_KEY"),
Models: []string{"gpt-4o-mini"},
Weight: 1.0,
}}, nil
}
return nil, fmt.Errorf("provider %s not supported", provider)
}
func (a *MyAccount) GetConfigForProvider(provider schemas.ModelProvider) (*schemas.ProviderConfig, error) {
if provider == schemas.OpenAI {
// Return default config (can be customized for advanced use cases)
return &schemas.ProviderConfig{
NetworkConfig: schemas.DefaultNetworkConfig,
ConcurrencyAndBufferSize: schemas.DefaultConcurrencyAndBufferSize,
}, nil
}
return nil, fmt.Errorf("provider %s not supported", provider)
}
func main() {
// Initialize Bifrost
client, initErr := bifrost.Init(schemas.BifrostConfig{
Account: &MyAccount{},
})
if initErr != nil {
panic(initErr)
}
defer client.Cleanup()
// Make a chat completion request
response, bifrostErr := client.ChatCompletionRequest(context.Background(), &schemas.BifrostRequest{
Provider: schemas.OpenAI, // Primary provider
Model: "gpt-4o-mini",
Input: schemas.RequestInput{
ChatCompletionInput: &messages,
},
})
if err != nil {
panic(err)
}
// Print response
if len(response.Choices) > 0 && response.Choices[0].Message.Content.ContentStr != nil {
fmt.Println("AI Response:", *response.Choices[0].Message.Content.ContentStr)
}
}
4. Run Your App
🎉 Success! You should see an AI response in your terminal.
Next Steps (5 minutes each)
Add Multiple Providers
// Add to environment
export ANTHROPIC_API_KEY="your-anthropic-key"
// Update GetConfiguredProviders
func (a *MyAccount) GetConfiguredProviders() ([]schemas.ModelProvider, error) {
return []schemas.ModelProvider{schemas.OpenAI, schemas.Anthropic}, nil
}
// Update GetKeysForProvider to handle both providers
func (a *MyAccount) GetKeysForProvider(provider schemas.ModelProvider) ([]schemas.Key, error) {
switch provider {
case schemas.OpenAI:
return []schemas.Key{{
Value: os.Getenv("OPENAI_API_KEY"),
Models: []string{"gpt-4o-mini"},
Weight: 1.0,
}}, nil
case schemas.Anthropic:
return []schemas.Key{{
Value: os.Getenv("ANTHROPIC_API_KEY"),
Models: []string{"claude-3-sonnet-20240229"},
Weight: 1.0,
}}, nil
}
return nil, fmt.Errorf("provider %s not supported", provider)
}
// GetConfigForProvider remains the same
func (a *MyAccount) GetConfigForProvider(provider schemas.ModelProvider) (*schemas.ProviderConfig, error) {
return &schemas.ProviderConfig{
NetworkConfig: schemas.DefaultNetworkConfig,
ConcurrencyAndBufferSize: schemas.DefaultConcurrencyAndBufferSize,
}, nil
}
Add Automatic Fallbacks
// Request with fallback providers
response, err := client.ChatCompletionRequest(context.Background(), schemas.ChatCompletionRequest{
Provider: schemas.OpenAI, // Primary provider
Model: "gpt-4o-mini",
Messages: []schemas.Message{
{Role: schemas.User, Content: schemas.Content{ContentStr: bifrost.Ptr("Hello!")}},
},
Params: &schemas.ModelParameters{
MaxTokens: bifrost.Ptr(100),
},
Fallbacks: []schemas.Fallback{
{Provider: schemas.Anthropic, Model: "claude-3-sonnet-20240229"},
},
})
})
// Add tools to your request
response, err := client.ChatCompletionRequest(context.Background(), schemas.ChatCompletionRequest{
Provider: schemas.OpenAI,
Model: "gpt-4o-mini",
Messages: []schemas.Message{
{Role: schemas.ModelChatMessageRoleUser, Content: schemas.MessageContent{ContentStr: bifrost.Ptr("Which tool can I use to get the weather?")}},
},
Tools: []schemas.Tool{
{
Type: "function",
Function: schemas.FunctionTool{
Name: "get_weather",
Description: "Get current weather information",
Parameters: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"location": map[string]interface{}{
"type": "string",
"description": "City name",
},
},
},
},
},
},
})
Learn More
| What You Want | Where to Go | Time |
|---|
| Complete setup guide | 📖 Go Package Usage | 10 min |
| Add all 8+ providers | 🔗 Providers | 5 min |
| Production configuration | 👤 Account Management | 15 min |
| Custom plugins | 🔌 Plugins | 20 min |
| MCP integration | 🛠️ MCP | 15 min |
| Full API reference | 📊 Schemas | Reference |
Prefer HTTP API?
If you want to use Bifrost from Python, Node.js, or other languages, try the
HTTP Transport Quick Start instead.
Why Go Package?
- ✅ Type safety - Compile-time validation
- ✅ Performance - No HTTP overhead
- ✅ Custom logic - Full programmatic control
- ✅ Advanced features - Complete plugin system access