package main
import (
"context"
"fmt"
"os"
bifrost "github.com/maximhq/bifrost/core"
"github.com/maximhq/bifrost/core/schemas"
)
// Simple account implementation
type MyAccount struct{}
func (a *MyAccount) GetConfiguredProviders() ([]schemas.ModelProvider, error) {
return []schemas.ModelProvider{schemas.OpenAI}, nil
}
func (a *MyAccount) GetKeysForProvider(provider schemas.ModelProvider) ([]schemas.Key, error) {
if provider == schemas.OpenAI {
return []schemas.Key{{
Value: os.Getenv("OPENAI_API_KEY"),
Models: []string{"gpt-4o-mini"},
Weight: 1.0,
}}, nil
}
return nil, fmt.Errorf("provider %s not supported", provider)
}
func (a *MyAccount) GetConfigForProvider(provider schemas.ModelProvider) (*schemas.ProviderConfig, error) {
if provider == schemas.OpenAI {
// Return default config (can be customized for advanced use cases)
return &schemas.ProviderConfig{
NetworkConfig: schemas.DefaultNetworkConfig,
ConcurrencyAndBufferSize: schemas.DefaultConcurrencyAndBufferSize,
}, nil
}
return nil, fmt.Errorf("provider %s not supported", provider)
}
func main() {
// Initialize Bifrost
client, initErr := bifrost.Init(schemas.BifrostConfig{
Account: &MyAccount{},
})
if initErr != nil {
panic(initErr)
}
defer client.Cleanup()
// Make a chat completion request
response, bifrostErr := client.ChatCompletionRequest(context.Background(), &schemas.BifrostRequest{
Provider: schemas.OpenAI, // Primary provider
Model: "gpt-4o-mini",
Input: schemas.RequestInput{
ChatCompletionInput: &messages,
},
})
if err != nil {
panic(err)
}
// Print response
if len(response.Choices) > 0 && response.Choices[0].Message.Content.ContentStr != nil {
fmt.Println("AI Response:", *response.Choices[0].Message.Content.ContentStr)
}
}