Learn how to integrate Maxim with LiteLLM for tracing and monitoring
litellm>=1.25.0 maxim-py>=3.5.0
.env
MAXIM_API_KEY= MAXIM_LOG_REPO_ID= OPENAI_API_KEY=
import litellm import os from maxim import Maxim, Config, LoggerConfig from maxim.logger.litellm import MaximLiteLLMTracer logger = Maxim().logger() # One-line integration: add Maxim tracer to LiteLLM callbacks litellm.callbacks = [MaximLiteLLMTracer(logger)]
import os from litellm import acompletion response = await acompletion( model='openai/gpt-4o', api_key=os.getenv('OPENAI_API_KEY'), messages=[{'role': 'user', 'content': 'Hello, world!'}], ) print(response.choices[0].message.content)
from maxim.logger.logger import TraceConfig import uuid trace = logger.trace(TraceConfig(id=str(uuid.uuid4()), name='litellm-generation')) trace.event(str(uuid.uuid4()), 'litellm-generation', 'litellm-generation', {}) # Attach trace to LiteLLM call using metadata response = await acompletion( model='openai/gpt-4o', api_key=os.getenv('OPENAI_API_KEY'), messages=[{'role': 'user', 'content': 'What can you do for me!'}], metadata={'maxim': {'trace_id': trace.id, 'span_name': 'litellm-generation'}} ) print(response.choices[0].message.content)
Was this page helpful?