Requirements

"openai>=1.65.4"
"maxim-py>=3.5.0"

Env variables

MAXIM_API_KEY=
MAXIM_LOG_REPO_ID=
OPENAI_API_KEY=

Initialize logger

import maxim from Maxim

logger = Maxim().logger()

Initialize MaximOpenAIClient

from openai import OpenAI
from maxim.logger.openai import MaximOpenAIClient

client = MaximOpenAIClient(client=OpenAI(api_key=OPENAI_API_KEY),logger=logger)

Make LLM calls using MaximOpenAIClient

from openai import OpenAI
from maxim.logger.openai import MaximOpenAIClient

client = MaximOpenAIClient(client=OpenAI(api_key=OPENAI_API_KEY),logger=logger)

messages = [
    {"role": "system", "content": "You are a helpful assistant."},
    {"role": "user", "content": "Write a haiku about recursion in programming."},
]

# Create a chat completion request
response = client.chat.completions.create(
    model="gpt-4o-mini",
    messages=messages,        
)
# Extract response text and usage
response_text = response.choices[0].message.content
print(response_text)

Demo

Advanced use-cases

Capture multiple LLM calls in one trace

1

Initialize Maxim SDK and OpenAI Client

from openai import OpenAI
from maxim import Maxim
from maxim.logger.openai import MaximOpenAIClient

# Make sure MAXIM_API_KEY and MAXIM_LOG_REPO_ID are set in env variables
logger = Maxim().logger()

# Initialize MaximOpenAIClient
client = MaximOpenAIClient(client=OpenAI(api_key=OPENAI_API_KEY),logger=logger)
2

Create a new trace externally

from uuid import uuid4

trace_id = str(uuid4())

trace = logger.trace({
	id: trace_id,
	name: "Trace name"
})
3

Make LLM calls and use this trace id

response = client.chat.completions.create(
    model="gpt-4o-mini",
    messages=messages,   
	extra_headers={"x-maxim-trace-id": trace_id}
)
# Extract response text and usage
response_text = response.choices[0].message.content
print(response_text)
4

Keep adding LLM calls

All LLM calls with extra header maxim_trace_id: trace_id will add it the declared trace.

Capture multi-turn conversations

1

Initialize Maxim SDK and OpenAI Client

from openai import OpenAI
from maxim import Maxim
from maxim.logger.openai import MaximOpenAIClient

# Make sure MAXIM_API_KEY and MAXIM_LOG_REPO_ID are set in env variables
logger = Maxim().logger()

# Initialize MaximOpenAIClient
client = MaximOpenAIClient(client=OpenAI(api_key=OPENAI_API_KEY),logger=logger)
2

Create a new trace externally and add it to a session

from uuid import uuid4

# use this session id to add multiple traces in one session
session_id = str(uuid4())

trace_id = str(uuid4())


trace = logger.trace({
	id: trace_id,
	name: "Trace name",
    session_id: session_id
})
3

Make LLM calls and use this trace id

response = client.chat.completions.create(
    model="gpt-4o-mini",
    messages=messages,   
	extra_headers={"x-maxim-trace-id": trace_id}
)
# Extract response text and usage
response_text = response.choices[0].message.content
print(response_text)