View module source on GitHub

ConfigDict

class ConfigDict(TypedDict)

A class representing the configuration for the Maxim SDK.

Attributes:

NameTypeDescription
api_keyOptional[str], optionalThe API key for the Maxim instance. Defaults to None.
base_urlOptional[str], optionalThe base URL for the Maxim instance. Defaults to None.
cacheOptional[MaximCache], optionalThe cache to use for the Maxim instance. Defaults to None.
debugOptional[bool], optionalWhether to enable debug logging. Defaults to False.
raise_exceptionsOptional[bool], optionalWhether to raise exceptions during logging operations. Defaults to False.
prompt_managementOptional[bool], optionalWhether to enable prompt management. Defaults to False.

Config

@deprecated(
    "This class will be removed in a future version. Use {} which is TypedDict."
)

@dataclass
class Config()

A class representing the configuration for the Maxim SDK.

Attributes:

NameTypeDescription
api_keystrThe API key for the Maxim instance.
base_urlOptional[str], optionalThe base URL for the Maxim instance. Defaults to “https://app.getmaxim.ai”.
cacheOptional[MaximCache], optionalThe cache to use for the Maxim instance. Defaults to None.
debugOptional[bool], optionalWhether to enable debug logging. Defaults to False.
raise_exceptionsOptional[bool], optionalWhether to raise exceptions during logging operations. Defaults to False.
prompt_managementOptional[bool], optionalWhether to enable prompt management. Defaults to False.

get_config_dict

def get_config_dict(config: Union[Config, ConfigDict]) -> dict[str, Any]

Converts a Config or ConfigDict to a dictionary with default values.

Arguments:

NameTypeDescription
configUnion[Config, ConfigDict]The configuration object to convert.

Returns:

dict[str, Any]: A dictionary containing the configuration parameters with defaults applied.

Maxim

class Maxim()

__init__

def __init__(config: Union[Config, ConfigDict, None] = None)

Initializes a new instance of the Maxim class.

Arguments:

NameTypeDescription
configConfigThe configuration for the Maxim instance.

enable_prompt_management

def enable_prompt_management(cache: Optional[MaximCache] = None) -> "Maxim"

Enables prompt management functionality with optional cache configuration.

Arguments:

NameTypeDescription
cacheOptional[MaximCache], optionalCustom cache implementation to use.

Defaults to None (uses existing cache).

Returns:

NameDescription
[Maxim](/sdk/python/references/maxim)The current Maxim instance for method chaining.

enable_exceptions

def enable_exceptions(val: bool) -> "Maxim"

Enables or disables exception raising during logging operations.

Arguments:

NameTypeDescription
valboolTrue to enable exception raising, False to disable.

Returns:

NameDescription
[Maxim](/sdk/python/references/maxim)The current Maxim instance for method chaining.

get_prompt

def get_prompt(id: str, rule: QueryRule) -> Optional[RunnablePrompt]

Retrieves a prompt based on the provided id and rule.

Arguments:

NameTypeDescription
idstrThe id of the prompt.
ruleQueryRuleThe rule to match the prompt against.

Returns:

NameDescription
Optional[[Prompt](/sdk/python/references/models/prompt)]The prompt object if found, otherwise None.

get_prompts

def get_prompts(rule: QueryRule) -> List[RunnablePrompt]

Retrieves all prompts that match the given rule.

Arguments:

NameTypeDescription
ruleQueryRuleThe rule to match the prompts against.

Returns:

NameDescription
List[[Prompt](/sdk/python/references/models/prompt)]A list of prompts that match the given rule.

get_prompt_chain

def get_prompt_chain(id: str,
                     rule: QueryRule) -> Optional[RunnablePromptChain]

Retrieves a prompt chain based on the provided id and rule.

Arguments:

NameTypeDescription
idstrThe id of the prompt chain.
ruleQueryRuleThe rule to match the prompt chain against.

Returns:

NameDescription
Optional[[PromptChain](/sdk/python/references/models/prompt_chain)]The prompt chain object if found, otherwise None.

get_folder_by_id

def get_folder_by_id(id: str) -> Optional[Folder]

Retrieves a folder based on the provided id.

Arguments:

NameTypeDescription
idstrThe id of the folder.

Returns:

NameDescription
Optional[[Folder](/sdk/python/references/models/folder)]The folder object if found, otherwise None.

get_folders

def get_folders(rule: QueryRule) -> List[Folder]

Retrieves all folders that match the given rule.

Arguments:

NameTypeDescription
ruleQueryRuleThe rule to match the folders against.

Returns:

NameDescription
List[[Folder](/sdk/python/references/models/folder)]A list of folders that match the given rule.

logger

def logger(
        config: Optional[Union[LoggerConfig,
                               LoggerConfigDict]] = None) -> Logger

Creates a logger based on the provided configuration.

Arguments:

NameTypeDescription
configLoggerConfigThe configuration for the logger.

Returns:

NameDescription
[Logger](/sdk/python/references/logger/logger)The logger object.

create_test_run

def create_test_run(name: str, in_workspace_id: str) -> TestRunBuilder

Creates a test run builder based on the provided name and workspace id.

Arguments:

NameTypeDescription
namestrThe name of the test run.
in_workspace_idstrThe workspace id to create the test run in.

Returns:

NameDescription
[TestRunBuilder](/sdk/python/references/test_runs/test_run_builder)The test run builder object.

chat_completion

def chat_completion(model: str,
                    messages: List[ChatCompletionMessage],
                    tools: Optional[List[Tool]] = None,
                    **kwargs) -> Optional[PromptResponse]

Performs a chat completion request using the specified model and messages.

Arguments:

NameTypeDescription
modelstrThe model name to use for completion. The expected format is “provider/model_name”. Example “openai/gpt-3.5-turbo”.
messagesList[ChatCompletionMessage]List of chat messages in the conversation
toolsOptional[List[Tool]], optionalList of tools available to the model. Defaults to None.
**kwargsAdditional model parameters to pass to the completion request

Returns:

NameDescription
Optional[[PromptResponse](/sdk/python/references/models/prompt)]The completion response if successful, None otherwise

cleanup

def cleanup()

Cleans up the Maxim sync thread.