tooluniverse.llm_clients moduleΒΆ

class tooluniverse.llm_clients.BaseLLMClient[source][source]ΒΆ

Bases: object

test_api() None[source][source]ΒΆ
infer(messages: List[Dict[str, str]], temperature: float | None, max_tokens: int | None, return_json: bool, custom_format: Any | None = None, max_retries: int = 5, retry_delay: int = 5) str | None[source][source]ΒΆ
class tooluniverse.llm_clients.AzureOpenAIClient(model_id: str, api_version: str | None, logger)[source][source]ΒΆ

Bases: BaseLLMClient

DEFAULT_MODEL_LIMITS: Dict[str, Dict[str, int]] = {'embedding-ada': {'context_window': 8192, 'max_output': 8192}, 'gpt-4.1': {'context_window': 1047576, 'max_output': 32768}, 'gpt-4.1-mini': {'context_window': 1047576, 'max_output': 32768}, 'gpt-4.1-nano': {'context_window': 1047576, 'max_output': 32768}, 'gpt-4o': {'context_window': 128000, 'max_output': 16384}, 'gpt-4o-0806': {'context_window': 128000, 'max_output': 16384}, 'gpt-4o-1120': {'context_window': 128000, 'max_output': 16384}, 'gpt-4o-mini-0718': {'context_window': 128000, 'max_output': 16384}, 'o3-mini': {'context_window': 200000, 'max_output': 100000}, 'o3-mini-0131': {'context_window': 200000, 'max_output': 100000}, 'o4-mini': {'context_window': 200000, 'max_output': 100000}, 'o4-mini-0416': {'context_window': 200000, 'max_output': 100000}, 'text-embedding-3-large': {'context_window': 8192, 'max_output': 8192}, 'text-embedding-3-small': {'context_window': 8192, 'max_output': 8192}}[source]ΒΆ
__init__(model_id: str, api_version: str | None, logger)[source][source]ΒΆ
test_api() None[source][source]ΒΆ
infer(messages: List[Dict[str, str]], temperature: float | None, max_tokens: int | None, return_json: bool, custom_format: Any | None = None, max_retries: int = 5, retry_delay: int = 5) str | None[source][source]ΒΆ
class tooluniverse.llm_clients.GeminiClient(model_name: str, logger)[source][source]ΒΆ

Bases: BaseLLMClient

__init__(model_name: str, logger)[source][source]ΒΆ
test_api() None[source][source]ΒΆ
infer(messages: List[Dict[str, str]], temperature: float | None, max_tokens: int | None, return_json: bool, custom_format: Any | None = None, max_retries: int = 5, retry_delay: int = 5) str | None[source][source]ΒΆ