41 lines
1.2 KiB
Python
41 lines
1.2 KiB
Python
"""
|
|
OpenAI-compatible LLM wrapper for LlamaIndex chat models.
|
|
|
|
This wrapper is used as a fallback/replacement for strict OpenAI model validation paths.
|
|
It relies on LlamaIndex `OpenAILike`, which supports arbitrary model names for
|
|
OpenAI-compatible endpoints.
|
|
"""
|
|
|
|
from llama_index.llms.openai_like import OpenAILike
|
|
|
|
|
|
class OpenAICompatibleLLM(OpenAILike):
|
|
"""
|
|
Thin wrapper over OpenAILike with chat-friendly defaults.
|
|
"""
|
|
|
|
def __init__(
|
|
self,
|
|
model: str,
|
|
api_base: str,
|
|
api_key: str,
|
|
temperature: float = 0.1,
|
|
timeout: float = 120.0,
|
|
max_tokens: int | None = None,
|
|
reasoning_effort: str | None = None,
|
|
is_function_calling_model: bool = False,
|
|
):
|
|
super().__init__(
|
|
model=model,
|
|
api_base=api_base,
|
|
api_key=api_key,
|
|
temperature=temperature,
|
|
timeout=timeout,
|
|
max_tokens=max_tokens,
|
|
reasoning_effort=reasoning_effort,
|
|
# Explicitly avoid "registered model only" assumptions.
|
|
is_chat_model=True,
|
|
is_function_calling_model=is_function_calling_model,
|
|
should_use_structured_outputs=False,
|
|
)
|