Files
rag-solution/services/rag/llamaindex/helpers/openai_compatible_llm.py

36 lines
1008 B
Python
Raw Normal View History

"""
OpenAI-compatible LLM wrapper for LlamaIndex chat models.
This wrapper is used as a fallback/replacement for strict OpenAI model validation paths.
It relies on LlamaIndex `OpenAILike`, which supports arbitrary model names for
OpenAI-compatible endpoints.
"""
from llama_index.llms.openai_like import OpenAILike
class OpenAICompatibleLLM(OpenAILike):
"""
Thin wrapper over OpenAILike with chat-friendly defaults.
"""
def __init__(
self,
model: str,
api_base: str,
api_key: str,
temperature: float = 0.1,
timeout: float = 120.0,
is_function_calling_model: bool = False,
):
super().__init__(
model=model,
api_base=api_base,
api_key=api_key,
temperature=temperature,
timeout=timeout,
# Explicitly avoid "registered model only" assumptions.
is_chat_model=True,
is_function_calling_model=is_function_calling_model,
)