using openailike for llamaindex that will allow of usage of any openai compatible models via api, and not just hardcoded models inside default OpenAI class in llamaindex
This commit is contained in:
35
services/rag/llamaindex/helpers/openai_compatible_llm.py
Normal file
35
services/rag/llamaindex/helpers/openai_compatible_llm.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
OpenAI-compatible LLM wrapper for LlamaIndex chat models.
|
||||
|
||||
This wrapper is used as a fallback/replacement for strict OpenAI model validation paths.
|
||||
It relies on LlamaIndex `OpenAILike`, which supports arbitrary model names for
|
||||
OpenAI-compatible endpoints.
|
||||
"""
|
||||
|
||||
from llama_index.llms.openai_like import OpenAILike
|
||||
|
||||
|
||||
class OpenAICompatibleLLM(OpenAILike):
|
||||
"""
|
||||
Thin wrapper over OpenAILike with chat-friendly defaults.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: str,
|
||||
api_base: str,
|
||||
api_key: str,
|
||||
temperature: float = 0.1,
|
||||
timeout: float = 120.0,
|
||||
is_function_calling_model: bool = False,
|
||||
):
|
||||
super().__init__(
|
||||
model=model,
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
temperature=temperature,
|
||||
timeout=timeout,
|
||||
# Explicitly avoid "registered model only" assumptions.
|
||||
is_chat_model=True,
|
||||
is_function_calling_model=is_function_calling_model,
|
||||
)
|
||||
Reference in New Issue
Block a user