using openailike for llamaindex that will allow of usage of any openai compatible models via api, and not just hardcoded models inside default OpenAI class in llamaindex
This commit is contained in:
@@ -93,18 +93,30 @@ def get_llm_model():
|
||||
return llm
|
||||
|
||||
elif strategy == "openai":
|
||||
from llama_index.llms.openai import OpenAI
|
||||
from llama_index.llms.openai_like import OpenAILike
|
||||
# from helpers.openai_compatible_llm import OpenAICompatibleLLM
|
||||
|
||||
openai_chat_url = os.getenv("OPENAI_CHAT_URL", "https://api.openai.com/v1")
|
||||
openai_chat_key = os.getenv("OPENAI_CHAT_KEY", "dummy_key_for_template")
|
||||
openai_chat_model = os.getenv("OPENAI_CHAT_MODEL", "gpt-3.5-turbo")
|
||||
openai_is_fc_model = (
|
||||
os.getenv("OPENAI_CHAT_IS_FUNCTION_CALLING_MODEL", "false").lower()
|
||||
== "true"
|
||||
)
|
||||
|
||||
# Set the API key in environment for OpenAI
|
||||
os.environ["OPENAI_API_KEY"] = openai_chat_key
|
||||
|
||||
logger.info(f"Initializing OpenAI chat model: {openai_chat_model}")
|
||||
logger.info(
|
||||
f"Initializing OpenAI-compatible chat model: {openai_chat_model} "
|
||||
f"(base={openai_chat_url}, function_calling={openai_is_fc_model})"
|
||||
)
|
||||
|
||||
llm = OpenAI(model=openai_chat_model, api_base=openai_chat_url)
|
||||
llm = OpenAILike(
|
||||
model=openai_chat_model,
|
||||
api_base=openai_chat_url,
|
||||
api_key=openai_chat_key,
|
||||
)
|
||||
|
||||
return llm
|
||||
|
||||
|
||||
Reference in New Issue
Block a user