proper usage of embedding models if defined in .env
This commit is contained in:
@@ -4,10 +4,11 @@ import os
|
||||
from typing import Optional
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from langchain_qdrant import QdrantVectorStore
|
||||
from langchain_core.documents import Document
|
||||
from langchain_ollama import OllamaEmbeddings
|
||||
from langchain_openai import OpenAIEmbeddings
|
||||
from langchain_qdrant import QdrantVectorStore
|
||||
from loguru import logger
|
||||
from qdrant_client import QdrantClient
|
||||
|
||||
# Load environment variables
|
||||
@@ -43,7 +44,9 @@ def initialize_vector_store(
|
||||
if EMBEDDING_STRATEGY == "openai":
|
||||
# Validate required OpenAI embedding variables
|
||||
if not OPENAI_EMBEDDING_API_KEY or not OPENAI_EMBEDDING_BASE_URL:
|
||||
raise ValueError("OPENAI_EMBEDDING_API_KEY and OPENAI_EMBEDDING_BASE_URL must be set when using OpenAI embedding strategy")
|
||||
raise ValueError(
|
||||
"OPENAI_EMBEDDING_API_KEY and OPENAI_EMBEDDING_BASE_URL must be set when using OpenAI embedding strategy"
|
||||
)
|
||||
|
||||
# Initialize OpenAI embeddings
|
||||
embeddings = OpenAIEmbeddings(
|
||||
@@ -51,6 +54,10 @@ def initialize_vector_store(
|
||||
openai_api_base=OPENAI_EMBEDDING_BASE_URL,
|
||||
openai_api_key=OPENAI_EMBEDDING_API_KEY,
|
||||
)
|
||||
elif EMBEDDING_STRATEGY == "none":
|
||||
embeddings = None
|
||||
|
||||
logger.warning("Embedding strategy for vector storage is NONE! FYI")
|
||||
else: # Default to ollama
|
||||
# Initialize Ollama embeddings
|
||||
embeddings = OllamaEmbeddings(
|
||||
@@ -118,7 +125,9 @@ def add_documents_to_vector_store(
|
||||
vector_store.add_documents(batch)
|
||||
|
||||
|
||||
def search_vector_store(vector_store: QdrantVectorStore, query: str, top_k: int = 5) -> list:
|
||||
def search_vector_store(
|
||||
vector_store: QdrantVectorStore, query: str, top_k: int = 5
|
||||
) -> list:
|
||||
"""
|
||||
Search the vector store for similar documents.
|
||||
|
||||
|
||||
Reference in New Issue
Block a user