Working chat with AI agent with retrieving data

This commit is contained in:
2026-02-04 00:02:53 +03:00
parent 299ee0acb5
commit d354d3dcca
4 changed files with 298 additions and 6 deletions

View File

@@ -113,5 +113,43 @@ def retrieve(query, collection_name, top_k):
click.echo(f"Error: {str(e)}")
@cli.command(
name="chat",
help="Start an interactive chat session with the RAG agent",
)
@click.option(
"--collection-name",
default="documents_langchain",
help="Name of the vector store collection",
)
@click.option(
"--model",
default=None,
help="Name of the Ollama model to use for chat",
)
def chat(collection_name, model):
"""Start an interactive chat session with the RAG agent"""
logger.info("Starting chat session with RAG agent")
try:
# Import here to avoid circular dependencies and only when needed
from agent import run_chat_loop
click.echo("Initializing chat agent...")
click.echo("Type 'quit' or 'exit' to end the conversation.\n")
# Run the interactive chat loop
run_chat_loop(
collection_name=collection_name,
llm_model=model
)
logger.info("Chat session ended")
except Exception as e:
logger.error(f"Error during chat session: {str(e)}")
click.echo(f"Error: {str(e)}")
if __name__ == "__main__":
cli()