Quantcast
Channel: Active questions tagged python - Stack Overflow
Viewing all articles
Browse latest Browse all 16832

LLM RAG with Agent

$
0
0

I was trying the application code in the link.

I am using the following Llang Chain version

langchain 0.0.327langchain-community 0.0.2langchain-core 0.1.0

Getting the following error:

Entering new AgentExecutor chain...Traceback (most recent call last):  File "RAGWithAgent.py", line 54, in <module>    result = agent_executor({"input": "hi, im bob"})  File "\lib\site-packages\langchain\chains\base.py", line 310, in __call__    raise e  File "\lib\site-packages\langchain\chains\base.py", line 304, in __call__    self._call(inputs, run_manager=run_manager)  File "\lib\site-packages\langchain\agents\agent.py", line 1146, in _call    next_step_output = self._take_next_step(  File "\lib\site-packages\langchain\agents\agent.py", line 933, in _take_next_step    output = self.agent.plan(  File "\lib\site-packages\langchain\agents\openai_functions_agent\base.py", line 104, in plan    predicted_message = self.llm.predict_messages(  File "\lib\site-packages\langchain\chat_models\base.py", line 650, in predict_messages    return self(messages, stop=_stop, **kwargs)  File "\lib\site-packages\langchain\chat_models\base.py", line 600, in __call__    generation = self.generate(  File "\lib\site-packages\langchain\chat_models\base.py", line 349, in generate    raise e  File "\lib\site-packages\langchain\chat_models\base.py", line 339, in generate    self._generate_with_cache(  File "\lib\site-packages\langchain\chat_models\base.py", line 492, in _generate_with_cache    return self._generate(  File "\lib\site-packages\langchain\chat_models\openai.py", line 357, in _generate    return _generate_from_stream(stream_iter)  File "\lib\site-packages\langchain\chat_models\base.py", line 57, in _generate_from_stream    for chunk in stream:  File "\lib\site-packages\langchain\chat_models\openai.py", line 326, in _stream    for chunk in self.completion_with_retry(  File "\lib\site-packages\langchain\chat_models\openai.py", line 299, in completion_with_retry    return _completion_with_retry(**kwargs)  File "\lib\site-packages\tenacity\__init__.py", line 289, in wrapped_f    return self(f, *args, **kw)  File "\lib\site-packages\tenacity\__init__.py", line 379, in __call__    do = self.iter(retry_state=retry_state)  File "\lib\site-packages\tenacity\__init__.py", line 314, in iter    return fut.result()  File "D:\Program Files\Python38\lib\concurrent\futures\_base.py", line 432, in result    return self.__get_result()  File "D:\Program Files\Python38\lib\concurrent\futures\_base.py", line 388, in __get_result    raise self._exception  File "\lib\site-packages\tenacity\__init__.py", line 382, in __call__    result = fn(*args, **kwargs)  File "\lib\site-packages\langchain\chat_models\openai.py", line 297, in _completion_with_retry    return self.client.create(**kwargs)  File "\lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create    return super().create(*args, **kwargs)  File "\lib\site-packages\openai\api_resources\abstract\engine_api_resource.py", line 155, in create    response, _, api_key = requestor.request(  File "\lib\site-packages\openai\api_requestor.py", line 299, in request    resp, got_stream = self._interpret_response(result, stream)  File "\lib\site-packages\openai\api_requestor.py", line 710, in _interpret_response    self._interpret_response_line(  File "\lib\site-packages\openai\api_requestor.py", line 775, in _interpret_response_line    raise self.handle_error_response(openai.error.InvalidRequestError: Unrecognized request argument supplied: functionsProcess finished with exit code 1

I used Azure LLM instead openAI.FAISS was not working for me so used Chroma Vector Store.

Following is my code:

from langchain.text_splitter import CharacterTextSplitterfrom langchain.document_loaders import TextLoaderfrom langchain.agents.agent_toolkits import create_retriever_toolfrom langchain.agents.agent_toolkits import create_conversational_retrieval_agentfrom langchain.chat_models import AzureChatOpenAIfrom langchain.vectorstores import Chromafrom langchain.embeddings.sentence_transformer import SentenceTransformerEmbeddingsimport osAZURE_OPENAI_API_KEY = ""os.environ["OPENAI_API_KEY"] = AZURE_OPENAI_API_KEYloader = TextLoader(r"Toward a Knowledge Graph of Cybersecurity Countermeasures.txt")documents = loader.load()text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)chunks = text_splitter.split_documents(documents)# create the open-source embedding functionembedding_function = SentenceTransformerEmbeddings(model_name="all-mpnet-base-v2")current_directory = os.path.dirname("__file__")# load it into Chroma and save it to diskdb = Chroma.from_documents(chunks, embedding_function, collection_name="groups_collection",                           persist_directory=r"\rag_with_agent_chroma_db")retriever = db.as_retriever(search_kwargs={"k": 5})tool = create_retriever_tool(    retriever,"search_state_of_union","Searches and returns documents regarding the state-of-the-union.",)tools = [tool]llm = AzureChatOpenAI(    deployment_name='gtp35turbo',    model_name='gpt-35-turbo',    openai_api_key=AZURE_OPENAI_API_KEY,    openai_api_version='2023-03-15-preview',    openai_api_base='https://azureft.openai.azure.com/',    openai_api_type='azure',    streaming=True,    verbose=True)agent_executor = create_conversational_retrieval_agent(llm, tools, verbose=True, remember_intermediate_steps=True,                                                       memory_key="chat_history")result = agent_executor({"input": "hi, im bob"})print(result["output"])

Viewing all articles
Browse latest Browse all 16832

Trending Articles



<script src="https://jsc.adskeeper.com/r/s/rssing.com.1596347.js" async> </script>