1

I am trying to provide a custom prompt for doing Q&A in langchain. I wasn't able to do that with ConversationalRetrievalChain as it was not allowing for multiple custom inputs in custom prompt. Hence, I used load_qa_chain but with load_qa_chain, I am unable to use memory.

How to add memory to load_qa_chain or How to implement ConversationalRetrievalChain with custom prompt with multiple inputs.

import openai
import numpy as np
import pandas as pd
import os
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.llms import OpenAI
from langchain.chains import RetrievalQA, ConversationalRetrievalChain,RetrievalQAWithSourcesChain
from langchain.chains.qa_with_sources import load_qa_with_sources_chain
from langchain.chains.question_answering import load_qa_chain
from langchain.document_loaders import UnstructuredFileLoader
from langchain.prompts import PromptTemplate

from langchain.document_loaders import UnstructuredExcelLoader
loader = UnstructuredFileLoader("../document.pdf", mode="elements")
documents = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
texts = text_splitter.split_documents(documents)
#embeddings = OpenAIEmbeddings()
from langchain.embeddings.sentence_transformer import SentenceTransformerEmbeddings
embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
vectorDB = Chroma.from_documents(texts,embeddings)


prompt_template = "You are a Chat customer support agent.\
        Address the customer as Dear Mr. or Miss. depending on customer's gender followed by Customer's First Name.\
        Use the following pieces of context to answer the question at the end.\
        If you don't know the answer, just say that you don't know, don't try to make up an answer.\
        Below are the details of the customer:\
        Customer's Name : {Customer_Name} \
        Customer's Resident State: {Customer_State}\
        Customer's Gender: {Customer_Gender}\
        {context}\
        Question: {question}\
        Answer: "

import json
 
# Opening JSON file
with open('Customer_profile.json', 'r') as openfile:
# Reading from json file
    json_object = json.load(openfile)
 
cName=json_object['Customer_Name']
cState=json_object['Customer_State']
cGen=json_object['Customer_Gender']

PROMPT = PromptTemplate(
    template=prompt_template, input_variables=["context", "question","Customer_Name","Customer_State","Customer_Gender"]
)

chain_type_kwargs = {"prompt": PROMPT}

from langchain.memory import ConversationBufferMemory
memory = ConversationBufferMemory(memory_key="chat_history", output_key='answer',return_messages=True)

#qa = RetrievalQAWithSourcesChain.from_chain_type(OpenAI(temperature=0), retriever=vectorDB.as_retriever(),chain_type="stuff", memory=memory,return_source_documents=True,chain_type_kwargs=chain_type_kwargs)
#qa = RetrievalQAWithSourcesChain.from_chain_type(OpenAI(temperature=0), retriever=vectorDB.as_retriever(),chain_type="stuff", memory=memory,return_source_documents=True)
#qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0), retriever=vectorDB.as_retriever(),chain_type="stuff", memory=memory,return_source_documents=True,chain_type_kwargs=chain_type_kwargs)
#qa = load_qa_with_sources_chain(OpenAI(temperature=0),chain_type="stuff",prompt=PROMPT)
qa = load_qa_chain(OpenAI(temperature=0.1),chain_type="stuff",prompt=PROMPT)

import langchain
langchain.debug=False
query="How's the weather in my place?"
docs = vectorDB.similarity_search(query)



#vectordbkwargs = {"search_distance": 0.9}
#result=qa({"input_documents": docs,"question": query,'Customer_Gender':'Male','Customer_State':'Madhya Pradesh','Customer_Name':'Bob'})
result=qa({"input_documents": docs,"question": query,'Customer_Gender':'Male','Customer_State':'Madhya Pradesh','Customer_Name':'Bob'})
#result=qa({"question": query})
print(result['output_text'])

Customer Profile.JSON

{
    "Customer_Name": "Bob",
    "Customer_State": "NY",
    "Customer_Gender": "Male"
}
Jason
  • 676
  • 1
  • 12
  • 34

1 Answers1

-2

Based on my custom PDF, you can have the following logic: you can refer my notebook for more detail.

# doc string prompt
#
prompt_template = """You are a Chat customer support agent.\
        Address the customer as Dear Mr. or Miss. depending on customer's gender followed by Customer's First Name.\
        Use the following pieces of context to answer the question at the end.\
        If you don't know the answer, just say that you don't know, don't try to make up an answer.\
        Below are the details of the customer:\
        Customer's Name : {Customer_Name} \
        Customer's Resident State: {Customer_State}\
        Customer's Gender: {Customer_Gender}\
        {context}\
        Question: {question}\
        Answer: """
PROMPT = PromptTemplate(
    template=prompt_template, input_variables=["context", "question","Customer_Name","Customer_State","Customer_Gender"]
)
from langchain.chains.question_answering import load_qa_chain

memory = ConversationBufferMemory(memory_key="chat_history", input_key="question")
chain = load_qa_chain(
    OpenAI(temperature=0), chain_type="stuff", memory=memory, prompt=PROMPT
)


query="why frog sent letter to Todd??"
docs=db.similarity_search(query=query)

# building the dictionary for chain

chain_input={
    "input_documents": docs,
    "context":"This is contextless",
    "question":query,
    "Customer_Name":"Bob",
    "Customer_State":"NY",
    "Customer_Gender":"Male"
}

result=chain(chain_input, return_only_outputs=True)

output:

 Dear Mr. Bob, Frog sent a letter to Toad because he wanted to show Toad that he was glad to be his best friend. He wrote in the letter, "Dear Toad, I am glad that you are my best friend. Your best friend, Frog."'

if you are planning to have a chat history, can use memory.

simpleApp
  • 2,885
  • 2
  • 10
  • 19
  • This won't work, you need to edit your `prompt_template` to have a `chat_history` input_variable. https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs#:~:text=memory%20import%20ConversationBufferMemory-,API%20Reference%3A,-load_qa_chain%20from%20langchain – BeGreen Aug 22 '23 at 08:35
  • you can refer to the notebook attached to the answer where you can see the complete example. – simpleApp Aug 22 '23 at 13:11