From 9bf52ba28b3d938384e020b8c34ba6f36237c086 Mon Sep 17 00:00:00 2001 From: rajveer43 Date: Thu, 28 Mar 2024 21:36:10 +0530 Subject: [PATCH 1/2] add genai module --- src/genai/conversation.py | 59 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 src/genai/conversation.py diff --git a/src/genai/conversation.py b/src/genai/conversation.py new file mode 100644 index 0000000..63a8492 --- /dev/null +++ b/src/genai/conversation.py @@ -0,0 +1,59 @@ +from langchain.text_splitter import RecursiveCharacterTextSplitter +import os +from langchain_google_genai import GoogleGenerativeAIEmbeddings +import google.generativeai as genai +from langchain.vectorstores import FAISS +from langchain_google_genai import ChatGoogleGenerativeAI +from langchain.chains.question_answering import load_qa_chain +from langchain.prompts import PromptTemplate +from dotenv import load_dotenv + +def get_text_chunks(text): + text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000) + chunks = text_splitter.split_text(text) + return chunks + + +def get_vector_store(text_chunks): + embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001") + vector_store = FAISS.from_texts(text_chunks, embedding=embeddings) + vector_store.save_local("faiss_index") + + +def get_conversational_chain(): + + prompt_template = """ + Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in + provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n + Context:\n {context}?\n + Question: \n{question}\n + + Answer: + """ + + model = ChatGoogleGenerativeAI(model="gemini-pro", + temperature=0.3) + + prompt = PromptTemplate(template = prompt_template, input_variables = ["context", "question"]) + chain = load_qa_chain(model, chain_type="stuff", prompt=prompt) + + return chain + + + +def user_input(user_question): + embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001") + + new_db = FAISS.load_local("faiss_index", embeddings) + docs = new_db.similarity_search(user_question) + + chain = get_conversational_chain() + + + response = chain( + {"input_documents":docs, "question": user_question} + , return_only_outputs=True) + + print(response) + st.write("Reply: ", response["output_text"]) + From dd26845851b0ace2fa37ac1d043f30e5f865c5bf Mon Sep 17 00:00:00 2001 From: rajveer43 Date: Thu, 28 Mar 2024 21:43:37 +0530 Subject: [PATCH 2/2] set env key --- src/genai/conversation.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/genai/conversation.py b/src/genai/conversation.py index 63a8492..c94661f 100644 --- a/src/genai/conversation.py +++ b/src/genai/conversation.py @@ -8,6 +8,10 @@ from langchain.prompts import PromptTemplate from dotenv import load_dotenv +load_dotenv() +os.getenv("GOOGLE_API_KEY") +genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) + def get_text_chunks(text): text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000) chunks = text_splitter.split_text(text)