From a7e54f6810e97945cb60a44ddc83f8d49b2e13b6 Mon Sep 17 00:00:00 2001 From: asdfcube <36262524+asdfcube@users.noreply.github.com> Date: Fri, 2 Jun 2023 16:56:10 +0800 Subject: [PATCH] Added config for topK for Pinecone --- .env.example | 10 ++++++---- .gitignore | 2 ++ config/pinecone.ts | 4 +++- utils/makechain.ts | 3 ++- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.env.example b/.env.example index 85bc4319d..de1557c73 100644 --- a/.env.example +++ b/.env.example @@ -1,8 +1,10 @@ -OPENAI_API_KEY= +OPENAI_API_KEY = # Update these with your pinecone details from your dashboard. # PINECONE_INDEX_NAME is in the indexes tab under "index name" in blue # PINECONE_ENVIRONMENT is in indexes tab under "Environment". Example: "us-east1-gcp" -PINECONE_API_KEY= -PINECONE_ENVIRONMENT= -PINECONE_INDEX_NAME= +# PINECONE_TOPK is the number of related sources you want for each prompt, defaults to 4 +PINECONE_API_KEY = +PINECONE_ENVIRONMENT = +PINECONE_INDEX_NAME = +PINECONE_TOPK = \ No newline at end of file diff --git a/.gitignore b/.gitignore index 1759b24fb..c286d0079 100644 --- a/.gitignore +++ b/.gitignore @@ -38,3 +38,5 @@ next-env.d.ts #Notion_db /Notion_DB + +/docs \ No newline at end of file diff --git a/config/pinecone.ts b/config/pinecone.ts index ce2dadaad..bc2977a05 100644 --- a/config/pinecone.ts +++ b/config/pinecone.ts @@ -10,4 +10,6 @@ const PINECONE_INDEX_NAME = process.env.PINECONE_INDEX_NAME ?? ''; const PINECONE_NAME_SPACE = 'pdf-test'; //namespace is optional for your vectors -export { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE }; +const PINECONE_TOPK: number = +(process.env.PINECONE_TOPK ?? 4); + +export { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE, PINECONE_TOPK }; diff --git a/utils/makechain.ts b/utils/makechain.ts index 45f6f1dff..e7054e8b2 100644 --- a/utils/makechain.ts +++ b/utils/makechain.ts @@ -1,6 +1,7 @@ import { OpenAI } from 'langchain/llms/openai'; import { PineconeStore } from 'langchain/vectorstores/pinecone'; import { ConversationalRetrievalQAChain } from 'langchain/chains'; +import { PINECONE_TOPK } from '@/config/pinecone'; const CONDENSE_PROMPT = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question. @@ -26,7 +27,7 @@ export const makeChain = (vectorstore: PineconeStore) => { const chain = ConversationalRetrievalQAChain.fromLLM( model, - vectorstore.asRetriever(), + vectorstore.asRetriever(PINECONE_TOPK), { qaTemplate: QA_PROMPT, questionGeneratorTemplate: CONDENSE_PROMPT,