diff --git a/.gitignore b/.gitignore index fa5a67b96..17be0a343 100644 --- a/.gitignore +++ b/.gitignore @@ -39,4 +39,6 @@ next-env.d.ts #Notion_db /Notion_DB -.yarn/ \ No newline at end of file +.yarn/ +# LangGraph API +.langgraph_api diff --git a/langgraph.json b/langgraph.json new file mode 100644 index 000000000..1c7c5ff6f --- /dev/null +++ b/langgraph.json @@ -0,0 +1,7 @@ +{ + "dependencies": ["."], + "graphs": { + "agent": "./langgraph/graph.ts:graph" + }, + "env": ".env" +} diff --git a/langgraph/graph.ts b/langgraph/graph.ts new file mode 100644 index 000000000..119fbbc7c --- /dev/null +++ b/langgraph/graph.ts @@ -0,0 +1,78 @@ +import { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE } from "@/config/pinecone"; +import { makeChain } from "@/utils/makechain"; +import { pinecone } from "@/utils/pinecone-client"; +import { AIMessage, HumanMessage } from "@langchain/core/messages"; +import { StateGraph } from "@langchain/langgraph"; +import type { Document } from 'langchain/document'; +import { OpenAIEmbeddings } from "langchain/embeddings/openai"; +import { PineconeStore } from "langchain/vectorstores/pinecone"; +import { InputAnnotation, OutputAnnotation } from "./state"; + + +const assistant = async (state: typeof InputAnnotation.State) => { + const { question, messages } = state; + + messages.push(new HumanMessage(question)); + + console.log('question', question); + + console.log('history', messages); + + const index = pinecone.Index(PINECONE_INDEX_NAME); + + /* create vectorstore*/ + const vectorStore = await PineconeStore.fromExistingIndex( + new OpenAIEmbeddings({}), + { + pineconeIndex: index, + textKey: 'text', + namespace: PINECONE_NAME_SPACE, //namespace comes from your config folder + }, + ); + + // Use a callback to get intermediate sources from the middle of the chain + let resolveWithDocuments: (value: Document[]) => void; + const documentPromise = new Promise((resolve) => { + resolveWithDocuments = resolve; + }); + const retriever = vectorStore.asRetriever({ + callbacks: [ + { + handleRetrieverEnd(documents) { + resolveWithDocuments(documents); + }, + }, + ], + }); + + //create chain + const chain = makeChain(retriever); + + //Ask a question using chat history + const response = await chain.invoke({ + question, + chat_history: JSON.stringify(messages), + }); + + const sourceDocuments = await documentPromise; + + return { messages: [...messages, new AIMessage(response)], sourceDocuments, answer: response }; +}; + +export const route = (state: typeof InputAnnotation.State): "__end__" | "assistant" => { + if (state.messages.length > 0) { + return "__end__"; + } + // Loop back + return "assistant"; +}; + +const builder = new StateGraph({ input: InputAnnotation, output: OutputAnnotation }) + .addNode("assistant", assistant) + .addEdge("__start__", "assistant") + .addConditionalEdges("assistant", route); + +export const graph = builder.compile(); + + + diff --git a/langgraph/state.ts b/langgraph/state.ts new file mode 100644 index 000000000..e75fc7da3 --- /dev/null +++ b/langgraph/state.ts @@ -0,0 +1,28 @@ +import { BaseMessage } from "@langchain/core/messages"; +import { Annotation } from "@langchain/langgraph"; + +export const InputAnnotation = Annotation.Root({ + question: Annotation, + messages: Annotation({ + reducer: (left: BaseMessage[], right: BaseMessage | BaseMessage[]) => { + if (Array.isArray(right)) { + return left.concat(right); + } + return left.concat([right]); + }, + default: () => [], + }), + }); + +export const OutputAnnotation = Annotation.Root({ + sourceDocuments: Annotation({ + reducer: (left: Document[], right: Document | Document[]) => { + if (Array.isArray(right)) { + return right; + } + return [right]; + }, + default: () => [], + }), + answer: Annotation +}); \ No newline at end of file diff --git a/package.json b/package.json index 0c3498b71..8ad4f2a27 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,8 @@ "ingest": "tsx -r dotenv/config scripts/ingest-data.ts" }, "dependencies": { + "@langchain/core": "^0.3.37", + "@langchain/langgraph": "^0.2.44", "@microsoft/fetch-event-source": "^2.0.1", "@pinecone-database/pinecone": "1.1.0", "@radix-ui/react-accordion": "^1.1.1", diff --git a/pages/api/langgraph.ts b/pages/api/langgraph.ts new file mode 100644 index 000000000..9f8f3ec44 --- /dev/null +++ b/pages/api/langgraph.ts @@ -0,0 +1,44 @@ +import type { NextApiRequest, NextApiResponse } from 'next'; +import { AIMessage, HumanMessage } from '@langchain/core/messages'; +import { graph } from '@/langgraph/graph'; + +export default async function handler( + req: NextApiRequest, + res: NextApiResponse, +) { + const { question, history } = req.body; + + console.log('question', question); + console.log('history', history); + + //only accept post requests + if (req.method !== 'POST') { + res.status(405).json({ error: 'Method not allowed' }); + return; + } + + if (!question) { + return res.status(400).json({ message: 'No question in the request' }); + } + // OpenAI recommends replacing newlines with spaces for best results + const sanitizedQuestion = question.trim().replaceAll('\n', ' '); + + try { + const pastMessages = history + .map((message: [string, string]) => { + return [new HumanMessage(message[0]), new AIMessage(message[1])] + }) + console.log(pastMessages); + + //Ask a question using chat history + const response = await graph.invoke({ question: sanitizedQuestion, messages: pastMessages }); + + const sourceDocuments = response.sourceDocuments; + + console.log('response', response); + res.status(200).json({ text: response.answer, sourceDocuments }); + } catch (error: any) { + console.log('error', error); + res.status(500).json({ error: error.message || 'Something went wrong' }); + } +}