Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,6 @@ next-env.d.ts
#Notion_db
/Notion_DB

.yarn/
.yarn/
# LangGraph API
.langgraph_api
7 changes: 7 additions & 0 deletions langgraph.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"dependencies": ["."],
"graphs": {
"agent": "./langgraph/graph.ts:graph"
},
"env": ".env"
}
78 changes: 78 additions & 0 deletions langgraph/graph.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { PINECONE_INDEX_NAME, PINECONE_NAME_SPACE } from "@/config/pinecone";
import { makeChain } from "@/utils/makechain";
import { pinecone } from "@/utils/pinecone-client";
import { AIMessage, HumanMessage } from "@langchain/core/messages";
import { StateGraph } from "@langchain/langgraph";
import type { Document } from 'langchain/document';
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { PineconeStore } from "langchain/vectorstores/pinecone";
import { InputAnnotation, OutputAnnotation } from "./state";


const assistant = async (state: typeof InputAnnotation.State) => {
const { question, messages } = state;

messages.push(new HumanMessage(question));

console.log('question', question);

console.log('history', messages);

const index = pinecone.Index(PINECONE_INDEX_NAME);

/* create vectorstore*/
const vectorStore = await PineconeStore.fromExistingIndex(
new OpenAIEmbeddings({}),
{
pineconeIndex: index,
textKey: 'text',
namespace: PINECONE_NAME_SPACE, //namespace comes from your config folder
},
);

// Use a callback to get intermediate sources from the middle of the chain
let resolveWithDocuments: (value: Document[]) => void;
const documentPromise = new Promise<Document[]>((resolve) => {
resolveWithDocuments = resolve;
});
const retriever = vectorStore.asRetriever({
callbacks: [
{
handleRetrieverEnd(documents) {
resolveWithDocuments(documents);
},
},
],
});

//create chain
const chain = makeChain(retriever);

//Ask a question using chat history
const response = await chain.invoke({
question,
chat_history: JSON.stringify(messages),
});

const sourceDocuments = await documentPromise;

return { messages: [...messages, new AIMessage(response)], sourceDocuments, answer: response };
};

export const route = (state: typeof InputAnnotation.State): "__end__" | "assistant" => {
if (state.messages.length > 0) {
return "__end__";
}
// Loop back
return "assistant";
};

const builder = new StateGraph({ input: InputAnnotation, output: OutputAnnotation })
.addNode("assistant", assistant)
.addEdge("__start__", "assistant")
.addConditionalEdges("assistant", route);

export const graph = builder.compile();



28 changes: 28 additions & 0 deletions langgraph/state.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { BaseMessage } from "@langchain/core/messages";
import { Annotation } from "@langchain/langgraph";

export const InputAnnotation = Annotation.Root({
question: Annotation<string>,
messages: Annotation<BaseMessage[]>({
reducer: (left: BaseMessage[], right: BaseMessage | BaseMessage[]) => {
if (Array.isArray(right)) {
return left.concat(right);
}
return left.concat([right]);
},
default: () => [],
}),
});

export const OutputAnnotation = Annotation.Root({
sourceDocuments: Annotation<Document[]>({
reducer: (left: Document[], right: Document | Document[]) => {
if (Array.isArray(right)) {
return right;
}
return [right];
},
default: () => [],
}),
answer: Annotation<string>
});
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
"ingest": "tsx -r dotenv/config scripts/ingest-data.ts"
},
"dependencies": {
"@langchain/core": "^0.3.37",
"@langchain/langgraph": "^0.2.44",
"@microsoft/fetch-event-source": "^2.0.1",
"@pinecone-database/pinecone": "1.1.0",
"@radix-ui/react-accordion": "^1.1.1",
Expand Down
44 changes: 44 additions & 0 deletions pages/api/langgraph.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { AIMessage, HumanMessage } from '@langchain/core/messages';
import { graph } from '@/langgraph/graph';

export default async function handler(
req: NextApiRequest,
res: NextApiResponse,
) {
const { question, history } = req.body;

console.log('question', question);
console.log('history', history);

//only accept post requests
if (req.method !== 'POST') {
res.status(405).json({ error: 'Method not allowed' });
return;
}

if (!question) {
return res.status(400).json({ message: 'No question in the request' });
}
// OpenAI recommends replacing newlines with spaces for best results
const sanitizedQuestion = question.trim().replaceAll('\n', ' ');

try {
const pastMessages = history
.map((message: [string, string]) => {
return [new HumanMessage(message[0]), new AIMessage(message[1])]
})
console.log(pastMessages);

//Ask a question using chat history
const response = await graph.invoke({ question: sanitizedQuestion, messages: pastMessages });

const sourceDocuments = response.sourceDocuments;

console.log('response', response);
res.status(200).json({ text: response.answer, sourceDocuments });
} catch (error: any) {
console.log('error', error);
res.status(500).json({ error: error.message || 'Something went wrong' });
}
}