200.Land

qa-chat-pinecone.ts

Setup up a chat system using the GPT-3.5-turbo-16k model to respond to user queries. It initializes necessary components like a vector store and a QA chain, and includes a query method to fetch generated responses to questions.

import { OpenAI } from "langchain/llms/openai";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { BufferMemory } from "langchain/memory";
import { PineconeStore } from "langchain/vectorstores/pinecone";
import { ConversationalRetrievalQAChain } from "langchain/chains";
import { getPineconeClient } from "../utils/pinecone.utils";
import { PromptTemplate } from "langchain/prompts";
import { getConversationPromot, getQuestionPrompt } from "./prompt";

export class ConversationalRetrievalChat {
  constructor(private indexName: string) {}
  private chain!: ConversationalRetrievalQAChain;
  private vectorStore!: PineconeStore;
  private memory = new BufferMemory({ memoryKey: "chat_history", inputKey: "question", outputKey: "text" });
  private model = new OpenAI({ modelName: "gpt-3.5-turbo-16k", temperature: 0 });
  private initialised = false;

  public async init() {
    if (!this.initialised) {
      const pineconeIndex = await this.getPineconeIndex();
      const embeddings = new OpenAIEmbeddings({ batchSize: 2000 });
      this.vectorStore = await PineconeStore.fromExistingIndex(embeddings, { pineconeIndex });
      this.chain = this.getChain();
      this.initialised = true;
      console.log("initialised");
    }
    console.log("already initialised");
  }

  private getChain() {
    const retriever = this.vectorStore.asRetriever({ searchType: "similarity" });
    return ConversationalRetrievalQAChain.fromLLM(this.model, retriever, {
      returnSourceDocuments: false,
      memory: this.memory,
      qaChainOptions: { type: "stuff", prompt: this.getPrompt() },
      questionGeneratorChainOptions: { llm: this.model, template: getQuestionPrompt() },
    });
  }

  private getPrompt() {
    return new PromptTemplate({ template: getConversationPromot(), inputVariables: ["question", "context"] });
  }

  private async getPineconeIndex() {
    return (await getPineconeClient()).Index(this.indexName);
  }

  public async query(question: string): Promise<string> {
    await this.init();
    const { text } = await this.chain.call({ question });
    return text;
  }
}