node.jsnext.jslangchaindatastax-astralangchain-js

RAG app fails using DataAPIClient: "Error loading the fetch-h2 client for the DataAPIClient... try setting httpOptions.client to 'fetch'"


error:

<scriptid="__NEXT_DATA__"type="application/json">{
    "props": {
        "pageProps": {
            "statusCode": 500
        }
    },
    "page": "/_error",
    "query": {
        "__NEXT_PAGE": "/api/chat"
    },
    "buildId": "development",
    "isFallback": false,
    "err": {
        "name": "Error",
        "source": "server",
        "message": "Error loading the fetch-h2 client for the DataAPIClient... try setting httpOptions.client to 'fetch'",
        "stack": "Error: Error loading the fetch-h2 client for the DataAPIClient... try setting httpOptions.client to 'fetch'\n

above is the error occurred in rag application.

api route, where error occured when called.

import { NextRequest, NextResponse } from 'next/server';
import { getVectorStore } from '@/lib/astradb';
import { AIMessage, HumanMessage } from '@langchain/core/messages';
import {
  ChatPromptTemplate,
  MessagesPlaceholder,
  PromptTemplate,
} from '@langchain/core/prompts';
import { ChatOpenAI } from '@langchain/openai';
import { Redis } from '@upstash/redis';
import { Ratelimit } from '@upstash/ratelimit';
import {
  LangChainStream, StreamingTextResponse,
  Message as VercelChatMessage,
} from 'ai';
import { UpstashRedisCache } from '@langchain/community/caches/upstash_redis';
import { createStuffDocumentsChain } from 'langchain/chains/combine_documents';
import { createHistoryAwareRetriever } from 'langchain/chains/history_aware_retriever';
import { createRetrievalChain } from 'langchain/chains/retrieval';
import https from 'https';


const ratelimit = new Ratelimit({
  redis: Redis.fromEnv(),
  limiter: Ratelimit.fixedWindow(8, '30s'),
});

export async function POST(req: NextRequest) {

console.log('api requested,dddddddddddddddddddddddddddddd')

  try {

 const ip = req.ip ?? 'ip';
 const { success, remaining } = await ratelimit.limit(ip);

 // block the request if unsuccessful
 if (!success) {
   return new Response('Ratelimited!', { status: 429 });
 }

    const body = await req.json();
    const messages = body.messages;

    const chatHistory = messages
      .slice(0, -1)
      .map((m: VercelChatMessage) =>
        m.role === 'user'
          ? new HumanMessage(m.content)
          : new AIMessage(m.content)
      );

    const currentMessageContent = messages[messages.length - 1].content;

    const cache = new UpstashRedisCache({
      client: Redis.fromEnv({
        agent: new https.Agent({ keepAlive: true }),
      }),
    });

    const { stream, handlers } = LangChainStream();

    const chatModel = new ChatOpenAI({
      apiKey: process.env.OPENAI_API_KEY!,`your text`
      modelName: 'gpt-3.5-turbo',``your text``
      streaming: true,
      callbacks: [handlers],
      verbose: true,
      cache,
    });

    const rephrasingModel = new ChatOpenAI({
      apiKey: process.env.OPENAI_API_KEY!,
      modelName: 'gpt-3.5-turbo',
      verbose: true,
      cache,
    });

    const retriever = (await getVectorStore()).asRetriever();

    const rephrasePrompt = ChatPromptTemplate.fromMessages([
      new MessagesPlaceholder('chat_history'),
      ['user', '{input}'],
      [
        'user',
        'Given the above conversation, generate a search query to look up in order to get information relevant to the current question. ' +
          "Don't leave out any relevant keywords. Only return the query and no other text.",
      ],
    ]);

    const historyAwareRetrieverChain = await createHistoryAwareRetriever({
      llm: rephrasingModel,
      retriever,
      rephrasePrompt,
    });

    const prompt = ChatPromptTemplate.fromMessages([
      [
        'system',
        "You are a chatbot for a Decentralized betting website nameDecentBet. You impersonate the website's owner. " +
          "Answer the user's questions based on the below context. " +
          'Format your messages in markdown format.\n\n' +
          'Context:\n{context}',
      ],
      new MessagesPlaceholder('chat_history'),
      ['user', '{input}'],
    ]);

    const combineDocsChain = await createStuffDocumentsChain({
      llm: chatModel,
      prompt,
      // documentPrompt: PromptTemplate.fromTemplate(
      //   'Page URL: {url}\n\nPage content:\n{page_content}'
      // ),
      documentSeparator: '\n--------\n',
    });

    const retrievalChain = await createRetrievalChain({
      combineDocsChain,
      retriever: historyAwareRetrieverChain,
    });

    retrievalChain.invoke({
      input: currentMessageContent,
      chat_history: chatHistory,
    });

    return new StreamingTextResponse(stream);
  } catch (error) {
    console.error("Error:---",error);
    return NextResponse.json({ error: 'Internal server error' }, { status: 500 });
  }
}

here is route code where error occuring when called, strangely same code working fine with my other application.

astra db instilization:<

 import { AstraDB } from '@datastax/astra-db-ts';
import { DataAPIClient } from '@datastax/astra-db-ts';
import { AstraDBVectorStore } from '@langchain/community/vectorstores/astradb';
import { OpenAIEmbeddings } from '@langchain/openai';
const endpoint = process.env.ASTRA_DB_ENDPOINT || '';
const token = process.env.ASTRA_DB_APPLICATION_TOKEN || '';
const collection = process.env.ASTRA_DB_COLLECTION || '';   



if (!token || !endpoint || !collection) {
  throw new Error(
    'Please set ASTRA_DB_ENDPOINT, ASTRA_DB_APPLICATION_TOKEN, and ASTRA_DB_COLLECTION environment variables.'
  );
}

export async function getVectorStore() {
  return AstraDBVectorStore.fromExistingIndex(
    new OpenAIEmbeddings({ modelName: 'text-embedding-3-small' }),
    {
      token,
      endpoint,
      collection,
     
      collectionOptions: {
        vector: {
          dimension: 1536,
          metric: 'cosine',
        },
        }
    }
  );
}
const client = new DataAPIClient(token);
const db = client.db(endpoint);


export async function getEmbeddingsCollection() {
 return db.collection(collection);
}

unable to get over this error , but same code with same dependencies working fine in other app (https://github.com/Sandeepreddyr12/My-Portfolio/blob/main/src/app/api/chat/route.ts).

note : i've rewritten latest code from their respective docs, haven't worked not get over this error.

stack next js, ai lib for streaming, upstash caching, datastax astradb for embeddings.

please let me know work around to get over this. thank u


Solution

  • There are a couple of versions of @datastax/astra-db-ts that cause this error. It was fixed recently, so you should update the dependency to the latest version.

    npm install @datastax/astra-db-ts@latest