-
Checked other resources
Commit to Help
Example Code'use strict';
require('dotenv').config()
const { ChatOllama } = require("@langchain/community/chat_models/ollama");
const { RecursiveCharacterTextSplitter } = require("langchain/text_splitter");
const { OllamaEmbeddings } = require("@langchain/community/embeddings/ollama");
const { createStuffDocumentsChain } = require("langchain/chains/combine_documents");
const { ChatPromptTemplate } = require("@langchain/core/prompts");
const { createRetrievalChain } = require("langchain/chains/retrieval");
const { TextLoader } = require("langchain/document_loaders/fs/text");
const { StringOutputParser } = require("@langchain/core/output_parsers");
const { createClient } = require("redis");
const { RedisVectorStore } = require("@langchain/redis");
async function main() {
console.time('---> start');
const client = createClient({
url: "redis://localhost:6379",
});
await client.connect();
const redisAddOptions = {
redisClient: client,
indexName: "docs",
}
const embeddings = new OllamaEmbeddings({
model: "qwen",
maxConcurrency: 5,
});
const vectorStore = new RedisVectorStore(embeddings, redisAddOptions);
await vectorStore.dropIndex(true)
let hasIndex = await vectorStore.checkIndexExists();
console.log("hasIndex", hasIndex);
if (!hasIndex) {
const loader = new TextLoader("docs/a.txt");
const docs = await loader.load();
const splitter = new RecursiveCharacterTextSplitter({
chunkSize: 256,
chunkOverlap: 0,
});
const splitDocs = await splitter.splitDocuments(docs);
await vectorStore.addDocuments(splitDocs);
}
console.timeEnd('---> start');
const chatModel = new ChatOllama({
baseUrl: "http://localhost:11434", // Default value
model: "qwen",
max_token: 80000,
topP: 0.9,
// temperature: 0
});
const prompt =
ChatPromptTemplate.fromTemplate(`You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise.
Question: {input}
Context: {context}
Answer:`);
const documentChain = await createStuffDocumentsChain({
llm: chatModel,
outputParser: new StringOutputParser(),
prompt,
});
const retriever = vectorStore.asRetriever()
const chain = await createRetrievalChain({
retriever,
documentChain
});
const chainRes = await chain.invoke({ input: "Hot Are You?" });
console.log(chainRes);
await client.disconnect();
}
main() Descriptionerror msg throw new Error(`Expected a Runnable, function or object.\nInstead got an unsupported type.`);
^
Error: Expected a Runnable, function or object.
Instead got an unsupported type. I found this, but I don't know what to do with it! System InfoMac M1 |
Beta Was this translation helpful? Give feedback.
Replies: 2 comments 1 reply
-
Hey there, @lycpan233! Great to see you diving into some adventurous coding again. How's everything on your end? Based on the error message you're encountering, it seems like there might be an issue with the type of argument being passed to a function or method within the LangChain JS framework. The error message "Expected a Runnable, function or object. Instead got an unsupported type." suggests that a function, method, or constructor is receiving an argument of a type it doesn't support. From the code snippet you provided, it appears that you're correctly using the LangChain JS framework's API for creating a
Given the information provided, the best course of action would be to:
If the issue persists after these checks, please provide more information about the specific circumstances under which the error occurs, such as the exact line of code that triggers the error, the versions of the LangChain JS packages you're using, and any relevant configuration details. Regarding the similar issue you found in the LangChain JS repository, it seems to be related to a breaking change introduced in version 0.0.144. The last version that didn't introduce breaking changes in vector stores was LangChain 0.0.143. You can find more details about this issue here. However, it's not clear if this issue is directly related to the error you're encountering. It might be worth trying to downgrade to version 0.0.143 to see if that resolves the issue. I hope this helps! If you have any further questions, feel free to ask. Sources
|
Beta Was this translation helpful? Give feedback.
-
mehh same thing :( @lycpan233 have you solved this? |
Beta Was this translation helpful? Give feedback.
Hey there, @lycpan233! Great to see you diving into some adventurous coding again. How's everything on your end?
Based on the error message you're encountering, it seems like there might be an issue with the type of argument being passed to a function or method within the LangChain JS framework. The error message "Expected a Runnable, function or object. Instead got an unsupported type." suggests that a function, method, or constructor is receiving an argument of a type it doesn't support.
From the code snippet you provided, it appears that you're correctly using the LangChain JS framework's API for creating a
RedisVectorStore
from a set of documents. However, the issue might not be dire…