diff --git a/docs/core_docs/docs/integrations/chat/novita.ipynb b/docs/core_docs/docs/integrations/chat/novita.ipynb index a65041f95222..12a42b905678 100644 --- a/docs/core_docs/docs/integrations/chat/novita.ipynb +++ b/docs/core_docs/docs/integrations/chat/novita.ipynb @@ -35,7 +35,7 @@ "### Model features\n", "| [Tool calling](/docs/how_to/tool_calling) | [Structured output](/docs/how_to/structured_output/) | JSON mode | [Image input](/docs/how_to/multimodal_inputs/) | Audio input | Video input | [Token-level streaming](/docs/how_to/chat_streaming/) | Native async | [Token usage](/docs/how_to/chat_token_usage_tracking/) | [Logprobs](/docs/how_to/logprobs/) |\n", "| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: |\n", - "| ❌ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | ❌ |" + "| ✅ | ✅ | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ | ✅ | ✅ |" ] }, { diff --git a/libs/langchain-community/src/chat_models/novita.ts b/libs/langchain-community/src/chat_models/novita.ts index 59726db84238..2c76fb0c49fd 100644 --- a/libs/langchain-community/src/chat_models/novita.ts +++ b/libs/langchain-community/src/chat_models/novita.ts @@ -11,24 +11,8 @@ import { } from "@langchain/openai"; import { getEnvironmentVariable } from "@langchain/core/utils/env"; -type NovitaUnsupportedArgs = - | "frequencyPenalty" - | "presencePenalty" - | "logitBias" - | "functions"; - -type NovitaUnsupportedCallOptions = "functions" | "function_call"; - -export interface ChatNovitaCallOptions - extends Omit { - response_format: { - type: "json_object"; - schema: Record; - }; -} - export interface ChatNovitaInput - extends Omit, + extends Omit, BaseChatModelParams { /** * Novita API key @@ -45,7 +29,7 @@ export interface ChatNovitaInput /** * Novita chat model implementation */ -export class ChatNovitaAI extends ChatOpenAI { +export class ChatNovitaAI extends ChatOpenAI { static lc_name() { return "ChatNovita"; } @@ -65,7 +49,7 @@ export class ChatNovitaAI extends ChatOpenAI { constructor( fields?: Partial< - Omit + Omit > & BaseChatModelParams & { novitaApiKey?: string; @@ -85,7 +69,7 @@ export class ChatNovitaAI extends ChatOpenAI { super({ ...fields, - model: fields?.model || "gryphe/mythomax-l2-13b", + model: fields?.model || "qwen/qwen-2.5-72b-instruct", apiKey: novitaApiKey, configuration: { baseURL: "https://api.novita.ai/v3/openai/", @@ -133,15 +117,33 @@ export class ChatNovitaAI extends ChatOpenAI { | AsyncIterable | OpenAIClient.Chat.Completions.ChatCompletion > { - delete request.frequency_penalty; - delete request.presence_penalty; - delete request.logit_bias; - delete request.functions; + if (request.response_format) { + if (request.response_format.type === "json_object") { + request.response_format = { + type: "json_object", + }; + } else if ('json_schema' in request.response_format) { + const json_schema = request.response_format.json_schema; + request.response_format = { + type: "json_schema", + json_schema, + }; + } + } - if (request.stream === true) { - return super.completionWithRetry(request, options); + if (!request.model) { + request.model = "qwen/qwen-2.5-72b-instruct"; } - return super.completionWithRetry(request, options); + try { + if (request.stream === true) { + return super.completionWithRetry(request, options); + } + + return super.completionWithRetry(request, options); + } catch (error: any) { + console.error("Novita API call failed:", error.message || error); + throw error; + } } } diff --git a/libs/langchain-community/src/chat_models/tests/chatnovita.int.test.ts b/libs/langchain-community/src/chat_models/tests/chatnovita.int.test.ts index 91a134d45c57..91af57210f65 100644 --- a/libs/langchain-community/src/chat_models/tests/chatnovita.int.test.ts +++ b/libs/langchain-community/src/chat_models/tests/chatnovita.int.test.ts @@ -7,19 +7,22 @@ import { HumanMessagePromptTemplate, SystemMessagePromptTemplate, } from "@langchain/core/prompts"; +import { formatToOpenAITool } from "@langchain/openai"; +import { StructuredTool } from "@langchain/core/tools"; +import { z } from "zod"; import { ChatNovitaAI } from "../novita.js"; describe("ChatNovitaAI", () => { test("invoke", async () => { const chat = new ChatNovitaAI(); - const message = new HumanMessage("Hello!"); + const message = new HumanMessage("Hello! Who are you?"); const res = await chat.invoke([message]); expect(res.content.length).toBeGreaterThan(10); }); test("generate", async () => { const chat = new ChatNovitaAI(); - const message = new HumanMessage("Hello!"); + const message = new HumanMessage("Hello! Who are you?"); const res = await chat.generate([[message]]); expect(res.generations[0][0].text.length).toBeGreaterThan(10); }); @@ -53,7 +56,6 @@ describe("ChatNovitaAI", () => { test("prompt templates", async () => { const chat = new ChatNovitaAI(); - // PaLM doesn't support translation yet const systemPrompt = PromptTemplate.fromTemplate( "You are a helpful assistant who must always respond like a {job}." ); @@ -88,4 +90,54 @@ describe("ChatNovitaAI", () => { ]); expect(responseA.generations[0][0].text.length).toBeGreaterThan(10); }); -}); + + test("JSON mode", async () => { + const chat = new ChatNovitaAI().bind({ + response_format: { + type: "json_object" + }, + }); + const prompt = ChatPromptTemplate.fromMessages([ + ["system", "You are a helpful assistant who responds in JSON. You must return a JSON object with an 'orderedArray' property containing the numbers in descending order."], + ["human", "Please list this output in order of DESC [1, 4, 2, 8]."], + ]); + const res = await prompt.pipe(chat).invoke({}); + expect(typeof res.content).toBe("string"); + expect(JSON.parse(res.content as string)).toMatchObject({ + orderedArray: expect.any(Array), + }); + }); + + test("Tool calls", async () => { + class CalculatorTool extends StructuredTool { + name = "Calculator"; + + schema = z.object({ + a: z.number(), + b: z.number(), + }); + + description = "A simple calculator tool."; + + constructor() { + super(); + } + + async _call(input: { a: number; b: number }) { + return JSON.stringify({ total: input.a + input.b }); + } + } + const tool = formatToOpenAITool(new CalculatorTool()); + const chat = new ChatNovitaAI().bind({ + tools: [tool], + }); + const prompt = ChatPromptTemplate.fromMessages([ + ["system", "You are a helpful assistant."], + ["human", "What is 1273926 times 27251?"], + ]); + const res = await prompt.pipe(chat).invoke({}); + expect(res.tool_calls?.length).toBeGreaterThan(0); + expect(res.tool_calls?.[0].args) + .toMatchObject({ a: expect.any(Number), b: expect.any(Number) }); + }); +}); \ No newline at end of file