diff --git a/docs/core_docs/docs/how_to/tool_calling_parallel.ipynb b/docs/core_docs/docs/how_to/tool_calling_parallel.ipynb
index 97f188d00549..f9800c95c6f5 100644
--- a/docs/core_docs/docs/how_to/tool_calling_parallel.ipynb
+++ b/docs/core_docs/docs/how_to/tool_calling_parallel.ipynb
@@ -56,7 +56,7 @@
"});\n",
"\n",
"const multiplyTool = tool(async ({ a, b }) => {\n",
- " return a + b;\n",
+ " return a * b;\n",
"}, {\n",
" name: \"multiply\",\n",
" description: \"Multiplies a and b\",\n",
@@ -220,4 +220,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
-}
\ No newline at end of file
+}
diff --git a/docs/core_docs/docs/how_to/tool_results_pass_to_model.ipynb b/docs/core_docs/docs/how_to/tool_results_pass_to_model.ipynb
index 5f9720f11474..81e40b0fd14e 100644
--- a/docs/core_docs/docs/how_to/tool_results_pass_to_model.ipynb
+++ b/docs/core_docs/docs/how_to/tool_results_pass_to_model.ipynb
@@ -18,7 +18,7 @@
":::\n",
"```\n",
"\n",
- "Some models are capable of [**tool calling**](/docs/concepts/tool_calling) - generating arguments that conform to a specific user-provided schema. This guide will demonstrate how to use those tool cals to actually call a function and properly pass the results back to the model.\n",
+ "Some models are capable of [**tool calling**](/docs/concepts/tool_calling) - generating arguments that conform to a specific user-provided schema. This guide will demonstrate how to use those tool calls to actually call a function and properly pass the results back to the model.\n",
"\n",
"![](../../static/img/tool_invocation.png)\n",
"\n",
@@ -367,4 +367,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
-}
\ No newline at end of file
+}
diff --git a/docs/core_docs/docs/integrations/tools/google_calendar.mdx b/docs/core_docs/docs/integrations/tools/google_calendar.mdx
index 14cfa036c75f..c1ba5b19bc12 100644
--- a/docs/core_docs/docs/integrations/tools/google_calendar.mdx
+++ b/docs/core_docs/docs/integrations/tools/google_calendar.mdx
@@ -25,7 +25,7 @@ import IntegrationInstallTooltip from "@mdx_components/integration_install_toolt
```bash npm2yarn
-npm install @langchain/openai @langchain/core
+npm install @langchain/openai @langchain/core @langchain/community @langchain/langgraph
```
{ToolExample}
diff --git a/docs/core_docs/src/theme/VectorStoreTabs.js b/docs/core_docs/src/theme/VectorStoreTabs.js
index a8ff549e70db..53af6a8a08ef 100644
--- a/docs/core_docs/src/theme/VectorStoreTabs.js
+++ b/docs/core_docs/src/theme/VectorStoreTabs.js
@@ -54,7 +54,7 @@ const ${vectorStoreVarName} = new MongoDBAtlasVectorSearch(embeddings, {
{
value: "PGVector",
label: "PGVector",
- text: `import PGVectorStore from "@langchain/community/vectorstores/pgvector";
+ text: `import { PGVectorStore } from "@langchain/community/vectorstores/pgvector";
const ${vectorStoreVarName} = await PGVectorStore.initialize(embeddings, {})`,
dependencies: "@langchain/community",
diff --git a/examples/package.json b/examples/package.json
index 53faf65a96ce..e06c5ebbe717 100644
--- a/examples/package.json
+++ b/examples/package.json
@@ -93,7 +93,7 @@
"ioredis": "^5.3.2",
"js-yaml": "^4.1.0",
"langchain": "workspace:*",
- "langsmith": "^0.2.8",
+ "langsmith": ">=0.2.8 <0.4.0",
"mongodb": "^6.3.0",
"pg": "^8.11.0",
"pickleparser": "^0.2.1",
diff --git a/examples/src/models/chat/integration_anthropic_pdf.ts b/examples/src/models/chat/integration_anthropic_pdf.ts
new file mode 100644
index 000000000000..eef650100b45
--- /dev/null
+++ b/examples/src/models/chat/integration_anthropic_pdf.ts
@@ -0,0 +1,52 @@
+import { ChatAnthropic } from "@langchain/anthropic";
+
+import * as fs from "fs";
+
+export const run = async () => {
+ const llm = new ChatAnthropic({
+ model: "claude-3-5-sonnet-20240620", // Only claude-3-5-sonnet-20240620 , claude-3-5-sonnet-20241022 as of Jan 2025 support PDF documents as in base64
+ });
+
+ // PDF needs to be in Base64.
+ const getLocalFile = async (path: string) => {
+ const localFile = await fs.readFileSync(path);
+ const base64File = localFile.toString("base64");
+ return base64File;
+ };
+
+ // Or remotely
+ const getRemoteFile = async (url: string) => {
+ const response = await fetch(url);
+ const arrayBuffer = await response.arrayBuffer();
+ const base64File = Buffer.from(arrayBuffer).toString("base64");
+ return base64File;
+ };
+
+ const base64 = await getRemoteFile(
+ "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf"
+ );
+
+ const prompt = "Summarise the contents of this PDF";
+
+ const response = await llm.invoke([
+ {
+ role: "user",
+ content: [
+ {
+ type: "text",
+ text: prompt,
+ },
+ {
+ type: "document",
+ source: {
+ media_type: "application/pdf",
+ type: "base64",
+ data: base64,
+ },
+ },
+ ],
+ },
+ ]);
+ console.log(response.content);
+ return response.content;
+};
diff --git a/examples/src/tools/google_calendar.ts b/examples/src/tools/google_calendar.ts
index 6dee71adca24..948af910c161 100644
--- a/examples/src/tools/google_calendar.ts
+++ b/examples/src/tools/google_calendar.ts
@@ -1,5 +1,5 @@
-import { initializeAgentExecutorWithOptions } from "langchain/agents";
-import { OpenAI } from "@langchain/openai";
+import { createReactAgent } from "@langchain/langgraph/prebuilt";
+import { ChatOpenAI } from "@langchain/openai";
import { Calculator } from "@langchain/community/tools/calculator";
import {
GoogleCalendarCreateTool,
@@ -7,9 +7,10 @@ import {
} from "@langchain/community/tools/google_calendar";
export async function run() {
- const model = new OpenAI({
+ const model = new ChatOpenAI({
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
+ model: "gpt-4o-mini",
});
const googleCalendarParams = {
@@ -31,14 +32,16 @@ export async function run() {
new GoogleCalendarViewTool(googleCalendarParams),
];
- const calendarAgent = await initializeAgentExecutorWithOptions(tools, model, {
- agentType: "zero-shot-react-description",
- verbose: true,
+ const calendarAgent = createReactAgent({
+ llm: model,
+ tools,
});
const createInput = `Create a meeting with John Doe next Friday at 4pm - adding to the agenda of it the result of 99 + 99`;
- const createResult = await calendarAgent.invoke({ input: createInput });
+ const createResult = await calendarAgent.invoke({
+ messages: [{ role: "user", content: createInput }],
+ });
// Create Result {
// output: 'A meeting with John Doe on 29th September at 4pm has been created and the result of 99 + 99 has been added to the agenda.'
// }
@@ -46,7 +49,9 @@ export async function run() {
const viewInput = `What meetings do I have this week?`;
- const viewResult = await calendarAgent.invoke({ input: viewInput });
+ const viewResult = await calendarAgent.invoke({
+ messages: [{ role: "user", content: viewInput }],
+ });
// View Result {
// output: "You have no meetings this week between 8am and 8pm."
// }
diff --git a/langchain-core/package.json b/langchain-core/package.json
index d101b1fb9e23..3c7c27962705 100644
--- a/langchain-core/package.json
+++ b/langchain-core/package.json
@@ -1,6 +1,6 @@
{
"name": "@langchain/core",
- "version": "0.3.30",
+ "version": "0.3.32",
"description": "Core LangChain.js abstractions and schemas",
"type": "module",
"engines": {
@@ -38,7 +38,7 @@
"camelcase": "6",
"decamelize": "1.2.0",
"js-tiktoken": "^1.0.12",
- "langsmith": "^0.2.8",
+ "langsmith": ">=0.2.8 <0.4.0",
"mustache": "^4.2.0",
"p-queue": "^6.6.2",
"p-retry": "4",
diff --git a/langchain-core/src/messages/tests/message_utils.test.ts b/langchain-core/src/messages/tests/message_utils.test.ts
index 9788cbafc4b9..a570ee478f60 100644
--- a/langchain-core/src/messages/tests/message_utils.test.ts
+++ b/langchain-core/src/messages/tests/message_utils.test.ts
@@ -4,10 +4,11 @@ import {
mergeMessageRuns,
trimMessages,
} from "../transformers.js";
-import { AIMessage } from "../ai.js";
+import { AIMessage, AIMessageChunk } from "../ai.js";
import { ChatMessage } from "../chat.js";
import { HumanMessage } from "../human.js";
import { SystemMessage } from "../system.js";
+import { ToolMessage } from "../tool.js";
import { BaseMessage } from "../base.js";
import {
getBufferString,
@@ -187,6 +188,7 @@ describe("trimMessages can trim", () => {
defaultMsgSuffixLen;
}
}
+ console.log(count);
return count;
};
@@ -196,6 +198,84 @@ describe("trimMessages can trim", () => {
};
};
+ it("should not mutate messages if no trimming occurs with strategy last", async () => {
+ const trimmer = trimMessages({
+ maxTokens: 128000,
+ strategy: "last",
+ startOn: [HumanMessage],
+ endOn: [AIMessage, ToolMessage],
+ tokenCounter: () => 1,
+ });
+ const messages = [
+ new HumanMessage({
+ content: "Fetch the last 5 emails from Flora Testington's inbox.",
+ additional_kwargs: {},
+ response_metadata: {},
+ }),
+ new AIMessageChunk({
+ id: "chatcmpl-abcdefg",
+ content: "",
+ additional_kwargs: {
+ tool_calls: [
+ {
+ function: {
+ name: "getEmails",
+ arguments: JSON.stringify({
+ inboxName: "flora@foo.org",
+ amount: 5,
+ folder: "Inbox",
+ searchString: null,
+ from: null,
+ subject: null,
+ }),
+ },
+ id: "foobarbaz",
+ index: 0,
+ type: "function",
+ },
+ ],
+ },
+ response_metadata: {
+ usage: {},
+ },
+ tool_calls: [
+ {
+ name: "getEmails",
+ args: {
+ inboxName: "flora@foo.org",
+ amount: 5,
+ folder: "Inbox",
+ searchString: null,
+ from: null,
+ subject: null,
+ },
+ id: "foobarbaz",
+ type: "tool_call",
+ },
+ ],
+ tool_call_chunks: [
+ {
+ name: "getEmails",
+ args: '{"inboxName":"flora@foo.org","amount":5,"folder":"Inbox","searchString":null,"from":null,"subject":null,"cc":[],"bcc":[]}',
+ id: "foobarbaz",
+ index: 0,
+ type: "tool_call_chunk",
+ },
+ ],
+ invalid_tool_calls: [],
+ }),
+ new ToolMessage({
+ content: "a whole bunch of emails!",
+ name: "getEmails",
+ additional_kwargs: {},
+ response_metadata: {},
+ tool_call_id: "foobarbaz",
+ }),
+ ];
+ const trimmedMessages = await trimmer.invoke(messages);
+ expect(trimmedMessages).toEqual(messages);
+ });
+
it("First 30 tokens, not allowing partial messages", async () => {
const { messages, dummyTokenCounter } = messagesAndTokenCounterFactory();
const trimmedMessages = await trimMessages(messages, {
@@ -319,6 +399,7 @@ describe("trimMessages can trim", () => {
it("Last 30 tokens, including system message, allowing partial messages, end on HumanMessage", async () => {
const { messages, dummyTokenCounter } = messagesAndTokenCounterFactory();
+ console.log(messages);
const trimmedMessages = await trimMessages(messages, {
maxTokens: 30,
tokenCounter: dummyTokenCounter,
diff --git a/langchain-core/src/messages/transformers.ts b/langchain-core/src/messages/transformers.ts
index 15389655d59c..c96ecd69ce48 100644
--- a/langchain-core/src/messages/transformers.ts
+++ b/langchain-core/src/messages/transformers.ts
@@ -274,7 +274,7 @@ function _mergeMessageRuns(messages: BaseMessage[]): BaseMessage[] {
}
const merged: BaseMessage[] = [];
for (const msg of messages) {
- const curr = msg; // Create a shallow copy of the message
+ const curr = msg;
const last = merged.pop();
if (!last) {
merged.push(curr);
@@ -861,20 +861,24 @@ async function _lastMaxTokens(
...rest
} = options;
+ // Create a copy of messages to avoid mutation
+ let messagesCopy = [...messages];
+
if (endOn) {
const endOnArr = Array.isArray(endOn) ? endOn : [endOn];
while (
- messages &&
- !_isMessageType(messages[messages.length - 1], endOnArr)
+ messagesCopy.length > 0 &&
+ !_isMessageType(messagesCopy[messagesCopy.length - 1], endOnArr)
) {
- messages.pop();
+ messagesCopy = messagesCopy.slice(0, -1);
}
}
- const swappedSystem = includeSystem && messages[0]._getType() === "system";
+ const swappedSystem =
+ includeSystem && messagesCopy[0]?._getType() === "system";
let reversed_ = swappedSystem
- ? messages.slice(0, 1).concat(messages.slice(1).reverse())
- : messages.reverse();
+ ? messagesCopy.slice(0, 1).concat(messagesCopy.slice(1).reverse())
+ : messagesCopy.reverse();
reversed_ = await _firstMaxTokens(reversed_, {
...rest,
diff --git a/langchain/package.json b/langchain/package.json
index 04e65bbd2fb5..9d89d69c3eb8 100644
--- a/langchain/package.json
+++ b/langchain/package.json
@@ -1,6 +1,6 @@
{
"name": "langchain",
- "version": "0.3.11",
+ "version": "0.3.12",
"description": "Typescript bindings for langchain",
"type": "module",
"engines": {
@@ -530,7 +530,7 @@
"js-tiktoken": "^1.0.12",
"js-yaml": "^4.1.0",
"jsonpointer": "^5.0.1",
- "langsmith": "^0.2.8",
+ "langsmith": ">=0.2.8 <0.4.0",
"openapi-types": "^12.1.3",
"p-retry": "4",
"uuid": "^10.0.0",
diff --git a/langchain/src/chains/openai_moderation.ts b/langchain/src/chains/openai_moderation.ts
index c1be1dbe911f..1823576f1c78 100644
--- a/langchain/src/chains/openai_moderation.ts
+++ b/langchain/src/chains/openai_moderation.ts
@@ -27,7 +27,7 @@ export interface OpenAIModerationChainInput
* OpenAIModerationChainInput interface.
* @example
* ```typescript
- * const moderation = new ChatOpenAIModerationChain({ throwError: true });
+ * const moderation = new OpenAIModerationChain({ throwError: true });
*
* const badString = "Bad naughty words from user";
*
diff --git a/libs/langchain-anthropic/package.json b/libs/langchain-anthropic/package.json
index 1340f6fc4443..6d3aa16b109a 100644
--- a/libs/langchain-anthropic/package.json
+++ b/libs/langchain-anthropic/package.json
@@ -1,6 +1,6 @@
{
"name": "@langchain/anthropic",
- "version": "0.3.11",
+ "version": "0.3.12",
"description": "Anthropic integrations for LangChain.js",
"type": "module",
"engines": {
diff --git a/libs/langchain-anthropic/src/utils/message_inputs.ts b/libs/langchain-anthropic/src/utils/message_inputs.ts
index 0e0be120f864..df44e296901f 100644
--- a/libs/langchain-anthropic/src/utils/message_inputs.ts
+++ b/libs/langchain-anthropic/src/utils/message_inputs.ts
@@ -131,6 +131,13 @@ function _formatContent(content: MessageContent) {
source,
...(cacheControl ? { cache_control: cacheControl } : {}),
};
+ } else if (contentPart.type === "document") {
+ // PDF
+ return {
+ type: "document",
+ source: contentPart.source,
+ ...(cacheControl ? { cache_control: cacheControl } : {}),
+ };
} else if (
textTypes.find((t) => t === contentPart.type) &&
"text" in contentPart
diff --git a/libs/langchain-azure-cosmosdb/package.json b/libs/langchain-azure-cosmosdb/package.json
index d958472cf933..72ca6fb70e2a 100644
--- a/libs/langchain-azure-cosmosdb/package.json
+++ b/libs/langchain-azure-cosmosdb/package.json
@@ -1,6 +1,6 @@
{
"name": "@langchain/azure-cosmosdb",
- "version": "0.2.6",
+ "version": "0.2.7",
"description": "Azure CosmosDB integration for LangChain.js",
"type": "module",
"engines": {
diff --git a/libs/langchain-azure-cosmosdb/src/chat_histories/mongodb.ts b/libs/langchain-azure-cosmosdb/src/chat_histories/mongodb.ts
index 53104c198d71..8a0ba0264160 100644
--- a/libs/langchain-azure-cosmosdb/src/chat_histories/mongodb.ts
+++ b/libs/langchain-azure-cosmosdb/src/chat_histories/mongodb.ts
@@ -20,7 +20,13 @@ export interface AzureCosmosDBMongoChatHistoryDBConfig {
readonly collectionName?: string;
}
+export type ChatSessionMongo = {
+ id: string;
+ context: Record;
+};
+
const ID_KEY = "sessionId";
+const ID_USER = "userId";
export class AzureCosmosDBMongoChatMessageHistory extends BaseListChatMessageHistory {
lc_namespace = ["langchain", "stores", "message", "azurecosmosdb"];
@@ -33,6 +39,8 @@ export class AzureCosmosDBMongoChatMessageHistory extends BaseListChatMessageHis
private initPromise?: Promise;
+ private context: Record = {};
+
private readonly client: MongoClient | undefined;
private database: Db;
@@ -41,11 +49,14 @@ export class AzureCosmosDBMongoChatMessageHistory extends BaseListChatMessageHis
private sessionId: string;
+ private userId: string;
+
initialize: () => Promise;
constructor(
dbConfig: AzureCosmosDBMongoChatHistoryDBConfig,
- sessionId: string
+ sessionId: string,
+ userId: string
) {
super();
@@ -70,6 +81,7 @@ export class AzureCosmosDBMongoChatMessageHistory extends BaseListChatMessageHis
const collectionName = dbConfig.collectionName ?? "chatHistory";
this.sessionId = sessionId;
+ this.userId = userId ?? "anonymous";
// Deferring initialization to the first call to `initialize`
this.initialize = () => {
@@ -120,6 +132,7 @@ export class AzureCosmosDBMongoChatMessageHistory extends BaseListChatMessageHis
const document = await this.collection.findOne({
[ID_KEY]: this.sessionId,
+ [ID_USER]: this.userId,
});
const messages = document?.messages || [];
return mapStoredMessagesToChatMessages(messages);
@@ -134,10 +147,12 @@ export class AzureCosmosDBMongoChatMessageHistory extends BaseListChatMessageHis
await this.initialize();
const messages = mapChatMessagesToStoredMessages([message]);
+ const context = await this.getContext();
await this.collection.updateOne(
- { [ID_KEY]: this.sessionId },
+ { [ID_KEY]: this.sessionId, [ID_USER]: this.userId },
{
$push: { messages: { $each: messages } } as PushOperator,
+ $set: { context },
},
{ upsert: true }
);
@@ -150,6 +165,66 @@ export class AzureCosmosDBMongoChatMessageHistory extends BaseListChatMessageHis
async clear(): Promise {
await this.initialize();
- await this.collection.deleteOne({ [ID_KEY]: this.sessionId });
+ await this.collection.deleteOne({
+ [ID_KEY]: this.sessionId,
+ [ID_USER]: this.userId,
+ });
+ }
+
+ async getAllSessions(): Promise {
+ await this.initialize();
+ const documents = await this.collection
+ .find({
+ [ID_USER]: this.userId,
+ })
+ .toArray();
+
+ const chatSessions: ChatSessionMongo[] = documents.map((doc) => ({
+ id: doc[ID_KEY],
+ user_id: doc[ID_USER],
+ context: doc.context || {},
+ }));
+
+ return chatSessions;
+ }
+
+ async clearAllSessions() {
+ await this.initialize();
+ try {
+ await this.collection.deleteMany({
+ [ID_USER]: this.userId,
+ });
+ } catch (error) {
+ console.error("Error clearing chat history sessions:", error);
+ throw error;
+ }
+ }
+
+ async getContext(): Promise> {
+ await this.initialize();
+
+ const document = await this.collection.findOne({
+ [ID_KEY]: this.sessionId,
+ [ID_USER]: this.userId,
+ });
+ this.context = document?.context || this.context;
+ return this.context;
+ }
+
+ async setContext(context: Record): Promise {
+ await this.initialize();
+
+ try {
+ await this.collection.updateOne(
+ { [ID_KEY]: this.sessionId },
+ {
+ $set: { context },
+ },
+ { upsert: true }
+ );
+ } catch (error) {
+ console.error("Error setting chat history context", error);
+ throw error;
+ }
}
}
diff --git a/libs/langchain-azure-cosmosdb/src/tests/chat_histories/mongodb.int.test.ts b/libs/langchain-azure-cosmosdb/src/tests/chat_histories/mongodb.int.test.ts
index 35c4a2cf0311..2825b2cafab4 100644
--- a/libs/langchain-azure-cosmosdb/src/tests/chat_histories/mongodb.int.test.ts
+++ b/libs/langchain-azure-cosmosdb/src/tests/chat_histories/mongodb.int.test.ts
@@ -32,9 +32,11 @@ test("Test Azure Cosmos MongoDB history store", async () => {
};
const sessionId = new ObjectId().toString();
+ const userId = new ObjectId().toString();
const chatHistory = new AzureCosmosDBMongoChatMessageHistory(
dbcfg,
- sessionId
+ sessionId,
+ userId
);
const blankResult = await chatHistory.getMessages();
@@ -70,9 +72,11 @@ test("Test clear Azure Cosmos MongoDB history store", async () => {
};
const sessionId = new ObjectId().toString();
+ const userId = new ObjectId().toString();
const chatHistory = new AzureCosmosDBMongoChatMessageHistory(
dbcfg,
- sessionId
+ sessionId,
+ userId
);
await chatHistory.addUserMessage("Who is the best vocalist?");
@@ -93,3 +97,50 @@ test("Test clear Azure Cosmos MongoDB history store", async () => {
await mongoClient.close();
});
+
+test("Test getAllSessions and clearAllSessions", async () => {
+ expect(process.env.AZURE_COSMOSDB_MONGODB_CONNECTION_STRING).toBeDefined();
+
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
+ const mongoClient = new MongoClient(
+ process.env.AZURE_COSMOSDB_MONGODB_CONNECTION_STRING!
+ );
+ const dbcfg: AzureCosmosDBMongoChatHistoryDBConfig = {
+ client: mongoClient,
+ connectionString: process.env.AZURE_COSMOSDB_MONGODB_CONNECTION_STRING,
+ databaseName: "langchain",
+ collectionName: "chathistory",
+ };
+
+ const sessionId1 = new ObjectId().toString();
+ const userId1 = new ObjectId().toString();
+ const sessionId2 = new ObjectId().toString();
+ const userId2 = new ObjectId().toString();
+
+ const chatHistory1 = new AzureCosmosDBMongoChatMessageHistory(
+ dbcfg,
+ sessionId1,
+ userId1
+ );
+ const chatHistory2 = new AzureCosmosDBMongoChatMessageHistory(
+ dbcfg,
+ sessionId2,
+ userId2
+ );
+
+ await chatHistory1.addUserMessage("What is AI?");
+ await chatHistory1.addAIChatMessage("AI stands for Artificial Intelligence.");
+ await chatHistory2.addUserMessage("What is the best programming language?");
+ await chatHistory2.addAIChatMessage("It depends on the use case.");
+
+ const allSessions = await chatHistory1.getAllSessions();
+ expect(allSessions.length).toBe(2);
+ expect(allSessions[0].id).toBe(sessionId1);
+ expect(allSessions[1].id).toBe(sessionId2);
+
+ await chatHistory1.clearAllSessions();
+ const clearedSessions = await chatHistory1.getAllSessions();
+ expect(clearedSessions.length).toBe(0);
+
+ await mongoClient.close();
+});
diff --git a/libs/langchain-community/package.json b/libs/langchain-community/package.json
index aef582d27f8a..af66cf2e2381 100644
--- a/libs/langchain-community/package.json
+++ b/libs/langchain-community/package.json
@@ -1,6 +1,6 @@
{
"name": "@langchain/community",
- "version": "0.3.24",
+ "version": "0.3.26",
"description": "Third-party integrations for LangChain.js",
"type": "module",
"engines": {
@@ -41,7 +41,7 @@
"flat": "^5.0.2",
"js-yaml": "^4.1.0",
"langchain": ">=0.2.3 <0.3.0 || >=0.3.4 <0.4.0",
- "langsmith": "^0.2.8",
+ "langsmith": ">=0.2.8 <0.4.0",
"uuid": "^10.0.0",
"zod": "^3.22.3",
"zod-to-json-schema": "^3.22.5"
@@ -79,7 +79,7 @@
"@gradientai/nodejs-sdk": "^1.2.0",
"@huggingface/inference": "^2.6.4",
"@huggingface/transformers": "^3.2.3",
- "@ibm-cloud/watsonx-ai": "^1.3.0",
+ "@ibm-cloud/watsonx-ai": "^1.4.0",
"@jest/globals": "^29.5.0",
"@lancedb/lancedb": "^0.13.0",
"@langchain/core": "workspace:*",
diff --git a/libs/langchain-community/src/chat_models/ibm.ts b/libs/langchain-community/src/chat_models/ibm.ts
index 992419649fb1..17e80922a8db 100644
--- a/libs/langchain-community/src/chat_models/ibm.ts
+++ b/libs/langchain-community/src/chat_models/ibm.ts
@@ -33,6 +33,7 @@ import {
} from "@langchain/core/outputs";
import { AsyncCaller } from "@langchain/core/utils/async_caller";
import {
+ DeploymentsTextChatParams,
RequestCallbacks,
TextChatMessagesTextChatMessageAssistant,
TextChatParameterTools,
@@ -65,7 +66,13 @@ import {
import { isZodSchema } from "@langchain/core/utils/types";
import { zodToJsonSchema } from "zod-to-json-schema";
import { NewTokenIndices } from "@langchain/core/callbacks/base";
-import { WatsonxAuth, WatsonxParams } from "../types/ibm.js";
+import {
+ Neverify,
+ WatsonxAuth,
+ WatsonxChatBasicOptions,
+ WatsonxDeployedParams,
+ WatsonxParams,
+} from "../types/ibm.js";
import {
_convertToolCallIdToMistralCompatible,
authenticateAndSetInstance,
@@ -80,16 +87,24 @@ export interface WatsonxDeltaStream {
}
export interface WatsonxCallParams
- extends Partial> {
- maxRetries?: number;
- watsonxCallbacks?: RequestCallbacks;
-}
+ extends Partial<
+ Omit
+ > {}
+
+export interface WatsonxCallDeployedParams extends DeploymentsTextChatParams {}
+
export interface WatsonxCallOptionsChat
extends Omit,
- WatsonxCallParams {
+ WatsonxCallParams,
+ WatsonxChatBasicOptions {
promptIndex?: number;
tool_choice?: TextChatParameterTools | string | "auto" | "any";
- watsonxCallbacks?: RequestCallbacks;
+}
+
+export interface WatsonxCallOptionsDeployedChat
+ extends WatsonxCallDeployedParams,
+ WatsonxChatBasicOptions {
+ promptIndex?: number;
}
type ChatWatsonxToolType = BindToolsInput | TextChatParameterTools;
@@ -97,10 +112,18 @@ type ChatWatsonxToolType = BindToolsInput | TextChatParameterTools;
export interface ChatWatsonxInput
extends BaseChatModelParams,
WatsonxParams,
- WatsonxCallParams {
- streaming?: boolean;
-}
+ WatsonxCallParams,
+ Neverify {}
+
+export interface ChatWatsonxDeployedInput
+ extends BaseChatModelParams,
+ WatsonxDeployedParams,
+ Neverify {}
+export type ChatWatsonxConstructor = BaseChatModelParams &
+ Partial &
+ WatsonxDeployedParams &
+ WatsonxCallParams;
function _convertToValidToolId(model: string, tool_call_id: string) {
if (model.startsWith("mistralai"))
return _convertToolCallIdToMistralCompatible(tool_call_id);
@@ -335,10 +358,12 @@ function _convertToolChoiceToWatsonxToolChoice(
}
export class ChatWatsonx<
- CallOptions extends WatsonxCallOptionsChat = WatsonxCallOptionsChat
+ CallOptions extends WatsonxCallOptionsChat =
+ | WatsonxCallOptionsChat
+ | WatsonxCallOptionsDeployedChat
>
extends BaseChatModel
- implements ChatWatsonxInput
+ implements ChatWatsonxConstructor
{
static lc_name() {
return "ChatWatsonx";
@@ -380,8 +405,8 @@ export class ChatWatsonx<
ls_provider: "watsonx",
ls_model_name: this.model,
ls_model_type: "chat",
- ls_temperature: params.temperature ?? undefined,
- ls_max_tokens: params.maxTokens ?? undefined,
+ ls_temperature: params?.temperature ?? undefined,
+ ls_max_tokens: params?.maxTokens ?? undefined,
};
}
@@ -399,6 +424,8 @@ export class ChatWatsonx<
projectId?: string;
+ idOrName?: string;
+
frequencyPenalty?: number;
logprobs?: boolean;
@@ -425,37 +452,44 @@ export class ChatWatsonx<
watsonxCallbacks?: RequestCallbacks;
- constructor(fields: ChatWatsonxInput & WatsonxAuth) {
+ constructor(
+ fields: (ChatWatsonxInput | ChatWatsonxDeployedInput) & WatsonxAuth
+ ) {
super(fields);
if (
- (fields.projectId && fields.spaceId) ||
- (fields.idOrName && fields.projectId) ||
- (fields.spaceId && fields.idOrName)
+ ("projectId" in fields && "spaceId" in fields) ||
+ ("projectId" in fields && "idOrName" in fields) ||
+ ("spaceId" in fields && "idOrName" in fields)
)
throw new Error("Maximum 1 id type can be specified per instance");
- if (!fields.projectId && !fields.spaceId && !fields.idOrName)
+ if (!("projectId" in fields || "spaceId" in fields || "idOrName" in fields))
throw new Error(
"No id specified! At least id of 1 type has to be specified"
);
- this.projectId = fields?.projectId;
- this.spaceId = fields?.spaceId;
- this.temperature = fields?.temperature;
- this.maxRetries = fields?.maxRetries || this.maxRetries;
- this.maxConcurrency = fields?.maxConcurrency;
- this.frequencyPenalty = fields?.frequencyPenalty;
- this.topLogprobs = fields?.topLogprobs;
- this.maxTokens = fields?.maxTokens ?? this.maxTokens;
- this.presencePenalty = fields?.presencePenalty;
- this.topP = fields?.topP;
- this.timeLimit = fields?.timeLimit;
- this.responseFormat = fields?.responseFormat ?? this.responseFormat;
+
+ if ("model" in fields) {
+ this.projectId = fields?.projectId;
+ this.spaceId = fields?.spaceId;
+ this.temperature = fields?.temperature;
+ this.maxRetries = fields?.maxRetries || this.maxRetries;
+ this.maxConcurrency = fields?.maxConcurrency;
+ this.frequencyPenalty = fields?.frequencyPenalty;
+ this.topLogprobs = fields?.topLogprobs;
+ this.maxTokens = fields?.maxTokens ?? this.maxTokens;
+ this.presencePenalty = fields?.presencePenalty;
+ this.topP = fields?.topP;
+ this.timeLimit = fields?.timeLimit;
+ this.responseFormat = fields?.responseFormat ?? this.responseFormat;
+ this.streaming = fields?.streaming ?? this.streaming;
+ this.n = fields?.n ?? this.n;
+ this.model = fields?.model ?? this.model;
+ } else this.idOrName = fields?.idOrName;
+
+ this.watsonxCallbacks = fields?.watsonxCallbacks ?? this.watsonxCallbacks;
this.serviceUrl = fields?.serviceUrl;
- this.streaming = fields?.streaming ?? this.streaming;
- this.n = fields?.n ?? this.n;
- this.model = fields?.model ?? this.model;
this.version = fields?.version ?? this.version;
- this.watsonxCallbacks = fields?.watsonxCallbacks ?? this.watsonxCallbacks;
+
const {
watsonxAIApikey,
watsonxAIAuthType,
@@ -486,6 +520,11 @@ export class ChatWatsonx<
}
invocationParams(options: this["ParsedCallOptions"]) {
+ const { signal, promptIndex, ...rest } = options;
+ if (this.idOrName && Object.keys(rest).length > 0)
+ throw new Error("Options cannot be provided to a deployed model");
+ if (this.idOrName) return undefined;
+
const params = {
maxTokens: options.maxTokens ?? this.maxTokens,
temperature: options?.temperature ?? this.temperature,
@@ -521,10 +560,16 @@ export class ChatWatsonx<
} as CallOptions);
}
- scopeId() {
+ scopeId():
+ | { idOrName: string }
+ | { projectId: string; modelId: string }
+ | { spaceId: string; modelId: string } {
if (this.projectId)
return { projectId: this.projectId, modelId: this.model };
- else return { spaceId: this.spaceId, modelId: this.model };
+ else if (this.spaceId)
+ return { spaceId: this.spaceId, modelId: this.model };
+ else if (this.idOrName) return { idOrName: this.idOrName };
+ else throw new Error("No scope id provided");
}
async completionWithRetry(
@@ -595,23 +640,30 @@ export class ChatWatsonx<
.map(([_, value]) => value);
return { generations, llmOutput: { tokenUsage } };
} else {
- const params = {
- ...this.invocationParams(options),
- ...this.scopeId(),
- };
+ const params = this.invocationParams(options);
+ const scopeId = this.scopeId();
const watsonxCallbacks = this.invocationCallbacks(options);
const watsonxMessages = _convertMessagesToWatsonxMessages(
messages,
this.model
);
const callback = () =>
- this.service.textChat(
- {
- ...params,
- messages: watsonxMessages,
- },
- watsonxCallbacks
- );
+ "idOrName" in scopeId
+ ? this.service.deploymentsTextChat(
+ {
+ ...scopeId,
+ messages: watsonxMessages,
+ },
+ watsonxCallbacks
+ )
+ : this.service.textChat(
+ {
+ ...params,
+ ...scopeId,
+ messages: watsonxMessages,
+ },
+ watsonxCallbacks
+ );
const { result } = await this.completionWithRetry(callback, options);
const generations: ChatGeneration[] = [];
for (const part of result.choices) {
@@ -646,21 +698,33 @@ export class ChatWatsonx<
options: this["ParsedCallOptions"],
_runManager?: CallbackManagerForLLMRun
): AsyncGenerator {
- const params = { ...this.invocationParams(options), ...this.scopeId() };
+ const params = this.invocationParams(options);
+ const scopeId = this.scopeId();
const watsonxMessages = _convertMessagesToWatsonxMessages(
messages,
this.model
);
const watsonxCallbacks = this.invocationCallbacks(options);
const callback = () =>
- this.service.textChatStream(
- {
- ...params,
- messages: watsonxMessages,
- returnObject: true,
- },
- watsonxCallbacks
- );
+ "idOrName" in scopeId
+ ? this.service.deploymentsTextChatStream(
+ {
+ ...scopeId,
+ messages: watsonxMessages,
+ returnObject: true,
+ },
+ watsonxCallbacks
+ )
+ : this.service.textChatStream(
+ {
+ ...params,
+ ...scopeId,
+ messages: watsonxMessages,
+ returnObject: true,
+ },
+ watsonxCallbacks
+ );
+
const stream = await this.completionWithRetry(callback, options);
let defaultRole;
let usage: TextChatUsage | undefined;
@@ -707,7 +771,6 @@ export class ChatWatsonx<
if (message === null || (!delta.content && !delta.tool_calls)) {
continue;
}
-
const generationChunk = new ChatGenerationChunk({
message,
text: delta.content ?? "",
diff --git a/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts b/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts
index ae47345a1add..1cdc836ba9c8 100644
--- a/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts
+++ b/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts
@@ -16,7 +16,7 @@ import { ChatWatsonx } from "../ibm.js";
describe("Tests for chat", () => {
describe("Test ChatWatsonx invoke and generate", () => {
- test("Basic invoke", async () => {
+ test("Basic invoke with projectId", async () => {
const service = new ChatWatsonx({
model: "mistralai/mistral-large",
version: "2024-05-31",
@@ -26,6 +26,37 @@ describe("Tests for chat", () => {
const res = await service.invoke("Print hello world");
expect(res).toBeInstanceOf(AIMessage);
});
+ test("Basic invoke with spaceId", async () => {
+ const service = new ChatWatsonx({
+ model: "mistralai/mistral-large",
+ version: "2024-05-31",
+ serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString",
+ spaceId: process.env.WATSONX_AI_SPACE_ID ?? "testString",
+ });
+ const res = await service.invoke("Print hello world");
+ expect(res).toBeInstanceOf(AIMessage);
+ });
+ test("Basic invoke with idOrName", async () => {
+ const service = new ChatWatsonx({
+ version: "2024-05-31",
+ serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString",
+ idOrName: process.env.WATSONX_AI_ID_OR_NAME ?? "testString",
+ });
+ const res = await service.invoke("Print hello world");
+ expect(res).toBeInstanceOf(AIMessage);
+ });
+ test("Invalide invoke with idOrName and options as second argument", async () => {
+ const service = new ChatWatsonx({
+ version: "2024-05-31",
+ serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString",
+ idOrName: process.env.WATSONX_AI_ID_OR_NAME ?? "testString",
+ });
+ await expect(() =>
+ service.invoke("Print hello world", {
+ maxTokens: 100,
+ })
+ ).rejects.toThrow("Options cannot be provided to a deployed model");
+ });
test("Basic generate", async () => {
const service = new ChatWatsonx({
model: "mistralai/mistral-large",
@@ -710,7 +741,7 @@ describe("Tests for chat", () => {
test("Schema with zod and stream", async () => {
const service = new ChatWatsonx({
- model: "mistralai/mistral-large",
+ model: "meta-llama/llama-3-1-70b-instruct",
version: "2024-05-31",
serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString",
projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString",
diff --git a/libs/langchain-community/src/chat_models/tests/ibm.test.ts b/libs/langchain-community/src/chat_models/tests/ibm.test.ts
index f52a689f6755..b35b59d8ccbd 100644
--- a/libs/langchain-community/src/chat_models/tests/ibm.test.ts
+++ b/libs/langchain-community/src/chat_models/tests/ibm.test.ts
@@ -1,7 +1,12 @@
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-explicit-any */
import WatsonxAiMlVml_v1 from "@ibm-cloud/watsonx-ai/dist/watsonx-ai-ml/vml_v1.js";
-import { ChatWatsonx, ChatWatsonxInput, WatsonxCallParams } from "../ibm.js";
+import {
+ ChatWatsonx,
+ ChatWatsonxConstructor,
+ ChatWatsonxInput,
+ WatsonxCallParams,
+} from "../ibm.js";
import { authenticateAndSetInstance } from "../../utils/ibm.js";
const fakeAuthProp = {
@@ -13,7 +18,7 @@ export function getKey(key: K): K {
}
export const testProperties = (
instance: ChatWatsonx,
- testProps: ChatWatsonxInput,
+ testProps: ChatWatsonxConstructor,
notExTestProps?: { [key: string]: any }
) => {
const checkProperty = (
@@ -24,13 +29,19 @@ export const testProperties = (
Object.keys(testProps).forEach((key) => {
const keys = getKey(key);
type Type = Pick;
-
if (typeof testProps[key as keyof T] === "object")
- checkProperty(testProps[key as keyof T], instance[key], existing);
+ checkProperty(
+ testProps[key as keyof T],
+ instance[key as keyof typeof instance],
+ existing
+ );
else {
if (existing)
- expect(instance[key as keyof T]).toBe(testProps[key as keyof T]);
- else if (instance) expect(instance[key as keyof T]).toBeUndefined();
+ expect(instance[key as keyof typeof instance]).toBe(
+ testProps[key as keyof T]
+ );
+ else if (instance)
+ expect(instance[key as keyof typeof instance]).toBeUndefined();
}
});
};
@@ -62,6 +73,40 @@ describe("LLM unit tests", () => {
testProperties(instance, testProps);
});
+ test("Authenticate with projectId", async () => {
+ const testProps = {
+ model: "mistralai/mistral-large",
+ version: "2024-05-31",
+ serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
+ projectId: process.env.WATSONX_AI_PROJECT_ID || "testString",
+ };
+ const instance = new ChatWatsonx({ ...testProps, ...fakeAuthProp });
+
+ testProperties(instance, testProps);
+ });
+
+ test("Authenticate with spaceId", async () => {
+ const testProps = {
+ model: "mistralai/mistral-large",
+ version: "2024-05-31",
+ serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
+ spaceId: process.env.WATSONX_AI_SPACE_ID || "testString",
+ };
+ const instance = new ChatWatsonx({ ...testProps, ...fakeAuthProp });
+
+ testProperties(instance, testProps);
+ });
+
+ test("Authenticate with idOrName", async () => {
+ const testProps = {
+ version: "2024-05-31",
+ serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
+ idOrName: process.env.WATSONX_AI_ID_OR_NAME || "testString",
+ };
+ const instance = new ChatWatsonx({ ...testProps, ...fakeAuthProp });
+ testProperties(instance, testProps);
+ });
+
test("Test methods after init", () => {
const testProps: ChatWatsonxInput = {
model: "mistralai/mistral-large",
diff --git a/libs/langchain-community/src/document_loaders/web/airtable.ts b/libs/langchain-community/src/document_loaders/web/airtable.ts
index be570e7a2759..a4c2367dde64 100644
--- a/libs/langchain-community/src/document_loaders/web/airtable.ts
+++ b/libs/langchain-community/src/document_loaders/web/airtable.ts
@@ -139,7 +139,7 @@ export class AirtableLoader extends BaseDocumentLoader {
private async fetchRecords(
body: Record
): Promise {
- const url = `${AirtableLoader.BASE_URL}/${this.baseId}/${this.tableId}`;
+ const url = `${AirtableLoader.BASE_URL}/${this.baseId}/${this.tableId}/listRecords`;
try {
const response = await fetch(url, {
method: "POST",
diff --git a/libs/langchain-community/src/embeddings/togetherai.ts b/libs/langchain-community/src/embeddings/togetherai.ts
index 27b3546873cb..bf06495368c6 100644
--- a/libs/langchain-community/src/embeddings/togetherai.ts
+++ b/libs/langchain-community/src/embeddings/togetherai.ts
@@ -87,7 +87,7 @@ export class TogetherAIEmbeddings
timeout?: number;
- private embeddingsAPIUrl = "https://api.together.xyz/api/v1/embeddings";
+ private embeddingsAPIUrl = "https://api.together.xyz/v1/embeddings";
constructor(fields?: Partial) {
super(fields ?? {});
diff --git a/libs/langchain-community/src/llms/ibm.ts b/libs/langchain-community/src/llms/ibm.ts
index 75e65fd6873d..97fb287a982a 100644
--- a/libs/langchain-community/src/llms/ibm.ts
+++ b/libs/langchain-community/src/llms/ibm.ts
@@ -3,7 +3,6 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import { BaseLLM, BaseLLMParams } from "@langchain/core/language_models/llms";
import { WatsonXAI } from "@ibm-cloud/watsonx-ai";
import {
- DeploymentTextGenProperties,
RequestCallbacks,
ReturnOptionProperties,
TextGenLengthPenalty,
@@ -21,9 +20,11 @@ import { AsyncCaller } from "@langchain/core/utils/async_caller";
import { authenticateAndSetInstance } from "../utils/ibm.js";
import {
GenerationInfo,
+ Neverify,
ResponseChunk,
TokenUsage,
WatsonxAuth,
+ WatsonxDeployedParams,
WatsonxParams,
} from "../types/ibm.js";
@@ -31,15 +32,7 @@ import {
* Input to LLM class.
*/
-export interface WatsonxCallOptionsLLM extends BaseLanguageModelCallOptions {
- maxRetries?: number;
- parameters?: Partial;
- idOrName?: string;
- watsonxCallbacks?: RequestCallbacks;
-}
-
-export interface WatsonxInputLLM extends WatsonxParams, BaseLLMParams {
- streaming?: boolean;
+export interface WatsonxLLMParams {
maxNewTokens?: number;
decodingMethod?: TextGenParameters.Constants.DecodingMethod | string;
lengthPenalty?: TextGenLengthPenalty;
@@ -54,9 +47,36 @@ export interface WatsonxInputLLM extends WatsonxParams, BaseLLMParams {
truncateInpuTokens?: number;
returnOptions?: ReturnOptionProperties;
includeStopSequence?: boolean;
+}
+
+export interface WatsonxDeploymentLLMParams {
+ idOrName: string;
+}
+
+export interface WatsonxCallOptionsLLM extends BaseLanguageModelCallOptions {
+ maxRetries?: number;
+ parameters?: Partial;
watsonxCallbacks?: RequestCallbacks;
}
+export interface WatsonxInputLLM
+ extends WatsonxParams,
+ BaseLLMParams,
+ WatsonxLLMParams,
+ Neverify {}
+
+export interface WatsonxDeployedInputLLM
+ extends WatsonxDeployedParams,
+ BaseLLMParams,
+ Neverify {
+ model?: never;
+}
+
+export type WatsonxLLMConstructor = BaseLLMParams &
+ WatsonxLLMParams &
+ Partial &
+ WatsonxDeployedParams;
+
/**
* Integration with an LLM.
*/
@@ -64,7 +84,7 @@ export class WatsonxLLM<
CallOptions extends WatsonxCallOptionsLLM = WatsonxCallOptionsLLM
>
extends BaseLLM
- implements WatsonxInputLLM
+ implements WatsonxLLMConstructor
{
// Used for tracing, replace with the same name as your class
static lc_name() {
@@ -123,43 +143,51 @@ export class WatsonxLLM<
private service: WatsonXAI;
- constructor(fields: WatsonxInputLLM & WatsonxAuth) {
+ constructor(
+ fields: (WatsonxInputLLM | WatsonxDeployedInputLLM) & WatsonxAuth
+ ) {
super(fields);
- this.model = fields.model ?? this.model;
- this.version = fields.version;
- this.maxNewTokens = fields.maxNewTokens ?? this.maxNewTokens;
- this.serviceUrl = fields.serviceUrl;
- this.decodingMethod = fields.decodingMethod;
- this.lengthPenalty = fields.lengthPenalty;
- this.minNewTokens = fields.minNewTokens;
- this.randomSeed = fields.randomSeed;
- this.stopSequence = fields.stopSequence;
- this.temperature = fields.temperature;
- this.timeLimit = fields.timeLimit;
- this.topK = fields.topK;
- this.topP = fields.topP;
- this.repetitionPenalty = fields.repetitionPenalty;
- this.truncateInpuTokens = fields.truncateInpuTokens;
- this.returnOptions = fields.returnOptions;
- this.includeStopSequence = fields.includeStopSequence;
+
+ if (fields.model) {
+ this.model = fields.model ?? this.model;
+ this.version = fields.version;
+ this.maxNewTokens = fields.maxNewTokens ?? this.maxNewTokens;
+ this.serviceUrl = fields.serviceUrl;
+ this.decodingMethod = fields.decodingMethod;
+ this.lengthPenalty = fields.lengthPenalty;
+ this.minNewTokens = fields.minNewTokens;
+ this.randomSeed = fields.randomSeed;
+ this.stopSequence = fields.stopSequence;
+ this.temperature = fields.temperature;
+ this.timeLimit = fields.timeLimit;
+ this.topK = fields.topK;
+ this.topP = fields.topP;
+ this.repetitionPenalty = fields.repetitionPenalty;
+ this.truncateInpuTokens = fields.truncateInpuTokens;
+ this.returnOptions = fields.returnOptions;
+ this.includeStopSequence = fields.includeStopSequence;
+ this.projectId = fields?.projectId;
+ this.spaceId = fields?.spaceId;
+ } else {
+ this.idOrName = fields?.idOrName;
+ }
+
this.maxRetries = fields.maxRetries || this.maxRetries;
this.maxConcurrency = fields.maxConcurrency;
this.streaming = fields.streaming || this.streaming;
this.watsonxCallbacks = fields.watsonxCallbacks || this.watsonxCallbacks;
+
if (
- (fields.projectId && fields.spaceId) ||
- (fields.idOrName && fields.projectId) ||
- (fields.spaceId && fields.idOrName)
+ ("projectId" in fields && "spaceId" in fields) ||
+ ("projectId" in fields && "idOrName" in fields) ||
+ ("spaceId" in fields && "idOrName" in fields)
)
throw new Error("Maximum 1 id type can be specified per instance");
- if (!fields.projectId && !fields.spaceId && !fields.idOrName)
+ if (!("projectId" in fields || "spaceId" in fields || "idOrName" in fields))
throw new Error(
"No id specified! At least id of 1 type has to be specified"
);
- this.projectId = fields?.projectId;
- this.spaceId = fields?.spaceId;
- this.idOrName = fields?.idOrName;
this.serviceUrl = fields?.serviceUrl;
const {
@@ -215,11 +243,12 @@ export class WatsonxLLM<
};
}
- invocationParams(
- options: this["ParsedCallOptions"]
- ): TextGenParameters | DeploymentTextGenProperties {
+ invocationParams(options: this["ParsedCallOptions"]) {
const { parameters } = options;
-
+ const { signal, ...rest } = options;
+ if (this.idOrName && Object.keys(rest).length > 0)
+ throw new Error("Options cannot be provided to a deployed model");
+ if (this.idOrName) return undefined;
return {
max_new_tokens: parameters?.maxNewTokens ?? this.maxNewTokens,
decoding_method: parameters?.decodingMethod ?? this.decodingMethod,
@@ -293,7 +322,7 @@ export class WatsonxLLM<
...requestOptions
} = options;
const tokenUsage = { generated_token_count: 0, input_token_count: 0 };
- const idOrName = options?.idOrName ?? this.idOrName;
+ const idOrName = this.idOrName;
const parameters = this.invocationParams(options);
const watsonxCallbacks = this.invocationCallbacks(options);
if (stream) {
diff --git a/libs/langchain-community/src/llms/tests/ibm.test.ts b/libs/langchain-community/src/llms/tests/ibm.test.ts
index 6237cb1d14c1..0669af2f811b 100644
--- a/libs/langchain-community/src/llms/tests/ibm.test.ts
+++ b/libs/langchain-community/src/llms/tests/ibm.test.ts
@@ -1,7 +1,7 @@
/* eslint-disable no-process-env */
/* eslint-disable @typescript-eslint/no-explicit-any */
import WatsonxAiMlVml_v1 from "@ibm-cloud/watsonx-ai/dist/watsonx-ai-ml/vml_v1.js";
-import { WatsonxLLM, WatsonxInputLLM } from "../ibm.js";
+import { WatsonxLLM, WatsonxInputLLM, WatsonxLLMConstructor } from "../ibm.js";
import { authenticateAndSetInstance } from "../../utils/ibm.js";
import { WatsonxEmbeddings } from "../../embeddings/ibm.js";
@@ -14,7 +14,7 @@ export function getKey(key: K): K {
}
export const testProperties = (
instance: WatsonxLLM | WatsonxEmbeddings,
- testProps: WatsonxInputLLM,
+ testProps: WatsonxLLMConstructor,
notExTestProps?: { [key: string]: any }
) => {
const checkProperty = (
@@ -63,6 +63,17 @@ describe("LLM unit tests", () => {
testProperties(instance, testProps);
});
+ test("Test basic properties after init", async () => {
+ const testProps = {
+ version: "2024-05-31",
+ serviceUrl: process.env.WATSONX_AI_SERVICE_URL as string,
+ idOrName: process.env.WATSONX_AI_PROJECT_ID || "testString",
+ };
+ const instance = new WatsonxLLM({ ...testProps, ...fakeAuthProp });
+
+ testProperties(instance, testProps);
+ });
+
test("Test methods after init", () => {
const testProps: WatsonxInputLLM = {
model: "ibm/granite-13b-chat-v2",
diff --git a/libs/langchain-community/src/tools/google_calendar/base.ts b/libs/langchain-community/src/tools/google_calendar/base.ts
index 1f8916e4c2ab..f1962091acf9 100644
--- a/libs/langchain-community/src/tools/google_calendar/base.ts
+++ b/libs/langchain-community/src/tools/google_calendar/base.ts
@@ -1,7 +1,7 @@
import { google } from "googleapis";
import { Tool } from "@langchain/core/tools";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
-import { BaseLLM } from "@langchain/core/language_models/llms";
+import { BaseLanguageModel } from "@langchain/core/language_models/base";
export interface GoogleCalendarAgentParams {
credentials?: {
@@ -10,7 +10,7 @@ export interface GoogleCalendarAgentParams {
calendarId?: string;
};
scopes?: string[];
- model?: BaseLLM;
+ model?: BaseLanguageModel;
}
export class GoogleCalendarBase extends Tool {
@@ -27,7 +27,7 @@ export class GoogleCalendarBase extends Tool {
protected scopes: string[];
- protected llm: BaseLLM;
+ protected llm: BaseLanguageModel;
constructor(
fields: GoogleCalendarAgentParams = {
diff --git a/libs/langchain-community/src/tools/google_calendar/commands/run-create-events.ts b/libs/langchain-community/src/tools/google_calendar/commands/run-create-events.ts
index 8ec8e46bdc08..2eea293ab68c 100644
--- a/libs/langchain-community/src/tools/google_calendar/commands/run-create-events.ts
+++ b/libs/langchain-community/src/tools/google_calendar/commands/run-create-events.ts
@@ -2,7 +2,7 @@ import { google, calendar_v3 } from "googleapis";
import type { JWT, GaxiosResponse } from "googleapis-common";
import { PromptTemplate } from "@langchain/core/prompts";
import { CallbackManagerForToolRun } from "@langchain/core/callbacks/manager";
-import { BaseLLM } from "@langchain/core/language_models/llms";
+import { BaseLanguageModel } from "@langchain/core/language_models/base";
import { StringOutputParser } from "@langchain/core/output_parsers";
import { CREATE_EVENT_PROMPT } from "../prompts/index.js";
import { getTimezoneOffsetInHours } from "../utils/get-timezone-offset-in-hours.js";
@@ -61,7 +61,7 @@ const createEvent = async (
type RunCreateEventParams = {
calendarId: string;
auth: JWT;
- model: BaseLLM;
+ model: BaseLanguageModel;
};
const runCreateEvent = async (
diff --git a/libs/langchain-community/src/tools/google_calendar/commands/run-view-events.ts b/libs/langchain-community/src/tools/google_calendar/commands/run-view-events.ts
index 1cc721535916..52767d80bff9 100644
--- a/libs/langchain-community/src/tools/google_calendar/commands/run-view-events.ts
+++ b/libs/langchain-community/src/tools/google_calendar/commands/run-view-events.ts
@@ -1,7 +1,7 @@
import { calendar_v3 } from "googleapis";
import type { JWT } from "googleapis-common";
import { PromptTemplate } from "@langchain/core/prompts";
-import { BaseLLM } from "@langchain/core/language_models/llms";
+import { BaseLanguageModel } from "@langchain/core/language_models/base";
import { CallbackManagerForToolRun } from "@langchain/core/callbacks/manager";
import { StringOutputParser } from "@langchain/core/output_parsers";
@@ -11,7 +11,7 @@ import { getTimezoneOffsetInHours } from "../utils/get-timezone-offset-in-hours.
type RunViewEventParams = {
calendarId: string;
auth: JWT;
- model: BaseLLM;
+ model: BaseLanguageModel;
};
const runViewEvents = async (
diff --git a/libs/langchain-community/src/tools/tests/google_calendar.test.ts b/libs/langchain-community/src/tools/tests/google_calendar.test.ts
index f777ae8999a2..10531cc72991 100644
--- a/libs/langchain-community/src/tools/tests/google_calendar.test.ts
+++ b/libs/langchain-community/src/tools/tests/google_calendar.test.ts
@@ -1,5 +1,5 @@
import { jest, expect, describe } from "@jest/globals";
-import { LLM } from "@langchain/core/language_models/llms";
+import { BaseChatModel } from "@langchain/core/language_models/chat_models";
import {
GoogleCalendarCreateTool,
GoogleCalendarViewTool,
@@ -25,13 +25,13 @@ jest.mock("@langchain/core/utils/env", () => ({
// runViewEvents: jest.fn(),
// }));
-class FakeLLM extends LLM {
+class FakeLLM extends BaseChatModel {
_llmType() {
return "fake";
}
- async _call(prompt: string): Promise {
- return prompt;
+ async _generate() {
+ return {} as any;
}
}
diff --git a/libs/langchain-community/src/types/ibm.ts b/libs/langchain-community/src/types/ibm.ts
index ee5db8532036..f5d4b72de7b4 100644
--- a/libs/langchain-community/src/types/ibm.ts
+++ b/libs/langchain-community/src/types/ibm.ts
@@ -1,3 +1,5 @@
+import { RequestCallbacks } from "@ibm-cloud/watsonx-ai/dist/watsonx-ai-ml/vml_v1.js";
+
export interface TokenUsage {
generated_token_count: number;
input_token_count: number;
@@ -17,13 +19,27 @@ export interface WatsonxInit {
version: string;
}
-export interface WatsonxParams extends WatsonxInit {
+export interface WatsonxChatBasicOptions {
+ maxConcurrency?: number;
+ maxRetries?: number;
+ streaming?: boolean;
+ watsonxCallbacks?: RequestCallbacks;
+}
+
+export interface WatsonxParams extends WatsonxInit, WatsonxChatBasicOptions {
model: string;
spaceId?: string;
projectId?: string;
+}
+
+export type Neverify = {
+ [K in keyof T]?: never;
+};
+
+export interface WatsonxDeployedParams
+ extends WatsonxInit,
+ WatsonxChatBasicOptions {
idOrName?: string;
- maxConcurrency?: number;
- maxRetries?: number;
}
export interface GenerationInfo {
diff --git a/libs/langchain-community/src/utils/bedrock/index.ts b/libs/langchain-community/src/utils/bedrock/index.ts
index 9b3203e8ce76..c59692943049 100644
--- a/libs/langchain-community/src/utils/bedrock/index.ts
+++ b/libs/langchain-community/src/utils/bedrock/index.ts
@@ -284,7 +284,6 @@ export class BedrockLLMInputOutputAdapter {
if (tools.length > 0) {
inputBody.tools = tools;
}
- return { ...inputBody, ...modelKwargs };
} else if (provider === "cohere") {
const {
system,
diff --git a/libs/langchain-community/src/utils/ibm.ts b/libs/langchain-community/src/utils/ibm.ts
index ccbe1204ef60..8786a0263198 100644
--- a/libs/langchain-community/src/utils/ibm.ts
+++ b/libs/langchain-community/src/utils/ibm.ts
@@ -184,10 +184,18 @@ export class WatsonxToolsOutputParser<
const tool = message.tool_calls;
return tool;
});
+
if (tools[0] === undefined) {
- if (this.latestCorrect) tools.push(this.latestCorrect);
+ if (this.latestCorrect) {
+ tools.push(this.latestCorrect);
+ } else {
+ const toolCall: ToolCall = { name: "", args: {} };
+ tools.push(toolCall);
+ }
}
+
const [tool] = tools;
+ tool.name = "";
this.latestCorrect = tool;
return tool.args as T;
}
diff --git a/yarn.lock b/yarn.lock
index 3f5039ba8eaf..f341b53827c6 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -10669,14 +10669,15 @@ __metadata:
languageName: node
linkType: hard
-"@ibm-cloud/watsonx-ai@npm:^1.3.0":
- version: 1.3.0
- resolution: "@ibm-cloud/watsonx-ai@npm:1.3.0"
+"@ibm-cloud/watsonx-ai@npm:^1.4.0":
+ version: 1.4.0
+ resolution: "@ibm-cloud/watsonx-ai@npm:1.4.0"
dependencies:
+ "@langchain/textsplitters": ^0.1.0
"@types/node": ^18.0.0
extend: 3.0.2
ibm-cloud-sdk-core: ^5.0.2
- checksum: 6a2127391ca70005b942d3c4ab1abc738946c42bbf3ee0f8eb6f778434b5f8806d622f1f36446f00b9fb82dc2c8aea3526426ec46cc53fa8a075ba7a294da096
+ checksum: 5250816f9ad93839cf26e3788eeace8155721765c39c65547eff8ebbd5fc8a0dfa107f6e799593f1209f4b3489be24aa674aa92b7ecbc5fc2bd29390a28e84ff
languageName: node
linkType: hard
@@ -11899,7 +11900,7 @@ __metadata:
"@gradientai/nodejs-sdk": ^1.2.0
"@huggingface/inference": ^2.6.4
"@huggingface/transformers": ^3.2.3
- "@ibm-cloud/watsonx-ai": ^1.3.0
+ "@ibm-cloud/watsonx-ai": ^1.4.0
"@jest/globals": ^29.5.0
"@lancedb/lancedb": ^0.13.0
"@langchain/core": "workspace:*"
@@ -12010,7 +12011,7 @@ __metadata:
jsdom: ^22.1.0
jsonwebtoken: ^9.0.2
langchain: ">=0.2.3 <0.3.0 || >=0.3.4 <0.4.0"
- langsmith: ^0.2.8
+ langsmith: ">=0.2.8 <0.4.0"
llmonitor: ^0.5.9
lodash: ^4.17.21
lunary: ^0.7.10
@@ -12448,7 +12449,7 @@ __metadata:
jest: ^29.5.0
jest-environment-node: ^29.6.4
js-tiktoken: ^1.0.12
- langsmith: ^0.2.8
+ langsmith: ">=0.2.8 <0.4.0"
ml-matrix: ^6.10.4
mustache: ^4.2.0
p-queue: ^6.6.2
@@ -13188,7 +13189,7 @@ __metadata:
languageName: unknown
linkType: soft
-"@langchain/textsplitters@>=0.0.0 <0.2.0, @langchain/textsplitters@workspace:*, @langchain/textsplitters@workspace:libs/langchain-textsplitters":
+"@langchain/textsplitters@>=0.0.0 <0.2.0, @langchain/textsplitters@^0.1.0, @langchain/textsplitters@workspace:*, @langchain/textsplitters@workspace:libs/langchain-textsplitters":
version: 0.0.0-use.local
resolution: "@langchain/textsplitters@workspace:libs/langchain-textsplitters"
dependencies:
@@ -27852,7 +27853,7 @@ __metadata:
ioredis: ^5.3.2
js-yaml: ^4.1.0
langchain: "workspace:*"
- langsmith: ^0.2.8
+ langsmith: ">=0.2.8 <0.4.0"
mongodb: ^6.3.0
pg: ^8.11.0
pickleparser: ^0.2.1
@@ -33464,7 +33465,7 @@ __metadata:
js-tiktoken: ^1.0.12
js-yaml: ^4.1.0
jsonpointer: ^5.0.1
- langsmith: ^0.2.8
+ langsmith: ">=0.2.8 <0.4.0"
openai: ^4.41.1
openapi-types: ^12.1.3
p-retry: 4
@@ -33549,9 +33550,9 @@ __metadata:
languageName: unknown
linkType: soft
-"langsmith@npm:^0.2.8":
- version: 0.2.8
- resolution: "langsmith@npm:0.2.8"
+"langsmith@npm:>=0.2.8 <0.4.0":
+ version: 0.2.15
+ resolution: "langsmith@npm:0.2.15"
dependencies:
"@types/uuid": ^10.0.0
commander: ^10.0.1
@@ -33564,7 +33565,7 @@ __metadata:
peerDependenciesMeta:
openai:
optional: true
- checksum: 8695df08a09b9885b0308c66fbf9802edbe20e286fec3db8faa75ed1893a7aafae014441e311677bb60abb33af49da7f7d8404f55fffbdad5aec61cf65215fc8
+ checksum: 72dbc86c72e186b3228ec754abc1c98fd17e62aee337375c228ba83da6dd6aaa0eea0acb0ca2c754e6c33f69ab0e11d39a122a7e51b6b33100ce95395aa57e9a
languageName: node
linkType: hard