diff --git a/docs/core_docs/docs/integrations/chat/deepseek.ipynb b/docs/core_docs/docs/integrations/chat/deepseek.ipynb index 85d36d5455f4..cde68ff0a773 100644 --- a/docs/core_docs/docs/integrations/chat/deepseek.ipynb +++ b/docs/core_docs/docs/integrations/chat/deepseek.ipynb @@ -193,16 +193,6 @@ ")" ] }, - { - "cell_type": "markdown", - "id": "d1ee55bc-ffc8-4cfa-801c-993953a08cfd", - "metadata": {}, - "source": [ - "## TODO: Any functionality specific to this model provider\n", - "\n", - "E.g. creating/using finetuned models via this provider. Delete if not relevant." - ] - }, { "cell_type": "markdown", "id": "3a5bb5ca-c3ae-4a58-be67-2cd18574b9a3", diff --git a/libs/langchain-deepseek/src/chat_models.ts b/libs/langchain-deepseek/src/chat_models.ts index 2c8c375eadf5..7afc479745ed 100644 --- a/libs/langchain-deepseek/src/chat_models.ts +++ b/libs/langchain-deepseek/src/chat_models.ts @@ -1,5 +1,10 @@ import { getEnvironmentVariable } from "@langchain/core/utils/env"; -import { ChatOpenAI, ChatOpenAICallOptions, ChatOpenAIFields, OpenAIClient } from "@langchain/openai"; +import { + ChatOpenAI, + ChatOpenAICallOptions, + ChatOpenAIFields, + OpenAIClient, +} from "@langchain/openai"; export interface ChatDeepseekCallOptions extends ChatOpenAICallOptions { headers?: Record; @@ -317,8 +322,7 @@ export class ChatDeepseek extends ChatOpenAI { rawResponse, defaultRole ); - messageChunk.additional_kwargs.reasoning_content = - delta.reasoning_content; + messageChunk.additional_kwargs.reasoning_content = delta.reasoning_content; return messageChunk; } @@ -336,4 +340,4 @@ export class ChatDeepseek extends ChatOpenAI { ).reasoning_content; return langChainMessage; } -} \ No newline at end of file +} diff --git a/libs/langchain-deepseek/src/tests/chat_models.int.test.ts b/libs/langchain-deepseek/src/tests/chat_models.int.test.ts index 4e1408e19824..5ca45ffd92ea 100644 --- a/libs/langchain-deepseek/src/tests/chat_models.int.test.ts +++ b/libs/langchain-deepseek/src/tests/chat_models.int.test.ts @@ -11,5 +11,7 @@ test("Can send deepseek-reasoner requests", async () => { // Models also accept a list of chat messages or a formatted prompt const result = await llm.invoke(input); console.log(result); - expect((result.additional_kwargs.reasoning_content as any).length).toBeGreaterThan(10); + expect( + (result.additional_kwargs.reasoning_content as any).length + ).toBeGreaterThan(10); });