Skip to content

Commit

Permalink
Merge branch 'main' into aoai-ua
Browse files Browse the repository at this point in the history
  • Loading branch information
sinedied authored Feb 10, 2025
2 parents 977d4a2 + 193d1e9 commit 8fc8162
Show file tree
Hide file tree
Showing 19 changed files with 378 additions and 35 deletions.
2 changes: 1 addition & 1 deletion docs/core_docs/docs/integrations/llms/replicate.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import IntegrationInstallTooltip from "@mdx_components/integration_install_toolt
<IntegrationInstallTooltip></IntegrationInstallTooltip>

```bash npm2yarn
npm install replicate @langchain/community @langchain/core
npm install replicate@1 @langchain/community @langchain/core
```

import ReplicateLlama2 from "@examples/models/llm/replicate_llama2.ts";
Expand Down
6 changes: 5 additions & 1 deletion examples/src/document_loaders/openai_whisper_audio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,11 @@ import { OpenAIWhisperAudio } from "@langchain/community/document_loaders/fs/ope

const filePath = "./src/document_loaders/example_data/test.mp3";

const loader = new OpenAIWhisperAudio(filePath);
const loader = new OpenAIWhisperAudio(filePath, {
transcriptionCreateParams: {
language: "en",
},
});

const docs = await loader.load();

Expand Down
2 changes: 1 addition & 1 deletion langchain-core/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@langchain/core",
"version": "0.3.38",
"version": "0.3.39",
"description": "Core LangChain.js abstractions and schemas",
"type": "module",
"engines": {
Expand Down
4 changes: 4 additions & 0 deletions langchain-core/src/callbacks/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,10 @@ export abstract class BaseCallbackHandler
return undefined;
}

get lc_serializable_keys(): string[] | undefined {
return undefined;
}

/**
* The name of the serializable. Override to provide an alias or
* to preserve the serialized module name in minified environments.
Expand Down
18 changes: 17 additions & 1 deletion langchain-core/src/load/serializable.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,8 +140,24 @@ export abstract class Serializable implements SerializableInterface {
return undefined;
}

/**
* A manual list of keys that should be serialized.
* If not overridden, all fields passed into the constructor will be serialized.
*/
get lc_serializable_keys(): string[] | undefined {
return undefined;
}

constructor(kwargs?: SerializedFields, ..._args: never[]) {
this.lc_kwargs = kwargs || {};
if (this.lc_serializable_keys !== undefined) {
this.lc_kwargs = Object.fromEntries(
Object.entries(kwargs || {}).filter(([key]) =>
this.lc_serializable_keys?.includes(key)
)
);
} else {
this.lc_kwargs = kwargs ?? {};
}
}

toJSON(): Serialized {
Expand Down
6 changes: 3 additions & 3 deletions libs/langchain-community/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@langchain/community",
"version": "0.3.28",
"version": "0.3.29",
"description": "Third-party integrations for LangChain.js",
"type": "module",
"engines": {
Expand Down Expand Up @@ -206,7 +206,7 @@
"pyodide": "^0.26.2",
"redis": "^4.6.6",
"release-it": "^17.6.0",
"replicate": "^0.29.4",
"replicate": "^1.0.1",
"rollup": "^3.19.1",
"sonix-speech-recognition": "^2.1.1",
"srt-parser-2": "^1.2.3",
Expand Down Expand Up @@ -336,7 +336,7 @@
"puppeteer": "*",
"pyodide": ">=0.24.1 <0.27.0",
"redis": "*",
"replicate": "^0.29.4",
"replicate": "*",
"sonix-speech-recognition": "^2.1.1",
"srt-parser-2": "^1.2.3",
"typeorm": "^0.3.20",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,18 @@ const MODEL_NAME = "whisper-1";
export class OpenAIWhisperAudio extends BufferLoader {
private readonly openAIClient: OpenAIClient;

private readonly transcriptionCreateParams?: Partial<OpenAIClient.Audio.TranscriptionCreateParams>;

constructor(
filePathOrBlob: string | Blob,
fields?: {
clientOptions?: ClientOptions;
transcriptionCreateParams?: Partial<OpenAIClient.Audio.TranscriptionCreateParams>;
}
) {
super(filePathOrBlob);
this.openAIClient = new OpenAIClient(fields?.clientOptions);
this.transcriptionCreateParams = fields?.transcriptionCreateParams ?? {};
}

protected async parse(
Expand All @@ -38,6 +42,7 @@ export class OpenAIWhisperAudio extends BufferLoader {
await this.openAIClient.audio.transcriptions.create({
file: await toFile(raw, fileName),
model: MODEL_NAME,
...this.transcriptionCreateParams,
});
const document = new Document({
pageContent: transcriptionResponse.text,
Expand Down
12 changes: 6 additions & 6 deletions libs/langchain-ollama/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@langchain/ollama",
"version": "0.1.5",
"version": "0.1.6",
"description": "Ollama integration for LangChain.js",
"type": "module",
"engines": {
Expand Down Expand Up @@ -32,8 +32,10 @@
"author": "LangChain",
"license": "MIT",
"dependencies": {
"ollama": "^0.5.9",
"uuid": "^10.0.0"
"ollama": "^0.5.12",
"uuid": "^10.0.0",
"zod": "^3.24.1",
"zod-to-json-schema": "^3.24.1"
},
"peerDependencies": {
"@langchain/core": ">=0.2.21 <0.4.0"
Expand Down Expand Up @@ -62,9 +64,7 @@
"release-it": "^17.6.0",
"rollup": "^4.5.2",
"ts-jest": "^29.1.0",
"typescript": "<5.2.0",
"zod": "^3.22.4",
"zod-to-json-schema": "^3.23.0"
"typescript": "<5.2.0"
},
"publishConfig": {
"access": "public"
Expand Down
133 changes: 128 additions & 5 deletions libs/langchain-ollama/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@ import {
UsageMetadata,
type BaseMessage,
} from "@langchain/core/messages";
import { BaseLanguageModelInput } from "@langchain/core/language_models/base";
import {
BaseLanguageModelInput,
StructuredOutputMethodOptions,
} from "@langchain/core/language_models/base";
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import {
type BaseChatModelParams,
Expand All @@ -21,9 +24,20 @@ import type {
Message as OllamaMessage,
Tool as OllamaTool,
} from "ollama";
import { Runnable } from "@langchain/core/runnables";
import {
Runnable,
RunnablePassthrough,
RunnableSequence,
} from "@langchain/core/runnables";
import { convertToOpenAITool } from "@langchain/core/utils/function_calling";
import { concat } from "@langchain/core/utils/stream";
import {
JsonOutputParser,
StructuredOutputParser,
} from "@langchain/core/output_parsers";
import { isZodSchema } from "@langchain/core/utils/types";
import { z } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import {
convertOllamaMessagesToLangChain,
convertToOllamaMessages,
Expand All @@ -36,6 +50,8 @@ export interface ChatOllamaCallOptions extends BaseChatModelCallOptions {
*/
stop?: string[];
tools?: BindToolsInput[];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
format?: string | Record<string, any>;
}

export interface PullModelOptions {
Expand Down Expand Up @@ -82,7 +98,8 @@ export interface ChatOllamaInput
*/
checkOrPullModel?: boolean;
streaming?: boolean;
format?: string;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
format?: string | Record<string, any>;
}

/**
Expand Down Expand Up @@ -453,7 +470,8 @@ export class ChatOllama

streaming?: boolean;

format?: string;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
format?: string | Record<string, any>;

keepAlive?: string | number;

Expand Down Expand Up @@ -575,7 +593,7 @@ export class ChatOllama

return {
model: this.model,
format: this.format,
format: options?.format ?? this.format,
keep_alive: this.keepAlive,
options: {
numa: this.numa,
Expand Down Expand Up @@ -763,4 +781,109 @@ export class ChatOllama
}),
});
}

withStructuredOutput<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
RunOutput extends Record<string, any> = Record<string, any>
>(
outputSchema:
| z.ZodType<RunOutput>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Record<string, any>,
config?: StructuredOutputMethodOptions<false>
): Runnable<BaseLanguageModelInput, RunOutput>;

withStructuredOutput<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
RunOutput extends Record<string, any> = Record<string, any>
>(
outputSchema:
| z.ZodType<RunOutput>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Record<string, any>,
config?: StructuredOutputMethodOptions<true>
): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;

withStructuredOutput<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
RunOutput extends Record<string, any> = Record<string, any>
>(
outputSchema:
| z.ZodType<RunOutput>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Record<string, any>,
config?: StructuredOutputMethodOptions<boolean>
):
| Runnable<BaseLanguageModelInput, RunOutput>
| Runnable<
BaseLanguageModelInput,
{
raw: BaseMessage;
parsed: RunOutput;
}
>;

withStructuredOutput<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
RunOutput extends Record<string, any> = Record<string, any>
>(
outputSchema:
| z.ZodType<RunOutput>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Record<string, any>,
config?: StructuredOutputMethodOptions<boolean>
):
| Runnable<BaseLanguageModelInput, RunOutput>
| Runnable<
BaseLanguageModelInput,
{
raw: BaseMessage;
parsed: RunOutput;
}
> {
// TODO: Make this method the default in a minor bump
if (config?.method === "jsonSchema") {
const outputSchemaIsZod = isZodSchema(outputSchema);
const jsonSchema = outputSchemaIsZod
? zodToJsonSchema(outputSchema)
: outputSchema;
const llm = this.bind({
format: jsonSchema,
});
const outputParser = outputSchemaIsZod
? StructuredOutputParser.fromZodSchema(outputSchema)
: new JsonOutputParser<RunOutput>();

if (!config?.includeRaw) {
return llm.pipe(outputParser) as Runnable<
BaseLanguageModelInput,
RunOutput
>;
}

const parserAssign = RunnablePassthrough.assign({
// eslint-disable-next-line @typescript-eslint/no-explicit-any
parsed: (input: any, config) => outputParser.invoke(input.raw, config),
});
const parserNone = RunnablePassthrough.assign({
parsed: () => null,
});
const parsedWithFallback = parserAssign.withFallbacks({
fallbacks: [parserNone],
});
return RunnableSequence.from<
BaseLanguageModelInput,
{ raw: BaseMessage; parsed: RunOutput }
>([
{
raw: llm,
},
parsedWithFallback,
]);
} else {
// TODO: Fix this type in core
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return super.withStructuredOutput<RunOutput>(outputSchema, config as any);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,24 @@ test("Ollama can call withStructuredOutput", async () => {
expect(result.location).not.toBe("");
});

test("Ollama can call withStructuredOutput includeRaw", async () => {
test("Ollama can call withStructuredOutput includeRaw JSON Schema", async () => {
const model = new ChatOllama({
model: "llama3-groq-tool-use",
maxRetries: 1,
}).withStructuredOutput(weatherTool.schema, {
name: weatherTool.name,
includeRaw: true,
method: "jsonSchema",
});

const result = await model.invoke(messageHistory);
expect(result).toBeDefined();
expect(result.parsed.location).toBeDefined();
expect(result.parsed.location).not.toBe("");
expect((result.raw as AIMessage).tool_calls?.length).toBe(0);
});

test("Ollama can call withStructuredOutput includeRaw with tool calling", async () => {
const model = new ChatOllama({
model: "llama3-groq-tool-use",
maxRetries: 1,
Expand Down
4 changes: 2 additions & 2 deletions libs/langchain-openai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@langchain/openai",
"version": "0.4.2",
"version": "0.4.3",
"description": "OpenAI integrations for LangChain.js",
"type": "module",
"engines": {
Expand Down Expand Up @@ -41,7 +41,7 @@
"zod-to-json-schema": "^3.22.3"
},
"peerDependencies": {
"@langchain/core": ">=0.3.29 <0.4.0"
"@langchain/core": ">=0.3.39 <0.4.0"
},
"devDependencies": {
"@azure/identity": "^4.2.1",
Expand Down
15 changes: 15 additions & 0 deletions libs/langchain-openai/src/azure/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -473,6 +473,21 @@ export class AzureChatOpenAI extends ChatOpenAI {
};
}

get lc_serializable_keys(): string[] {
return [
...super.lc_serializable_keys,
"azureOpenAIApiKey",
"azureOpenAIApiVersion",
"azureOpenAIBasePath",
"azureOpenAIEndpoint",
"azureOpenAIApiInstanceName",
"azureOpenAIApiDeploymentName",
"deploymentName",
"openAIApiKey",
"openAIApiVersion",
];
}

constructor(
fields?: Partial<OpenAIChatInput> &
Partial<AzureOpenAIInput> & {
Expand Down
Loading

0 comments on commit 8fc8162

Please sign in to comment.