Skip to content

Commit

Permalink
core: Add signal/timeout options to RunnableConfig
Browse files Browse the repository at this point in the history
- Handled by all built-in runnables
- Handled by all utility methods in base runnable, which should propagate to basically all runnables
  • Loading branch information
nfcampos committed Aug 1, 2024
1 parent fa376c6 commit c2f9818
Show file tree
Hide file tree
Showing 8 changed files with 231 additions and 79 deletions.
12 changes: 0 additions & 12 deletions langchain-core/src/language_models/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,18 +207,6 @@ export interface BaseLanguageModelCallOptions extends RunnableConfig {
* If not provided, the default stop tokens for the model will be used.
*/
stop?: string[];

/**
* Timeout for this call in milliseconds.
*/
timeout?: number;

/**
* Abort signal for this call.
* If provided, the call will be aborted when the signal is aborted.
* @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal
*/
signal?: AbortSignal;
}

export interface FunctionDefinition {
Expand Down
18 changes: 9 additions & 9 deletions langchain-core/src/language_models/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ export abstract class BaseChatModel<
> extends BaseLanguageModel<OutputMessageType, CallOptions> {
declare ParsedCallOptions: Omit<
CallOptions,
keyof RunnableConfig & "timeout"
Exclude<keyof RunnableConfig, "signal">
>;

// Only ever instantiated in main LangChain
Expand All @@ -159,14 +159,13 @@ export abstract class BaseChatModel<
...llmOutputs: LLMResult["llmOutput"][]
): LLMResult["llmOutput"];

protected _separateRunnableConfigFromCallOptions(
protected _separateRunnableConfigFromCallOptionsCompat(
options?: Partial<CallOptions>
): [RunnableConfig, this["ParsedCallOptions"]] {
// For backwards compat, keep `signal` in both runnableConfig and callOptions
const [runnableConfig, callOptions] =
super._separateRunnableConfigFromCallOptions(options);
if (callOptions?.timeout && !callOptions.signal) {
callOptions.signal = AbortSignal.timeout(callOptions.timeout);
}
(callOptions as this["ParsedCallOptions"]).signal = runnableConfig.signal;
return [runnableConfig, callOptions as this["ParsedCallOptions"]];
}

Expand Down Expand Up @@ -232,7 +231,7 @@ export abstract class BaseChatModel<
const prompt = BaseChatModel._convertInputToPromptValue(input);
const messages = prompt.toChatMessages();
const [runnableConfig, callOptions] =
this._separateRunnableConfigFromCallOptions(options);
this._separateRunnableConfigFromCallOptionsCompat(options);

const inheritableMetadata = {
...runnableConfig.metadata,
Expand Down Expand Up @@ -578,16 +577,17 @@ export abstract class BaseChatModel<
);

const [runnableConfig, callOptions] =
this._separateRunnableConfigFromCallOptions(parsedOptions);
this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);
runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;

if (!this.cache) {
return this._generateUncached(baseMessages, callOptions, runnableConfig);
}

const { cache } = this;
const llmStringKey =
this._getSerializedCacheKeyParametersForCall(callOptions);
const llmStringKey = this._getSerializedCacheKeyParametersForCall(
callOptions as CallOptions
);

const { generations, missingPromptIndices } = await this._generateCached({
messages: baseMessages,
Expand Down
18 changes: 9 additions & 9 deletions langchain-core/src/language_models/llms.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ export abstract class BaseLLM<
> extends BaseLanguageModel<string, CallOptions> {
declare ParsedCallOptions: Omit<
CallOptions,
keyof RunnableConfig & "timeout"
Exclude<keyof RunnableConfig, "signal">
>;

// Only ever instantiated in main LangChain
Expand Down Expand Up @@ -103,14 +103,13 @@ export abstract class BaseLLM<
throw new Error("Not implemented.");
}

protected _separateRunnableConfigFromCallOptions(
protected _separateRunnableConfigFromCallOptionsCompat(
options?: Partial<CallOptions>
): [RunnableConfig, this["ParsedCallOptions"]] {
// For backwards compat, keep `signal` in both runnableConfig and callOptions
const [runnableConfig, callOptions] =
super._separateRunnableConfigFromCallOptions(options);
if (callOptions?.timeout && !callOptions.signal) {
callOptions.signal = AbortSignal.timeout(callOptions.timeout);
}
(callOptions as this["ParsedCallOptions"]).signal = runnableConfig.signal;
return [runnableConfig, callOptions as this["ParsedCallOptions"]];
}

Expand All @@ -126,7 +125,7 @@ export abstract class BaseLLM<
} else {
const prompt = BaseLLM._convertInputToPromptValue(input);
const [runnableConfig, callOptions] =
this._separateRunnableConfigFromCallOptions(options);
this._separateRunnableConfigFromCallOptionsCompat(options);
const callbackManager_ = await CallbackManager.configure(
runnableConfig.callbacks,
this.callbacks,
Expand Down Expand Up @@ -461,16 +460,17 @@ export abstract class BaseLLM<
}

const [runnableConfig, callOptions] =
this._separateRunnableConfigFromCallOptions(parsedOptions);
this._separateRunnableConfigFromCallOptionsCompat(parsedOptions);
runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;

if (!this.cache) {
return this._generateUncached(prompts, callOptions, runnableConfig);
}

const { cache } = this;
const llmStringKey =
this._getSerializedCacheKeyParametersForCall(callOptions);
const llmStringKey = this._getSerializedCacheKeyParametersForCall(
callOptions as CallOptions
);
const { generations, missingPromptIndices } = await this._generateCached({
prompts,
cache,
Expand Down
Loading

0 comments on commit c2f9818

Please sign in to comment.