Skip to content

Commit

Permalink
Naming
Browse files Browse the repository at this point in the history
  • Loading branch information
jacoblee93 committed Jan 21, 2025
1 parent fab9ae4 commit 02c44de
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 18 deletions.
18 changes: 9 additions & 9 deletions langchain-core/src/language_models/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -346,18 +346,18 @@ export abstract class BaseChatModel<
messages: BaseMessageLike[][],
parsedOptions: this["ParsedCallOptions"],
handledOptions: RunnableConfig,
existingRunManagers?: CallbackManagerForLLMRun[]
startedRunManagers?: CallbackManagerForLLMRun[]
): Promise<LLMResult> {
const baseMessages = messages.map((messageList) =>
messageList.map(coerceMessageLikeToMessage)
);

let runManagers: CallbackManagerForLLMRun[] | undefined;
if (
existingRunManagers !== undefined &&
existingRunManagers.length === baseMessages.length
startedRunManagers !== undefined &&
startedRunManagers.length === baseMessages.length
) {
runManagers = existingRunManagers;
runManagers = startedRunManagers;
} else {
const inheritableMetadata = {
...handledOptions.metadata,
Expand Down Expand Up @@ -525,7 +525,7 @@ export abstract class BaseChatModel<
}): Promise<
LLMResult & {
missingPromptIndices: number[];
existingRunManagers?: CallbackManagerForLLMRun[];
startedRunManagers?: CallbackManagerForLLMRun[];
}
> {
const baseMessages = messages.map((messageList) =>
Expand Down Expand Up @@ -633,7 +633,7 @@ export abstract class BaseChatModel<
const output = {
generations,
missingPromptIndices,
existingRunManagers: runManagers,
startedRunManagers: runManagers,
};

// This defines RUN_KEY as a non-enumerable property on the output object
Expand Down Expand Up @@ -686,7 +686,7 @@ export abstract class BaseChatModel<
callOptions as CallOptions
);

const { generations, missingPromptIndices, existingRunManagers } =
const { generations, missingPromptIndices, startedRunManagers } =
await this._generateCached({
messages: baseMessages,
cache,
Expand All @@ -701,8 +701,8 @@ export abstract class BaseChatModel<
missingPromptIndices.map((i) => baseMessages[i]),
callOptions,
runnableConfig,
existingRunManagers !== undefined
? missingPromptIndices.map((i) => existingRunManagers?.[i])
startedRunManagers !== undefined
? missingPromptIndices.map((i) => startedRunManagers?.[i])
: undefined
);
await Promise.all(
Expand Down
18 changes: 9 additions & 9 deletions langchain-core/src/language_models/llms.ts
Original file line number Diff line number Diff line change
Expand Up @@ -241,14 +241,14 @@ export abstract class BaseLLM<
prompts: string[],
parsedOptions: this["ParsedCallOptions"],
handledOptions: BaseCallbackConfig,
existingRunManagers?: CallbackManagerForLLMRun[]
startedRunManagers?: CallbackManagerForLLMRun[]
): Promise<LLMResult> {
let runManagers: CallbackManagerForLLMRun[] | undefined;
if (
existingRunManagers !== undefined &&
existingRunManagers.length === prompts.length
startedRunManagers !== undefined &&
startedRunManagers.length === prompts.length
) {
runManagers = existingRunManagers;
runManagers = startedRunManagers;
} else {
const callbackManager_ = await CallbackManager.configure(
handledOptions.callbacks,
Expand Down Expand Up @@ -358,7 +358,7 @@ export abstract class BaseLLM<
}): Promise<
LLMResult & {
missingPromptIndices: number[];
existingRunManagers?: CallbackManagerForLLMRun[];
startedRunManagers?: CallbackManagerForLLMRun[];
}
> {
const callbackManager_ = await CallbackManager.configure(
Expand Down Expand Up @@ -440,7 +440,7 @@ export abstract class BaseLLM<
const output = {
generations,
missingPromptIndices,
existingRunManagers: runManagers,
startedRunManagers: runManagers,
};

// This defines RUN_KEY as a non-enumerable property on the output object
Expand Down Expand Up @@ -487,7 +487,7 @@ export abstract class BaseLLM<
const llmStringKey = this._getSerializedCacheKeyParametersForCall(
callOptions as CallOptions
);
const { generations, missingPromptIndices, existingRunManagers } =
const { generations, missingPromptIndices, startedRunManagers } =
await this._generateCached({
prompts,
cache,
Expand All @@ -503,8 +503,8 @@ export abstract class BaseLLM<
missingPromptIndices.map((i) => prompts[i]),
callOptions,
runnableConfig,
existingRunManagers !== undefined
? missingPromptIndices.map((i) => existingRunManagers?.[i])
startedRunManagers !== undefined
? missingPromptIndices.map((i) => startedRunManagers?.[i])
: undefined
);
await Promise.all(
Expand Down

0 comments on commit 02c44de

Please sign in to comment.