diff --git a/libs/langchain-openai/src/azure/chat_models.ts b/libs/langchain-openai/src/azure/chat_models.ts index 391d1b98b7e8..c57640627da1 100644 --- a/libs/langchain-openai/src/azure/chat_models.ts +++ b/libs/langchain-openai/src/azure/chat_models.ts @@ -266,7 +266,7 @@ export type { AzureOpenAIInput }; * const Joke = z.object({ * setup: z.string().describe("The setup of the joke"), * punchline: z.string().describe("The punchline to the joke"), - * rating: z.number().optional().describe("How funny the joke is, from 1 to 10") + * rating: z.number().nullable().describe("How funny the joke is, from 1 to 10") * }).describe('Joke to tell user.'); * * const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" }); diff --git a/libs/langchain-openai/src/azure/embeddings.ts b/libs/langchain-openai/src/azure/embeddings.ts index f50e3d2a8212..37e684ede2f4 100644 --- a/libs/langchain-openai/src/azure/embeddings.ts +++ b/libs/langchain-openai/src/azure/embeddings.ts @@ -37,10 +37,12 @@ export class AzureOpenAIEmbeddings extends OpenAIEmbeddings { this.batchSize = fields?.batchSize ?? 1; this.azureOpenAIApiKey = fields?.azureOpenAIApiKey ?? + fields?.apiKey ?? getEnvironmentVariable("AZURE_OPENAI_API_KEY"); this.azureOpenAIApiVersion = fields?.azureOpenAIApiVersion ?? + fields?.openAIApiVersion ?? getEnvironmentVariable("AZURE_OPENAI_API_VERSION"); this.azureOpenAIBasePath = diff --git a/libs/langchain-openai/src/chat_models.ts b/libs/langchain-openai/src/chat_models.ts index b6da672416fa..98670b1a5278 100644 --- a/libs/langchain-openai/src/chat_models.ts +++ b/libs/langchain-openai/src/chat_models.ts @@ -715,7 +715,7 @@ export interface ChatOpenAIFields * const Joke = z.object({ * setup: z.string().describe("The setup of the joke"), * punchline: z.string().describe("The punchline to the joke"), - * rating: z.number().optional().describe("How funny the joke is, from 1 to 10") + * rating: z.number().nullable().describe("How funny the joke is, from 1 to 10") * }).describe('Joke to tell user.'); * * const structuredLlm = llm.withStructuredOutput(Joke, {