diff --git a/demos/chatgpt-nextjs/src/app/api/openai/route.ts b/demos/chatgpt-nextjs/src/app/api/openai/route.ts index 5aa87303..0117b65e 100644 --- a/demos/chatgpt-nextjs/src/app/api/openai/route.ts +++ b/demos/chatgpt-nextjs/src/app/api/openai/route.ts @@ -9,9 +9,16 @@ export async function POST(request: Request) { baseURL: 'base url', // if u dont need change baseUrl,you can delete this line }); + const PickMessages = messages.map((message) => { + return { + role: message.role, + content: message.content, + }; + }); + const response = await openai.chat.completions.create({ model: 'gpt-3.5-turbo', - messages: [...messages], + messages: [...PickMessages], stream: true, }); diff --git a/docs/guide/chatgpt.md b/docs/guide/chatgpt.md index 0a293b74..723b2e9d 100644 --- a/docs/guide/chatgpt.md +++ b/docs/guide/chatgpt.md @@ -33,6 +33,8 @@ bun add openai 我们借助 Vercel 的库来解析 数据流,不需要自己手动配置 Reader +> 这里我们需要将 role 和 content 组合一下,因为 messages 包含的内容会更多一些,但是对于 ChatGPT 来说只需要这两个内容 + ```ts import OpenAI from 'openai'; import { OpenAIStream, StreamingTextResponse } from 'ai'; @@ -45,6 +47,13 @@ export const POST = async (request: Request) => { baseURL: 'base url', }); + const PickMessages = messages.map((message) => { + return { + role: message.role, + content: message.content, + }; + }); + const response = await openai.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [...messages],