From 3cb89e513e3daf5135957fe08e8fca2e32b74d5f Mon Sep 17 00:00:00 2001 From: Eric Date: Tue, 13 Jun 2023 10:28:20 +0800 Subject: [PATCH] Revert "simple chatgpt interface" This reverts commit a4269df7ac5cf79613925fe6e7945ccc93e4ec5c. --- .env.example | 3 +- components/Form.tsx | 183 ------------------------------------------ layouts/main.tsx | 6 -- pages/api/response.ts | 36 --------- pages/chatgpt.tsx | 12 --- utils/OpenAIStream.ts | 80 ------------------ yarn.lock | 17 +--- 7 files changed, 2 insertions(+), 335 deletions(-) delete mode 100644 components/Form.tsx delete mode 100644 pages/api/response.ts delete mode 100644 pages/chatgpt.tsx delete mode 100644 utils/OpenAIStream.ts diff --git a/.env.example b/.env.example index ea4b29d..edfebf3 100644 --- a/.env.example +++ b/.env.example @@ -3,5 +3,4 @@ SALAI_TOKEN="Token of the Account from which you paid MidJourney" SERVER_ID="Server id here" CHANNEL_ID="Channel in which commands are sent" NEXT_PUBLIC_IMAGE_PREFIX="/" -HUGGINGFACE_TOKEN="huggingface token here https://huggingface.co/docs/hub/security-tokens" -OPENAI_API_KEY="openai api key here" +HUGGINGFACE_TOKEN="huggingface token here https://huggingface.co/docs/hub/security-tokens" \ No newline at end of file diff --git a/components/Form.tsx b/components/Form.tsx deleted file mode 100644 index ae41831..0000000 --- a/components/Form.tsx +++ /dev/null @@ -1,183 +0,0 @@ -'use client' -import { useRef, useState } from 'react' -import useSWR from 'swr' - -interface ModelType { - object: 'engine' - id: string - ready: boolean - owner: string - permissions: null - created: string -} - -const Form = () => { - const messageInput = useRef(null) - const [response, setResponse] = useState([]) - const [isLoading, setIsLoading] = useState(false) - const [models, setModels] = useState([]) - const [currentModel, setCurrentModel] = useState('gpt-4') - - const handleEnter = ( - e: React.KeyboardEvent & - React.FormEvent - ) => { - if (e.key === 'Enter' && isLoading === false) { - e.preventDefault() - setIsLoading(true) - handleSubmit(e) - } - } - - const handleSubmit = async (e: React.FormEvent) => { - e.preventDefault() - const message = messageInput.current?.value - if (message !== undefined) { - setResponse((prev) => [...prev, message]) - messageInput.current!.value = '' - } - - if (!message) { - return - } - - const response = await fetch('/api/response', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - message, - currentModel, - }), - }) - console.log('Edge function returned.') - - console.log(response) - - if (!response.ok) { - throw new Error(response.statusText) - } - - const data = response.body - if (!data) { - return - } - - const reader = data.getReader() - const decoder = new TextDecoder() - let done = false - - setResponse((prev) => [...prev, message]) - - let currentResponse: string[] = [] - while (!done) { - const { value, done: doneReading } = await reader.read() - done = doneReading - const chunkValue = decoder.decode(value) - // currentResponse = [...currentResponse, message, chunkValue]; - currentResponse = [...currentResponse, chunkValue] - setResponse((prev) => [...prev.slice(0, -1), currentResponse.join('')]) - } - // breaks text indent on refresh due to streaming - // localStorage.setItem('response', JSON.stringify(currentResponse)); - } - - const handleReset = () => { - localStorage.removeItem('response') - setResponse([]) - } - - useSWR('fetchingResponse', async () => { - const storedResponse = localStorage.getItem('response') - if (storedResponse) { - setResponse(JSON.parse(storedResponse)) - } - }) - - const handleModelChange = (e: React.ChangeEvent) => { - setCurrentModel(e.target.value) - } - - return ( -
- -
- {isLoading - ? response.map((item: any, index: number) => { - return ( -
-

{item}

-
- ) - }) - : response - ? response.map((item: string, index: number) => { - return ( -
-

{item}

-
- ) - }) - : null} -
-
-