diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index fd2a759b99b..c67fca63019 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -57,7 +57,6 @@ services: # ports: # - 7700:7700 # if exposing these ports, make sure your master key is not the default value environment: - - MEILI_HTTP_ADDR=meilisearch:7700 - MEILI_NO_ANALYTICS=true - MEILI_MASTER_KEY=5c71cf56d672d009e36070b5bc5e47b743535ae55c818ae3b735bb6ebfb4ba63 volumes: diff --git a/.dockerignore b/.dockerignore index 0f03be58859..396f0da3e57 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,17 @@ +**/.circleci +**/.editorconfig +**/.dockerignore +**/.git +**/.DS_Store +**/.vscode **/node_modules -client/dist/images + +# Specific patterns to ignore data-node -.env -**/.env \ No newline at end of file +meili_data* +librechat* +Dockerfile* +docs + +# Ignore all hidden files +.* diff --git a/.env.example b/.env.example index 22b7743e0d0..2e23a09a349 100644 --- a/.env.example +++ b/.env.example @@ -101,7 +101,7 @@ GOOGLE_KEY=user_provided #============# OPENAI_API_KEY=user_provided -# OPENAI_MODELS=gpt-3.5-turbo-1106,gpt-4-1106-preview,gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,text-davinci-003,gpt-4,gpt-4-0314,gpt-4-0613 +# OPENAI_MODELS=gpt-3.5-turbo-1106,gpt-4-1106-preview,gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,gpt-4,gpt-4-0314,gpt-4-0613 DEBUG_OPENAI=false @@ -115,6 +115,8 @@ DEBUG_OPENAI=false # OPENAI_REVERSE_PROXY= +# OPENAI_ORGANIZATION= + #============# # OpenRouter # #============# @@ -143,11 +145,22 @@ AZURE_AI_SEARCH_SEARCH_OPTION_QUERY_TYPE= AZURE_AI_SEARCH_SEARCH_OPTION_TOP= AZURE_AI_SEARCH_SEARCH_OPTION_SELECT= -# DALL·E 3 +# DALL·E +#---------------- +# DALLE_API_KEY= # Key for both DALL-E-2 and DALL-E-3 +# DALLE3_API_KEY= # Key for DALL-E-3 only +# DALLE2_API_KEY= # Key for DALL-E-2 only +# DALLE3_SYSTEM_PROMPT="Your DALL-E-3 System Prompt here" +# DALLE2_SYSTEM_PROMPT="Your DALL-E-2 System Prompt here" +# DALLE_REVERSE_PROXY= # Reverse proxy for DALL-E-2 and DALL-E-3 +# DALLE3_BASEURL= # Base URL for DALL-E-3 +# DALLE2_BASEURL= # Base URL for DALL-E-2 + +# DALL·E (via Azure OpenAI) +# Note: requires some of the variables above to be set #---------------- -# DALLE_API_KEY= -# DALLE3_SYSTEM_PROMPT="Your System Prompt here" -# DALLE_REVERSE_PROXY= +# DALLE3_AZURE_API_VERSION= # Azure OpenAI API version for DALL-E-3 +# DALLE2_AZURE_API_VERSION= # Azure OpenAI API versiion for DALL-E-2 # Google #----------------- @@ -177,7 +190,6 @@ ZAPIER_NLA_API_KEY= SEARCH=true MEILI_NO_ANALYTICS=true MEILI_HOST=http://0.0.0.0:7700 -MEILI_HTTP_ADDR=0.0.0.0:7700 MEILI_MASTER_KEY=DrhYf7zENyR6AlUCKmnz0eYASOQdl6zxH7s7MKFSfFCt #===================================================# diff --git a/.github/workflows/container.yml b/.github/workflows/container.yml index 949d39cfe04..23c6ad48cc8 100644 --- a/.github/workflows/container.yml +++ b/.github/workflows/container.yml @@ -19,6 +19,10 @@ jobs: - name: Set up Docker uses: docker/setup-buildx-action@v3 + # Set up QEMU for cross-platform builds + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + # Log in to GitHub Container Registry - name: Log in to GitHub Container Registry uses: docker/login-action@v2 @@ -27,26 +31,53 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - # Run docker-compose build + # Prepare Docker Build - name: Build Docker images run: | cp .env.example .env - docker-compose build - docker build -f Dockerfile.multi --target api-build -t librechat-api . - # Get Tag Name - - name: Get Tag Name - id: tag_name - run: echo "TAG_NAME=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV + # Tag and push librechat-api + - name: Docker metadata for librechat-api + id: meta-librechat-api + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/${{ github.repository_owner }}/librechat-api + tags: | + type=raw,value=latest + type=semver,pattern={{version}} + type=semver,pattern={{major}} + type=semver,pattern={{major}}.{{minor}} - # Tag it properly before push to github - - name: tag image and push - run: | - docker tag librechat:latest ghcr.io/${{ github.repository_owner }}/librechat:${{ env.TAG_NAME }} - docker push ghcr.io/${{ github.repository_owner }}/librechat:${{ env.TAG_NAME }} - docker tag librechat:latest ghcr.io/${{ github.repository_owner }}/librechat:latest - docker push ghcr.io/${{ github.repository_owner }}/librechat:latest - docker tag librechat-api:latest ghcr.io/${{ github.repository_owner }}/librechat-api:${{ env.TAG_NAME }} - docker push ghcr.io/${{ github.repository_owner }}/librechat-api:${{ env.TAG_NAME }} - docker tag librechat-api:latest ghcr.io/${{ github.repository_owner }}/librechat-api:latest - docker push ghcr.io/${{ github.repository_owner }}/librechat-api:latest + - name: Build and librechat-api + uses: docker/build-push-action@v5 + with: + file: Dockerfile.multi + context: . + push: true + tags: ${{ steps.meta-librechat-api.outputs.tags }} + platforms: linux/amd64,linux/arm64 + target: api-build + + # Tag and push librechat + - name: Docker metadata for librechat + id: meta-librechat + uses: docker/metadata-action@v5 + with: + images: | + ghcr.io/${{ github.repository_owner }}/librechat + tags: | + type=raw,value=latest + type=semver,pattern={{version}} + type=semver,pattern={{major}} + type=semver,pattern={{major}}.{{minor}} + + - name: Build and librechat + uses: docker/build-push-action@v5 + with: + file: Dockerfile + context: . + push: true + tags: ${{ steps.meta-librechat.outputs.tags }} + platforms: linux/amd64,linux/arm64 + target: node diff --git a/.gitignore b/.gitignore index f360cbba0ac..765de5cb799 100644 --- a/.gitignore +++ b/.gitignore @@ -48,6 +48,9 @@ bower_components/ .floo .flooignore +#config file +librechat.yaml + # Environment .npmrc .env* @@ -66,6 +69,7 @@ src/style - official.css .DS_Store *.code-workspace .idea +*.iml *.pem config.local.ts **/storageState.json diff --git a/Dockerfile b/Dockerfile index 1dac186b17f..edc79c2497a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,14 @@ # Base node image -FROM node:19-alpine AS node +FROM node:18-alpine AS node COPY . /app WORKDIR /app +# Allow mounting of these files, which have no default +# values. +RUN touch .env # Install call deps - Install curl for health check RUN apk --no-cache add curl && \ - # We want to inherit env from the container, not the file - # This will preserve any existing env file if it's already in source - # otherwise it will create a new one - touch .env && \ - # Build deps in seperate npm ci # React client build diff --git a/LICENSE b/LICENSE index 752f1a45809..49a224977b1 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 LibreChat +Copyright (c) 2024 LibreChat Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index cfd0d8ffd45..00cd890b073 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@

- + @@ -26,6 +26,18 @@

+

+ + Deploy on Railway + +

+ +

+ + Deploy on Sealos + +

+ # 📃 Features - 🖥️ UI matching ChatGPT, including Dark mode, Streaming, and 11-2023 updates - 💬 Multimodal Chat: @@ -73,6 +85,10 @@ Please consult the breaking changes before updating. ## ⭐ Star History +

+danny-avila%2FLibreChat | Trendshift +

+ Star History Chart diff --git a/api/app/clients/BaseClient.js b/api/app/clients/BaseClient.js index ea63a3ce904..aa39084b9fa 100644 --- a/api/app/clients/BaseClient.js +++ b/api/app/clients/BaseClient.js @@ -46,6 +46,10 @@ class BaseClient { logger.debug('`[BaseClient] recordTokenUsage` not implemented.', response); } + async addPreviousAttachments(messages) { + return messages; + } + async recordTokenUsage({ promptTokens, completionTokens }) { logger.debug('`[BaseClient] recordTokenUsage` not implemented.', { promptTokens, @@ -484,20 +488,22 @@ class BaseClient { mapMethod = this.getMessageMapMethod(); } - const orderedMessages = this.constructor.getMessagesForConversation({ + let _messages = this.constructor.getMessagesForConversation({ messages, parentMessageId, mapMethod, }); + _messages = await this.addPreviousAttachments(_messages); + if (!this.shouldSummarize) { - return orderedMessages; + return _messages; } // Find the latest message with a 'summary' property - for (let i = orderedMessages.length - 1; i >= 0; i--) { - if (orderedMessages[i]?.summary) { - this.previous_summary = orderedMessages[i]; + for (let i = _messages.length - 1; i >= 0; i--) { + if (_messages[i]?.summary) { + this.previous_summary = _messages[i]; break; } } @@ -512,14 +518,15 @@ class BaseClient { }); } - return orderedMessages; + return _messages; } async saveMessageToDatabase(message, endpointOptions, user = null) { - await saveMessage({ ...message, user, unfinished: false }); + await saveMessage({ ...message, endpoint: this.options.endpoint, user, unfinished: false }); await saveConvo(user, { conversationId: message.conversationId, endpoint: this.options.endpoint, + endpointType: this.options.endpointType, ...endpointOptions, }); } @@ -617,6 +624,11 @@ class BaseClient { * An additional 3 tokens need to be added for assistant label priming after all messages have been counted. * In our implementation, this is accounted for in the getMessagesWithinTokenLimit method. * + * The content parts example was adapted from the following example: + * https://github.com/openai/openai-cookbook/pull/881/files + * + * Note: image token calculation is to be done elsewhere where we have access to the image metadata + * * @param {Object} message */ getTokenCountForMessage(message) { @@ -630,11 +642,18 @@ class BaseClient { } const processValue = (value) => { - if (typeof value === 'object' && value !== null) { - for (let [nestedKey, nestedValue] of Object.entries(value)) { - if (nestedKey === 'image_url' || nestedValue === 'image_url') { + if (Array.isArray(value)) { + for (let item of value) { + if (!item || !item.type || item.type === 'image_url') { continue; } + + const nestedValue = item[item.type]; + + if (!nestedValue) { + continue; + } + processValue(nestedValue); } } else { diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js index 605646a4b6e..ca0c8d84248 100644 --- a/api/app/clients/OpenAIClient.js +++ b/api/app/clients/OpenAIClient.js @@ -1,15 +1,21 @@ const OpenAI = require('openai'); const { HttpsProxyAgent } = require('https-proxy-agent'); -const { getResponseSender, EModelEndpoint } = require('librechat-data-provider'); +const { getResponseSender, ImageDetailCost, ImageDetail } = require('librechat-data-provider'); const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken'); +const { + getModelMaxTokens, + genAzureChatCompletion, + extractBaseURL, + constructAzureURL, +} = require('~/utils'); const { encodeAndFormat, validateVisionModel } = require('~/server/services/Files/images'); -const { getModelMaxTokens, genAzureChatCompletion, extractBaseURL } = require('~/utils'); const { truncateText, formatMessage, CUT_OFF_PROMPT } = require('./prompts'); const { handleOpenAIErrors } = require('./tools/util'); const spendTokens = require('~/models/spendTokens'); const { createLLM, RunManager } = require('./llm'); -const { isEnabled } = require('~/server/utils'); const ChatGPTClient = require('./ChatGPTClient'); +const { isEnabled } = require('~/server/utils'); +const { getFiles } = require('~/models/File'); const { summaryBuffer } = require('./memory'); const { runTitleChain } = require('./chains'); const { tokenSplit } = require('./document'); @@ -31,6 +37,7 @@ class OpenAIClient extends BaseClient { ? options.contextStrategy.toLowerCase() : 'discard'; this.shouldSummarize = this.contextStrategy === 'summarize'; + /** @type {AzureOptions} */ this.azure = options.azure || false; this.setOptions(options); } @@ -76,16 +83,7 @@ class OpenAIClient extends BaseClient { }; } - this.isVisionModel = validateVisionModel(this.modelOptions.model); - - if (this.options.attachments && !this.isVisionModel) { - this.modelOptions.model = 'gpt-4-vision-preview'; - this.isVisionModel = true; - } - - if (this.isVisionModel) { - delete this.modelOptions.stop; - } + this.checkVisionRequest(this.options.attachments); const { OPENROUTER_API_KEY, OPENAI_FORCE_PROMPT } = process.env ?? {}; if (OPENROUTER_API_KEY && !this.azure) { @@ -94,15 +92,28 @@ class OpenAIClient extends BaseClient { } const { reverseProxyUrl: reverseProxy } = this.options; + + if ( + !this.useOpenRouter && + reverseProxy && + reverseProxy.includes('https://openrouter.ai/api/v1') + ) { + this.useOpenRouter = true; + } + this.FORCE_PROMPT = isEnabled(OPENAI_FORCE_PROMPT) || (reverseProxy && reverseProxy.includes('completions') && !reverseProxy.includes('chat')); + if (typeof this.options.forcePrompt === 'boolean') { + this.FORCE_PROMPT = this.options.forcePrompt; + } + if (this.azure && process.env.AZURE_OPENAI_DEFAULT_MODEL) { - this.azureEndpoint = genAzureChatCompletion(this.azure, this.modelOptions.model); + this.azureEndpoint = genAzureChatCompletion(this.azure, this.modelOptions.model, this); this.modelOptions.model = process.env.AZURE_OPENAI_DEFAULT_MODEL; } else if (this.azure) { - this.azureEndpoint = genAzureChatCompletion(this.azure, this.modelOptions.model); + this.azureEndpoint = genAzureChatCompletion(this.azure, this.modelOptions.model, this); } const { model } = this.modelOptions; @@ -146,8 +157,10 @@ class OpenAIClient extends BaseClient { this.options.sender ?? getResponseSender({ model: this.modelOptions.model, - endpoint: EModelEndpoint.openAI, + endpoint: this.options.endpoint, + endpointType: this.options.endpointType, chatGptLabel: this.options.chatGptLabel, + modelDisplayLabel: this.options.modelDisplayLabel, }); this.userLabel = this.options.userLabel || 'User'; @@ -189,6 +202,27 @@ class OpenAIClient extends BaseClient { return this; } + /** + * + * Checks if the model is a vision model based on request attachments and sets the appropriate options: + * - Sets `this.modelOptions.model` to `gpt-4-vision-preview` if the request is a vision request. + * - Sets `this.isVisionModel` to `true` if vision request. + * - Deletes `this.modelOptions.stop` if vision request. + * @param {Array | MongoFile[]> | Record} attachments + */ + checkVisionRequest(attachments) { + this.isVisionModel = validateVisionModel(this.modelOptions.model); + + if (attachments && !this.isVisionModel) { + this.modelOptions.model = 'gpt-4-vision-preview'; + this.isVisionModel = true; + } + + if (this.isVisionModel) { + delete this.modelOptions.stop; + } + } + setupTokens() { if (this.isChatCompletion) { this.startToken = '||>'; @@ -273,7 +307,11 @@ class OpenAIClient extends BaseClient { tokenizerCallsCount++; } - // Returns the token count of a given text. It also checks and resets the tokenizers if necessary. + /** + * Returns the token count of a given text. It also checks and resets the tokenizers if necessary. + * @param {string} text - The text to get the token count for. + * @returns {number} The token count of the given text. + */ getTokenCount(text) { this.resetTokenizersIfNecessary(); try { @@ -286,10 +324,33 @@ class OpenAIClient extends BaseClient { } } + /** + * Calculate the token cost for an image based on its dimensions and detail level. + * + * @param {Object} image - The image object. + * @param {number} image.width - The width of the image. + * @param {number} image.height - The height of the image. + * @param {'low'|'high'|string|undefined} [image.detail] - The detail level ('low', 'high', or other). + * @returns {number} The calculated token cost. + */ + calculateImageTokenCost({ width, height, detail }) { + if (detail === 'low') { + return ImageDetailCost.LOW; + } + + // Calculate the number of 512px squares + const numSquares = Math.ceil(width / 512) * Math.ceil(height / 512); + + // Default to high detail cost calculation + return numSquares * ImageDetailCost.HIGH + ImageDetailCost.ADDITIONAL; + } + getSaveOptions() { return { chatGptLabel: this.options.chatGptLabel, promptPrefix: this.options.promptPrefix, + resendImages: this.options.resendImages, + imageDetail: this.options.imageDetail, ...this.modelOptions, }; } @@ -302,6 +363,69 @@ class OpenAIClient extends BaseClient { }; } + /** + * + * @param {TMessage[]} _messages + * @returns {TMessage[]} + */ + async addPreviousAttachments(_messages) { + if (!this.options.resendImages) { + return _messages; + } + + /** + * + * @param {TMessage} message + */ + const processMessage = async (message) => { + if (!this.message_file_map) { + /** @type {Record */ + this.message_file_map = {}; + } + + const fileIds = message.files.map((file) => file.file_id); + const files = await getFiles({ + file_id: { $in: fileIds }, + }); + + await this.addImageURLs(message, files); + + this.message_file_map[message.messageId] = files; + return message; + }; + + const promises = []; + + for (const message of _messages) { + if (!message.files) { + promises.push(message); + continue; + } + + promises.push(processMessage(message)); + } + + const messages = await Promise.all(promises); + + this.checkVisionRequest(this.message_file_map); + return messages; + } + + /** + * + * Adds image URLs to the message object and returns the files + * + * @param {TMessage[]} messages + * @param {MongoFile[]} files + * @returns {Promise} + */ + async addImageURLs(message, attachments) { + const { files, image_urls } = await encodeAndFormat(this.options.req, attachments); + + message.image_urls = image_urls; + return files; + } + async buildMessages( messages, parentMessageId, @@ -340,13 +464,23 @@ class OpenAIClient extends BaseClient { } if (this.options.attachments) { - const attachments = await this.options.attachments; - const { files, image_urls } = await encodeAndFormat( - this.options.req, - attachments.filter((file) => file.type.includes('image')), + const attachments = (await this.options.attachments).filter((file) => + file.type.includes('image'), + ); + + if (this.message_file_map) { + this.message_file_map[orderedMessages[orderedMessages.length - 1].messageId] = attachments; + } else { + this.message_file_map = { + [orderedMessages[orderedMessages.length - 1].messageId]: attachments, + }; + } + + const files = await this.addImageURLs( + orderedMessages[orderedMessages.length - 1], + attachments, ); - orderedMessages[orderedMessages.length - 1].image_urls = image_urls; this.options.attachments = files; } @@ -357,10 +491,25 @@ class OpenAIClient extends BaseClient { assistantName: this.options?.chatGptLabel, }); - if (this.contextStrategy && !orderedMessages[i].tokenCount) { + const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount; + + /* If tokens were never counted, or, is a Vision request and the message has files, count again */ + if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) { orderedMessages[i].tokenCount = this.getTokenCountForMessage(formattedMessage); } + /* If message has files, calculate image token cost */ + if (this.message_file_map && this.message_file_map[message.messageId]) { + const attachments = this.message_file_map[message.messageId]; + for (const file of attachments) { + orderedMessages[i].tokenCount += this.calculateImageTokenCost({ + width: file.width, + height: file.height, + detail: this.options.imageDetail ?? ImageDetail.auto, + }); + } + } + return formattedMessage; }); @@ -434,7 +583,7 @@ class OpenAIClient extends BaseClient { }, opts.abortController || new AbortController(), ); - } else if (typeof opts.onProgress === 'function') { + } else if (typeof opts.onProgress === 'function' || this.options.useChatCompletion) { reply = await this.chatCompletion({ payload, clientOptions: opts, @@ -530,6 +679,19 @@ class OpenAIClient extends BaseClient { return llm; } + /** + * Generates a concise title for a conversation based on the user's input text and response. + * Uses either specified method or starts with the OpenAI `functions` method (using LangChain). + * If the `functions` method fails, it falls back to the `completion` method, + * which involves sending a chat completion request with specific instructions for title generation. + * + * @param {Object} params - The parameters for the conversation title generation. + * @param {string} params.text - The user's input. + * @param {string} [params.responseText=''] - The AI's immediate response to the user. + * + * @returns {Promise} A promise that resolves to the generated conversation title. + * In case of failure, it will return the default title, "New Chat". + */ async titleConvo({ text, responseText = '' }) { let title = 'New Chat'; const convo = `||>User: @@ -539,32 +701,25 @@ class OpenAIClient extends BaseClient { const { OPENAI_TITLE_MODEL } = process.env ?? {}; + const model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo'; + const modelOptions = { - model: OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo', + // TODO: remove the gpt fallback and make it specific to endpoint + model, temperature: 0.2, presence_penalty: 0, frequency_penalty: 0, max_tokens: 16, }; - try { - this.abortController = new AbortController(); - const llm = this.initializeLLM({ ...modelOptions, context: 'title', tokenBuffer: 150 }); - title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal }); - } catch (e) { - if (e?.message?.toLowerCase()?.includes('abort')) { - logger.debug('[OpenAIClient] Aborted title generation'); - return; - } - logger.error( - '[OpenAIClient] There was an issue generating title with LangChain, trying the old method...', - e, - ); - modelOptions.model = OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo'; + const titleChatCompletion = async () => { + modelOptions.model = model; + if (this.azure) { modelOptions.model = process.env.AZURE_OPENAI_DEFAULT_MODEL ?? modelOptions.model; - this.azureEndpoint = genAzureChatCompletion(this.azure, modelOptions.model); + this.azureEndpoint = genAzureChatCompletion(this.azure, modelOptions.model, this); } + const instructionsPayload = [ { role: 'system', @@ -578,10 +733,38 @@ ${convo} ]; try { - title = (await this.sendPayload(instructionsPayload, { modelOptions })).replaceAll('"', ''); + title = ( + await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion: true }) + ).replaceAll('"', ''); } catch (e) { - logger.error('[OpenAIClient] There was another issue generating the title', e); + logger.error( + '[OpenAIClient] There was an issue generating the title with the completion method', + e, + ); } + }; + + if (this.options.titleMethod === 'completion') { + await titleChatCompletion(); + logger.debug('[OpenAIClient] Convo Title: ' + title); + return title; + } + + try { + this.abortController = new AbortController(); + const llm = this.initializeLLM({ ...modelOptions, context: 'title', tokenBuffer: 150 }); + title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal }); + } catch (e) { + if (e?.message?.toLowerCase()?.includes('abort')) { + logger.debug('[OpenAIClient] Aborted title generation'); + return; + } + logger.error( + '[OpenAIClient] There was an issue generating title with LangChain, trying completion method...', + e, + ); + + await titleChatCompletion(); } logger.debug('[OpenAIClient] Convo Title: ' + title); @@ -593,8 +776,11 @@ ${convo} let context = messagesToRefine; let prompt; + // TODO: remove the gpt fallback and make it specific to endpoint const { OPENAI_SUMMARY_MODEL = 'gpt-3.5-turbo' } = process.env ?? {}; - const maxContextTokens = getModelMaxTokens(OPENAI_SUMMARY_MODEL) ?? 4095; + const model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL; + const maxContextTokens = getModelMaxTokens(model) ?? 4095; + // 3 tokens for the assistant label, and 98 for the summarizer prompt (101) let promptBuffer = 101; @@ -644,7 +830,7 @@ ${convo} logger.debug('[OpenAIClient] initialPromptTokens', initialPromptTokens); const llm = this.initializeLLM({ - model: OPENAI_SUMMARY_MODEL, + model, temperature: 0.2, context: 'summary', tokenBuffer: initialPromptTokens, @@ -719,14 +905,15 @@ ${convo} if (!abortController) { abortController = new AbortController(); } - const modelOptions = { ...this.modelOptions }; + + let modelOptions = { ...this.modelOptions }; + if (typeof onProgress === 'function') { modelOptions.stream = true; } if (this.isChatCompletion) { modelOptions.messages = payload; } else { - // TODO: unreachable code. Need to implement completions call for non-chat models modelOptions.prompt = payload; } @@ -768,17 +955,47 @@ ${convo} // Azure does not accept `model` in the body, so we need to remove it. delete modelOptions.model; - opts.baseURL = this.azureEndpoint.split('/chat')[0]; + opts.baseURL = this.langchainProxy + ? constructAzureURL({ + baseURL: this.langchainProxy, + azure: this.azure, + }) + : this.azureEndpoint.split(/\/(chat|completion)/)[0]; opts.defaultQuery = { 'api-version': this.azure.azureOpenAIApiVersion }; opts.defaultHeaders = { ...opts.defaultHeaders, 'api-key': this.apiKey }; } + if (process.env.OPENAI_ORGANIZATION) { + opts.organization = process.env.OPENAI_ORGANIZATION; + } + let chatCompletion; const openai = new OpenAI({ apiKey: this.apiKey, ...opts, }); + /* hacky fix for Mistral AI API not allowing a singular system message in payload */ + if (opts.baseURL.includes('https://api.mistral.ai/v1') && modelOptions.messages) { + const { messages } = modelOptions; + if (messages.length === 1 && messages[0].role === 'system') { + modelOptions.messages[0].role = 'user'; + } + } + + if (this.options.addParams && typeof this.options.addParams === 'object') { + modelOptions = { + ...modelOptions, + ...this.options.addParams, + }; + } + + if (this.options.dropParams && Array.isArray(this.options.dropParams)) { + this.options.dropParams.forEach((param) => { + delete modelOptions[param]; + }); + } + let UnexpectedRoleError = false; if (modelOptions.stream) { const stream = await openai.beta.chat.completions @@ -841,6 +1058,8 @@ ${convo} clientOptions.addMetadata({ finish_reason }); } + logger.debug('[OpenAIClient] chatCompletion response', chatCompletion); + return message.content; } catch (err) { if ( diff --git a/api/app/clients/PluginsClient.js b/api/app/clients/PluginsClient.js index f26df8a2d10..6118c3547a1 100644 --- a/api/app/clients/PluginsClient.js +++ b/api/app/clients/PluginsClient.js @@ -112,7 +112,7 @@ class PluginsClient extends OpenAIClient { signal: this.abortController.signal, openAIApiKey: this.openAIApiKey, conversationId: this.conversationId, - debug: this.options?.debug, + fileStrategy: this.options.req.app.locals.fileStrategy, message, }, }); diff --git a/api/app/clients/llm/createLLM.js b/api/app/clients/llm/createLLM.js index 020fba65034..62f2fe86f95 100644 --- a/api/app/clients/llm/createLLM.js +++ b/api/app/clients/llm/createLLM.js @@ -1,38 +1,6 @@ const { ChatOpenAI } = require('langchain/chat_models/openai'); -const { sanitizeModelName } = require('../../../utils'); -const { isEnabled } = require('../../../server/utils'); - -/** - * @typedef {Object} ModelOptions - * @property {string} modelName - The name of the model. - * @property {number} [temperature] - The temperature setting for the model. - * @property {number} [presence_penalty] - The presence penalty setting. - * @property {number} [frequency_penalty] - The frequency penalty setting. - * @property {number} [max_tokens] - The maximum number of tokens to generate. - */ - -/** - * @typedef {Object} ConfigOptions - * @property {string} [basePath] - The base path for the API requests. - * @property {Object} [baseOptions] - Base options for the API requests, including headers. - * @property {Object} [httpAgent] - The HTTP agent for the request. - * @property {Object} [httpsAgent] - The HTTPS agent for the request. - */ - -/** - * @typedef {Object} Callbacks - * @property {Function} [handleChatModelStart] - A callback function for handleChatModelStart - * @property {Function} [handleLLMEnd] - A callback function for handleLLMEnd - * @property {Function} [handleLLMError] - A callback function for handleLLMError - */ - -/** - * @typedef {Object} AzureOptions - * @property {string} [azureOpenAIApiKey] - The Azure OpenAI API key. - * @property {string} [azureOpenAIApiInstanceName] - The Azure OpenAI API instance name. - * @property {string} [azureOpenAIApiDeploymentName] - The Azure OpenAI API deployment name. - * @property {string} [azureOpenAIApiVersion] - The Azure OpenAI API version. - */ +const { sanitizeModelName, constructAzureURL } = require('~/utils'); +const { isEnabled } = require('~/server/utils'); /** * Creates a new instance of a language model (LLM) for chat interactions. @@ -68,6 +36,7 @@ function createLLM({ apiKey: openAIApiKey, }; + /** @type {AzureOptions} */ let azureOptions = {}; if (azure) { const useModelName = isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME); @@ -85,8 +54,12 @@ function createLLM({ modelOptions.modelName = process.env.AZURE_OPENAI_DEFAULT_MODEL; } - // console.debug('createLLM: configOptions'); - // console.debug(configOptions); + if (azure && configOptions.basePath) { + configOptions.basePath = constructAzureURL({ + baseURL: configOptions.basePath, + azure: azureOptions, + }); + } return new ChatOpenAI( { @@ -96,6 +69,7 @@ function createLLM({ configuration, ...azureOptions, ...modelOptions, + ...credentials, callbacks, }, configOptions, diff --git a/api/app/clients/specs/OpenAIClient.test.js b/api/app/clients/specs/OpenAIClient.test.js index a31d82fe126..8c2226215c1 100644 --- a/api/app/clients/specs/OpenAIClient.test.js +++ b/api/app/clients/specs/OpenAIClient.test.js @@ -546,6 +546,39 @@ describe('OpenAIClient', () => { expect(totalTokens).toBe(testCase.expected); }); }); + + const vision_request = [ + { + role: 'user', + content: [ + { + type: 'text', + text: 'describe what is in this image?', + }, + { + type: 'image_url', + image_url: { + url: 'https://venturebeat.com/wp-content/uploads/2019/03/openai-1.png', + detail: 'high', + }, + }, + ], + }, + ]; + + const expectedTokens = 14; + const visionModel = 'gpt-4-vision-preview'; + + it(`should return ${expectedTokens} tokens for model ${visionModel} (Vision Request)`, () => { + client.modelOptions.model = visionModel; + client.selectTokenizer(); + // 3 tokens for assistant label + let totalTokens = 3; + for (let message of vision_request) { + totalTokens += client.getTokenCountForMessage(message); + } + expect(totalTokens).toBe(expectedTokens); + }); }); describe('sendMessage/getCompletion/chatCompletion', () => { diff --git a/api/app/clients/tools/AzureAiSearch.js b/api/app/clients/tools/AzureAiSearch.js index 2d74c005439..9b50aa2c433 100644 --- a/api/app/clients/tools/AzureAiSearch.js +++ b/api/app/clients/tools/AzureAiSearch.js @@ -16,6 +16,9 @@ class AzureAISearch extends StructuredTool { constructor(fields = {}) { super(); + this.name = 'azure-ai-search'; + this.description = + 'Use the \'azure-ai-search\' tool to retrieve search results relevant to your input'; // Initialize properties using helper function this.serviceEndpoint = this._initializeField( @@ -68,15 +71,6 @@ class AzureAISearch extends StructuredTool { }); } - // Simplified getter methods - get name() { - return 'azure-ai-search'; - } - - get description() { - return 'Use the \'azure-ai-search\' tool to retrieve search results relevant to your input'; - } - // Improved error handling and logging async _call(data) { const { query } = data; diff --git a/api/app/clients/tools/DALL-E.js b/api/app/clients/tools/DALL-E.js index 387294a1cbb..4eca7f7932e 100644 --- a/api/app/clients/tools/DALL-E.js +++ b/api/app/clients/tools/DALL-E.js @@ -1,61 +1,46 @@ -// From https://platform.openai.com/docs/api-reference/images/create -// To use this tool, you must pass in a configured OpenAIApi object. -const fs = require('fs'); const path = require('path'); const OpenAI = require('openai'); -// const { genAzureEndpoint } = require('~/utils/genAzureEndpoints'); const { v4: uuidv4 } = require('uuid'); const { Tool } = require('langchain/tools'); const { HttpsProxyAgent } = require('https-proxy-agent'); -const { - saveImageToFirebaseStorage, - getFirebaseStorageImageUrl, - getFirebaseStorage, -} = require('~/server/services/Files/Firebase'); const { getImageBasename } = require('~/server/services/Files/images'); +const { processFileURL } = require('~/server/services/Files/process'); const extractBaseURL = require('~/utils/extractBaseURL'); -const saveImageFromUrl = require('./saveImageFromUrl'); const { logger } = require('~/config'); -const { DALLE_REVERSE_PROXY, PROXY } = process.env; +const { + DALLE2_SYSTEM_PROMPT, + DALLE_REVERSE_PROXY, + PROXY, + DALLE2_AZURE_API_VERSION, + DALLE2_BASEURL, + DALLE2_API_KEY, + DALLE_API_KEY, +} = process.env; class OpenAICreateImage extends Tool { constructor(fields = {}) { super(); this.userId = fields.userId; - let apiKey = fields.DALLE_API_KEY || this.getApiKey(); + this.fileStrategy = fields.fileStrategy; + let apiKey = fields.DALLE2_API_KEY ?? fields.DALLE_API_KEY ?? this.getApiKey(); const config = { apiKey }; if (DALLE_REVERSE_PROXY) { config.baseURL = extractBaseURL(DALLE_REVERSE_PROXY); } + if (DALLE2_AZURE_API_VERSION && DALLE2_BASEURL) { + config.baseURL = DALLE2_BASEURL; + config.defaultQuery = { 'api-version': DALLE2_AZURE_API_VERSION }; + config.defaultHeaders = { 'api-key': DALLE2_API_KEY, 'Content-Type': 'application/json' }; + config.apiKey = DALLE2_API_KEY; + } + if (PROXY) { config.httpAgent = new HttpsProxyAgent(PROXY); } - // let azureKey = fields.AZURE_API_KEY || process.env.AZURE_API_KEY; - - // if (azureKey) { - // apiKey = azureKey; - // const azureConfig = { - // apiKey, - // azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE_NAME || fields.azureOpenAIApiInstanceName, - // azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME || fields.azureOpenAIApiDeploymentName, - // azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION || fields.azureOpenAIApiVersion - // }; - // config = { - // apiKey, - // basePath: genAzureEndpoint({ - // ...azureConfig, - // }), - // baseOptions: { - // headers: { 'api-key': apiKey }, - // params: { - // 'api-version': azureConfig.azureOpenAIApiVersion // this might change. I got the current value from the sample code at https://oai.azure.com/portal/chat - // } - // } - // }; - // } + this.openai = new OpenAI(config); this.name = 'dall-e'; this.description = `You can generate images with 'dall-e'. This tool is exclusively for visual content. @@ -65,10 +50,24 @@ Guidelines: - It's best to follow this format for image creation. Come up with the optional inputs yourself if none are given: "Subject: [subject], Style: [style], Color: [color], Details: [details], Emotion: [emotion]" - Generate images only once per human query unless explicitly requested by the user`; + this.description_for_model = + DALLE2_SYSTEM_PROMPT ?? + `// Whenever a description of an image is given, generate prompts (following these rules), and use dalle to create the image. If the user does not ask for a specific number of images, default to creating 2 prompts to send to dalle that are written to be as diverse as possible. All prompts sent to dalle must abide by the following policies: +// 1. Prompts must be in English. Translate to English if needed. +// 2. One image per function call. Create only 1 image per request unless explicitly told to generate more than 1 image. +// 3. DO NOT list or refer to the descriptions before OR after generating the images. They should ONLY ever be written out ONCE, in the \`"prompts"\` field of the request. You do not need to ask for permission to generate, just do it! +// 4. Always mention the image type (photo, oil painting, watercolor painting, illustration, cartoon, drawing, vector, render, etc.) at the beginning of the caption. Unless the captions suggests otherwise, make one of the images a photo. +// 5. Diversify depictions of ALL images with people to always include always DESCENT and GENDER for EACH person using direct terms. Adjust only human descriptions. +// - EXPLICITLY specify these attributes, not abstractly reference them. The attributes should be specified in a minimal way and should directly describe their physical form. +// - Your choices should be grounded in reality. For example, all of a given OCCUPATION should not be the same gender or race. Additionally, focus on creating diverse, inclusive, and exploratory scenes via the properties you choose during rewrites. Make choices that may be insightful or unique sometimes. +// - Use "various" or "diverse" ONLY IF the description refers to groups of more than 3 people. Do not change the number of people requested in the original description. +// - Don't alter memes, fictional character origins, or unseen people. Maintain the original prompt's intent and prioritize quality. +// The prompt must intricately describe every part of the image in concrete, objective detail. THINK about what the end goal of the description is, and extrapolate that to what would make satisfying images. +// All descriptions sent to dalle should be a paragraph of text that is extremely descriptive and detailed. Each should be more than 3 sentences long.`; } getApiKey() { - const apiKey = process.env.DALLE_API_KEY || ''; + const apiKey = DALLE2_API_KEY ?? DALLE_API_KEY ?? ''; if (!apiKey) { throw new Error('Missing DALLE_API_KEY environment variable.'); } @@ -82,12 +81,8 @@ Guidelines: .trim(); } - getMarkdownImageUrl(imageName) { - const imageUrl = path - .join(this.relativeImageUrl, imageName) - .replace(/\\/g, '/') - .replace('public/', ''); - return `![generated image](/${imageUrl})`; + wrapInMarkdown(imageUrl) { + return `![generated image](${imageUrl})`; } async _call(input) { @@ -106,57 +101,35 @@ Guidelines: } const imageBasename = getImageBasename(theImageUrl); - let imageName = `image_${uuidv4()}.png`; - - if (imageBasename) { - imageName = imageBasename; - logger.debug('[DALL-E]', { imageName }); // Output: img-lgCf7ppcbhqQrz6a5ear6FOb.png - } else { - logger.debug('[DALL-E] No image name found in the string.', { - theImageUrl, - data: resp.data[0], + const imageExt = path.extname(imageBasename); + + const extension = imageExt.startsWith('.') ? imageExt.slice(1) : imageExt; + const imageName = `img-${uuidv4()}.${extension}`; + + logger.debug('[DALL-E-2]', { + imageName, + imageBasename, + imageExt, + extension, + theImageUrl, + data: resp.data[0], + }); + + try { + const result = await processFileURL({ + fileStrategy: this.fileStrategy, + userId: this.userId, + URL: theImageUrl, + fileName: imageName, + basePath: 'images', }); - } - this.outputPath = path.resolve( - __dirname, - '..', - '..', - '..', - '..', - 'client', - 'public', - 'images', - this.userId, - ); - - const appRoot = path.resolve(__dirname, '..', '..', '..', '..', 'client'); - this.relativeImageUrl = path.relative(appRoot, this.outputPath); - - // Check if directory exists, if not create it - if (!fs.existsSync(this.outputPath)) { - fs.mkdirSync(this.outputPath, { recursive: true }); + this.result = this.wrapInMarkdown(result); + } catch (error) { + logger.error('Error while saving the image:', error); + this.result = `Failed to save the image locally. ${error.message}`; } - const storage = getFirebaseStorage(); - if (storage) { - try { - await saveImageToFirebaseStorage(this.userId, theImageUrl, imageName); - this.result = await getFirebaseStorageImageUrl(`${this.userId}/${imageName}`); - logger.debug('[DALL-E] result: ' + this.result); - } catch (error) { - logger.error('Error while saving the image to Firebase Storage:', error); - this.result = `Failed to save the image to Firebase Storage. ${error.message}`; - } - } else { - try { - await saveImageFromUrl(theImageUrl, this.outputPath, imageName); - this.result = this.getMarkdownImageUrl(imageName); - } catch (error) { - logger.error('Error while saving the image locally:', error); - this.result = `Failed to save the image locally. ${error.message}`; - } - } return this.result; } } diff --git a/api/app/clients/tools/manifest.json b/api/app/clients/tools/manifest.json index d5f2c75d3d5..bde4c8a87a9 100644 --- a/api/app/clients/tools/manifest.json +++ b/api/app/clients/tools/manifest.json @@ -89,7 +89,7 @@ "icon": "https://i.imgur.com/u2TzXzH.png", "authConfig": [ { - "authField": "DALLE_API_KEY", + "authField": "DALLE2_API_KEY", "label": "OpenAI API Key", "description": "You can use DALL-E with your API Key from OpenAI." } @@ -102,7 +102,7 @@ "icon": "https://i.imgur.com/u2TzXzH.png", "authConfig": [ { - "authField": "DALLE_API_KEY", + "authField": "DALLE3_API_KEY", "label": "OpenAI API Key", "description": "You can use DALL-E with your API Key from OpenAI." } diff --git a/api/app/clients/tools/saveImageFromUrl.js b/api/app/clients/tools/saveImageFromUrl.js deleted file mode 100644 index d8b14ad4783..00000000000 --- a/api/app/clients/tools/saveImageFromUrl.js +++ /dev/null @@ -1,40 +0,0 @@ -const fs = require('fs'); -const path = require('path'); -const axios = require('axios'); -const { logger } = require('~/config'); - -async function saveImageFromUrl(url, outputPath, outputFilename) { - try { - // Fetch the image from the URL - const response = await axios({ - url, - responseType: 'stream', - }); - - // Check if the output directory exists, if not, create it - if (!fs.existsSync(outputPath)) { - fs.mkdirSync(outputPath, { recursive: true }); - } - - // Ensure the output filename has a '.png' extension - const filenameWithPngExt = outputFilename.endsWith('.png') - ? outputFilename - : `${outputFilename}.png`; - - // Create a writable stream for the output path - const outputFilePath = path.join(outputPath, filenameWithPngExt); - const writer = fs.createWriteStream(outputFilePath); - - // Pipe the response data to the output file - response.data.pipe(writer); - - return new Promise((resolve, reject) => { - writer.on('finish', resolve); - writer.on('error', reject); - }); - } catch (error) { - logger.error('[saveImageFromUrl] Error while saving the image:', error); - } -} - -module.exports = saveImageFromUrl; diff --git a/api/app/clients/tools/structured/AzureAISearch.js b/api/app/clients/tools/structured/AzureAISearch.js index 2d74c005439..9b50aa2c433 100644 --- a/api/app/clients/tools/structured/AzureAISearch.js +++ b/api/app/clients/tools/structured/AzureAISearch.js @@ -16,6 +16,9 @@ class AzureAISearch extends StructuredTool { constructor(fields = {}) { super(); + this.name = 'azure-ai-search'; + this.description = + 'Use the \'azure-ai-search\' tool to retrieve search results relevant to your input'; // Initialize properties using helper function this.serviceEndpoint = this._initializeField( @@ -68,15 +71,6 @@ class AzureAISearch extends StructuredTool { }); } - // Simplified getter methods - get name() { - return 'azure-ai-search'; - } - - get description() { - return 'Use the \'azure-ai-search\' tool to retrieve search results relevant to your input'; - } - // Improved error handling and logging async _call(data) { const { query } = data; diff --git a/api/app/clients/tools/structured/DALLE3.js b/api/app/clients/tools/structured/DALLE3.js index 17d0368f395..33df93e7fcf 100644 --- a/api/app/clients/tools/structured/DALLE3.js +++ b/api/app/clients/tools/structured/DALLE3.js @@ -1,34 +1,41 @@ -// From https://platform.openai.com/docs/guides/images/usage?context=node -// To use this tool, you must pass in a configured OpenAIApi object. -const fs = require('fs'); -const path = require('path'); const { z } = require('zod'); +const path = require('path'); const OpenAI = require('openai'); const { v4: uuidv4 } = require('uuid'); const { Tool } = require('langchain/tools'); const { HttpsProxyAgent } = require('https-proxy-agent'); -const { - saveImageToFirebaseStorage, - getFirebaseStorageImageUrl, - getFirebaseStorage, -} = require('~/server/services/Files/Firebase'); const { getImageBasename } = require('~/server/services/Files/images'); +const { processFileURL } = require('~/server/services/Files/process'); const extractBaseURL = require('~/utils/extractBaseURL'); -const saveImageFromUrl = require('../saveImageFromUrl'); const { logger } = require('~/config'); -const { DALLE3_SYSTEM_PROMPT, DALLE_REVERSE_PROXY, PROXY } = process.env; +const { + DALLE3_SYSTEM_PROMPT, + DALLE_REVERSE_PROXY, + PROXY, + DALLE3_AZURE_API_VERSION, + DALLE3_BASEURL, + DALLE3_API_KEY, +} = process.env; class DALLE3 extends Tool { constructor(fields = {}) { super(); this.userId = fields.userId; - let apiKey = fields.DALLE_API_KEY || this.getApiKey(); + this.fileStrategy = fields.fileStrategy; + let apiKey = fields.DALLE3_API_KEY ?? fields.DALLE_API_KEY ?? this.getApiKey(); const config = { apiKey }; if (DALLE_REVERSE_PROXY) { config.baseURL = extractBaseURL(DALLE_REVERSE_PROXY); } + if (DALLE3_AZURE_API_VERSION && DALLE3_BASEURL) { + config.baseURL = DALLE3_BASEURL; + config.defaultQuery = { 'api-version': DALLE3_AZURE_API_VERSION }; + config.defaultHeaders = { 'api-key': DALLE3_API_KEY, 'Content-Type': 'application/json' }; + config.apiKey = DALLE3_API_KEY; + } + if (PROXY) { config.httpAgent = new HttpsProxyAgent(PROXY); } @@ -52,7 +59,8 @@ class DALLE3 extends Tool { // - Use "various" or "diverse" ONLY IF the description refers to groups of more than 3 people. Do not change the number of people requested in the original description. // - Don't alter memes, fictional character origins, or unseen people. Maintain the original prompt's intent and prioritize quality. // The prompt must intricately describe every part of the image in concrete, objective detail. THINK about what the end goal of the description is, and extrapolate that to what would make satisfying images. - // All descriptions sent to dalle should be a paragraph of text that is extremely descriptive and detailed. Each should be more than 3 sentences long.`; + // All descriptions sent to dalle should be a paragraph of text that is extremely descriptive and detailed. Each should be more than 3 sentences long. + // - The "vivid" style is HIGHLY preferred, but "natural" is also supported.`; this.schema = z.object({ prompt: z .string() @@ -77,7 +85,7 @@ class DALLE3 extends Tool { } getApiKey() { - const apiKey = process.env.DALLE_API_KEY || ''; + const apiKey = process.env.DALLE3_API_KEY ?? process.env.DALLE_API_KEY ?? ''; if (!apiKey) { throw new Error('Missing DALLE_API_KEY environment variable.'); } @@ -91,12 +99,8 @@ class DALLE3 extends Tool { .trim(); } - getMarkdownImageUrl(imageName) { - const imageUrl = path - .join(this.relativeImageUrl, imageName) - .replace(/\\/g, '/') - .replace('public/', ''); - return `![generated image](/${imageUrl})`; + wrapInMarkdown(imageUrl) { + return `![generated image](${imageUrl})`; } async _call(data) { @@ -131,55 +135,33 @@ Error Message: ${error.message}`; } const imageBasename = getImageBasename(theImageUrl); - let imageName = `image_${uuidv4()}.png`; - - if (imageBasename) { - imageName = imageBasename; - logger.debug('[DALL-E-3]', { imageName }); // Output: img-lgCf7ppcbhqQrz6a5ear6FOb.png - } else { - logger.debug('[DALL-E-3] No image name found in the string.', { - theImageUrl, - data: resp.data[0], + const imageExt = path.extname(imageBasename); + + const extension = imageExt.startsWith('.') ? imageExt.slice(1) : imageExt; + const imageName = `img-${uuidv4()}.${extension}`; + + logger.debug('[DALL-E-3]', { + imageName, + imageBasename, + imageExt, + extension, + theImageUrl, + data: resp.data[0], + }); + + try { + const result = await processFileURL({ + fileStrategy: this.fileStrategy, + userId: this.userId, + URL: theImageUrl, + fileName: imageName, + basePath: 'images', }); - } - this.outputPath = path.resolve( - __dirname, - '..', - '..', - '..', - '..', - '..', - 'client', - 'public', - 'images', - this.userId, - ); - const appRoot = path.resolve(__dirname, '..', '..', '..', '..', '..', 'client'); - this.relativeImageUrl = path.relative(appRoot, this.outputPath); - - // Check if directory exists, if not create it - if (!fs.existsSync(this.outputPath)) { - fs.mkdirSync(this.outputPath, { recursive: true }); - } - const storage = getFirebaseStorage(); - if (storage) { - try { - await saveImageToFirebaseStorage(this.userId, theImageUrl, imageName); - this.result = await getFirebaseStorageImageUrl(`${this.userId}/${imageName}`); - logger.debug('[DALL-E-3] result: ' + this.result); - } catch (error) { - logger.error('Error while saving the image to Firebase Storage:', error); - this.result = `Failed to save the image to Firebase Storage. ${error.message}`; - } - } else { - try { - await saveImageFromUrl(theImageUrl, this.outputPath, imageName); - this.result = this.getMarkdownImageUrl(imageName); - } catch (error) { - logger.error('Error while saving the image locally:', error); - this.result = `Failed to save the image locally. ${error.message}`; - } + this.result = this.wrapInMarkdown(result); + } catch (error) { + logger.error('Error while saving the image:', error); + this.result = `Failed to save the image locally. ${error.message}`; } return this.result; diff --git a/api/app/clients/tools/structured/specs/DALLE3.spec.js b/api/app/clients/tools/structured/specs/DALLE3.spec.js index 34fa3ebf00a..58771b1459e 100644 --- a/api/app/clients/tools/structured/specs/DALLE3.spec.js +++ b/api/app/clients/tools/structured/specs/DALLE3.spec.js @@ -1,20 +1,13 @@ -const fs = require('fs'); -const path = require('path'); const OpenAI = require('openai'); const DALLE3 = require('../DALLE3'); -const { - getFirebaseStorage, - saveImageToFirebaseStorage, -} = require('~/server/services/Files/Firebase'); -const saveImageFromUrl = require('../../saveImageFromUrl'); +const { processFileURL } = require('~/server/services/Files/process'); + const { logger } = require('~/config'); jest.mock('openai'); -jest.mock('~/server/services/Files/Firebase', () => ({ - getFirebaseStorage: jest.fn(), - saveImageToFirebaseStorage: jest.fn(), - getFirebaseStorageImageUrl: jest.fn(), +jest.mock('~/server/services/Files/process', () => ({ + processFileURL: jest.fn(), })); jest.mock('~/server/services/Files/images', () => ({ @@ -50,15 +43,14 @@ jest.mock('fs', () => { }; }); -jest.mock('../../saveImageFromUrl', () => { - return jest.fn(); -}); - jest.mock('path', () => { return { resolve: jest.fn(), join: jest.fn(), relative: jest.fn(), + extname: jest.fn().mockImplementation((filename) => { + return filename.slice(filename.lastIndexOf('.')); + }), }; }); @@ -99,10 +91,8 @@ describe('DALLE3', () => { it('should generate markdown image URL correctly', () => { const imageName = 'test.png'; - path.join.mockReturnValue('images/test.png'); - path.relative.mockReturnValue('images/test.png'); - const markdownImage = dalle.getMarkdownImageUrl(imageName); - expect(markdownImage).toBe('![generated image](/images/test.png)'); + const markdownImage = dalle.wrapInMarkdown(imageName); + expect(markdownImage).toBe('![generated image](test.png)'); }); it('should call OpenAI API with correct parameters', async () => { @@ -122,11 +112,7 @@ describe('DALLE3', () => { }; generate.mockResolvedValue(mockResponse); - saveImageFromUrl.mockResolvedValue(true); - fs.existsSync.mockReturnValue(true); - path.resolve.mockReturnValue('/fakepath/images'); - path.join.mockReturnValue('/fakepath/images/img-test.png'); - path.relative.mockReturnValue('images/img-test.png'); + processFileURL.mockResolvedValue('http://example.com/img-test.png'); const result = await dalle._call(mockData); @@ -138,6 +124,7 @@ describe('DALLE3', () => { prompt: mockData.prompt, n: 1, }); + expect(result).toContain('![generated image]'); }); @@ -164,7 +151,7 @@ describe('DALLE3', () => { await expect(dalle._call(mockData)).rejects.toThrow('Missing required field: prompt'); }); - it('should log to console if no image name is found in the URL', async () => { + it('should log appropriate debug values', async () => { const mockData = { prompt: 'A test prompt', }; @@ -178,29 +165,16 @@ describe('DALLE3', () => { generate.mockResolvedValue(mockResponse); await dalle._call(mockData); - expect(logger.debug).toHaveBeenCalledWith('[DALL-E-3] No image name found in the string.', { + expect(logger.debug).toHaveBeenCalledWith('[DALL-E-3]', { data: { url: 'http://example.com/invalid-url' }, theImageUrl: 'http://example.com/invalid-url', + extension: expect.any(String), + imageBasename: expect.any(String), + imageExt: expect.any(String), + imageName: expect.any(String), }); }); - it('should create the directory if it does not exist', async () => { - const mockData = { - prompt: 'A test prompt', - }; - const mockResponse = { - data: [ - { - url: 'http://example.com/img-test.png', - }, - ], - }; - generate.mockResolvedValue(mockResponse); - fs.existsSync.mockReturnValue(false); // Simulate directory does not exist - await dalle._call(mockData); - expect(fs.mkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true }); - }); - it('should log an error and return the image URL if there is an error saving the image', async () => { const mockData = { prompt: 'A test prompt', @@ -214,31 +188,12 @@ describe('DALLE3', () => { }; const error = new Error('Error while saving the image'); generate.mockResolvedValue(mockResponse); - saveImageFromUrl.mockRejectedValue(error); + processFileURL.mockRejectedValue(error); const result = await dalle._call(mockData); - expect(logger.error).toHaveBeenCalledWith('Error while saving the image locally:', error); + expect(logger.error).toHaveBeenCalledWith('Error while saving the image:', error); expect(result).toBe('Failed to save the image locally. Error while saving the image'); }); - it('should save image to Firebase Storage if Firebase is initialized', async () => { - const mockData = { - prompt: 'A test prompt', - }; - const mockImageUrl = 'http://example.com/img-test.png'; - const mockResponse = { data: [{ url: mockImageUrl }] }; - generate.mockResolvedValue(mockResponse); - getFirebaseStorage.mockReturnValue({}); // Simulate Firebase being initialized - - await dalle._call(mockData); - - expect(getFirebaseStorage).toHaveBeenCalled(); - expect(saveImageToFirebaseStorage).toHaveBeenCalledWith( - undefined, - mockImageUrl, - expect.any(String), - ); - }); - it('should handle error when saving image to Firebase Storage fails', async () => { const mockData = { prompt: 'A test prompt', @@ -247,17 +202,11 @@ describe('DALLE3', () => { const mockResponse = { data: [{ url: mockImageUrl }] }; const error = new Error('Error while saving to Firebase'); generate.mockResolvedValue(mockResponse); - getFirebaseStorage.mockReturnValue({}); // Simulate Firebase being initialized - saveImageToFirebaseStorage.mockRejectedValue(error); + processFileURL.mockRejectedValue(error); const result = await dalle._call(mockData); - expect(logger.error).toHaveBeenCalledWith( - 'Error while saving the image to Firebase Storage:', - error, - ); - expect(result).toBe( - 'Failed to save the image to Firebase Storage. Error while saving to Firebase', - ); + expect(logger.error).toHaveBeenCalledWith('Error while saving the image:', error); + expect(result).toContain('Failed to save the image'); }); }); diff --git a/api/app/clients/tools/util/handleTools.js b/api/app/clients/tools/util/handleTools.js index 352dd5dec74..1d9a3a00749 100644 --- a/api/app/clients/tools/util/handleTools.js +++ b/api/app/clients/tools/util/handleTools.js @@ -170,6 +170,8 @@ const loadTools = async ({ const toolOptions = { serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' }, + dalle: { fileStrategy: options.fileStrategy }, + 'dall-e': { fileStrategy: options.fileStrategy }, }; const toolAuthFields = {}; diff --git a/api/cache/getCustomConfig.js b/api/cache/getCustomConfig.js new file mode 100644 index 00000000000..62082c5cbae --- /dev/null +++ b/api/cache/getCustomConfig.js @@ -0,0 +1,23 @@ +const { CacheKeys } = require('librechat-data-provider'); +const loadCustomConfig = require('~/server/services/Config/loadCustomConfig'); +const getLogStores = require('./getLogStores'); + +/** + * Retrieves the configuration object + * @function getCustomConfig */ +async function getCustomConfig() { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG); + + if (!customConfig) { + customConfig = await loadCustomConfig(); + } + + if (!customConfig) { + return null; + } + + return customConfig; +} + +module.exports = getCustomConfig; diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js index 77949dacd3c..016c7700009 100644 --- a/api/cache/getLogStores.js +++ b/api/cache/getLogStores.js @@ -1,9 +1,10 @@ const Keyv = require('keyv'); -const keyvMongo = require('./keyvMongo'); -const keyvRedis = require('./keyvRedis'); -const { CacheKeys } = require('~/common/enums'); -const { math, isEnabled } = require('~/server/utils'); +const { CacheKeys } = require('librechat-data-provider'); const { logFile, violationFile } = require('./keyvFiles'); +const { math, isEnabled } = require('~/server/utils'); +const keyvRedis = require('./keyvRedis'); +const keyvMongo = require('./keyvMongo'); + const { BAN_DURATION, USE_REDIS } = process.env ?? {}; const duration = math(BAN_DURATION, 7200000); @@ -20,10 +21,10 @@ const pending_req = isEnabled(USE_REDIS) const config = isEnabled(USE_REDIS) ? new Keyv({ store: keyvRedis }) - : new Keyv({ namespace: CacheKeys.CONFIG }); + : new Keyv({ namespace: CacheKeys.CONFIG_STORE }); const namespaces = { - config, + [CacheKeys.CONFIG_STORE]: config, pending_req, ban: new Keyv({ store: keyvMongo, namespace: 'bans', ttl: duration }), general: new Keyv({ store: logFile, namespace: 'violations' }), @@ -39,19 +40,15 @@ const namespaces = { * Returns the keyv cache specified by type. * If an invalid type is passed, an error will be thrown. * - * @module getLogStores - * @requires keyv - a simple key-value storage that allows you to easily switch out storage adapters. - * @requires keyvFiles - a module that includes the logFile and violationFile. - * - * @param {string} type - The type of violation, which can be 'concurrent', 'message_limit', 'registrations' or 'logins'. - * @returns {Keyv} - If a valid type is passed, returns an object containing the logs for violations of the specified type. - * @throws Will throw an error if an invalid violation type is passed. + * @param {string} key - The key for the namespace to access + * @returns {Keyv} - If a valid key is passed, returns an object containing the cache store of the specified key. + * @throws Will throw an error if an invalid key is passed. */ -const getLogStores = (type) => { - if (!type || !namespaces[type]) { - throw new Error(`Invalid store type: ${type}`); +const getLogStores = (key) => { + if (!key || !namespaces[key]) { + throw new Error(`Invalid store key: ${key}`); } - return namespaces[type]; + return namespaces[key]; }; module.exports = getLogStores; diff --git a/api/cache/keyvRedis.js b/api/cache/keyvRedis.js index f723429ee27..9501045e4e1 100644 --- a/api/cache/keyvRedis.js +++ b/api/cache/keyvRedis.js @@ -10,10 +10,11 @@ if (REDIS_URI && isEnabled(USE_REDIS)) { keyvRedis = new KeyvRedis(REDIS_URI, { useRedisSets: false }); keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err)); keyvRedis.setMaxListeners(20); -} else { logger.info( - '`REDIS_URI` not provided, or `USE_REDIS` not set. Redis module will not be initialized.', + '[Optional] Redis initialized. Note: Redis support is experimental. If you have issues, disable it. Cache needs to be flushed for values to refresh.', ); +} else { + logger.info('[Optional] Redis not initialized. Note: Redis support is experimental.'); } module.exports = keyvRedis; diff --git a/api/common/enums.js b/api/common/enums.js deleted file mode 100644 index 849ae43f59c..00000000000 --- a/api/common/enums.js +++ /dev/null @@ -1,17 +0,0 @@ -/** - * @typedef {Object} CacheKeys - * @property {'config'} CONFIG - Key for the config cache. - * @property {'plugins'} PLUGINS - Key for the plugins cache. - * @property {'modelsConfig'} MODELS_CONFIG - Key for the model config cache. - * @property {'defaultConfig'} DEFAULT_CONFIG - Key for the default config cache. - * @property {'overrideConfig'} OVERRIDE_CONFIG - Key for the override config cache. - */ -const CacheKeys = { - CONFIG: 'config', - PLUGINS: 'plugins', - MODELS_CONFIG: 'modelsConfig', - DEFAULT_CONFIG: 'defaultConfig', - OVERRIDE_CONFIG: 'overrideConfig', -}; - -module.exports = { CacheKeys }; diff --git a/api/config/paths.js b/api/config/paths.js index 2f577a183fd..41e3ac5054f 100644 --- a/api/config/paths.js +++ b/api/config/paths.js @@ -1,6 +1,7 @@ const path = require('path'); module.exports = { + dist: path.resolve(__dirname, '..', '..', 'client', 'dist'), publicPath: path.resolve(__dirname, '..', '..', 'client', 'public'), imageOutput: path.resolve(__dirname, '..', '..', 'client', 'public', 'images'), }; diff --git a/api/models/Message.js b/api/models/Message.js index 7accf9285a8..fe615f3283f 100644 --- a/api/models/Message.js +++ b/api/models/Message.js @@ -9,6 +9,7 @@ module.exports = { async saveMessage({ user, + endpoint, messageId, newMessageId, conversationId, @@ -34,6 +35,7 @@ module.exports = { const update = { user, + endpoint, messageId: newMessageId || messageId, conversationId, parentMessageId, diff --git a/api/models/schema/convoSchema.js b/api/models/schema/convoSchema.js index 46555ba3534..a282287eccb 100644 --- a/api/models/schema/convoSchema.js +++ b/api/models/schema/convoSchema.js @@ -18,36 +18,29 @@ const convoSchema = mongoose.Schema( user: { type: String, index: true, - // default: null, }, messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }], // google only - examples: [{ type: mongoose.Schema.Types.Mixed }], + examples: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined }, agentOptions: { type: mongoose.Schema.Types.Mixed, - // default: null, }, ...conversationPreset, // for bingAI only bingConversationId: { type: String, - // default: null, }, jailbreakConversationId: { type: String, - // default: null, }, conversationSignature: { type: String, - // default: null, }, clientId: { type: String, - // default: null, }, invocationId: { type: Number, - // default: 1, }, }, { timestamps: true }, diff --git a/api/models/schema/defaults.js b/api/models/schema/defaults.js index 338ee120891..39a6430f46b 100644 --- a/api/models/schema/defaults.js +++ b/api/models/schema/defaults.js @@ -5,6 +5,9 @@ const conversationPreset = { default: null, required: true, }, + endpointType: { + type: String, + }, // for azureOpenAI, openAI, chatGPTBrowser only model: { type: String, @@ -81,6 +84,12 @@ const conversationPreset = { type: String, // default: null, }, + resendImages: { + type: Boolean, + }, + imageDetail: { + type: String, + }, }; const agentOptions = { @@ -95,7 +104,6 @@ const agentOptions = { // default: null, required: false, }, - // for google only modelLabel: { type: String, // default: null, diff --git a/api/models/schema/fileSchema.js b/api/models/schema/fileSchema.js index bf9db4864d0..471b7bfd70a 100644 --- a/api/models/schema/fileSchema.js +++ b/api/models/schema/fileSchema.js @@ -1,3 +1,4 @@ +const { FileSources } = require('librechat-data-provider'); const mongoose = require('mongoose'); /** @@ -12,6 +13,7 @@ const mongoose = require('mongoose'); * @property {'file'} object - Type of object, always 'file' * @property {string} type - Type of file * @property {number} usage - Number of uses of the file + * @property {string} [source] - The source of the file * @property {number} [width] - Optional width of the file * @property {number} [height] - Optional height of the file * @property {Date} [expiresAt] - Optional height of the file @@ -42,11 +44,6 @@ const fileSchema = mongoose.Schema( type: Number, required: true, }, - usage: { - type: Number, - required: true, - default: 0, - }, filename: { type: String, required: true, @@ -64,6 +61,15 @@ const fileSchema = mongoose.Schema( type: String, required: true, }, + usage: { + type: Number, + required: true, + default: 0, + }, + source: { + type: String, + default: FileSources.local, + }, width: Number, height: Number, expiresAt: { diff --git a/api/models/schema/messageSchema.js b/api/models/schema/messageSchema.js index 33d799544b2..06da19e476d 100644 --- a/api/models/schema/messageSchema.js +++ b/api/models/schema/messageSchema.js @@ -23,9 +23,11 @@ const messageSchema = mongoose.Schema( type: String, default: null, }, + endpoint: { + type: String, + }, conversationSignature: { type: String, - // required: true }, clientId: { type: String, @@ -35,7 +37,6 @@ const messageSchema = mongoose.Schema( }, parentMessageId: { type: String, - // required: true }, tokenCount: { type: Number, @@ -82,22 +83,26 @@ const messageSchema = mongoose.Schema( select: false, default: false, }, - files: [{ type: mongoose.Schema.Types.Mixed }], + files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined }, plugin: { - latest: { - type: String, - required: false, - }, - inputs: { - type: [mongoose.Schema.Types.Mixed], - required: false, - }, - outputs: { - type: String, - required: false, + type: { + latest: { + type: String, + required: false, + }, + inputs: { + type: [mongoose.Schema.Types.Mixed], + required: false, + default: undefined, + }, + outputs: { + type: String, + required: false, + }, }, + default: undefined, }, - plugins: [{ type: mongoose.Schema.Types.Mixed }], + plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined }, }, { timestamps: true }, ); diff --git a/api/package.json b/api/package.json index 56d1a7e59b9..292a3f5a1c3 100644 --- a/api/package.json +++ b/api/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/backend", - "version": "0.6.5", + "version": "0.6.6", "description": "", "scripts": { "start": "echo 'please run this from the root directory'", @@ -31,7 +31,7 @@ "@azure/search-documents": "^12.0.0", "@keyv/mongo": "^2.1.8", "@keyv/redis": "^2.8.1", - "@langchain/google-genai": "^0.0.2", + "@langchain/google-genai": "^0.0.7", "axios": "^1.3.4", "bcryptjs": "^2.4.3", "cheerio": "^1.0.0-rc.12", @@ -54,7 +54,7 @@ "keyv": "^4.5.4", "keyv-file": "^0.2.0", "klona": "^2.0.6", - "langchain": "^0.0.213", + "langchain": "^0.0.214", "librechat-data-provider": "*", "lodash": "^4.17.21", "meilisearch": "^0.33.0", diff --git a/api/server/controllers/AskController.js b/api/server/controllers/AskController.js index 78933feebc1..67d7c67e9f7 100644 --- a/api/server/controllers/AskController.js +++ b/api/server/controllers/AskController.js @@ -9,6 +9,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => { text, endpointOption, conversationId, + modelDisplayLabel, parentMessageId = null, overrideParentMessageId = null, } = req.body; @@ -22,7 +23,11 @@ const AskController = async (req, res, next, initializeClient, addTitle) => { let responseMessageId; let lastSavedTimestamp = 0; let saveDelay = 100; - const sender = getResponseSender({ ...endpointOption, model: endpointOption.modelOptions.model }); + const sender = getResponseSender({ + ...endpointOption, + model: endpointOption.modelOptions.model, + modelDisplayLabel, + }); const newConvo = !conversationId; const user = req.user.id; @@ -113,6 +118,8 @@ const AskController = async (req, res, next, initializeClient, addTitle) => { response = { ...response, ...metadata }; } + response.endpoint = endpointOption.endpoint; + if (client.options.attachments) { userMessage.files = client.options.attachments; delete userMessage.image_urls; diff --git a/api/server/controllers/EditController.js b/api/server/controllers/EditController.js index 72ee58026a4..43b82e7193f 100644 --- a/api/server/controllers/EditController.js +++ b/api/server/controllers/EditController.js @@ -10,6 +10,7 @@ const EditController = async (req, res, next, initializeClient) => { generation, endpointOption, conversationId, + modelDisplayLabel, responseMessageId, isContinued = false, parentMessageId = null, @@ -29,7 +30,11 @@ const EditController = async (req, res, next, initializeClient) => { let promptTokens; let lastSavedTimestamp = 0; let saveDelay = 100; - const sender = getResponseSender({ ...endpointOption, model: endpointOption.modelOptions.model }); + const sender = getResponseSender({ + ...endpointOption, + model: endpointOption.modelOptions.model, + modelDisplayLabel, + }); const userMessageId = parentMessageId; const user = req.user.id; diff --git a/api/server/controllers/EndpointController.js b/api/server/controllers/EndpointController.js index 0cc21f96ac3..5069bb33e0b 100644 --- a/api/server/controllers/EndpointController.js +++ b/api/server/controllers/EndpointController.js @@ -1,17 +1,22 @@ +const { CacheKeys } = require('librechat-data-provider'); +const { loadDefaultEndpointsConfig, loadConfigEndpoints } = require('~/server/services/Config'); const { getLogStores } = require('~/cache'); -const { CacheKeys } = require('~/common/enums'); -const { loadDefaultEndpointsConfig } = require('~/server/services/Config'); async function endpointController(req, res) { - const cache = getLogStores(CacheKeys.CONFIG); - const config = await cache.get(CacheKeys.DEFAULT_CONFIG); - if (config) { - res.send(config); + const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cachedEndpointsConfig = await cache.get(CacheKeys.ENDPOINT_CONFIG); + if (cachedEndpointsConfig) { + res.send(cachedEndpointsConfig); return; } - const defaultConfig = await loadDefaultEndpointsConfig(); - await cache.set(CacheKeys.DEFAULT_CONFIG, defaultConfig); - res.send(JSON.stringify(defaultConfig)); + + const defaultEndpointsConfig = await loadDefaultEndpointsConfig(); + const customConfigEndpoints = await loadConfigEndpoints(); + + const endpointsConfig = { ...defaultEndpointsConfig, ...customConfigEndpoints }; + + await cache.set(CacheKeys.ENDPOINT_CONFIG, endpointsConfig); + res.send(JSON.stringify(endpointsConfig)); } module.exports = endpointController; diff --git a/api/server/controllers/ModelController.js b/api/server/controllers/ModelController.js index 61ca82ecf03..2d23961e154 100644 --- a/api/server/controllers/ModelController.js +++ b/api/server/controllers/ModelController.js @@ -1,15 +1,19 @@ +const { CacheKeys } = require('librechat-data-provider'); +const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config'); const { getLogStores } = require('~/cache'); -const { CacheKeys } = require('~/common/enums'); -const { loadDefaultModels } = require('~/server/services/Config'); async function modelController(req, res) { - const cache = getLogStores(CacheKeys.CONFIG); - let modelConfig = await cache.get(CacheKeys.MODELS_CONFIG); - if (modelConfig) { - res.send(modelConfig); + const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cachedModelsConfig = await cache.get(CacheKeys.MODELS_CONFIG); + if (cachedModelsConfig) { + res.send(cachedModelsConfig); return; } - modelConfig = await loadDefaultModels(); + const defaultModelsConfig = await loadDefaultModels(); + const customModelsConfig = await loadConfigModels(); + + const modelConfig = { ...defaultModelsConfig, ...customModelsConfig }; + await cache.set(CacheKeys.MODELS_CONFIG, modelConfig); res.send(modelConfig); } diff --git a/api/server/controllers/OverrideController.js b/api/server/controllers/OverrideController.js index 0abd27a7a24..677fb87bdcb 100644 --- a/api/server/controllers/OverrideController.js +++ b/api/server/controllers/OverrideController.js @@ -1,9 +1,9 @@ -const { getLogStores } = require('~/cache'); -const { CacheKeys } = require('~/common/enums'); +const { CacheKeys } = require('librechat-data-provider'); const { loadOverrideConfig } = require('~/server/services/Config'); +const { getLogStores } = require('~/cache'); async function overrideController(req, res) { - const cache = getLogStores(CacheKeys.CONFIG); + const cache = getLogStores(CacheKeys.CONFIG_STORE); let overrideConfig = await cache.get(CacheKeys.OVERRIDE_CONFIG); if (overrideConfig) { res.send(overrideConfig); @@ -15,7 +15,7 @@ async function overrideController(req, res) { overrideConfig = await loadOverrideConfig(); const { endpointsConfig, modelsConfig } = overrideConfig; if (endpointsConfig) { - await cache.set(CacheKeys.DEFAULT_CONFIG, endpointsConfig); + await cache.set(CacheKeys.ENDPOINT_CONFIG, endpointsConfig); } if (modelsConfig) { await cache.set(CacheKeys.MODELS_CONFIG, modelsConfig); diff --git a/api/server/controllers/PluginController.js b/api/server/controllers/PluginController.js index 697a499796c..c37b36974e0 100644 --- a/api/server/controllers/PluginController.js +++ b/api/server/controllers/PluginController.js @@ -1,7 +1,7 @@ const path = require('path'); const { promises: fs } = require('fs'); +const { CacheKeys } = require('librechat-data-provider'); const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs'); -const { CacheKeys } = require('~/common/enums'); const { getLogStores } = require('~/cache'); const filterUniquePlugins = (plugins) => { @@ -29,7 +29,7 @@ const isPluginAuthenticated = (plugin) => { const getAvailablePluginsController = async (req, res) => { try { - const cache = getLogStores(CacheKeys.CONFIG); + const cache = getLogStores(CacheKeys.CONFIG_STORE); const cachedPlugins = await cache.get(CacheKeys.PLUGINS); if (cachedPlugins) { res.status(200).json(cachedPlugins); diff --git a/api/server/index.js b/api/server/index.js index 698620c56f3..86806b59146 100644 --- a/api/server/index.js +++ b/api/server/index.js @@ -1,34 +1,32 @@ +require('dotenv').config(); const path = require('path'); require('module-alias')({ base: path.resolve(__dirname, '..') }); const cors = require('cors'); const express = require('express'); const passport = require('passport'); const mongoSanitize = require('express-mongo-sanitize'); -const { initializeFirebase } = require('~/server/services/Files/Firebase/initialize'); const errorController = require('./controllers/ErrorController'); +const { jwtLogin, passportLogin } = require('~/strategies'); const configureSocialLogins = require('./socialLogins'); const { connectDb, indexSync } = require('~/lib/db'); -const { logger } = require('~/config'); +const AppService = require('./services/AppService'); const noIndex = require('./middleware/noIndex'); +const { logger } = require('~/config'); -const paths = require('~/config/paths'); const routes = require('./routes'); const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {}; const port = Number(PORT) || 3080; const host = HOST || 'localhost'; -const projectPath = path.join(__dirname, '..', '..', 'client'); -const { jwtLogin, passportLogin } = require('~/strategies'); const startServer = async () => { await connectDb(); logger.info('Connected to MongoDB'); - initializeFirebase(); await indexSync(); const app = express(); - app.locals.config = paths; + await AppService(app); // Middleware app.use(noIndex); @@ -36,14 +34,14 @@ const startServer = async () => { app.use(express.json({ limit: '3mb' })); app.use(mongoSanitize()); app.use(express.urlencoded({ extended: true, limit: '3mb' })); - app.use(express.static(path.join(projectPath, 'dist'))); - app.use(express.static(path.join(projectPath, 'public'))); + app.use(express.static(app.locals.paths.dist)); + app.use(express.static(app.locals.paths.publicPath)); app.set('trust proxy', 1); // trust first proxy app.use(cors()); if (!ALLOW_SOCIAL_LOGIN) { console.warn( - 'Social logins are disabled. Set Envrionment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.', + 'Social logins are disabled. Set Environment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.', ); } @@ -78,7 +76,7 @@ const startServer = async () => { app.use('/api/files', routes.files); app.use((req, res) => { - res.status(404).sendFile(path.join(projectPath, 'dist', 'index.html')); + res.status(404).sendFile(path.join(app.locals.paths.dist, 'index.html')); }); app.listen(port, host, () => { diff --git a/api/server/middleware/abortMiddleware.js b/api/server/middleware/abortMiddleware.js index 811963174c9..cc9b9fc0513 100644 --- a/api/server/middleware/abortMiddleware.js +++ b/api/server/middleware/abortMiddleware.js @@ -14,7 +14,7 @@ async function abortMessage(req, res) { } if (!abortControllers.has(abortKey) && !res.headersSent) { - return res.status(404).send({ message: 'Request not found' }); + return res.status(204).send({ message: 'Request not found' }); } const { abortController } = abortControllers.get(abortKey); @@ -26,6 +26,8 @@ async function abortMessage(req, res) { return sendMessage(res, finalEvent); } + res.setHeader('Content-Type', 'application/json'); + res.send(JSON.stringify(finalEvent)); } diff --git a/api/server/middleware/buildEndpointOption.js b/api/server/middleware/buildEndpointOption.js index d98fe92d2ce..91d0caceaaf 100644 --- a/api/server/middleware/buildEndpointOption.js +++ b/api/server/middleware/buildEndpointOption.js @@ -1,22 +1,28 @@ -const { processFiles } = require('~/server/services/Files'); +const { parseConvo, EModelEndpoint } = require('librechat-data-provider'); +const { processFiles } = require('~/server/services/Files/process'); +const gptPlugins = require('~/server/services/Endpoints/gptPlugins'); +const anthropic = require('~/server/services/Endpoints/anthropic'); const openAI = require('~/server/services/Endpoints/openAI'); +const custom = require('~/server/services/Endpoints/custom'); const google = require('~/server/services/Endpoints/google'); -const anthropic = require('~/server/services/Endpoints/anthropic'); -const gptPlugins = require('~/server/services/Endpoints/gptPlugins'); -const { parseConvo, EModelEndpoint } = require('librechat-data-provider'); const buildFunction = { [EModelEndpoint.openAI]: openAI.buildOptions, [EModelEndpoint.google]: google.buildOptions, + [EModelEndpoint.custom]: custom.buildOptions, [EModelEndpoint.azureOpenAI]: openAI.buildOptions, [EModelEndpoint.anthropic]: anthropic.buildOptions, [EModelEndpoint.gptPlugins]: gptPlugins.buildOptions, }; function buildEndpointOption(req, res, next) { - const { endpoint } = req.body; - const parsedBody = parseConvo(endpoint, req.body); - req.body.endpointOption = buildFunction[endpoint](endpoint, parsedBody); + const { endpoint, endpointType } = req.body; + const parsedBody = parseConvo({ endpoint, endpointType, conversation: req.body }); + req.body.endpointOption = buildFunction[endpointType ?? endpoint]( + endpoint, + parsedBody, + endpointType, + ); if (req.body.files) { // hold the promise req.body.endpointOption.attachments = processFiles(req.body.files); diff --git a/api/server/middleware/validateEndpoint.js b/api/server/middleware/validateEndpoint.js index 6e9c914c8eb..0eeaaeb97dc 100644 --- a/api/server/middleware/validateEndpoint.js +++ b/api/server/middleware/validateEndpoint.js @@ -1,7 +1,8 @@ const { handleError } = require('../utils'); function validateEndpoint(req, res, next) { - const { endpoint } = req.body; + const { endpoint: _endpoint, endpointType } = req.body; + const endpoint = endpointType ?? _endpoint; if (!req.body.text || req.body.text.length === 0) { return handleError(res, { text: 'Prompt empty or too short' }); diff --git a/api/server/routes/ask/custom.js b/api/server/routes/ask/custom.js new file mode 100644 index 00000000000..ef979bf0000 --- /dev/null +++ b/api/server/routes/ask/custom.js @@ -0,0 +1,20 @@ +const express = require('express'); +const AskController = require('~/server/controllers/AskController'); +const { initializeClient } = require('~/server/services/Endpoints/custom'); +const { addTitle } = require('~/server/services/Endpoints/openAI'); +const { + handleAbort, + setHeaders, + validateEndpoint, + buildEndpointOption, +} = require('~/server/middleware'); + +const router = express.Router(); + +router.post('/abort', handleAbort()); + +router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req, res, next) => { + await AskController(req, res, next, initializeClient, addTitle); +}); + +module.exports = router; diff --git a/api/server/routes/ask/index.js b/api/server/routes/ask/index.js index 669fd87e6fb..b5156ed8d10 100644 --- a/api/server/routes/ask/index.js +++ b/api/server/routes/ask/index.js @@ -1,5 +1,6 @@ const express = require('express'); const openAI = require('./openAI'); +const custom = require('./custom'); const google = require('./google'); const bingAI = require('./bingAI'); const anthropic = require('./anthropic'); @@ -42,5 +43,6 @@ router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins); router.use(`/${EModelEndpoint.anthropic}`, anthropic); router.use(`/${EModelEndpoint.google}`, google); router.use(`/${EModelEndpoint.bingAI}`, bingAI); +router.use(`/${EModelEndpoint.custom}`, custom); module.exports = router; diff --git a/api/server/routes/edit/custom.js b/api/server/routes/edit/custom.js new file mode 100644 index 00000000000..dd63c96c8f9 --- /dev/null +++ b/api/server/routes/edit/custom.js @@ -0,0 +1,20 @@ +const express = require('express'); +const EditController = require('~/server/controllers/EditController'); +const { initializeClient } = require('~/server/services/Endpoints/custom'); +const { addTitle } = require('~/server/services/Endpoints/openAI'); +const { + handleAbort, + setHeaders, + validateEndpoint, + buildEndpointOption, +} = require('~/server/middleware'); + +const router = express.Router(); + +router.post('/abort', handleAbort()); + +router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req, res, next) => { + await EditController(req, res, next, initializeClient, addTitle); +}); + +module.exports = router; diff --git a/api/server/routes/edit/index.js b/api/server/routes/edit/index.js index 01dd06ced98..fa19f9effdc 100644 --- a/api/server/routes/edit/index.js +++ b/api/server/routes/edit/index.js @@ -1,5 +1,6 @@ const express = require('express'); const openAI = require('./openAI'); +const custom = require('./custom'); const google = require('./google'); const anthropic = require('./anthropic'); const gptPlugins = require('./gptPlugins'); @@ -38,5 +39,6 @@ router.use([`/${EModelEndpoint.azureOpenAI}`, `/${EModelEndpoint.openAI}`], open router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins); router.use(`/${EModelEndpoint.anthropic}`, anthropic); router.use(`/${EModelEndpoint.google}`, google); +router.use(`/${EModelEndpoint.custom}`, custom); module.exports = router; diff --git a/api/server/routes/files/avatar.js b/api/server/routes/files/avatar.js index a7bb07c0f95..5abba85f9e8 100644 --- a/api/server/routes/files/avatar.js +++ b/api/server/routes/files/avatar.js @@ -1,7 +1,7 @@ const express = require('express'); const multer = require('multer'); -const uploadAvatar = require('~/server/services/Files/images/avatar/uploadAvatar'); +const uploadAvatar = require('~/server/services/Files/images/avatar'); const { requireJwtAuth } = require('~/server/middleware/'); const User = require('~/models/User'); @@ -23,7 +23,12 @@ router.post('/', requireJwtAuth, upload.single('input'), async (req, res) => { if (!user) { throw new Error('User not found'); } - const url = await uploadAvatar(userId, input, manual); + const url = await uploadAvatar({ + input, + userId, + manual, + fileStrategy: req.app.locals.fileStrategy, + }); res.json({ url }); } catch (error) { diff --git a/api/server/routes/files/files.js b/api/server/routes/files/files.js index d9df1bdd759..3fea2e5d07b 100644 --- a/api/server/routes/files/files.js +++ b/api/server/routes/files/files.js @@ -1,36 +1,29 @@ const { z } = require('zod'); -const path = require('path'); -const fs = require('fs').promises; const express = require('express'); -const { deleteFiles } = require('~/models'); +const { FileSources } = require('librechat-data-provider'); +const { getStrategyFunctions } = require('~/server/services/Files/strategies'); +const { deleteFiles, getFiles } = require('~/models'); const { logger } = require('~/config'); const router = express.Router(); const isUUID = z.string().uuid(); -const isValidPath = (req, base, subfolder, filepath) => { - const normalizedBase = path.resolve(base, subfolder, req.user.id); - const normalizedFilepath = path.resolve(filepath); - return normalizedFilepath.startsWith(normalizedBase); -}; - -const deleteFile = async (req, file) => { - const { publicPath } = req.app.locals.config; - const parts = file.filepath.split(path.sep); - const subfolder = parts[1]; - const filepath = path.join(publicPath, file.filepath); - - if (!isValidPath(req, publicPath, subfolder, filepath)) { - throw new Error('Invalid file path'); +router.get('/', async (req, res) => { + try { + const files = await getFiles({ user: req.user.id }); + res.status(200).send(files); + } catch (error) { + logger.error('[/files] Error getting files:', error); + res.status(400).json({ message: 'Error in request', error: error.message }); } - - await fs.unlink(filepath); -}; +}); router.delete('/', async (req, res) => { try { const { files: _files } = req.body; + + /** @type {MongoFile[]} */ const files = _files.filter((file) => { if (!file.file_id) { return false; @@ -47,9 +40,24 @@ router.delete('/', async (req, res) => { } const file_ids = files.map((file) => file.file_id); + const deletionMethods = {}; const promises = []; promises.push(await deleteFiles(file_ids)); + for (const file of files) { + const source = file.source ?? FileSources.local; + + if (deletionMethods[source]) { + promises.push(deletionMethods[source](req, file)); + continue; + } + + const { deleteFile } = getStrategyFunctions(source); + if (!deleteFile) { + throw new Error(`Delete function not implemented for ${source}`); + } + + deletionMethods[source] = deleteFile; promises.push(deleteFile(req, file)); } diff --git a/api/server/routes/files/images.js b/api/server/routes/files/images.js index f88b7f2c7a2..30d3c3cac60 100644 --- a/api/server/routes/files/images.js +++ b/api/server/routes/files/images.js @@ -2,7 +2,7 @@ const { z } = require('zod'); const fs = require('fs').promises; const express = require('express'); const upload = require('./multer'); -const { localStrategy } = require('~/server/services/Files'); +const { processImageUpload } = require('~/server/services/Files/process'); const { logger } = require('~/config'); const router = express.Router(); @@ -34,7 +34,8 @@ router.post('/', upload.single('file'), async (req, res) => { uuidSchema.parse(metadata.file_id); metadata.temp_file_id = metadata.file_id; metadata.file_id = req.file_id; - await localStrategy({ req, res, file, metadata }); + + await processImageUpload({ req, res, file, metadata }); } catch (error) { logger.error('[/files/images] Error processing file:', error); try { diff --git a/api/server/routes/files/index.js b/api/server/routes/files/index.js index 74b200c8066..9afb900bbe6 100644 --- a/api/server/routes/files/index.js +++ b/api/server/routes/files/index.js @@ -11,6 +11,7 @@ const { const files = require('./files'); const images = require('./images'); +const avatar = require('./avatar'); router.use(requireJwtAuth); router.use(checkBan); @@ -18,6 +19,6 @@ router.use(uaParser); router.use('/', files); router.use('/images', images); -router.use('/images/avatar', require('./avatar')); +router.use('/images/avatar', avatar); module.exports = router; diff --git a/api/server/routes/files/multer.js b/api/server/routes/files/multer.js index ae0158f7d5c..d5aea05a373 100644 --- a/api/server/routes/files/multer.js +++ b/api/server/routes/files/multer.js @@ -8,7 +8,7 @@ const sizeLimit = 20 * 1024 * 1024; // 20 MB const storage = multer.diskStorage({ destination: function (req, file, cb) { - const outputPath = path.join(req.app.locals.config.imageOutput, 'temp'); + const outputPath = path.join(req.app.locals.paths.imageOutput, 'temp'); if (!fs.existsSync(outputPath)) { fs.mkdirSync(outputPath, { recursive: true }); } diff --git a/api/server/services/AppService.js b/api/server/services/AppService.js new file mode 100644 index 00000000000..b1f7cf57d8b --- /dev/null +++ b/api/server/services/AppService.js @@ -0,0 +1,27 @@ +const { FileSources } = require('librechat-data-provider'); +const { initializeFirebase } = require('./Files/Firebase/initialize'); +const loadCustomConfig = require('./Config/loadCustomConfig'); +const paths = require('~/config/paths'); + +/** + * + * Loads custom config and initializes app-wide variables. + * @function AppService + * @param {Express.Application} app - The Express application object. + */ +const AppService = async (app) => { + const config = (await loadCustomConfig()) ?? {}; + const fileStrategy = config.fileStrategy ?? FileSources.local; + process.env.CDN_PROVIDER = fileStrategy; + + if (fileStrategy === FileSources.firebase) { + initializeFirebase(); + } + + app.locals = { + fileStrategy, + paths, + }; +}; + +module.exports = AppService; diff --git a/api/server/services/Config/index.js b/api/server/services/Config/index.js index 13cbc09f3b3..57a00bf515e 100644 --- a/api/server/services/Config/index.js +++ b/api/server/services/Config/index.js @@ -1,13 +1,19 @@ const { config } = require('./EndpointService'); +const loadCustomConfig = require('./loadCustomConfig'); +const loadConfigModels = require('./loadConfigModels'); const loadDefaultModels = require('./loadDefaultModels'); const loadOverrideConfig = require('./loadOverrideConfig'); const loadAsyncEndpoints = require('./loadAsyncEndpoints'); +const loadConfigEndpoints = require('./loadConfigEndpoints'); const loadDefaultEndpointsConfig = require('./loadDefaultEConfig'); module.exports = { config, + loadCustomConfig, + loadConfigModels, loadDefaultModels, loadOverrideConfig, loadAsyncEndpoints, + loadConfigEndpoints, loadDefaultEndpointsConfig, }; diff --git a/api/server/services/Config/loadConfigEndpoints.js b/api/server/services/Config/loadConfigEndpoints.js new file mode 100644 index 00000000000..1b435e144e9 --- /dev/null +++ b/api/server/services/Config/loadConfigEndpoints.js @@ -0,0 +1,54 @@ +const { CacheKeys, EModelEndpoint } = require('librechat-data-provider'); +const { isUserProvided, extractEnvVariable } = require('~/server/utils'); +const loadCustomConfig = require('./loadCustomConfig'); +const { getLogStores } = require('~/cache'); + +/** + * Load config endpoints from the cached configuration object + * @function loadConfigEndpoints */ +async function loadConfigEndpoints() { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG); + + if (!customConfig) { + customConfig = await loadCustomConfig(); + } + + if (!customConfig) { + return {}; + } + + const { endpoints = {} } = customConfig ?? {}; + const endpointsConfig = {}; + + if (Array.isArray(endpoints[EModelEndpoint.custom])) { + const customEndpoints = endpoints[EModelEndpoint.custom].filter( + (endpoint) => + endpoint.baseURL && + endpoint.apiKey && + endpoint.name && + endpoint.models && + (endpoint.models.fetch || endpoint.models.default), + ); + + for (let i = 0; i < customEndpoints.length; i++) { + const endpoint = customEndpoints[i]; + const { baseURL, apiKey, name, iconURL, modelDisplayLabel } = endpoint; + + const resolvedApiKey = extractEnvVariable(apiKey); + const resolvedBaseURL = extractEnvVariable(baseURL); + + endpointsConfig[name] = { + type: EModelEndpoint.custom, + userProvide: isUserProvided(resolvedApiKey), + userProvideURL: isUserProvided(resolvedBaseURL), + modelDisplayLabel, + iconURL, + }; + } + } + + return endpointsConfig; +} + +module.exports = loadConfigEndpoints; diff --git a/api/server/services/Config/loadConfigModels.js b/api/server/services/Config/loadConfigModels.js new file mode 100644 index 00000000000..0abe15a8a1f --- /dev/null +++ b/api/server/services/Config/loadConfigModels.js @@ -0,0 +1,79 @@ +const { CacheKeys, EModelEndpoint } = require('librechat-data-provider'); +const { isUserProvided, extractEnvVariable } = require('~/server/utils'); +const { fetchModels } = require('~/server/services/ModelService'); +const loadCustomConfig = require('./loadCustomConfig'); +const { getLogStores } = require('~/cache'); + +/** + * Load config endpoints from the cached configuration object + * @function loadConfigModels */ +async function loadConfigModels() { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG); + + if (!customConfig) { + customConfig = await loadCustomConfig(); + } + + if (!customConfig) { + return {}; + } + + const { endpoints = {} } = customConfig ?? {}; + const modelsConfig = {}; + + if (!Array.isArray(endpoints[EModelEndpoint.custom])) { + return modelsConfig; + } + + const customEndpoints = endpoints[EModelEndpoint.custom].filter( + (endpoint) => + endpoint.baseURL && + endpoint.apiKey && + endpoint.name && + endpoint.models && + (endpoint.models.fetch || endpoint.models.default), + ); + + const fetchPromisesMap = {}; // Map for promises keyed by baseURL + const baseUrlToNameMap = {}; // Map to associate baseURLs with names + + for (let i = 0; i < customEndpoints.length; i++) { + const endpoint = customEndpoints[i]; + const { models, name, baseURL, apiKey } = endpoint; + + const API_KEY = extractEnvVariable(apiKey); + const BASE_URL = extractEnvVariable(baseURL); + + modelsConfig[name] = []; + + if (models.fetch && !isUserProvided(API_KEY) && !isUserProvided(BASE_URL)) { + fetchPromisesMap[BASE_URL] = + fetchPromisesMap[BASE_URL] || fetchModels({ baseURL: BASE_URL, apiKey: API_KEY }); + baseUrlToNameMap[BASE_URL] = baseUrlToNameMap[BASE_URL] || []; + baseUrlToNameMap[BASE_URL].push(name); + continue; + } + + if (Array.isArray(models.default)) { + modelsConfig[name] = models.default; + } + } + + const fetchedData = await Promise.all(Object.values(fetchPromisesMap)); + const baseUrls = Object.keys(fetchPromisesMap); + + for (let i = 0; i < fetchedData.length; i++) { + const currentBaseUrl = baseUrls[i]; + const modelData = fetchedData[i]; + const associatedNames = baseUrlToNameMap[currentBaseUrl]; + + for (const name of associatedNames) { + modelsConfig[name] = modelData; + } + } + + return modelsConfig; +} + +module.exports = loadConfigModels; diff --git a/api/server/services/Config/loadCustomConfig.js b/api/server/services/Config/loadCustomConfig.js new file mode 100644 index 00000000000..c17d3283b47 --- /dev/null +++ b/api/server/services/Config/loadCustomConfig.js @@ -0,0 +1,41 @@ +const path = require('path'); +const { CacheKeys, configSchema } = require('librechat-data-provider'); +const loadYaml = require('~/utils/loadYaml'); +const { getLogStores } = require('~/cache'); +const { logger } = require('~/config'); + +const projectRoot = path.resolve(__dirname, '..', '..', '..', '..'); +const configPath = path.resolve(projectRoot, 'librechat.yaml'); + +/** + * Load custom configuration files and caches the object if the `cache` field at root is true. + * Validation via parsing the config file with the config schema. + * @function loadCustomConfig + * @returns {Promise} A promise that resolves to null or the custom config object. + * */ + +async function loadCustomConfig() { + const customConfig = loadYaml(configPath); + if (!customConfig) { + return null; + } + + const result = configSchema.strict().safeParse(customConfig); + if (!result.success) { + logger.error(`Invalid custom config file at ${configPath}`, result.error); + return null; + } else { + logger.info('Loaded custom config file'); + } + + if (customConfig.cache) { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + await cache.set(CacheKeys.CUSTOM_CONFIG, customConfig); + } + + // TODO: handle remote config + + return customConfig; +} + +module.exports = loadCustomConfig; diff --git a/api/server/services/Endpoints/custom/buildOptions.js b/api/server/services/Endpoints/custom/buildOptions.js new file mode 100644 index 00000000000..0bba48e2b95 --- /dev/null +++ b/api/server/services/Endpoints/custom/buildOptions.js @@ -0,0 +1,18 @@ +const buildOptions = (endpoint, parsedBody, endpointType) => { + const { chatGptLabel, promptPrefix, resendImages, imageDetail, ...rest } = parsedBody; + const endpointOption = { + endpoint, + endpointType, + chatGptLabel, + promptPrefix, + resendImages, + imageDetail, + modelOptions: { + ...rest, + }, + }; + + return endpointOption; +}; + +module.exports = buildOptions; diff --git a/api/server/services/Endpoints/custom/index.js b/api/server/services/Endpoints/custom/index.js new file mode 100644 index 00000000000..3cda8d5fece --- /dev/null +++ b/api/server/services/Endpoints/custom/index.js @@ -0,0 +1,7 @@ +const initializeClient = require('./initializeClient'); +const buildOptions = require('./buildOptions'); + +module.exports = { + initializeClient, + buildOptions, +}; diff --git a/api/server/services/Endpoints/custom/initializeClient.js b/api/server/services/Endpoints/custom/initializeClient.js new file mode 100644 index 00000000000..978506b7b47 --- /dev/null +++ b/api/server/services/Endpoints/custom/initializeClient.js @@ -0,0 +1,97 @@ +const { EModelEndpoint } = require('librechat-data-provider'); +const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); +const { isUserProvided, extractEnvVariable } = require('~/server/utils'); +const getCustomConfig = require('~/cache/getCustomConfig'); +const { OpenAIClient } = require('~/app'); + +const envVarRegex = /^\${(.+)}$/; + +const { PROXY } = process.env; + +const initializeClient = async ({ req, res, endpointOption }) => { + const { key: expiresAt, endpoint } = req.body; + const customConfig = await getCustomConfig(); + if (!customConfig) { + throw new Error(`Config not found for the ${endpoint} custom endpoint.`); + } + + const { endpoints = {} } = customConfig; + const customEndpoints = endpoints[EModelEndpoint.custom] ?? []; + const endpointConfig = customEndpoints.find((endpointConfig) => endpointConfig.name === endpoint); + + const CUSTOM_API_KEY = extractEnvVariable(endpointConfig.apiKey); + const CUSTOM_BASE_URL = extractEnvVariable(endpointConfig.baseURL); + + let resolvedHeaders = {}; + if (endpointConfig.headers && typeof endpointConfig.headers === 'object') { + Object.keys(endpointConfig.headers).forEach((key) => { + resolvedHeaders[key] = extractEnvVariable(endpointConfig.headers[key]); + }); + } + + if (CUSTOM_API_KEY.match(envVarRegex)) { + throw new Error(`Missing API Key for ${endpoint}.`); + } + + if (CUSTOM_BASE_URL.match(envVarRegex)) { + throw new Error(`Missing Base URL for ${endpoint}.`); + } + + const customOptions = { + headers: resolvedHeaders, + addParams: endpointConfig.addParams, + dropParams: endpointConfig.dropParams, + titleConvo: endpointConfig.titleConvo, + titleModel: endpointConfig.titleModel, + forcePrompt: endpointConfig.forcePrompt, + summaryModel: endpointConfig.summaryModel, + modelDisplayLabel: endpointConfig.modelDisplayLabel, + titleMethod: endpointConfig.titleMethod ?? 'completion', + contextStrategy: endpointConfig.summarize ? 'summarize' : null, + }; + + const useUserKey = isUserProvided(CUSTOM_API_KEY); + const useUserURL = isUserProvided(CUSTOM_BASE_URL); + + let userValues = null; + if (expiresAt && (useUserKey || useUserURL)) { + checkUserKeyExpiry( + expiresAt, + `Your API values for ${endpoint} have expired. Please configure them again.`, + ); + userValues = await getUserKey({ userId: req.user.id, name: endpoint }); + try { + userValues = JSON.parse(userValues); + } catch (e) { + throw new Error(`Invalid JSON provided for ${endpoint} user values.`); + } + } + + let apiKey = useUserKey ? userValues.apiKey : CUSTOM_API_KEY; + let baseURL = useUserURL ? userValues.baseURL : CUSTOM_BASE_URL; + + if (!apiKey) { + throw new Error(`${endpoint} API key not provided.`); + } + + if (!baseURL) { + throw new Error(`${endpoint} Base URL not provided.`); + } + + const clientOptions = { + reverseProxyUrl: baseURL ?? null, + proxy: PROXY ?? null, + req, + res, + ...customOptions, + ...endpointOption, + }; + + const client = new OpenAIClient(apiKey, clientOptions); + return { + client, + openAIApiKey: apiKey, + }; +}; + +module.exports = initializeClient; diff --git a/api/server/services/Endpoints/gptPlugins/initializeClient.js b/api/server/services/Endpoints/gptPlugins/initializeClient.js index 4abb2d2de5c..54ea822e494 100644 --- a/api/server/services/Endpoints/gptPlugins/initializeClient.js +++ b/api/server/services/Endpoints/gptPlugins/initializeClient.js @@ -1,7 +1,8 @@ -const { PluginsClient } = require('~/app'); -const { isEnabled } = require('~/server/utils'); -const { getAzureCredentials } = require('~/utils'); +const { EModelEndpoint } = require('librechat-data-provider'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); +const { getAzureCredentials } = require('~/utils'); +const { isEnabled } = require('~/server/utils'); +const { PluginsClient } = require('~/app'); const initializeClient = async ({ req, res, endpointOption }) => { const { @@ -10,26 +11,40 @@ const initializeClient = async ({ req, res, endpointOption }) => { AZURE_API_KEY, PLUGINS_USE_AZURE, OPENAI_REVERSE_PROXY, + AZURE_OPENAI_BASEURL, OPENAI_SUMMARIZE, DEBUG_PLUGINS, } = process.env; + const { key: expiresAt } = req.body; const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null; + + const useAzure = isEnabled(PLUGINS_USE_AZURE); + const endpoint = useAzure ? EModelEndpoint.azureOpenAI : EModelEndpoint.openAI; + + const baseURLOptions = { + [EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY, + [EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL, + }; + + const reverseProxyUrl = baseURLOptions[endpoint] ?? null; + const clientOptions = { contextStrategy, debug: isEnabled(DEBUG_PLUGINS), - reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null, + reverseProxyUrl, proxy: PROXY ?? null, req, res, ...endpointOption, }; - const useAzure = isEnabled(PLUGINS_USE_AZURE); + const credentials = { + [EModelEndpoint.openAI]: OPENAI_API_KEY, + [EModelEndpoint.azureOpenAI]: AZURE_API_KEY, + }; - const isUserProvided = useAzure - ? AZURE_API_KEY === 'user_provided' - : OPENAI_API_KEY === 'user_provided'; + const isUserProvided = credentials[endpoint] === 'user_provided'; let userKey = null; if (expiresAt && isUserProvided) { @@ -39,11 +54,11 @@ const initializeClient = async ({ req, res, endpointOption }) => { ); userKey = await getUserKey({ userId: req.user.id, - name: useAzure ? 'azureOpenAI' : 'openAI', + name: endpoint, }); } - let apiKey = isUserProvided ? userKey : OPENAI_API_KEY; + let apiKey = isUserProvided ? userKey : credentials[endpoint]; if (useAzure || (apiKey && apiKey.includes('azure') && !clientOptions.azure)) { clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials(); diff --git a/api/server/services/Endpoints/openAI/addTitle.js b/api/server/services/Endpoints/openAI/addTitle.js index f630638643f..ab15443f942 100644 --- a/api/server/services/Endpoints/openAI/addTitle.js +++ b/api/server/services/Endpoints/openAI/addTitle.js @@ -7,6 +7,10 @@ const addTitle = async (req, { text, response, client }) => { return; } + if (client.options.titleConvo === false) { + return; + } + // If the request was aborted and is not azure, don't generate the title. if (!client.azure && client.abortController.signal.aborted) { return; diff --git a/api/server/services/Endpoints/openAI/buildOptions.js b/api/server/services/Endpoints/openAI/buildOptions.js index a1ad232bb73..80037fb4b8e 100644 --- a/api/server/services/Endpoints/openAI/buildOptions.js +++ b/api/server/services/Endpoints/openAI/buildOptions.js @@ -1,9 +1,11 @@ const buildOptions = (endpoint, parsedBody) => { - const { chatGptLabel, promptPrefix, ...rest } = parsedBody; + const { chatGptLabel, promptPrefix, resendImages, imageDetail, ...rest } = parsedBody; const endpointOption = { endpoint, chatGptLabel, promptPrefix, + resendImages, + imageDetail, modelOptions: { ...rest, }, diff --git a/api/server/services/Endpoints/openAI/initializeClient.js b/api/server/services/Endpoints/openAI/initializeClient.js index 37681485b27..b6427823e12 100644 --- a/api/server/services/Endpoints/openAI/initializeClient.js +++ b/api/server/services/Endpoints/openAI/initializeClient.js @@ -1,7 +1,8 @@ -const { OpenAIClient } = require('~/app'); -const { isEnabled } = require('~/server/utils'); -const { getAzureCredentials } = require('~/utils'); +const { EModelEndpoint } = require('librechat-data-provider'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); +const { getAzureCredentials } = require('~/utils'); +const { isEnabled } = require('~/server/utils'); +const { OpenAIClient } = require('~/app'); const initializeClient = async ({ req, res, endpointOption }) => { const { @@ -9,15 +10,24 @@ const initializeClient = async ({ req, res, endpointOption }) => { OPENAI_API_KEY, AZURE_API_KEY, OPENAI_REVERSE_PROXY, + AZURE_OPENAI_BASEURL, OPENAI_SUMMARIZE, DEBUG_OPENAI, } = process.env; const { key: expiresAt, endpoint } = req.body; const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null; + + const baseURLOptions = { + [EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY, + [EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL, + }; + + const reverseProxyUrl = baseURLOptions[endpoint] ?? null; + const clientOptions = { debug: isEnabled(DEBUG_OPENAI), contextStrategy, - reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null, + reverseProxyUrl, proxy: PROXY ?? null, req, res, @@ -25,8 +35,8 @@ const initializeClient = async ({ req, res, endpointOption }) => { }; const credentials = { - openAI: OPENAI_API_KEY, - azureOpenAI: AZURE_API_KEY, + [EModelEndpoint.openAI]: OPENAI_API_KEY, + [EModelEndpoint.azureOpenAI]: AZURE_API_KEY, }; const isUserProvided = credentials[endpoint] === 'user_provided'; @@ -42,7 +52,7 @@ const initializeClient = async ({ req, res, endpointOption }) => { let apiKey = isUserProvided ? userKey : credentials[endpoint]; - if (endpoint === 'azureOpenAI') { + if (endpoint === EModelEndpoint.azureOpenAI) { clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials(); apiKey = clientOptions.azure.azureOpenAIApiKey; } diff --git a/api/server/services/Files/Firebase/crud.js b/api/server/services/Files/Firebase/crud.js new file mode 100644 index 00000000000..68f534bcb6d --- /dev/null +++ b/api/server/services/Files/Firebase/crud.js @@ -0,0 +1,174 @@ +const fetch = require('node-fetch'); +const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage'); +const { getFirebaseStorage } = require('./initialize'); + +/** + * Deletes a file from Firebase Storage. + * @param {string} directory - The directory name + * @param {string} fileName - The name of the file to delete. + * @returns {Promise} A promise that resolves when the file is deleted. + */ +async function deleteFile(basePath, fileName) { + const storage = getFirebaseStorage(); + if (!storage) { + console.error('Firebase is not initialized. Cannot delete file from Firebase Storage.'); + throw new Error('Firebase is not initialized'); + } + + const storageRef = ref(storage, `${basePath}/${fileName}`); + + try { + await deleteObject(storageRef); + console.log('File deleted successfully from Firebase Storage'); + } catch (error) { + console.error('Error deleting file from Firebase Storage:', error.message); + throw error; + } +} + +/** + * Saves an file from a given URL to Firebase Storage. The function first initializes the Firebase Storage + * reference, then uploads the file to a specified basePath in the Firebase Storage. It handles initialization + * errors and upload errors, logging them to the console. If the upload is successful, the file name is returned. + * + * @param {Object} params - The parameters object. + * @param {string} params.userId - The user's unique identifier. This is used to create a user-specific basePath + * in Firebase Storage. + * @param {string} params.URL - The URL of the file to be uploaded. The file at this URL will be fetched + * and uploaded to Firebase Storage. + * @param {string} params.fileName - The name that will be used to save the file in Firebase Storage. This + * should include the file extension. + * @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file will + * be stored. Defaults to 'images' if not specified. + * + * @returns {Promise} + * A promise that resolves to the file name if the file is successfully uploaded, or null if there + * is an error in initialization or upload. + */ +async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' }) { + const storage = getFirebaseStorage(); + if (!storage) { + console.error('Firebase is not initialized. Cannot save file to Firebase Storage.'); + return null; + } + + const storageRef = ref(storage, `${basePath}/${userId.toString()}/${fileName}`); + + try { + await uploadBytes(storageRef, await fetch(URL).then((response) => response.buffer())); + return fileName; + } catch (error) { + console.error('Error uploading file to Firebase Storage:', error.message); + return null; + } +} + +/** + * Retrieves the download URL for a specified file from Firebase Storage. This function initializes the + * Firebase Storage and generates a reference to the file based on the provided basePath and file name. If + * Firebase Storage is not initialized or if there is an error in fetching the URL, the error is logged + * to the console. + * + * @param {Object} params - The parameters object. + * @param {string} params.fileName - The name of the file for which the URL is to be retrieved. This should + * include the file extension. + * @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file is + * stored. Defaults to 'images' if not specified. + * + * @returns {Promise} + * A promise that resolves to the download URL of the file if successful, or null if there is an + * error in initialization or fetching the URL. + */ +async function getFirebaseURL({ fileName, basePath = 'images' }) { + const storage = getFirebaseStorage(); + if (!storage) { + console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.'); + return null; + } + + const storageRef = ref(storage, `${basePath}/${fileName}`); + + try { + return await getDownloadURL(storageRef); + } catch (error) { + console.error('Error fetching file URL from Firebase Storage:', error.message); + return null; + } +} + +/** + * Uploads a buffer to Firebase Storage. + * + * @param {Object} params - The parameters object. + * @param {string} params.userId - The user's unique identifier. This is used to create a user-specific basePath + * in Firebase Storage. + * @param {string} params.fileName - The name of the file to be saved in Firebase Storage. + * @param {string} params.buffer - The buffer to be uploaded. + * @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file will + * be stored. Defaults to 'images' if not specified. + * + * @returns {Promise} - A promise that resolves to the download URL of the uploaded file. + */ +async function saveBufferToFirebase({ userId, buffer, fileName, basePath = 'images' }) { + const storage = getFirebaseStorage(); + if (!storage) { + throw new Error('Firebase is not initialized'); + } + + const storageRef = ref(storage, `${basePath}/${userId}/${fileName}`); + await uploadBytes(storageRef, buffer); + + // Assuming you have a function to get the download URL + return await getFirebaseURL({ fileName, basePath: `${basePath}/${userId}` }); +} + +/** + * Extracts and decodes the file path from a Firebase Storage URL. + * + * @param {string} urlString - The Firebase Storage URL. + * @returns {string} The decoded file path. + */ +function extractFirebaseFilePath(urlString) { + try { + const url = new URL(urlString); + const pathRegex = /\/o\/(.+?)(\?|$)/; + const match = url.pathname.match(pathRegex); + + if (match && match[1]) { + return decodeURIComponent(match[1]); + } + + return ''; + } catch (error) { + // If URL parsing fails, return an empty string + return ''; + } +} + +/** + * Deletes a file from Firebase storage. This function determines the filepath from the + * Firebase storage URL via regex for deletion. Validated by the user's ID. + * + * @param {Express.Request} req - The request object from Express. + * It should contain a `user` object with an `id` property. + * @param {MongoFile} file - The file object to be deleted. + * + * @returns {Promise} + * A promise that resolves when the file has been successfully deleted from Firebase storage. + * Throws an error if there is an issue with deletion. + */ +const deleteFirebaseFile = async (req, file) => { + const fileName = extractFirebaseFilePath(file.filepath); + if (!fileName.includes(req.user.id)) { + throw new Error('Invalid file path'); + } + await deleteFile('', fileName); +}; + +module.exports = { + deleteFile, + getFirebaseURL, + saveURLToFirebase, + deleteFirebaseFile, + saveBufferToFirebase, +}; diff --git a/api/server/services/Files/Firebase/images.js b/api/server/services/Files/Firebase/images.js index e04902c02fe..95b600962f6 100644 --- a/api/server/services/Files/Firebase/images.js +++ b/api/server/services/Files/Firebase/images.js @@ -1,45 +1,105 @@ -const fetch = require('node-fetch'); -const { ref, uploadBytes, getDownloadURL } = require('firebase/storage'); -const { getFirebaseStorage } = require('./initialize'); - -async function saveImageToFirebaseStorage(userId, imageUrl, imageName) { - const storage = getFirebaseStorage(); - if (!storage) { - console.error('Firebase is not initialized. Cannot save image to Firebase Storage.'); - return null; - } +const fs = require('fs'); +const path = require('path'); +const sharp = require('sharp'); +const { resizeImage } = require('../images/resize'); +const { saveBufferToFirebase } = require('./crud'); +const { updateFile } = require('~/models/File'); +const { logger } = require('~/config'); - const storageRef = ref(storage, `images/${userId.toString()}/${imageName}`); +/** + * Converts an image file to the WebP format. The function first resizes the image based on the specified + * resolution. + * + * + * @param {Object} req - The request object from Express. It should have a `user` property with an `id` + * representing the user, and an `app.locals.paths` object with an `imageOutput` path. + * @param {Express.Multer.File} file - The file object, which is part of the request. The file object should + * have a `path` property that points to the location of the uploaded file. + * @param {string} [resolution='high'] - Optional. The desired resolution for the image resizing. Default is 'high'. + * + * @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>} + * A promise that resolves to an object containing: + * - filepath: The path where the converted WebP image is saved. + * - bytes: The size of the converted image in bytes. + * - width: The width of the converted image. + * - height: The height of the converted image. + */ +async function uploadImageToFirebase(req, file, resolution = 'high') { + const inputFilePath = file.path; + const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution); + const extension = path.extname(inputFilePath); + const userId = req.user.id; - try { - // Upload image to Firebase Storage using the image URL - await uploadBytes(storageRef, await fetch(imageUrl).then((response) => response.buffer())); - return imageName; - } catch (error) { - console.error('Error uploading image to Firebase Storage:', error.message); - return null; + let webPBuffer; + let fileName = path.basename(inputFilePath); + if (extension.toLowerCase() === '.webp') { + webPBuffer = resizedBuffer; + } else { + webPBuffer = await sharp(resizedBuffer).toFormat('webp').toBuffer(); + // Replace or append the correct extension + const extRegExp = new RegExp(path.extname(fileName) + '$'); + fileName = fileName.replace(extRegExp, '.webp'); + if (!path.extname(fileName)) { + fileName += '.webp'; + } } -} -async function getFirebaseStorageImageUrl(imageName) { - const storage = getFirebaseStorage(); - if (!storage) { - console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.'); - return null; - } + const downloadURL = await saveBufferToFirebase({ userId, buffer: webPBuffer, fileName }); + + await fs.promises.unlink(inputFilePath); - const storageRef = ref(storage, `images/${imageName}`); + const bytes = Buffer.byteLength(webPBuffer); + return { filepath: downloadURL, bytes, width, height }; +} +/** + * Local: Updates the file and returns the URL in expected order/format + * for image payload handling: tuple order of [filepath, URL]. + * @param {Object} req - The request object. + * @param {MongoFile} file - The file object. + * @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage. + */ +async function prepareImageURL(req, file) { + const { filepath } = file; + const promises = []; + promises.push(updateFile({ file_id: file.file_id })); + promises.push(filepath); + return await Promise.all(promises); +} + +/** + * Uploads a user's avatar to Firebase Storage and returns the URL. + * If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database. + * + * @param {object} params - The parameters object. + * @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format. + * @param {object} params.User - The User document (mongoose); TODO: remove direct use of Model, `User` + * @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false'). + * @returns {Promise} - A promise that resolves with the URL of the uploaded avatar. + * @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading. + */ +async function processFirebaseAvatar({ buffer, User, manual }) { try { - // Get the download URL for the image from Firebase Storage - return `![generated image](${await getDownloadURL(storageRef)})`; + const downloadURL = await saveBufferToFirebase({ + userId: User._id.toString(), + buffer, + fileName: 'avatar.png', + }); + + const isManual = manual === 'true'; + + const url = `${downloadURL}?manual=${isManual}`; + + if (isManual) { + User.avatar = url; + await User.save(); + } + + return url; } catch (error) { - console.error('Error fetching image URL from Firebase Storage:', error.message); - return null; + logger.error('Error uploading profile picture:', error); + throw error; } } -module.exports = { - saveImageToFirebaseStorage, - getFirebaseStorageImageUrl, -}; +module.exports = { uploadImageToFirebase, prepareImageURL, processFirebaseAvatar }; diff --git a/api/server/services/Files/Firebase/index.js b/api/server/services/Files/Firebase/index.js index 905bf660d4f..27ad97a8520 100644 --- a/api/server/services/Files/Firebase/index.js +++ b/api/server/services/Files/Firebase/index.js @@ -1,7 +1,9 @@ +const crud = require('./crud'); const images = require('./images'); const initialize = require('./initialize'); module.exports = { + ...crud, ...images, ...initialize, }; diff --git a/api/server/services/Files/Firebase/initialize.js b/api/server/services/Files/Firebase/initialize.js index 5dc1f937915..67d923c44f8 100644 --- a/api/server/services/Files/Firebase/initialize.js +++ b/api/server/services/Files/Firebase/initialize.js @@ -21,16 +21,13 @@ const initializeFirebase = () => { }; if (Object.values(firebaseConfig).some((value) => !value)) { - i === 0 && - logger.info( - '[Optional] Firebase configuration missing or incomplete. Firebase will not be initialized.', - ); + i === 0 && logger.info('[Optional] CDN not initialized.'); i++; return null; } firebaseApp = firebase.initializeApp(firebaseConfig); - logger.info('Firebase initialized'); + logger.info('Firebase CDN initialized'); return firebaseApp; }; diff --git a/api/server/services/Files/Local/crud.js b/api/server/services/Files/Local/crud.js new file mode 100644 index 00000000000..d81c063031a --- /dev/null +++ b/api/server/services/Files/Local/crud.js @@ -0,0 +1,174 @@ +const fs = require('fs'); +const path = require('path'); +const axios = require('axios'); +const { logger } = require('~/config'); +const paths = require('~/config/paths'); + +/** + * Saves a file to a specified output path with a new filename. + * + * @param {Express.Multer.File} file - The file object to be saved. Should contain properties like 'originalname' and 'path'. + * @param {string} outputPath - The path where the file should be saved. + * @param {string} outputFilename - The new filename for the saved file (without extension). + * @returns {Promise} The full path of the saved file. + * @throws Will throw an error if the file saving process fails. + */ +async function saveFile(file, outputPath, outputFilename) { + try { + if (!fs.existsSync(outputPath)) { + fs.mkdirSync(outputPath, { recursive: true }); + } + + const fileExtension = path.extname(file.originalname); + const filenameWithExt = outputFilename + fileExtension; + const outputFilePath = path.join(outputPath, filenameWithExt); + fs.copyFileSync(file.path, outputFilePath); + fs.unlinkSync(file.path); + + return outputFilePath; + } catch (error) { + logger.error('[saveFile] Error while saving the file:', error); + throw error; + } +} + +/** + * Saves an uploaded image file to a specified directory based on the user's ID and a filename. + * + * @param {Express.Request} req - The Express request object, containing the user's information and app configuration. + * @param {Express.Multer.File} file - The uploaded file object. + * @param {string} filename - The new filename to assign to the saved image (without extension). + * @returns {Promise} + * @throws Will throw an error if the image saving process fails. + */ +const saveLocalImage = async (req, file, filename) => { + const imagePath = req.app.locals.paths.imageOutput; + const outputPath = path.join(imagePath, req.user.id ?? ''); + await saveFile(file, outputPath, filename); +}; + +/** + * Saves a file from a given URL to a local directory. The function fetches the file using the provided URL, + * determines the content type, and saves it to a specified local directory with the correct file extension. + * If the specified directory does not exist, it is created. The function returns the name of the saved file + * or null in case of an error. + * + * @param {Object} params - The parameters object. + * @param {string} params.userId - The user's unique identifier. This is used to create a user-specific path + * in the local file system. + * @param {string} params.URL - The URL of the file to be downloaded and saved. + * @param {string} params.fileName - The desired file name for the saved file. This may be modified to include + * the correct file extension based on the content type. + * @param {string} [params.basePath='images'] - Optional. The base directory where the file will be saved. + * Defaults to 'images' if not specified. + * + * @returns {Promise} + * A promise that resolves to the file name if the file is successfully saved, or null if there is an error. + */ +async function saveFileFromURL({ userId, URL, fileName, basePath = 'images' }) { + try { + // Fetch the file from the URL + const response = await axios({ + url: URL, + responseType: 'stream', + }); + + // Get the content type from the response headers + const contentType = response.headers['content-type']; + let extension = contentType.split('/').pop(); + + // Construct the outputPath based on the basePath and userId + const outputPath = path.join(paths.publicPath, basePath, userId.toString()); + + // Check if the output directory exists, if not, create it + if (!fs.existsSync(outputPath)) { + fs.mkdirSync(outputPath, { recursive: true }); + } + + // Replace or append the correct extension + const extRegExp = new RegExp(path.extname(fileName) + '$'); + fileName = fileName.replace(extRegExp, `.${extension}`); + if (!path.extname(fileName)) { + fileName += `.${extension}`; + } + + // Create a writable stream for the output path + const outputFilePath = path.join(outputPath, fileName); + const writer = fs.createWriteStream(outputFilePath); + + // Pipe the response data to the output file + response.data.pipe(writer); + + return new Promise((resolve, reject) => { + writer.on('finish', () => resolve(fileName)); + writer.on('error', reject); + }); + } catch (error) { + logger.error('[saveFileFromURL] Error while saving the file:', error); + return null; + } +} + +/** + * Constructs a local file path for a given file name and base path. This function simply joins the base + * path and the file name to create a file path. It does not check for the existence of the file at the path. + * + * @param {Object} params - The parameters object. + * @param {string} params.fileName - The name of the file for which the path is to be constructed. This should + * include the file extension. + * @param {string} [params.basePath='images'] - Optional. The base directory to be used for constructing the file path. + * Defaults to 'images' if not specified. + * + * @returns {string} + * The constructed local file path. + */ +async function getLocalFileURL({ fileName, basePath = 'images' }) { + return path.posix.join('/', basePath, fileName); +} + +/** + * Validates if a given filepath is within a specified subdirectory under a base path. This function constructs + * the expected base path using the base, subfolder, and user id from the request, and then checks if the + * provided filepath starts with this constructed base path. + * + * @param {Express.Request} req - The request object from Express. It should contain a `user` property with an `id`. + * @param {string} base - The base directory path. + * @param {string} subfolder - The subdirectory under the base path. + * @param {string} filepath - The complete file path to be validated. + * + * @returns {boolean} + * Returns true if the filepath is within the specified base and subfolder, false otherwise. + */ +const isValidPath = (req, base, subfolder, filepath) => { + const normalizedBase = path.resolve(base, subfolder, req.user.id); + const normalizedFilepath = path.resolve(filepath); + return normalizedFilepath.startsWith(normalizedBase); +}; + +/** + * Deletes a file from the filesystem. This function takes a file object, constructs the full path, and + * verifies the path's validity before deleting the file. If the path is invalid, an error is thrown. + * + * @param {Express.Request} req - The request object from Express. It should have an `app.locals.paths` object with + * a `publicPath` property. + * @param {MongoFile} file - The file object to be deleted. It should have a `filepath` property that is + * a string representing the path of the file relative to the publicPath. + * + * @returns {Promise} + * A promise that resolves when the file has been successfully deleted, or throws an error if the + * file path is invalid or if there is an error in deletion. + */ +const deleteLocalFile = async (req, file) => { + const { publicPath } = req.app.locals.paths; + const parts = file.filepath.split(path.sep); + const subfolder = parts[1]; + const filepath = path.join(publicPath, file.filepath); + + if (!isValidPath(req, publicPath, subfolder, filepath)) { + throw new Error('Invalid file path'); + } + + await fs.promises.unlink(filepath); +}; + +module.exports = { saveFile, saveLocalImage, saveFileFromURL, getLocalFileURL, deleteLocalFile }; diff --git a/api/server/services/Files/Local/images.js b/api/server/services/Files/Local/images.js new file mode 100644 index 00000000000..63ed5b2f64b --- /dev/null +++ b/api/server/services/Files/Local/images.js @@ -0,0 +1,140 @@ +const fs = require('fs'); +const path = require('path'); +const sharp = require('sharp'); +const { resizeImage } = require('../images/resize'); +const { updateFile } = require('~/models/File'); + +/** + * Converts an image file to the WebP format. The function first resizes the image based on the specified + * resolution. + * + * If the original image is already in WebP format, it writes the resized image back. Otherwise, + * it converts the image to WebP format before saving. + * + * The original image is deleted after conversion. + * + * @param {Object} req - The request object from Express. It should have a `user` property with an `id` + * representing the user, and an `app.locals.paths` object with an `imageOutput` path. + * @param {Express.Multer.File} file - The file object, which is part of the request. The file object should + * have a `path` property that points to the location of the uploaded file. + * @param {string} [resolution='high'] - Optional. The desired resolution for the image resizing. Default is 'high'. + * + * @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>} + * A promise that resolves to an object containing: + * - filepath: The path where the converted WebP image is saved. + * - bytes: The size of the converted image in bytes. + * - width: The width of the converted image. + * - height: The height of the converted image. + */ +async function uploadLocalImage(req, file, resolution = 'high') { + const inputFilePath = file.path; + const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution); + const extension = path.extname(inputFilePath); + + const { imageOutput } = req.app.locals.paths; + const userPath = path.join(imageOutput, req.user.id); + + if (!fs.existsSync(userPath)) { + fs.mkdirSync(userPath, { recursive: true }); + } + + const newPath = path.join(userPath, path.basename(inputFilePath)); + + if (extension.toLowerCase() === '.webp') { + const bytes = Buffer.byteLength(resizedBuffer); + await fs.promises.writeFile(newPath, resizedBuffer); + const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath)); + return { filepath, bytes, width, height }; + } + + const outputFilePath = newPath.replace(extension, '.webp'); + const data = await sharp(resizedBuffer).toFormat('webp').toBuffer(); + await fs.promises.writeFile(outputFilePath, data); + const bytes = Buffer.byteLength(data); + const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath)); + await fs.promises.unlink(inputFilePath); + return { filepath, bytes, width, height }; +} + +/** + * Encodes an image file to base64. + * @param {string} imagePath - The path to the image file. + * @returns {Promise} A promise that resolves with the base64 encoded image data. + */ +function encodeImage(imagePath) { + return new Promise((resolve, reject) => { + fs.readFile(imagePath, (err, data) => { + if (err) { + reject(err); + } else { + resolve(data.toString('base64')); + } + }); + }); +} + +/** + * Local: Updates the file and encodes the image to base64, + * for image payload handling: tuple order of [filepath, base64]. + * @param {Object} req - The request object. + * @param {MongoFile} file - The file object. + * @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage. + */ +async function prepareImagesLocal(req, file) { + const { publicPath, imageOutput } = req.app.locals.paths; + const userPath = path.join(imageOutput, req.user.id); + + if (!fs.existsSync(userPath)) { + fs.mkdirSync(userPath, { recursive: true }); + } + const filepath = path.join(publicPath, file.filepath); + + const promises = []; + promises.push(updateFile({ file_id: file.file_id })); + promises.push(encodeImage(filepath)); + return await Promise.all(promises); +} + +/** + * Uploads a user's avatar to Firebase Storage and returns the URL. + * If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database. + * + * @param {object} params - The parameters object. + * @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format. + * @param {object} params.User - The User document (mongoose); TODO: remove direct use of Model, `User` + * @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false'). + * @returns {Promise} - A promise that resolves with the URL of the uploaded avatar. + * @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading. + */ +async function processLocalAvatar({ buffer, User, manual }) { + const userDir = path.resolve( + __dirname, + '..', + '..', + '..', + '..', + '..', + 'client', + 'public', + 'images', + User._id.toString(), + ); + const fileName = `avatar-${new Date().getTime()}.png`; + const urlRoute = `/images/${User._id.toString()}/${fileName}`; + const avatarPath = path.join(userDir, fileName); + + await fs.promises.mkdir(userDir, { recursive: true }); + await fs.promises.writeFile(avatarPath, buffer); + + const isManual = manual === 'true'; + let url = `${urlRoute}?manual=${isManual}`; + + if (isManual) { + User.avatar = url; + await User.save(); + } + + return url; +} + +module.exports = { uploadLocalImage, encodeImage, prepareImagesLocal, processLocalAvatar }; diff --git a/api/server/services/Files/Local/index.js b/api/server/services/Files/Local/index.js new file mode 100644 index 00000000000..cb44238bcc9 --- /dev/null +++ b/api/server/services/Files/Local/index.js @@ -0,0 +1,7 @@ +const images = require('./images'); +const crud = require('./crud'); + +module.exports = { + ...crud, + ...images, +}; diff --git a/api/server/services/Files/images/avatar.js b/api/server/services/Files/images/avatar.js new file mode 100644 index 00000000000..490fc86171d --- /dev/null +++ b/api/server/services/Files/images/avatar.js @@ -0,0 +1,78 @@ +const sharp = require('sharp'); +const fs = require('fs').promises; +const fetch = require('node-fetch'); +const User = require('~/models/User'); +const { getStrategyFunctions } = require('~/server/services/Files/strategies'); +const { logger } = require('~/config'); + +async function convertToWebP(inputBuffer) { + return sharp(inputBuffer).resize({ width: 150 }).toFormat('webp').toBuffer(); +} + +/** + * Uploads an avatar image for a user. This function can handle various types of input (URL, Buffer, or File object), + * processes the image to a square format, converts it to WebP format, and then uses a specified file strategy for + * further processing. It performs validation on the user ID and the input type. The function can throw errors for + * invalid input types, fetching issues, or other processing errors. + * + * @param {Object} params - The parameters object. + * @param {string} params.userId - The unique identifier of the user for whom the avatar is being uploaded. + * @param {FileSources} params.fileStrategy - The file handling strategy to use, determining how the avatar is processed. + * @param {(string|Buffer|File)} params.input - The input representing the avatar image. Can be a URL (string), + * a Buffer, or a File object. + * @param {string} params.manual - A string flag indicating whether the upload process is manual. + * + * @returns {Promise} + * A promise that resolves to the result of the `processAvatar` function, specific to the chosen file + * strategy. Throws an error if any step in the process fails. + * + * @throws {Error} Throws an error if the user ID is undefined, the input type is invalid, the image fetching fails, + * or any other error occurs during the processing. + */ +async function uploadAvatar({ userId, fileStrategy, input, manual }) { + try { + if (userId === undefined) { + throw new Error('User ID is undefined'); + } + const _id = userId; + // TODO: remove direct use of Model, `User` + const oldUser = await User.findOne({ _id }); + + let imageBuffer; + if (typeof input === 'string') { + const response = await fetch(input); + + if (!response.ok) { + throw new Error(`Failed to fetch image from URL. Status: ${response.status}`); + } + imageBuffer = await response.buffer(); + } else if (input instanceof Buffer) { + imageBuffer = input; + } else if (typeof input === 'object' && input instanceof File) { + const fileContent = await fs.readFile(input.path); + imageBuffer = Buffer.from(fileContent); + } else { + throw new Error('Invalid input type. Expected URL, Buffer, or File.'); + } + + const { width, height } = await sharp(imageBuffer).metadata(); + const minSize = Math.min(width, height); + const squaredBuffer = await sharp(imageBuffer) + .extract({ + left: Math.floor((width - minSize) / 2), + top: Math.floor((height - minSize) / 2), + width: minSize, + height: minSize, + }) + .toBuffer(); + + const webPBuffer = await convertToWebP(squaredBuffer); + const { processAvatar } = getStrategyFunctions(fileStrategy); + return await processAvatar({ buffer: webPBuffer, User: oldUser, manual }); + } catch (error) { + logger.error('Error uploading the avatar:', error); + throw error; + } +} + +module.exports = uploadAvatar; diff --git a/api/server/services/Files/images/avatar/firebaseStrategy.js b/api/server/services/Files/images/avatar/firebaseStrategy.js deleted file mode 100644 index 9c000b43ecc..00000000000 --- a/api/server/services/Files/images/avatar/firebaseStrategy.js +++ /dev/null @@ -1,29 +0,0 @@ -const { ref, uploadBytes, getDownloadURL } = require('firebase/storage'); -const { getFirebaseStorage } = require('~/server/services/Files/Firebase/initialize'); -const { logger } = require('~/config'); - -async function firebaseStrategy(userId, webPBuffer, oldUser, manual) { - try { - const storage = getFirebaseStorage(); - if (!storage) { - throw new Error('Firebase is not initialized.'); - } - const avatarRef = ref(storage, `images/${userId.toString()}/avatar`); - - await uploadBytes(avatarRef, webPBuffer); - const urlFirebase = await getDownloadURL(avatarRef); - const isManual = manual === 'true'; - - const url = `${urlFirebase}?manual=${isManual}`; - if (isManual) { - oldUser.avatar = url; - await oldUser.save(); - } - return url; - } catch (error) { - logger.error('Error uploading profile picture:', error); - throw error; - } -} - -module.exports = firebaseStrategy; diff --git a/api/server/services/Files/images/avatar/localStrategy.js b/api/server/services/Files/images/avatar/localStrategy.js deleted file mode 100644 index 021beda7d13..00000000000 --- a/api/server/services/Files/images/avatar/localStrategy.js +++ /dev/null @@ -1,32 +0,0 @@ -const fs = require('fs').promises; -const path = require('path'); - -async function localStrategy(userId, webPBuffer, oldUser, manual) { - const userDir = path.resolve( - __dirname, - '..', - '..', - '..', - '..', - '..', - '..', - 'client', - 'public', - 'images', - userId, - ); - let avatarPath = path.join(userDir, 'avatar.png'); - const urlRoute = `/images/${userId}/avatar.png`; - await fs.mkdir(userDir, { recursive: true }); - await fs.writeFile(avatarPath, webPBuffer); - const isManual = manual === 'true'; - let url = `${urlRoute}?manual=${isManual}×tamp=${new Date().getTime()}`; - if (isManual) { - oldUser.avatar = url; - await oldUser.save(); - } - - return url; -} - -module.exports = localStrategy; diff --git a/api/server/services/Files/images/avatar/uploadAvatar.js b/api/server/services/Files/images/avatar/uploadAvatar.js deleted file mode 100644 index 0726df9a4dd..00000000000 --- a/api/server/services/Files/images/avatar/uploadAvatar.js +++ /dev/null @@ -1,63 +0,0 @@ -const sharp = require('sharp'); -const fetch = require('node-fetch'); -const fs = require('fs').promises; -const User = require('~/models/User'); -const { getFirebaseStorage } = require('~/server/services/Files/Firebase/initialize'); -const firebaseStrategy = require('./firebaseStrategy'); -const localStrategy = require('./localStrategy'); -const { logger } = require('~/config'); - -async function convertToWebP(inputBuffer) { - return sharp(inputBuffer).resize({ width: 150 }).toFormat('webp').toBuffer(); -} - -async function uploadAvatar(userId, input, manual) { - try { - if (userId === undefined) { - throw new Error('User ID is undefined'); - } - const _id = userId; - // TODO: remove direct use of Model, `User` - const oldUser = await User.findOne({ _id }); - let imageBuffer; - if (typeof input === 'string') { - const response = await fetch(input); - - if (!response.ok) { - throw new Error(`Failed to fetch image from URL. Status: ${response.status}`); - } - imageBuffer = await response.buffer(); - } else if (input instanceof Buffer) { - imageBuffer = input; - } else if (typeof input === 'object' && input instanceof File) { - const fileContent = await fs.readFile(input.path); - imageBuffer = Buffer.from(fileContent); - } else { - throw new Error('Invalid input type. Expected URL, Buffer, or File.'); - } - const { width, height } = await sharp(imageBuffer).metadata(); - const minSize = Math.min(width, height); - const squaredBuffer = await sharp(imageBuffer) - .extract({ - left: Math.floor((width - minSize) / 2), - top: Math.floor((height - minSize) / 2), - width: minSize, - height: minSize, - }) - .toBuffer(); - const webPBuffer = await convertToWebP(squaredBuffer); - const storage = getFirebaseStorage(); - if (storage) { - const url = await firebaseStrategy(userId, webPBuffer, oldUser, manual); - return url; - } - - const url = await localStrategy(userId, webPBuffer, oldUser, manual); - return url; - } catch (error) { - logger.error('Error uploading the avatar:', error); - throw error; - } -} - -module.exports = uploadAvatar; diff --git a/api/server/services/Files/images/convert.js b/api/server/services/Files/images/convert.js deleted file mode 100644 index 2de0fd21776..00000000000 --- a/api/server/services/Files/images/convert.js +++ /dev/null @@ -1,36 +0,0 @@ -const path = require('path'); -const sharp = require('sharp'); -const fs = require('fs'); -const { resizeImage } = require('./resize'); - -async function convertToWebP(req, file, resolution = 'high') { - const inputFilePath = file.path; - const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution); - const extension = path.extname(inputFilePath); - - const { imageOutput } = req.app.locals.config; - const userPath = path.join(imageOutput, req.user.id); - - if (!fs.existsSync(userPath)) { - fs.mkdirSync(userPath, { recursive: true }); - } - - const newPath = path.join(userPath, path.basename(inputFilePath)); - - if (extension.toLowerCase() === '.webp') { - const bytes = Buffer.byteLength(resizedBuffer); - await fs.promises.writeFile(newPath, resizedBuffer); - const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath)); - return { filepath, bytes, width, height }; - } - - const outputFilePath = newPath.replace(extension, '.webp'); - const data = await sharp(resizedBuffer).toFormat('webp').toBuffer(); - await fs.promises.writeFile(outputFilePath, data); - const bytes = Buffer.byteLength(data); - const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath)); - await fs.promises.unlink(inputFilePath); - return { filepath, bytes, width, height }; -} - -module.exports = { convertToWebP }; diff --git a/api/server/services/Files/images/encode.js b/api/server/services/Files/images/encode.js index 30428ffabc1..809ec0e8401 100644 --- a/api/server/services/Files/images/encode.js +++ b/api/server/services/Files/images/encode.js @@ -1,45 +1,5 @@ -const fs = require('fs'); -const path = require('path'); -const { EModelEndpoint } = require('librechat-data-provider'); -const { updateFile } = require('~/models'); - -/** - * Encodes an image file to base64. - * @param {string} imagePath - The path to the image file. - * @returns {Promise} A promise that resolves with the base64 encoded image data. - */ -function encodeImage(imagePath) { - return new Promise((resolve, reject) => { - fs.readFile(imagePath, (err, data) => { - if (err) { - reject(err); - } else { - resolve(data.toString('base64')); - } - }); - }); -} - -/** - * Updates the file and encodes the image. - * @param {Object} req - The request object. - * @param {Object} file - The file object. - * @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage. - */ -async function updateAndEncode(req, file) { - const { publicPath, imageOutput } = req.app.locals.config; - const userPath = path.join(imageOutput, req.user.id); - - if (!fs.existsSync(userPath)) { - fs.mkdirSync(userPath, { recursive: true }); - } - const filepath = path.join(publicPath, file.filepath); - - const promises = []; - promises.push(updateFile({ file_id: file.file_id })); - promises.push(encodeImage(filepath)); - return await Promise.all(promises); -} +const { EModelEndpoint, FileSources } = require('librechat-data-provider'); +const { getStrategyFunctions } = require('../strategies'); /** * Encodes and formats the given files. @@ -50,25 +10,42 @@ async function updateAndEncode(req, file) { */ async function encodeAndFormat(req, files, endpoint) { const promises = []; + const encodingMethods = {}; + for (let file of files) { - promises.push(updateAndEncode(req, file)); + const source = file.source ?? FileSources.local; + + if (encodingMethods[source]) { + promises.push(encodingMethods[source](req, file)); + continue; + } + + const { prepareImagePayload } = getStrategyFunctions(source); + if (!prepareImagePayload) { + throw new Error(`Encoding function not implemented for ${source}`); + } + + encodingMethods[source] = prepareImagePayload; + promises.push(prepareImagePayload(req, file)); } - // TODO: make detail configurable, as of now resizing is done - // to prefer "high" but "low" may be used if the image is small enough - const detail = req.body.detail ?? 'auto'; - const encodedImages = await Promise.all(promises); + const detail = req.body.imageDetail ?? 'auto'; + + /** @type {Array<[MongoFile, string]>} */ + const formattedImages = await Promise.all(promises); const result = { files: [], image_urls: [], }; - for (const [file, base64] of encodedImages) { + for (const [file, imageContent] of formattedImages) { const imagePart = { type: 'image_url', image_url: { - url: `data:image/webp;base64,${base64}`, + url: imageContent.startsWith('http') + ? imageContent + : `data:image/webp;base64,${imageContent}`, detail, }, }; @@ -81,17 +58,16 @@ async function encodeAndFormat(req, files, endpoint) { result.files.push({ file_id: file.file_id, - filepath: file.filepath, - filename: file.filename, - type: file.type, - height: file.height, - width: file.width, + // filepath: file.filepath, + // filename: file.filename, + // type: file.type, + // height: file.height, + // width: file.width, }); } return result; } module.exports = { - encodeImage, encodeAndFormat, }; diff --git a/api/server/services/Files/images/index.js b/api/server/services/Files/images/index.js index fa49eb95356..1438887e6d1 100644 --- a/api/server/services/Files/images/index.js +++ b/api/server/services/Files/images/index.js @@ -1,15 +1,13 @@ -const convert = require('./convert'); +const avatar = require('./avatar'); const encode = require('./encode'); const parse = require('./parse'); const resize = require('./resize'); const validate = require('./validate'); -const uploadAvatar = require('./avatar/uploadAvatar'); module.exports = { - ...convert, ...encode, ...parse, ...resize, ...validate, - uploadAvatar, + avatar, }; diff --git a/api/server/services/Files/images/parse.js b/api/server/services/Files/images/parse.js index 5a1113c97e4..1b0f7e47385 100644 --- a/api/server/services/Files/images/parse.js +++ b/api/server/services/Files/images/parse.js @@ -1,7 +1,7 @@ const URL = require('url').URL; const path = require('path'); -const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg)$/i; +const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg|webp)$/i; /** * Extracts the image basename from a given URL. @@ -22,6 +22,24 @@ function getImageBasename(urlString) { } } +/** + * Extracts the basename of a file from a given URL. + * + * @param {string} urlString - The URL string from which the file basename is to be extracted. + * @returns {string} The basename of the file from the URL. + * Returns an empty string if the URL parsing fails. + */ +function getFileBasename(urlString) { + try { + const url = new URL(urlString); + return path.basename(url.pathname); + } catch (error) { + // If URL parsing fails, return an empty string + return ''; + } +} + module.exports = { getImageBasename, + getFileBasename, }; diff --git a/api/server/services/Files/index.js b/api/server/services/Files/index.js deleted file mode 100644 index 47d47690cba..00000000000 --- a/api/server/services/Files/index.js +++ /dev/null @@ -1,9 +0,0 @@ -const localStrategy = require('./localStrategy'); -const process = require('./process'); -const save = require('./save'); - -module.exports = { - ...save, - ...process, - localStrategy, -}; diff --git a/api/server/services/Files/localStrategy.js b/api/server/services/Files/localStrategy.js deleted file mode 100644 index 0b711c77631..00000000000 --- a/api/server/services/Files/localStrategy.js +++ /dev/null @@ -1,36 +0,0 @@ -const { createFile } = require('~/models'); -const { convertToWebP } = require('./images/convert'); - -/** - * Applies the local strategy for image uploads. - * Saves file metadata to the database with an expiry TTL. - * Files must be deleted from the server filesystem manually. - * - * @param {Object} params - The parameters object. - * @param {Express.Request} params.req - The Express request object. - * @param {Express.Response} params.res - The Express response object. - * @param {Express.Multer.File} params.file - The uploaded file. - * @param {ImageMetadata} params.metadata - Additional metadata for the file. - * @returns {Promise} - */ -const localStrategy = async ({ req, res, file, metadata }) => { - const { file_id, temp_file_id } = metadata; - const { filepath, bytes, width, height } = await convertToWebP(req, file); - const result = await createFile( - { - user: req.user.id, - file_id, - temp_file_id, - bytes, - filepath, - filename: file.originalname, - type: 'image/webp', - width, - height, - }, - true, - ); - res.status(200).json({ message: 'File uploaded and processed successfully', ...result }); -}; - -module.exports = localStrategy; diff --git a/api/server/services/Files/process.js b/api/server/services/Files/process.js index c6ab3ca1399..4ee9510b4f1 100644 --- a/api/server/services/Files/process.js +++ b/api/server/services/Files/process.js @@ -1,17 +1,6 @@ -const { updateFileUsage } = require('~/models'); - -// const mapImageUrls = (files, detail) => { -// return files -// .filter((file) => file.type.includes('image')) -// .map((file) => ({ -// type: 'image_url', -// image_url: { -// /* Temporarily set to path to encode later */ -// url: file.filepath, -// detail, -// }, -// })); -// }; +const { updateFileUsage, createFile } = require('~/models'); +const { getStrategyFunctions } = require('./strategies'); +const { logger } = require('~/config'); const processFiles = async (files) => { const promises = []; @@ -24,6 +13,76 @@ const processFiles = async (files) => { return await Promise.all(promises); }; +/** + * Processes a file URL using a specified file handling strategy. This function accepts a strategy name, + * fetches the corresponding file processing functions (for saving and retrieving file URLs), and then + * executes these functions in sequence. It first saves the file using the provided URL and then retrieves + * the URL of the saved file. If any error occurs during this process, it logs the error and throws an + * exception with an appropriate message. + * + * @param {Object} params - The parameters object. + * @param {FileSources} params.fileStrategy - The file handling strategy to use. Must be a value from the + * `FileSources` enum, which defines different file handling + * strategies (like saving to Firebase, local storage, etc.). + * @param {string} params.userId - The user's unique identifier. Used for creating user-specific paths or + * references in the file handling process. + * @param {string} params.URL - The URL of the file to be processed. + * @param {string} params.fileName - The name that will be used to save the file. This should include the + * file extension. + * @param {string} params.basePath - The base path or directory where the file will be saved or retrieved from. + * + * @returns {Promise} + * A promise that resolves to the URL of the processed file. It throws an error if the file processing + * fails at any stage. + */ +const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath }) => { + const { saveURL, getFileURL } = getStrategyFunctions(fileStrategy); + try { + await saveURL({ userId, URL, fileName, basePath }); + return await getFileURL({ fileName: `${userId}/${fileName}`, basePath }); + } catch (error) { + logger.error(`Error while processing the image with ${fileStrategy}:`, error); + throw new Error(`Failed to process the image with ${fileStrategy}. ${error.message}`); + } +}; + +/** + * Applies the current strategy for image uploads. + * Saves file metadata to the database with an expiry TTL. + * Files must be deleted from the server filesystem manually. + * + * @param {Object} params - The parameters object. + * @param {Express.Request} params.req - The Express request object. + * @param {Express.Response} params.res - The Express response object. + * @param {Express.Multer.File} params.file - The uploaded file. + * @param {ImageMetadata} params.metadata - Additional metadata for the file. + * @returns {Promise} + */ +const processImageUpload = async ({ req, res, file, metadata }) => { + const source = req.app.locals.fileStrategy; + const { handleImageUpload } = getStrategyFunctions(source); + const { file_id, temp_file_id } = metadata; + const { filepath, bytes, width, height } = await handleImageUpload(req, file); + const result = await createFile( + { + user: req.user.id, + file_id, + temp_file_id, + bytes, + filepath, + filename: file.originalname, + source, + type: 'image/webp', + width, + height, + }, + true, + ); + res.status(200).json({ message: 'File uploaded and processed successfully', ...result }); +}; + module.exports = { + processImageUpload, processFiles, + processFileURL, }; diff --git a/api/server/services/Files/save.js b/api/server/services/Files/save.js deleted file mode 100644 index 08f6a0d5cc0..00000000000 --- a/api/server/services/Files/save.js +++ /dev/null @@ -1,48 +0,0 @@ -const fs = require('fs'); -const path = require('path'); -const { logger } = require('~/config'); - -/** - * Saves a file to a specified output path with a new filename. - * - * @param {Express.Multer.File} file - The file object to be saved. Should contain properties like 'originalname' and 'path'. - * @param {string} outputPath - The path where the file should be saved. - * @param {string} outputFilename - The new filename for the saved file (without extension). - * @returns {Promise} The full path of the saved file. - * @throws Will throw an error if the file saving process fails. - */ -async function saveFile(file, outputPath, outputFilename) { - try { - if (!fs.existsSync(outputPath)) { - fs.mkdirSync(outputPath, { recursive: true }); - } - - const fileExtension = path.extname(file.originalname); - const filenameWithExt = outputFilename + fileExtension; - const outputFilePath = path.join(outputPath, filenameWithExt); - fs.copyFileSync(file.path, outputFilePath); - fs.unlinkSync(file.path); - - return outputFilePath; - } catch (error) { - logger.error('[saveFile] Error while saving the file:', error); - throw error; - } -} - -/** - * Saves an uploaded image file to a specified directory based on the user's ID and a filename. - * - * @param {Express.Request} req - The Express request object, containing the user's information and app configuration. - * @param {Express.Multer.File} file - The uploaded file object. - * @param {string} filename - The new filename to assign to the saved image (without extension). - * @returns {Promise} - * @throws Will throw an error if the image saving process fails. - */ -const saveLocalImage = async (req, file, filename) => { - const imagePath = req.app.locals.config.imageOutput; - const outputPath = path.join(imagePath, req.user.id ?? ''); - await saveFile(file, outputPath, filename); -}; - -module.exports = { saveFile, saveLocalImage }; diff --git a/api/server/services/Files/strategies.js b/api/server/services/Files/strategies.js new file mode 100644 index 00000000000..4e201860434 --- /dev/null +++ b/api/server/services/Files/strategies.js @@ -0,0 +1,54 @@ +const { FileSources } = require('librechat-data-provider'); +const { + getFirebaseURL, + prepareImageURL, + saveURLToFirebase, + deleteFirebaseFile, + uploadImageToFirebase, + processFirebaseAvatar, +} = require('./Firebase'); +const { + getLocalFileURL, + saveFileFromURL, + deleteLocalFile, + uploadLocalImage, + prepareImagesLocal, + processLocalAvatar, +} = require('./Local'); + +// Firebase Strategy Functions +const firebaseStrategy = () => ({ + // saveFile: + saveURL: saveURLToFirebase, + getFileURL: getFirebaseURL, + deleteFile: deleteFirebaseFile, + prepareImagePayload: prepareImageURL, + processAvatar: processFirebaseAvatar, + handleImageUpload: uploadImageToFirebase, +}); + +// Local Strategy Functions +const localStrategy = () => ({ + // saveFile: , + saveURL: saveFileFromURL, + getFileURL: getLocalFileURL, + deleteFile: deleteLocalFile, + processAvatar: processLocalAvatar, + handleImageUpload: uploadLocalImage, + prepareImagePayload: prepareImagesLocal, +}); + +// Strategy Selector +const getStrategyFunctions = (fileSource) => { + if (fileSource === FileSources.firebase) { + return firebaseStrategy(); + } else if (fileSource === FileSources.local) { + return localStrategy(); + } else { + throw new Error('Invalid file source'); + } +}; + +module.exports = { + getStrategyFunctions, +}; diff --git a/api/server/services/ModelService.js b/api/server/services/ModelService.js index 08c9ae71d29..76ac061546d 100644 --- a/api/server/services/ModelService.js +++ b/api/server/services/ModelService.js @@ -24,15 +24,57 @@ const { PROXY, } = process.env ?? {}; +/** + * Fetches OpenAI models from the specified base API path or Azure, based on the provided configuration. + * + * @param {Object} params - The parameters for fetching the models. + * @param {string} params.apiKey - The API key for authentication with the API. + * @param {string} params.baseURL - The base path URL for the API. + * @param {string} [params.name='OpenAI'] - The name of the API; defaults to 'OpenAI'. + * @param {boolean} [params.azure=false] - Whether to fetch models from Azure. + * @returns {Promise} A promise that resolves to an array of model identifiers. + * @async + */ +const fetchModels = async ({ apiKey, baseURL, name = 'OpenAI', azure = false }) => { + let models = []; + + if (!baseURL && !azure) { + return models; + } + + try { + const payload = { + headers: { + Authorization: `Bearer ${apiKey}`, + }, + }; + + if (PROXY) { + payload.httpsAgent = new HttpsProxyAgent(PROXY); + } + + if (process.env.OPENAI_ORGANIZATION && baseURL.includes('openai')) { + payload.headers['OpenAI-Organization'] = process.env.OPENAI_ORGANIZATION; + } + + const res = await axios.get(`${baseURL}${azure ? '' : '/models'}`, payload); + models = res.data.data.map((item) => item.id); + } catch (err) { + logger.error(`Failed to fetch models from ${azure ? 'Azure ' : ''}${name} API`, err); + } + + return models; +}; + const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _models = []) => { let models = _models.slice() ?? []; let apiKey = openAIApiKey; - let basePath = 'https://api.openai.com/v1'; + let baseURL = 'https://api.openai.com/v1'; let reverseProxyUrl = OPENAI_REVERSE_PROXY; if (opts.azure) { return models; // const azure = getAzureCredentials(); - // basePath = (genAzureChatCompletion(azure)) + // baseURL = (genAzureChatCompletion(azure)) // .split('/deployments')[0] // .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`); // apiKey = azureOpenAIApiKey; @@ -42,32 +84,20 @@ const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _model } if (reverseProxyUrl) { - basePath = extractBaseURL(reverseProxyUrl); + baseURL = extractBaseURL(reverseProxyUrl); } - const cachedModels = await modelsCache.get(basePath); + const cachedModels = await modelsCache.get(baseURL); if (cachedModels) { return cachedModels; } - if (basePath || opts.azure) { - try { - const payload = { - headers: { - Authorization: `Bearer ${apiKey}`, - }, - }; - - if (PROXY) { - payload.httpsAgent = new HttpsProxyAgent(PROXY); - } - const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, payload); - - models = res.data.data.map((item) => item.id); - // logger.debug(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`); - } catch (err) { - logger.error(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`, err); - } + if (baseURL || opts.azure) { + models = await fetchModels({ + apiKey, + baseURL, + azure: opts.azure, + }); } if (!reverseProxyUrl) { @@ -75,7 +105,7 @@ const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _model models = models.filter((model) => regex.test(model)); } - await modelsCache.set(basePath, models); + await modelsCache.set(baseURL, models); return models; }; @@ -142,6 +172,7 @@ const getGoogleModels = () => { }; module.exports = { + fetchModels, getOpenAIModels, getChatGPTBrowserModels, getAnthropicModels, diff --git a/api/server/services/PluginService.js b/api/server/services/PluginService.js index 1eaa6eedab5..61582382914 100644 --- a/api/server/services/PluginService.js +++ b/api/server/services/PluginService.js @@ -2,18 +2,38 @@ const PluginAuth = require('~/models/schema/pluginAuthSchema'); const { encrypt, decrypt } = require('~/server/utils/'); const { logger } = require('~/config'); -const getUserPluginAuthValue = async (user, authField) => { +/** + * Asynchronously retrieves and decrypts the authentication value for a user's plugin, based on a specified authentication field. + * + * @param {string} userId - The unique identifier of the user for whom the plugin authentication value is to be retrieved. + * @param {string} authField - The specific authentication field (e.g., 'API_KEY', 'URL') whose value is to be retrieved and decrypted. + * @returns {Promise} A promise that resolves to the decrypted authentication value if found, or `null` if no such authentication value exists for the given user and field. + * + * The function throws an error if it encounters any issue during the retrieval or decryption process, or if the authentication value does not exist. + * + * @example + * // To get the decrypted value of the 'token' field for a user with userId '12345': + * getUserPluginAuthValue('12345', 'token').then(value => { + * console.log(value); + * }).catch(err => { + * console.error(err); + * }); + * + * @throws {Error} Throws an error if there's an issue during the retrieval or decryption process, or if the authentication value does not exist. + * @async + */ +const getUserPluginAuthValue = async (userId, authField) => { try { - const pluginAuth = await PluginAuth.findOne({ user, authField }).lean(); + const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean(); if (!pluginAuth) { - return null; + throw new Error(`No plugin auth ${authField} found for user ${userId}`); } const decryptedValue = decrypt(pluginAuth.value); return decryptedValue; } catch (err) { logger.error('[getUserPluginAuthValue]', err); - return err; + throw err; } }; diff --git a/api/server/socialLogins.js b/api/server/socialLogins.js index af61db73e9d..4abe278b84d 100644 --- a/api/server/socialLogins.js +++ b/api/server/socialLogins.js @@ -10,6 +10,10 @@ const { } = require('../strategies'); const client = require('../cache/redis'); +/** + * + * @param {Express.Application} app + */ const configureSocialLogins = (app) => { if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) { passport.use(googleLogin()); diff --git a/api/server/utils/handleText.js b/api/server/utils/handleText.js index 4cd1b7ce994..b8d17106622 100644 --- a/api/server/utils/handleText.js +++ b/api/server/utils/handleText.js @@ -165,6 +165,27 @@ function isEnabled(value) { return false; } +/** + * Checks if the provided value is 'user_provided'. + * + * @param {string} value - The value to check. + * @returns {boolean} - Returns true if the value is 'user_provided', otherwise false. + */ +const isUserProvided = (value) => value === 'user_provided'; + +/** + * Extracts the value of an environment variable from a string. + * @param {string} value - The value to be processed, possibly containing an env variable placeholder. + * @returns {string} - The actual value from the environment variable or the original value. + */ +function extractEnvVariable(value) { + const envVarMatch = value.match(/^\${(.+)}$/); + if (envVarMatch) { + return process.env[envVarMatch[1]] || value; + } + return value; +} + module.exports = { createOnProgress, isEnabled, @@ -172,4 +193,6 @@ module.exports = { formatSteps, formatAction, addSpaceIfNeeded, + isUserProvided, + extractEnvVariable, }; diff --git a/api/server/utils/handleText.spec.js b/api/server/utils/handleText.spec.js index ea440a89a57..a5566fb1b2b 100644 --- a/api/server/utils/handleText.spec.js +++ b/api/server/utils/handleText.spec.js @@ -1,4 +1,4 @@ -const { isEnabled } = require('./handleText'); +const { isEnabled, extractEnvVariable } = require('./handleText'); describe('isEnabled', () => { test('should return true when input is "true"', () => { @@ -48,4 +48,51 @@ describe('isEnabled', () => { test('should return false when input is an array', () => { expect(isEnabled([])).toBe(false); }); + + describe('extractEnvVariable', () => { + const originalEnv = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = { ...originalEnv }; + }); + + afterAll(() => { + process.env = originalEnv; + }); + + test('should return the value of the environment variable', () => { + process.env.TEST_VAR = 'test_value'; + expect(extractEnvVariable('${TEST_VAR}')).toBe('test_value'); + }); + + test('should return the original string if the envrionment variable is not defined correctly', () => { + process.env.TEST_VAR = 'test_value'; + expect(extractEnvVariable('${ TEST_VAR }')).toBe('${ TEST_VAR }'); + }); + + test('should return the original string if environment variable is not set', () => { + expect(extractEnvVariable('${NON_EXISTENT_VAR}')).toBe('${NON_EXISTENT_VAR}'); + }); + + test('should return the original string if it does not contain an environment variable', () => { + expect(extractEnvVariable('some_string')).toBe('some_string'); + }); + + test('should handle empty strings', () => { + expect(extractEnvVariable('')).toBe(''); + }); + + test('should handle strings without variable format', () => { + expect(extractEnvVariable('no_var_here')).toBe('no_var_here'); + }); + + test('should not process multiple variable formats', () => { + process.env.FIRST_VAR = 'first'; + process.env.SECOND_VAR = 'second'; + expect(extractEnvVariable('${FIRST_VAR} and ${SECOND_VAR}')).toBe( + '${FIRST_VAR} and ${SECOND_VAR}', + ); + }); + }); }); diff --git a/api/server/utils/streamResponse.js b/api/server/utils/streamResponse.js index 1933839fac2..3511f144cc7 100644 --- a/api/server/utils/streamResponse.js +++ b/api/server/utils/streamResponse.js @@ -1,6 +1,8 @@ const crypto = require('crypto'); +const { parseConvo } = require('librechat-data-provider'); const { saveMessage, getMessages } = require('~/models/Message'); const { getConvo } = require('~/models/Conversation'); +const { logger } = require('~/config'); /** * Sends error data in Server Sent Events format and ends the response. @@ -65,12 +67,21 @@ const sendError = async (res, options, callback) => { if (!errorMessage.error) { const requestMessage = { messageId: parentMessageId, conversationId }; - const query = await getMessages(requestMessage); + let query = [], + convo = {}; + try { + query = await getMessages(requestMessage); + convo = await getConvo(user, conversationId); + } catch (err) { + logger.error('[sendError] Error retrieving conversation data:', err); + convo = parseConvo(errorMessage); + } + return sendMessage(res, { final: true, requestMessage: query?.[0] ? query[0] : requestMessage, responseMessage: errorMessage, - conversation: await getConvo(user, conversationId), + conversation: convo, }); } diff --git a/api/strategies/discordStrategy.js b/api/strategies/discordStrategy.js index 994554200cd..f7b042f9406 100644 --- a/api/strategies/discordStrategy.js +++ b/api/strategies/discordStrategy.js @@ -1,12 +1,14 @@ const { Strategy: DiscordStrategy } = require('passport-discord'); +const { createNewUser, handleExistingUser } = require('./process'); const { logger } = require('~/config'); const User = require('~/models/User'); -const { useFirebase, uploadAvatar } = require('~/server/services/Files/images'); const discordLogin = async (accessToken, refreshToken, profile, cb) => { try { const email = profile.email; const discordId = profile.id; + + // TODO: remove direct access of User model const oldUser = await User.findOne({ email }); const ALLOW_SOCIAL_REGISTRATION = process.env.ALLOW_SOCIAL_REGISTRATION?.toLowerCase() === 'true'; @@ -21,12 +23,20 @@ const discordLogin = async (accessToken, refreshToken, profile, cb) => { } if (oldUser) { - await handleExistingUser(oldUser, avatarUrl, useFirebase); + await handleExistingUser(oldUser, avatarUrl); return cb(null, oldUser); } if (ALLOW_SOCIAL_REGISTRATION) { - const newUser = await createNewUser(profile, discordId, email, avatarUrl, useFirebase); + const newUser = await createNewUser({ + email, + avatarUrl, + provider: 'discord', + providerKey: 'discordId', + providerId: discordId, + username: profile.username, + name: profile.global_name, + }); return cb(null, newUser); } } catch (err) { @@ -35,38 +45,6 @@ const discordLogin = async (accessToken, refreshToken, profile, cb) => { } }; -const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => { - if (!useFirebase && !oldUser.avatar.includes('?manual=true')) { - oldUser.avatar = avatarUrl; - await oldUser.save(); - } else if (useFirebase && !oldUser.avatar.includes('?manual=true')) { - const userId = oldUser._id; - const newavatarUrl = await uploadAvatar(userId, avatarUrl); - oldUser.avatar = newavatarUrl; - await oldUser.save(); - } -}; - -const createNewUser = async (profile, discordId, email, avatarUrl, useFirebase) => { - const newUser = await new User({ - provider: 'discord', - discordId, - username: profile.username, - email, - name: profile.global_name, - avatar: avatarUrl, - }).save(); - - if (useFirebase) { - const userId = newUser._id; - const newavatarUrl = await uploadAvatar(userId, avatarUrl); - newUser.avatar = newavatarUrl; - await newUser.save(); - } - - return newUser; -}; - module.exports = () => new DiscordStrategy( { diff --git a/api/strategies/facebookStrategy.js b/api/strategies/facebookStrategy.js index b8915b2cc4b..dcf7beb7972 100644 --- a/api/strategies/facebookStrategy.js +++ b/api/strategies/facebookStrategy.js @@ -1,7 +1,7 @@ const FacebookStrategy = require('passport-facebook').Strategy; +const { createNewUser, handleExistingUser } = require('./process'); const { logger } = require('~/config'); const User = require('~/models/User'); -const { useFirebase, uploadAvatar } = require('~/server/services/Files/images'); const facebookLogin = async (accessToken, refreshToken, profile, cb) => { try { @@ -13,12 +13,20 @@ const facebookLogin = async (accessToken, refreshToken, profile, cb) => { const avatarUrl = profile.photos[0]?.value; if (oldUser) { - await handleExistingUser(oldUser, avatarUrl, useFirebase); + await handleExistingUser(oldUser, avatarUrl); return cb(null, oldUser); } if (ALLOW_SOCIAL_REGISTRATION) { - const newUser = await createNewUser(profile, facebookId, email, avatarUrl, useFirebase); + const newUser = await createNewUser({ + email, + avatarUrl, + provider: 'facebook', + providerKey: 'facebookId', + providerId: facebookId, + username: profile.displayName, + name: profile.name?.givenName + ' ' + profile.name?.familyName, + }); return cb(null, newUser); } } catch (err) { @@ -27,38 +35,6 @@ const facebookLogin = async (accessToken, refreshToken, profile, cb) => { } }; -const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => { - if (!useFirebase && !oldUser.avatar.includes('?manual=true')) { - oldUser.avatar = avatarUrl; - await oldUser.save(); - } else if (useFirebase && !oldUser.avatar.includes('?manual=true')) { - const userId = oldUser._id; - const newavatarUrl = await uploadAvatar(userId, avatarUrl); - oldUser.avatar = newavatarUrl; - await oldUser.save(); - } -}; - -const createNewUser = async (profile, facebookId, email, avatarUrl, useFirebase) => { - const newUser = await new User({ - provider: 'facebook', - facebookId, - username: profile.displayName, - email, - name: profile.name?.givenName + ' ' + profile.name?.familyName, - avatar: avatarUrl, - }).save(); - - if (useFirebase) { - const userId = newUser._id; - const newavatarUrl = await uploadAvatar(userId, avatarUrl); - newUser.avatar = newavatarUrl; - await newUser.save(); - } - - return newUser; -}; - module.exports = () => new FacebookStrategy( { diff --git a/api/strategies/githubStrategy.js b/api/strategies/githubStrategy.js index c8480d50c13..065568f92f0 100644 --- a/api/strategies/githubStrategy.js +++ b/api/strategies/githubStrategy.js @@ -1,7 +1,7 @@ const { Strategy: GitHubStrategy } = require('passport-github2'); +const { createNewUser, handleExistingUser } = require('./process'); const { logger } = require('~/config'); const User = require('~/models/User'); -const { useFirebase, uploadAvatar } = require('~/server/services/Files/images'); const githubLogin = async (accessToken, refreshToken, profile, cb) => { try { @@ -13,12 +13,21 @@ const githubLogin = async (accessToken, refreshToken, profile, cb) => { const avatarUrl = profile.photos[0].value; if (oldUser) { - await handleExistingUser(oldUser, avatarUrl, useFirebase); + await handleExistingUser(oldUser, avatarUrl); return cb(null, oldUser); } if (ALLOW_SOCIAL_REGISTRATION) { - const newUser = await createNewUser(profile, githubId, email, avatarUrl, useFirebase); + const newUser = await createNewUser({ + email, + avatarUrl, + provider: 'github', + providerKey: 'githubId', + providerId: githubId, + username: profile.username, + name: profile.displayName, + emailVerified: profile.emails[0].verified, + }); return cb(null, newUser); } } catch (err) { @@ -27,39 +36,6 @@ const githubLogin = async (accessToken, refreshToken, profile, cb) => { } }; -const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => { - if (!useFirebase && !oldUser.avatar.includes('?manual=true')) { - oldUser.avatar = avatarUrl; - await oldUser.save(); - } else if (useFirebase && !oldUser.avatar.includes('?manual=true')) { - const userId = oldUser._id; - const avatarURL = await uploadAvatar(userId, avatarUrl); - oldUser.avatar = avatarURL; - await oldUser.save(); - } -}; - -const createNewUser = async (profile, githubId, email, avatarUrl, useFirebase) => { - const newUser = await new User({ - provider: 'github', - githubId, - username: profile.username, - email, - emailVerified: profile.emails[0].verified, - name: profile.displayName, - avatar: avatarUrl, - }).save(); - - if (useFirebase) { - const userId = newUser._id; - const avatarURL = await uploadAvatar(userId, avatarUrl); - newUser.avatar = avatarURL; - await newUser.save(); - } - - return newUser; -}; - module.exports = () => new GitHubStrategy( { diff --git a/api/strategies/googleStrategy.js b/api/strategies/googleStrategy.js index d013cc8e8fd..0eff48ee094 100644 --- a/api/strategies/googleStrategy.js +++ b/api/strategies/googleStrategy.js @@ -1,7 +1,7 @@ const { Strategy: GoogleStrategy } = require('passport-google-oauth20'); +const { createNewUser, handleExistingUser } = require('./process'); const { logger } = require('~/config'); const User = require('~/models/User'); -const { useFirebase, uploadAvatar } = require('~/server/services/Files/images'); const googleLogin = async (accessToken, refreshToken, profile, cb) => { try { @@ -13,12 +13,21 @@ const googleLogin = async (accessToken, refreshToken, profile, cb) => { const avatarUrl = profile.photos[0].value; if (oldUser) { - await handleExistingUser(oldUser, avatarUrl, useFirebase); + await handleExistingUser(oldUser, avatarUrl); return cb(null, oldUser); } if (ALLOW_SOCIAL_REGISTRATION) { - const newUser = await createNewUser(profile, googleId, email, avatarUrl, useFirebase); + const newUser = await createNewUser({ + email, + avatarUrl, + provider: 'google', + providerKey: 'googleId', + providerId: googleId, + username: profile.name.givenName, + name: `${profile.name.givenName} ${profile.name.familyName}`, + emailVerified: profile.emails[0].verified, + }); return cb(null, newUser); } } catch (err) { @@ -27,39 +36,6 @@ const googleLogin = async (accessToken, refreshToken, profile, cb) => { } }; -const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => { - if ((!useFirebase && !oldUser.avatar.includes('?manual=true')) || oldUser.avatar === null) { - oldUser.avatar = avatarUrl; - await oldUser.save(); - } else if (useFirebase && !oldUser.avatar.includes('?manual=true')) { - const userId = oldUser._id; - const avatarURL = await uploadAvatar(userId, avatarUrl); - oldUser.avatar = avatarURL; - await oldUser.save(); - } -}; - -const createNewUser = async (profile, googleId, email, avatarUrl, useFirebase) => { - const newUser = await new User({ - provider: 'google', - googleId, - username: profile.name.givenName, - email, - emailVerified: profile.emails[0].verified, - name: `${profile.name.givenName} ${profile.name.familyName}`, - avatar: avatarUrl, - }).save(); - - if (useFirebase) { - const userId = newUser._id; - const avatarURL = await uploadAvatar(userId, avatarUrl); - newUser.avatar = avatarURL; - await newUser.save(); - } - - return newUser; -}; - module.exports = () => new GoogleStrategy( { diff --git a/api/strategies/process.js b/api/strategies/process.js new file mode 100644 index 00000000000..f5a12a26a25 --- /dev/null +++ b/api/strategies/process.js @@ -0,0 +1,92 @@ +const { FileSources } = require('librechat-data-provider'); +const uploadAvatar = require('~/server/services/Files/images/avatar'); +const User = require('~/models/User'); + +/** + * Updates the avatar URL of an existing user. If the user's avatar URL does not include the query parameter + * '?manual=true', it updates the user's avatar with the provided URL. For local file storage, it directly updates + * the avatar URL, while for other storage types, it processes the avatar URL using the specified file strategy. + * + * @param {User} oldUser - The existing user object that needs to be updated. Expected to have an 'avatar' property. + * @param {string} avatarUrl - The new avatar URL to be set for the user. + * + * @returns {Promise} + * The function updates the user's avatar and saves the user object. It does not return any value. + * + * @throws {Error} Throws an error if there's an issue saving the updated user object. + */ +const handleExistingUser = async (oldUser, avatarUrl) => { + const fileStrategy = process.env.CDN_PROVIDER; + const isLocal = fileStrategy === FileSources.local; + + if (isLocal && !oldUser.avatar.includes('?manual=true')) { + oldUser.avatar = avatarUrl; + await oldUser.save(); + } else if (!isLocal && !oldUser.avatar.includes('?manual=true')) { + const userId = oldUser._id; + const newavatarUrl = await uploadAvatar({ userId, input: avatarUrl, fileStrategy }); + oldUser.avatar = newavatarUrl; + await oldUser.save(); + } +}; + +/** + * Creates a new user with the provided user details. If the file strategy is not local, the avatar URL is + * processed using the specified file strategy. The new user is saved to the database with the processed or + * original avatar URL. + * + * @param {Object} params - The parameters object for user creation. + * @param {string} params.email - The email of the new user. + * @param {string} params.avatarUrl - The avatar URL of the new user. + * @param {string} params.provider - The provider of the user's account. + * @param {string} params.providerKey - The key to identify the provider in the user model. + * @param {string} params.providerId - The provider-specific ID of the user. + * @param {string} params.username - The username of the new user. + * @param {string} params.name - The name of the new user. + * @param {boolean} [params.emailVerified=false] - Optional. Indicates whether the user's email is verified. Defaults to false. + * + * @returns {Promise} + * A promise that resolves to the newly created user object. + * + * @throws {Error} Throws an error if there's an issue creating or saving the new user object. + */ +const createNewUser = async ({ + email, + avatarUrl, + provider, + providerKey, + providerId, + username, + name, + emailVerified, +}) => { + const update = { + email, + avatar: avatarUrl, + provider, + [providerKey]: providerId, + username, + name, + emailVerified, + }; + + // TODO: remove direct access of User model + const newUser = await new User(update).save(); + + const fileStrategy = process.env.CDN_PROVIDER; + const isLocal = fileStrategy === FileSources.local; + + if (!isLocal) { + const userId = newUser._id; + const newavatarUrl = await uploadAvatar({ userId, input: avatarUrl, fileStrategy }); + newUser.avatar = newavatarUrl; + await newUser.save(); + } + + return newUser; +}; + +module.exports = { + handleExistingUser, + createNewUser, +}; diff --git a/api/test/__mocks__/logger.js b/api/test/__mocks__/logger.js index 455ada0de0d..caeb004e394 100644 --- a/api/test/__mocks__/logger.js +++ b/api/test/__mocks__/logger.js @@ -1,3 +1,43 @@ +jest.mock('winston', () => { + const mockFormatFunction = jest.fn((fn) => fn); + + mockFormatFunction.colorize = jest.fn(); + mockFormatFunction.combine = jest.fn(); + mockFormatFunction.label = jest.fn(); + mockFormatFunction.timestamp = jest.fn(); + mockFormatFunction.printf = jest.fn(); + mockFormatFunction.errors = jest.fn(); + mockFormatFunction.splat = jest.fn(); + return { + format: mockFormatFunction, + createLogger: jest.fn().mockReturnValue({ + info: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + error: jest.fn(), + }), + transports: { + Console: jest.fn(), + DailyRotateFile: jest.fn(), + }, + addColors: jest.fn(), + }; +}); + +jest.mock('winston-daily-rotate-file', () => { + return jest.fn().mockImplementation(() => { + return { + level: 'error', + filename: '../logs/error-%DATE%.log', + datePattern: 'YYYY-MM-DD', + zippedArchive: true, + maxSize: '20m', + maxFiles: '14d', + format: 'format', + }; + }); +}); + jest.mock('~/config', () => { return { logger: { @@ -8,3 +48,11 @@ jest.mock('~/config', () => { }, }; }); + +jest.mock('~/config/parsers', () => { + return { + redactMessage: jest.fn(), + redactFormat: jest.fn(), + debugTraverse: jest.fn(), + }; +}); diff --git a/api/typedefs.js b/api/typedefs.js index 1ab9f645718..7bb956c9aec 100644 --- a/api/typedefs.js +++ b/api/typedefs.js @@ -20,6 +20,24 @@ * @memberof typedefs */ +/** + * @exports TConfig + * @typedef {import('librechat-data-provider').TConfig} TConfig + * @memberof typedefs + */ + +/** + * @exports TMessage + * @typedef {import('librechat-data-provider').TMessage} TMessage + * @memberof typedefs + */ + +/** + * @exports FileSources + * @typedef {import('librechat-data-provider').FileSources} FileSources + * @memberof typedefs + */ + /** * @exports ImageMetadata * @typedef {Object} ImageMetadata @@ -280,8 +298,8 @@ * @property {boolean|{userProvide: boolean}} [chatGPTBrowser] - Flag to indicate if ChatGPT Browser endpoint is user provided, or its configuration. * @property {boolean|{userProvide: boolean}} [anthropic] - Flag to indicate if Anthropic endpoint is user provided, or its configuration. * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. - * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. - * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [google] - Flag to indicate if BingAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean, userProvideURL: boolean, name: string}} [custom] - Custom Endpoint configuration. * @memberof typedefs */ @@ -313,13 +331,14 @@ * @property {boolean|{userProvide: boolean}} [anthropic] - Flag to indicate if Anthropic endpoint is user provided, or its configuration. * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. * @property {boolean|{userProvide: boolean}} [google] - Flag to indicate if Google endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean, userProvideURL: boolean, name: string}} [custom] - Custom Endpoint configuration. * @property {boolean|GptPlugins} [gptPlugins] - Configuration for GPT plugins. * @memberof typedefs */ /** * @exports EndpointConfig - * @typedef {boolean|{userProvide: boolean}|GptPlugins} EndpointConfig + * @typedef {boolean|TConfig} EndpointConfig * @memberof typedefs */ @@ -330,3 +349,39 @@ * @property {number} order - The order of the endpoint. * @memberof typedefs */ + +/** + * @typedef {Object} ModelOptions + * @property {string} modelName - The name of the model. + * @property {number} [temperature] - The temperature setting for the model. + * @property {number} [presence_penalty] - The presence penalty setting. + * @property {number} [frequency_penalty] - The frequency penalty setting. + * @property {number} [max_tokens] - The maximum number of tokens to generate. + * @memberof typedefs + */ + +/** + * @typedef {Object} ConfigOptions + * @property {string} [basePath] - The base path for the API requests. + * @property {Object} [baseOptions] - Base options for the API requests, including headers. + * @property {Object} [httpAgent] - The HTTP agent for the request. + * @property {Object} [httpsAgent] - The HTTPS agent for the request. + * @memberof typedefs + */ + +/** + * @typedef {Object} Callbacks + * @property {Function} [handleChatModelStart] - A callback function for handleChatModelStart + * @property {Function} [handleLLMEnd] - A callback function for handleLLMEnd + * @property {Function} [handleLLMError] - A callback function for handleLLMError + * @memberof typedefs + */ + +/** + * @typedef {Object} AzureOptions + * @property {string} [azureOpenAIApiKey] - The Azure OpenAI API key. + * @property {string} [azureOpenAIApiInstanceName] - The Azure OpenAI API instance name. + * @property {string} [azureOpenAIApiDeploymentName] - The Azure OpenAI API deployment name. + * @property {string} [azureOpenAIApiVersion] - The Azure OpenAI API version. + * @memberof typedefs + */ diff --git a/api/utils/azureUtils.js b/api/utils/azureUtils.js index 58b8fcde3fa..8083ff4fb3b 100644 --- a/api/utils/azureUtils.js +++ b/api/utils/azureUtils.js @@ -1,11 +1,3 @@ -/** - * @typedef {Object} AzureCredentials - * @property {string} azureOpenAIApiKey - The Azure OpenAI API key. - * @property {string} azureOpenAIApiInstanceName - The Azure OpenAI API instance name. - * @property {string} azureOpenAIApiDeploymentName - The Azure OpenAI API deployment name. - * @property {string} azureOpenAIApiVersion - The Azure OpenAI API version. - */ - const { isEnabled } = require('~/server/utils'); /** @@ -40,22 +32,29 @@ const genAzureEndpoint = ({ azureOpenAIApiInstanceName, azureOpenAIApiDeployment * @param {string} [AzureConfig.azureOpenAIApiDeploymentName] - The Azure OpenAI API deployment name (optional). * @param {string} AzureConfig.azureOpenAIApiVersion - The Azure OpenAI API version. * @param {string} [modelName] - The model name to be included in the deployment name (optional). + * @param {Object} [client] - The API Client class for optionally setting properties (optional). * @returns {string} The complete chat completion endpoint URL for the Azure OpenAI API. * @throws {Error} If neither azureOpenAIApiDeploymentName nor modelName is provided. */ const genAzureChatCompletion = ( { azureOpenAIApiInstanceName, azureOpenAIApiDeploymentName, azureOpenAIApiVersion }, modelName, + client, ) => { // Determine the deployment segment of the URL based on provided modelName or azureOpenAIApiDeploymentName let deploymentSegment; if (isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME) && modelName) { const sanitizedModelName = sanitizeModelName(modelName); deploymentSegment = `${sanitizedModelName}`; + client && + typeof client === 'object' && + (client.azure.azureOpenAIApiDeploymentName = sanitizedModelName); } else if (azureOpenAIApiDeploymentName) { deploymentSegment = azureOpenAIApiDeploymentName; - } else { - throw new Error('Either a model name or a deployment name must be provided.'); + } else if (!process.env.AZURE_OPENAI_BASEURL) { + throw new Error( + 'Either a model name with the `AZURE_USE_MODEL_AS_DEPLOYMENT_NAME` setting or a deployment name must be provided if `AZURE_OPENAI_BASEURL` is omitted.', + ); } console.log('genAzureChatCompletion'); console.log( @@ -66,7 +65,7 @@ const genAzureChatCompletion = ( /** * Retrieves the Azure OpenAI API credentials from environment variables. - * @returns {AzureCredentials} An object containing the Azure OpenAI API credentials. + * @returns {AzureOptions} An object containing the Azure OpenAI API credentials. */ const getAzureCredentials = () => { return { @@ -77,9 +76,33 @@ const getAzureCredentials = () => { }; }; +/** + * Constructs a URL by replacing placeholders in the baseURL with values from the azure object. + * It specifically looks for '${INSTANCE_NAME}' and '${DEPLOYMENT_NAME}' within the baseURL and replaces + * them with 'azureOpenAIApiInstanceName' and 'azureOpenAIApiDeploymentName' from the azure object. + * If the respective azure property is not provided, the placeholder is replaced with an empty string. + * + * @param {Object} params - The parameters object. + * @param {string} params.baseURL - The baseURL to inspect for replacement placeholders. + * @param {AzureOptions} params.azure - The baseURL to inspect for replacement placeholders. + * @returns {string} The complete baseURL with credentials injected for the Azure OpenAI API. + */ +function constructAzureURL({ baseURL, azure }) { + let finalURL = baseURL; + + // Replace INSTANCE_NAME and DEPLOYMENT_NAME placeholders with actual values if available + if (azure) { + finalURL = finalURL.replace('${INSTANCE_NAME}', azure.azureOpenAIApiInstanceName ?? ''); + finalURL = finalURL.replace('${DEPLOYMENT_NAME}', azure.azureOpenAIApiDeploymentName ?? ''); + } + + return finalURL; +} + module.exports = { sanitizeModelName, genAzureEndpoint, genAzureChatCompletion, getAzureCredentials, + constructAzureURL, }; diff --git a/api/utils/azureUtils.spec.js b/api/utils/azureUtils.spec.js new file mode 100644 index 00000000000..77db26b0911 --- /dev/null +++ b/api/utils/azureUtils.spec.js @@ -0,0 +1,268 @@ +const { + sanitizeModelName, + genAzureEndpoint, + genAzureChatCompletion, + getAzureCredentials, + constructAzureURL, +} = require('./azureUtils'); + +describe('sanitizeModelName', () => { + test('removes periods from the model name', () => { + const sanitized = sanitizeModelName('model.name'); + expect(sanitized).toBe('modelname'); + }); + + test('leaves model name unchanged if no periods are present', () => { + const sanitized = sanitizeModelName('modelname'); + expect(sanitized).toBe('modelname'); + }); +}); + +describe('genAzureEndpoint', () => { + test('generates correct endpoint URL', () => { + const url = genAzureEndpoint({ + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiDeploymentName: 'deploymentName', + }); + expect(url).toBe('https://instanceName.openai.azure.com/openai/deployments/deploymentName'); + }); +}); + +describe('genAzureChatCompletion', () => { + // Test with both deployment name and model name provided + test('prefers model name over deployment name when both are provided and feature enabled', () => { + process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true'; + const url = genAzureChatCompletion( + { + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiDeploymentName: 'deploymentName', + azureOpenAIApiVersion: 'v1', + }, + 'modelName', + ); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/modelName/chat/completions?api-version=v1', + ); + }); + + // Test with only deployment name provided + test('uses deployment name when model name is not provided', () => { + const url = genAzureChatCompletion({ + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiDeploymentName: 'deploymentName', + azureOpenAIApiVersion: 'v1', + }); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1', + ); + }); + + // Test with only model name provided + test('uses model name when deployment name is not provided and feature enabled', () => { + process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true'; + const url = genAzureChatCompletion( + { + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiVersion: 'v1', + }, + 'modelName', + ); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/modelName/chat/completions?api-version=v1', + ); + }); + + // Test with neither deployment name nor model name provided + test('throws error if neither deployment name nor model name is provided', () => { + expect(() => { + genAzureChatCompletion({ + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiVersion: 'v1', + }); + }).toThrow( + 'Either a model name with the `AZURE_USE_MODEL_AS_DEPLOYMENT_NAME` setting or a deployment name must be provided if `AZURE_OPENAI_BASEURL` is omitted.', + ); + }); + + // Test with feature disabled but model name provided + test('ignores model name and uses deployment name when feature is disabled', () => { + process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'false'; + const url = genAzureChatCompletion( + { + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiDeploymentName: 'deploymentName', + azureOpenAIApiVersion: 'v1', + }, + 'modelName', + ); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1', + ); + }); + + // Test with sanitized model name + test('sanitizes model name when used in URL', () => { + process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true'; + const url = genAzureChatCompletion( + { + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiVersion: 'v1', + }, + 'model.name', + ); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/modelname/chat/completions?api-version=v1', + ); + }); + + // Test with client parameter and model name + test('updates client with sanitized model name when provided and feature enabled', () => { + process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true'; + const clientMock = { azure: {} }; + const url = genAzureChatCompletion( + { + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiVersion: 'v1', + }, + 'model.name', + clientMock, + ); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/modelname/chat/completions?api-version=v1', + ); + expect(clientMock.azure.azureOpenAIApiDeploymentName).toBe('modelname'); + }); + + // Test with client parameter but without model name + test('does not update client when model name is not provided', () => { + const clientMock = { azure: {} }; + const url = genAzureChatCompletion( + { + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiDeploymentName: 'deploymentName', + azureOpenAIApiVersion: 'v1', + }, + undefined, + clientMock, + ); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1', + ); + expect(clientMock.azure.azureOpenAIApiDeploymentName).toBeUndefined(); + }); + + // Test with client parameter and deployment name when feature is disabled + test('does not update client when feature is disabled', () => { + process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'false'; + const clientMock = { azure: {} }; + const url = genAzureChatCompletion( + { + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiDeploymentName: 'deploymentName', + azureOpenAIApiVersion: 'v1', + }, + 'modelName', + clientMock, + ); + expect(url).toBe( + 'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1', + ); + expect(clientMock.azure.azureOpenAIApiDeploymentName).toBeUndefined(); + }); + + // Reset environment variable after tests + afterEach(() => { + delete process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME; + }); +}); + +describe('getAzureCredentials', () => { + beforeEach(() => { + process.env.AZURE_API_KEY = 'testApiKey'; + process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'instanceName'; + process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'deploymentName'; + process.env.AZURE_OPENAI_API_VERSION = 'v1'; + }); + + test('retrieves Azure OpenAI API credentials from environment variables', () => { + const credentials = getAzureCredentials(); + expect(credentials).toEqual({ + azureOpenAIApiKey: 'testApiKey', + azureOpenAIApiInstanceName: 'instanceName', + azureOpenAIApiDeploymentName: 'deploymentName', + azureOpenAIApiVersion: 'v1', + }); + }); +}); + +describe('constructAzureURL', () => { + test('replaces both placeholders when both properties are provided', () => { + const url = constructAzureURL({ + baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}', + azure: { + azureOpenAIApiInstanceName: 'instance1', + azureOpenAIApiDeploymentName: 'deployment1', + }, + }); + expect(url).toBe('https://example.com/instance1/deployment1'); + }); + + test('replaces only INSTANCE_NAME when only azureOpenAIApiInstanceName is provided', () => { + const url = constructAzureURL({ + baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}', + azure: { + azureOpenAIApiInstanceName: 'instance2', + }, + }); + expect(url).toBe('https://example.com/instance2/'); + }); + + test('replaces only DEPLOYMENT_NAME when only azureOpenAIApiDeploymentName is provided', () => { + const url = constructAzureURL({ + baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}', + azure: { + azureOpenAIApiDeploymentName: 'deployment2', + }, + }); + expect(url).toBe('https://example.com//deployment2'); + }); + + test('does not replace any placeholders when azure object is empty', () => { + const url = constructAzureURL({ + baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}', + azure: {}, + }); + expect(url).toBe('https://example.com//'); + }); + + test('returns baseURL as is when azure object is not provided', () => { + const url = constructAzureURL({ + baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}', + }); + expect(url).toBe('https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}'); + }); + + test('returns baseURL as is when no placeholders are set', () => { + const url = constructAzureURL({ + baseURL: 'https://example.com/my_custom_instance/my_deployment', + azure: { + azureOpenAIApiInstanceName: 'instance1', + azureOpenAIApiDeploymentName: 'deployment1', + }, + }); + expect(url).toBe('https://example.com/my_custom_instance/my_deployment'); + }); + + test('returns regular Azure OpenAI baseURL with placeholders set', () => { + const baseURL = + 'https://${INSTANCE_NAME}.openai.azure.com/openai/deployments/${DEPLOYMENT_NAME}'; + const url = constructAzureURL({ + baseURL, + azure: { + azureOpenAIApiInstanceName: 'instance1', + azureOpenAIApiDeploymentName: 'deployment1', + }, + }); + expect(url).toBe('https://instance1.openai.azure.com/openai/deployments/deployment1'); + }); +}); diff --git a/api/utils/extractBaseURL.js b/api/utils/extractBaseURL.js index cc95f4481d9..730473c4102 100644 --- a/api/utils/extractBaseURL.js +++ b/api/utils/extractBaseURL.js @@ -1,13 +1,15 @@ /** - * Extracts a valid OpenAI baseURL from a given string, matching "url/v1," also an added suffix, - * ending with "/openai" (to allow the Cloudflare, LiteLLM pattern). - * Returns the original URL if no match is found. + * Extracts a valid OpenAI baseURL from a given string, matching "url/v1," followed by an optional suffix. + * The suffix can be one of several predefined values (e.g., 'openai', 'azure-openai', etc.), + * accommodating different proxy patterns like Cloudflare, LiteLLM, etc. + * Returns the original URL if no valid pattern is found. * * Examples: * - `https://open.ai/v1/chat` -> `https://open.ai/v1` * - `https://open.ai/v1/chat/completions` -> `https://open.ai/v1` - * - `https://open.ai/v1/ACCOUNT/GATEWAY/openai/completions` -> `https://open.ai/v1/ACCOUNT/GATEWAY/openai` + * - `https://gateway.ai.cloudflare.com/v1/account/gateway/azure-openai/completions` -> `https://gateway.ai.cloudflare.com/v1/account/gateway/azure-openai` * - `https://open.ai/v1/hi/openai` -> `https://open.ai/v1/hi/openai` + * - `https://api.example.com/v1/replicate` -> `https://api.example.com/v1/replicate` * * @param {string} url - The URL to be processed. * @returns {string} The matched pattern or input if no match is found. @@ -23,8 +25,27 @@ function extractBaseURL(url) { // Extract the part of the URL up to and including '/v1'. let baseUrl = url.substring(0, v1Index + 3); + const openai = 'openai'; + // Find which suffix is present. + const suffixes = [ + 'azure-openai', + openai, + 'replicate', + 'huggingface', + 'workers-ai', + 'aws-bedrock', + ]; + const suffixUsed = suffixes.find((suffix) => url.includes(`/${suffix}`)); + + if (suffixUsed === 'azure-openai') { + return url.split(/\/(chat|completion)/)[0]; + } + // Check if the URL has '/openai' immediately after '/v1'. - const openaiIndex = url.indexOf('/openai', v1Index + 3); + const openaiIndex = url.indexOf(`/${openai}`, v1Index + 3); + // Find which suffix is present in the URL, if any. + const suffixIndex = + suffixUsed === openai ? openaiIndex : url.indexOf(`/${suffixUsed}`, v1Index + 3); // If '/openai' is found right after '/v1', include it in the base URL. if (openaiIndex === v1Index + 3) { @@ -37,9 +58,9 @@ function extractBaseURL(url) { // If there is a next slash, the base URL goes up to but not including the slash. baseUrl = url.substring(0, nextSlashIndex); } - } else if (openaiIndex > 0) { - // If '/openai' is present but not immediately after '/v1', we need to include the reverse proxy pattern. - baseUrl = url.substring(0, openaiIndex + 7); + } else if (suffixIndex > 0) { + // If a suffix is present but not immediately after '/v1', we need to include the reverse proxy pattern. + baseUrl = url.substring(0, suffixIndex + suffixUsed.length + 1); } return baseUrl; diff --git a/api/utils/extractBaseURL.spec.js b/api/utils/extractBaseURL.spec.js index 299b9c1397e..fe647b06997 100644 --- a/api/utils/extractBaseURL.spec.js +++ b/api/utils/extractBaseURL.spec.js @@ -53,4 +53,59 @@ describe('extractBaseURL', () => { const url = 'https://open.ai/v1/hi/openai'; expect(extractBaseURL(url)).toBe('https://open.ai/v1/hi/openai'); }); + + test('should handle Azure OpenAI Cloudflare endpoint correctly', () => { + const url = 'https://gateway.ai.cloudflare.com/v1/account/gateway/azure-openai/completions'; + expect(extractBaseURL(url)).toBe( + 'https://gateway.ai.cloudflare.com/v1/account/gateway/azure-openai', + ); + }); + + test('should include various suffixes in the extracted URL when present', () => { + const urls = [ + 'https://api.example.com/v1/azure-openai/something', + 'https://api.example.com/v1/replicate/anotherthing', + 'https://api.example.com/v1/huggingface/yetanotherthing', + 'https://api.example.com/v1/workers-ai/differentthing', + 'https://api.example.com/v1/aws-bedrock/somethingelse', + ]; + + const expected = [ + /* Note: exception for azure-openai to allow credential injection */ + 'https://api.example.com/v1/azure-openai/something', + 'https://api.example.com/v1/replicate', + 'https://api.example.com/v1/huggingface', + 'https://api.example.com/v1/workers-ai', + 'https://api.example.com/v1/aws-bedrock', + ]; + + urls.forEach((url, index) => { + expect(extractBaseURL(url)).toBe(expected[index]); + }); + }); + + test('should handle URLs with suffixes not immediately after /v1', () => { + const url = 'https://api.example.com/v1/some/path/azure-openai'; + expect(extractBaseURL(url)).toBe('https://api.example.com/v1/some/path/azure-openai'); + }); + + test('should handle URLs with complex paths after the suffix', () => { + const url = 'https://api.example.com/v1/replicate/deep/path/segment'; + expect(extractBaseURL(url)).toBe('https://api.example.com/v1/replicate'); + }); + + test('should leave a regular Azure OpenAI baseURL as is', () => { + const url = 'https://instance-name.openai.azure.com/openai/deployments/deployment-name'; + expect(extractBaseURL(url)).toBe(url); + }); + + test('should leave a regular Azure OpenAI baseURL with placeholders as is', () => { + const url = 'https://${INSTANCE_NAME}.openai.azure.com/openai/deployments/${DEPLOYMENT_NAME}'; + expect(extractBaseURL(url)).toBe(url); + }); + + test('should leave an alternate Azure OpenAI baseURL with placeholders as is', () => { + const url = 'https://${INSTANCE_NAME}.com/resources/deployments/${DEPLOYMENT_NAME}'; + expect(extractBaseURL(url)).toBe(url); + }); }); diff --git a/api/utils/index.js b/api/utils/index.js index f9194858e82..a40c53b6aba 100644 --- a/api/utils/index.js +++ b/api/utils/index.js @@ -1,3 +1,4 @@ +const loadYaml = require('./loadYaml'); const tokenHelpers = require('./tokens'); const azureUtils = require('./azureUtils'); const extractBaseURL = require('./extractBaseURL'); @@ -8,4 +9,5 @@ module.exports = { ...tokenHelpers, extractBaseURL, findMessageContent, + loadYaml, }; diff --git a/api/utils/loadYaml.js b/api/utils/loadYaml.js new file mode 100644 index 00000000000..b7068e209f0 --- /dev/null +++ b/api/utils/loadYaml.js @@ -0,0 +1,13 @@ +const fs = require('fs'); +const yaml = require('js-yaml'); + +function loadYaml(filepath) { + try { + let fileContents = fs.readFileSync(filepath, 'utf8'); + return yaml.load(fileContents); + } catch (e) { + // console.error(e); + } +} + +module.exports = loadYaml; diff --git a/api/utils/tokens.js b/api/utils/tokens.js index cda4755717d..ce6c51732aa 100644 --- a/api/utils/tokens.js +++ b/api/utils/tokens.js @@ -39,22 +39,26 @@ const models = [ 'gpt-3.5-turbo-0301', ]; +const openAIModels = { + 'gpt-4': 8187, // -5 from max + 'gpt-4-0613': 8187, // -5 from max + 'gpt-4-32k': 32758, // -10 from max + 'gpt-4-32k-0314': 32758, // -10 from max + 'gpt-4-32k-0613': 32758, // -10 from max + 'gpt-3.5-turbo': 4092, // -5 from max + 'gpt-3.5-turbo-0613': 4092, // -5 from max + 'gpt-3.5-turbo-0301': 4092, // -5 from max + 'gpt-3.5-turbo-16k': 16375, // -10 from max + 'gpt-3.5-turbo-16k-0613': 16375, // -10 from max + 'gpt-3.5-turbo-1106': 16375, // -10 from max + 'gpt-4-1106': 127990, // -10 from max + 'mistral-': 31990, // -10 from max +}; + // Order is important here: by model series and context size (gpt-4 then gpt-3, ascending) const maxTokensMap = { - [EModelEndpoint.openAI]: { - 'gpt-4': 8191, - 'gpt-4-0613': 8191, - 'gpt-4-32k': 32767, - 'gpt-4-32k-0314': 32767, - 'gpt-4-32k-0613': 32767, - 'gpt-3.5-turbo': 4095, - 'gpt-3.5-turbo-0613': 4095, - 'gpt-3.5-turbo-0301': 4095, - 'gpt-3.5-turbo-16k': 15999, - 'gpt-3.5-turbo-16k-0613': 15999, - 'gpt-3.5-turbo-1106': 16380, // -5 from max - 'gpt-4-1106': 127995, // -5 from max - }, + [EModelEndpoint.openAI]: openAIModels, + [EModelEndpoint.custom]: openAIModels, [EModelEndpoint.google]: { /* Max I/O is combined so we subtract the amount from max response tokens for actual total */ gemini: 32750, // -10 from max diff --git a/bun.lockb b/bun.lockb index 8ef4b75685f..9fd1d656d17 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/client/index.html b/client/index.html index dae28cb7057..3654c8766ba 100644 --- a/client/index.html +++ b/client/index.html @@ -4,6 +4,7 @@ + LibreChat + /etc/nginx/ssl/dhparam +# ssl_dhparam /etc/nginx/ssl/dhparam; + +# # intermediate configuration +# ssl_protocols TLSv1.2 TLSv1.3; +# ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-CHACHA20-POLY1305; +# ssl_prefer_server_ciphers off; + +# # HSTS (ngx_http_headers_module is required) (63072000 seconds) +# add_header Strict-Transport-Security "max-age=63072000" always; + +# # OCSP stapling +# ssl_stapling on; +# ssl_stapling_verify on; + +# # verify chain of trust of OCSP response using Root CA and Intermediate certs +# ssl_trusted_certificate /etc/nginx/ssl/ca.crt; + +# # replace with the IP address of your resolver +# resolver 127.0.0.1; + +# server_name localhost; + +# # Increase the client_max_body_size to allow larger file uploads +# # The default limits for image uploads as of 11/22/23 is 20MB/file, and 25MB/request +# client_max_body_size 25M; + +# location /api { +# proxy_pass http://api:3080/api; +# } + +# location / { +# proxy_pass http://api:3080; +# } +#} diff --git a/client/package.json b/client/package.json index 218ed7fa30c..19c8c01caf6 100644 --- a/client/package.json +++ b/client/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/frontend", - "version": "0.6.5", + "version": "0.6.6", "description": "", "type": "module", "scripts": { @@ -112,7 +112,7 @@ "postcss": "^8.4.31", "postcss-loader": "^7.1.0", "postcss-preset-env": "^8.2.0", - "tailwindcss": "^3.2.6", + "tailwindcss": "^3.4.1", "ts-jest": "^29.1.0", "typescript": "^5.0.4", "vite": "^5.0.7", diff --git a/client/public/assets/apple-touch-icon-180x180.png b/client/public/assets/apple-touch-icon-180x180.png new file mode 100644 index 00000000000..91dde5d139d Binary files /dev/null and b/client/public/assets/apple-touch-icon-180x180.png differ diff --git a/client/public/assets/mistral.png b/client/public/assets/mistral.png new file mode 100644 index 00000000000..ff2f3e8b63b Binary files /dev/null and b/client/public/assets/mistral.png differ diff --git a/client/public/assets/openrouter.png b/client/public/assets/openrouter.png new file mode 100644 index 00000000000..5d47b23fc64 Binary files /dev/null and b/client/public/assets/openrouter.png differ diff --git a/client/src/common/types.ts b/client/src/common/types.ts index 2daf2d8ba0f..1ca169a0c1c 100644 --- a/client/src/common/types.ts +++ b/client/src/common/types.ts @@ -1,4 +1,12 @@ -import type { TConversation, TMessage, TPreset, TLoginUser, TUser } from 'librechat-data-provider'; +import { FileSources } from 'librechat-data-provider'; +import type { + TConversation, + TMessage, + TPreset, + TLoginUser, + TUser, + EModelEndpoint, +} from 'librechat-data-provider'; import type { UseMutationResult } from '@tanstack/react-query'; export type TSetOption = (param: number | string) => (newValue: number | string | boolean) => void; @@ -141,7 +149,7 @@ export type TDisplayProps = TText & export type TConfigProps = { userKey: string; setUserKey: React.Dispatch>; - endpoint: string; + endpoint: EModelEndpoint | string; }; export type TDangerButtonProps = { @@ -194,9 +202,11 @@ export type IconProps = Pick & Pick & { size?: number; button?: boolean; + iconURL?: string; message?: boolean; className?: string; - endpoint?: string | null; + endpoint?: EModelEndpoint | string | null; + endpointType?: EModelEndpoint | null; }; export type Option = Record & { @@ -221,6 +231,7 @@ export interface ExtendedFile { size: number; preview: string; progress: number; + source?: FileSources; } export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void }; diff --git a/client/src/components/Chat/ChatView.tsx b/client/src/components/Chat/ChatView.tsx index d582f4b3e9d..30a7edc187e 100644 --- a/client/src/components/Chat/ChatView.tsx +++ b/client/src/components/Chat/ChatView.tsx @@ -6,11 +6,12 @@ import { useChatHelpers, useSSE } from '~/hooks'; // import GenerationButtons from './Input/GenerationButtons'; import MessagesView from './Messages/MessagesView'; // import OptionsBar from './Input/OptionsBar'; +import { useGetFiles } from '~/data-provider'; +import { buildTree, mapFiles } from '~/utils'; import { Spinner } from '~/components/svg'; import { ChatContext } from '~/Providers'; import Presentation from './Presentation'; import ChatForm from './Input/ChatForm'; -import { buildTree } from '~/utils'; import Landing from './Landing'; import Header from './Header'; import Footer from './Footer'; @@ -21,11 +22,16 @@ function ChatView({ index = 0 }: { index?: number }) { const submissionAtIndex = useRecoilValue(store.submissionByIndex(0)); useSSE(submissionAtIndex); + const { data: fileMap } = useGetFiles({ + select: mapFiles, + }); + const { data: messagesTree = null, isLoading } = useGetMessagesByConvoId(conversationId ?? '', { select: (data) => { - const dataTree = buildTree(data, false); + const dataTree = buildTree({ messages: data, fileMap }); return dataTree?.length === 0 ? null : dataTree ?? null; }, + enabled: !!fileMap, }); const chatHelpers = useChatHelpers(index, conversationId); diff --git a/client/src/components/Chat/Footer.tsx b/client/src/components/Chat/Footer.tsx index ec5e6e65f02..cba1b6083d1 100644 --- a/client/src/components/Chat/Footer.tsx +++ b/client/src/components/Chat/Footer.tsx @@ -17,7 +17,7 @@ export default function Footer() { rel="noreferrer" className="underline" > - {config?.appTitle || 'LibreChat'} v0.6.5 + {config?.appTitle || 'LibreChat'} v0.6.6 {' - '} {localize('com_ui_new_footer')} diff --git a/client/src/components/Chat/Input/ChatForm.tsx b/client/src/components/Chat/Input/ChatForm.tsx index 9fce650c83d..f52ff943e1f 100644 --- a/client/src/components/Chat/Input/ChatForm.tsx +++ b/client/src/components/Chat/Input/ChatForm.tsx @@ -30,6 +30,8 @@ export default function ChatForm({ index = 0 }) { }; const { requiresKey } = useRequiresKey(); + const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null }; + const endpoint = endpointType ?? _endpoint; return (
-