From b8ef53cc10b890cb6c4628209d889aa954d31aa1 Mon Sep 17 00:00:00 2001 From: Ivan Charapanau Date: Sun, 17 Nov 2024 19:14:55 +0100 Subject: [PATCH] feat: v0.2.17 - promptfoo, omniparser, webtop, failed perplexideez integration --- .scripts/seed.ts | 2 +- README.md | 5 +- app/package.json | 2 +- app/src-tauri/Cargo.toml | 2 +- app/src-tauri/tauri.conf.json | 2 +- app/src/serviceMetadata.tsx | 3 + compose.omniparser.yml | 13 ++ compose.perplexideez.yml | 137 ++++++++++++++ compose.promptfoo.yml | 28 +++ compose.webtop.yml | 33 ++++ compose.x.perplexideez.ollama.yml | 5 + compose.x.perplexideez.searxng.yml | 4 + compose.x.promptfoo.ollama.yml | 5 + harbor.sh | 96 +++++++++- http-catalog/perplexideez.http | 10 ++ omniparser/Dockerfile | 92 ++++++++++ omniparser/override.env | 0 package.json | 2 +- perplexideez/override.env | 56 ++++++ profiles/default.env | 23 +++ promptfoo/.gitignore | 1 + promptfoo/examples/hello-promptfoo/README.md | 10 ++ .../hello-promptfoo/promptfooconfig.yaml | 39 ++++ promptfoo/examples/temp-test/README.md | 10 ++ .../examples/temp-test/promptfooconfig.yaml | 64 +++++++ promptfoo/override.env | 3 + pyproject.toml | 2 +- searxng/settings.yml.new | 170 +++++++++--------- webtop/.gitignore | 1 + webtop/Dockerfile | 40 +++++ webtop/init/fix_desktop_app.sh | 9 + webtop/init/provision_docker_groups.sh | 22 +++ webtop/override.env | 3 + 33 files changed, 798 insertions(+), 96 deletions(-) create mode 100644 compose.omniparser.yml create mode 100644 compose.perplexideez.yml create mode 100644 compose.promptfoo.yml create mode 100644 compose.webtop.yml create mode 100644 compose.x.perplexideez.ollama.yml create mode 100644 compose.x.perplexideez.searxng.yml create mode 100644 compose.x.promptfoo.ollama.yml create mode 100644 http-catalog/perplexideez.http create mode 100644 omniparser/Dockerfile create mode 100644 omniparser/override.env create mode 100644 perplexideez/override.env create mode 100644 promptfoo/.gitignore create mode 100644 promptfoo/examples/hello-promptfoo/README.md create mode 100644 promptfoo/examples/hello-promptfoo/promptfooconfig.yaml create mode 100644 promptfoo/examples/temp-test/README.md create mode 100644 promptfoo/examples/temp-test/promptfooconfig.yaml create mode 100644 promptfoo/override.env create mode 100644 webtop/.gitignore create mode 100644 webtop/Dockerfile create mode 100755 webtop/init/fix_desktop_app.sh create mode 100755 webtop/init/provision_docker_groups.sh create mode 100644 webtop/override.env diff --git a/.scripts/seed.ts b/.scripts/seed.ts index 4716e60..1fcd136 100644 --- a/.scripts/seed.ts +++ b/.scripts/seed.ts @@ -2,7 +2,7 @@ import * as toml from 'jsr:@std/toml'; import * as path from 'jsr:@std/path'; import * as collections from "jsr:@std/collections/deep-merge"; -const VERSION = "0.2.16"; +const VERSION = "0.2.17"; type ValueSeed = { // Path relative to the project root diff --git a/README.md b/README.md index 3b6abc3..5f23bed 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,10 @@ Harbor is a containerized LLM toolkit that allows you to run LLMs and additional [Bolt.new](https://github.com/av/harbor/wiki/2.3.24-Satellite:-Bolt.new) ⦁︎ [Open WebUI Pipelines](https://github.com/av/harbor/wiki/2.3.25-Satellite:-Open-WebUI-Pipelines) ⦁︎ [Qdrant](https://github.com/av/harbor/wiki/2.3.26-Satellite:-Qdrant) ⦁︎ -[K6](https://github.com/av/harbor/wiki/2.3.26-Satellite:-K6) +[K6](https://github.com/av/harbor/wiki/2.3.27-Satellite:-K6) ⦁︎ +[Promptfoo](https://github.com/av/harbor/wiki/2.3.28-Satellite:-Promptfoo) ⦁︎ +[Webtop](https://github.com/av/harbor/wiki/2.3.29-Satellite:-Webtop) ⦁︎ +[OmniParser](https://github.com/av/harbor/wiki/2.3.30-Satellite:-OmniParser) ⦁︎ ## Blitz Tour diff --git a/app/package.json b/app/package.json index 6504ed9..fd002de 100644 --- a/app/package.json +++ b/app/package.json @@ -1,7 +1,7 @@ { "name": "@avcodes/harbor-app", "private": true, - "version": "0.2.16", + "version": "0.2.17", "type": "module", "scripts": { "dev": "vite", diff --git a/app/src-tauri/Cargo.toml b/app/src-tauri/Cargo.toml index 6030e6f..43f2748 100644 --- a/app/src-tauri/Cargo.toml +++ b/app/src-tauri/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "harbor-app" -version = "0.2.16" +version = "0.2.17" description = "A companion app for Harbor LLM toolkit" authors = ["av"] edition = "2021" diff --git a/app/src-tauri/tauri.conf.json b/app/src-tauri/tauri.conf.json index 386b062..060aa4b 100644 --- a/app/src-tauri/tauri.conf.json +++ b/app/src-tauri/tauri.conf.json @@ -1,7 +1,7 @@ { "$schema": "https://schema.tauri.app/config/2.0.0-rc", "productName": "Harbor", - "version": "0.2.16", + "version": "0.2.17", "identifier": "com.harbor.app", "build": { "beforeDevCommand": "bun run dev", diff --git a/app/src/serviceMetadata.tsx b/app/src/serviceMetadata.tsx index dbd4541..d13c4dd 100644 --- a/app/src/serviceMetadata.tsx +++ b/app/src/serviceMetadata.tsx @@ -208,5 +208,8 @@ export const serviceMetadata: Record> = { }, k6: { tags: [HST.satellite, HST.cli], + }, + promptfoo: { + tags: [HST.satellite, HST.cli], } }; \ No newline at end of file diff --git a/compose.omniparser.yml b/compose.omniparser.yml new file mode 100644 index 0000000..c924aa2 --- /dev/null +++ b/compose.omniparser.yml @@ -0,0 +1,13 @@ +services: + omniparser: + build: + context: ./omniparser + dockerfile: Dockerfile + container_name: ${HARBOR_CONTAINER_PREFIX}.omniparser + env_file: + - ./.env + - ./omniparser/override.env + ports: + - ${HARBOR_OMNIPARSER_HOST_PORT}:7861 + networks: + - harbor-network \ No newline at end of file diff --git a/compose.perplexideez.yml b/compose.perplexideez.yml new file mode 100644 index 0000000..cd256f6 --- /dev/null +++ b/compose.perplexideez.yml @@ -0,0 +1,137 @@ +services: + # Proxy to work around the secure cookies + # and cors for localhost deployment + perplexideez: + container_name: ${HARBOR_CONTAINER_PREFIX}.perplexideez + image: nginx:alpine + ports: + - ${HARBOR_PERPLEXIDEEZ_HOST_PORT}:80 + configs: + - source: perplexideez_proxy_config + target: /etc/nginx/conf.d/default.conf + depends_on: + perplexideez-service: + condition: service_healthy + networks: + - harbor-network + + perplexideez-service: + container_name: ${HARBOR_CONTAINER_PREFIX}.perplexideez-service + image: ${HARBOR_PERPLEXIDEEZ_IMAGE}:${HARBOR_PERPLEXIDEEZ_VERSION} + env_file: + - ./.env + - ./perplexideez/override.env + healthcheck: + # TODO: + # https://github.com/brunostjohn/perplexideez/issues/12 + test: ["CMD", "true"] + interval: 2s + timeout: 60s + retries: 5 + start_period: 10s + depends_on: + perplexideez-db: + condition: service_healthy + perplexideez-migrate: + condition: service_completed_successfully + networks: + - harbor-network + + perplexideez-db: + container_name: ${HARBOR_CONTAINER_PREFIX}.perplexideez-db + image: postgres:16-alpine + restart: unless-stopped + volumes: + - ./perplexideez/pgdata:/var/lib/postgresql/data + healthcheck: + test: "pg_isready -d postgres -U postgres -h localhost" + interval: 2s + timeout: 60s + retries: 5 + env_file: + - ./.env + - ./perplexideez/override.env + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_DB=postgres + networks: + - harbor-network + + perplexideez-migrate: + container_name: ${HARBOR_CONTAINER_PREFIX}.perplexideez-migrate + image: ${HARBOR_PERPLEXIDEEZ_MIGRATE_IMAGE} + env_file: + - ./.env + - ./perplexideez/override.env + depends_on: + perplexideez-db: + condition: service_healthy + networks: + - harbor-network + +configs: + perplexideez_proxy_config: + content: | + server { + listen 80; + gzip_static off; + server_name perplexideez.com; + + location / { + proxy_pass http://perplexideez-service:3000; + proxy_set_header Host perplexideez.com; + proxy_set_header X-Real-IP $$remote_addr; + proxy_set_header X-Forwarded-For $$proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto https; + + # Disable CSRF protection in Nginx + proxy_set_header X-Forwarded-Host $$host; + proxy_set_header Origin https://perplexideez.com; + proxy_set_header Referer https://perplexideez.com; + + # Rewrite Location header to use localhost + proxy_redirect https://perplexideez.com/ http://localhost:34261/; + proxy_redirect http://perplexideez.com/ http://localhost:34261/; + + # Cookie rewriting + proxy_cookie_domain perplexideez.com localhost; + proxy_cookie_path / /; + proxy_cookie_flags ~ -secure; + + # Force uncompressed response to allow substitutions + proxy_set_header Accept-Encoding ""; + + # Rewrite Secure-prefixed cookies + proxy_hide_header set-cookie; + proxy_set_header Accept-Encoding ""; + sub_filter_types *; + sub_filter_once off; + sub_filter '__Secure-' ''; + sub_filter '; Secure' ''; + sub_filter 'perplexideez.com' 'localhost:34261'; + sub_filter "https://localhost:34261" "http://localhost:34261"; + sub_filter "https%3A%2F%2Flocalhost%3A34261" "http%3A%2F%2Flocalhost%3A34261"; + sub_filter 'href="https://localhost:34261' 'href="http://localhost:34261'; + sub_filter 'content="https://localhost:34261' 'content="http://localhost:34261'; + + # CORS headers + add_header 'Access-Control-Allow-Origin' '*'; + add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; + add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization'; + + # Handle OPTIONS method + if ($$request_method = 'OPTIONS') { + add_header 'Access-Control-Allow-Origin' '*'; + add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; + add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization'; + add_header 'Access-Control-Max-Age' 1728000; + add_header 'Content-Type' 'text/plain; charset=utf-8'; + add_header 'Content-Length' 0; + return 204; + } + + # Force processing of response + proxy_force_ranges on; + } + } \ No newline at end of file diff --git a/compose.promptfoo.yml b/compose.promptfoo.yml new file mode 100644 index 0000000..df72e19 --- /dev/null +++ b/compose.promptfoo.yml @@ -0,0 +1,28 @@ +services: + promptfoo: + container_name: ${HARBOR_CONTAINER_PREFIX}.promptfoo + env_file: + - ./.env + - ./promptfoo/override.env + environment: + - PROMPTFOO_REMOTE_API_BASE_URL=${HARBOR_PROMPTFOO_REMOTE_API_BASE_URL} + - PROMPTFOO_REMOTE_APP_BASE_URL=http://localhost:${HARBOR_PROMPTFOO_REMOTE_APP_BASE_URL} + - PROMPTFOO_DISABLE_TELEMETRY=1 + - PROMPTFOO_DISABLE_UPDATE=1 + image: ${HARBOR_PROMPTFOO_IMAGE}:${HARBOR_PROMPTFOO_VERSION} + ports: + - ${HARBOR_PROMPTFOO_HOST_PORT}:3000 + volumes: + - ${HARBOR_PROMPTFOO_WORKSPACE}:/home/promptfoo/.promptfoo + # Original healthcheck is very + # slow, server starts in ~2s, so overriding + healthcheck: + test: [ "CMD-SHELL", "exit 0" ] + interval: 2s + timeout: 1s + start_period: 1s + retries: 5 + # Avoiding issues with SIGTERM in the original + command: ["ash", "-c", "node /app/dist/src/server/index.js"] + networks: + - harbor-network \ No newline at end of file diff --git a/compose.webtop.yml b/compose.webtop.yml new file mode 100644 index 0000000..ad2dbe8 --- /dev/null +++ b/compose.webtop.yml @@ -0,0 +1,33 @@ +services: + webtop: + # We add a few of extra dependencies to the image + build: + context: ./webtop + dockerfile: Dockerfile + args: + HARBOR_WEBTOP_IMAGE: ${HARBOR_WEBTOP_IMAGE} + HARBOR_WEBTOP_VERSION: ${HARBOR_WEBTOP_VERSION} + container_name: ${HARBOR_CONTAINER_PREFIX}.webtop + env_file: + - ./.env + - ./webtop/override.env + environment: + - HARBOR_HOME=/harbor + - PUID=${HARBOR_USER_ID} + volumes: + # Mount Host's Harbor home, very important for the interop + - .:/harbor + # Webtop homedir, can be cleared with "harbor webtop reset" + - ${HARBOR_WEBTOP_WORKSPACE}:/config + # Docker access + - /var/run/docker.sock:/var/run/docker.sock + # Pre-provision docker groups + - ./webtop/init:/custom-cont-init.d:ro + ports: + - ${HARBOR_WEBTOP_HOST_PORT}:3000 + # There's also HTTPS port, but we're not using it + # - ${HARBOR_WEBTOP_HOST_PORT}:3001 + # Avoid crashes in modern browsers + shm_size: "1gb" + networks: + - harbor-network \ No newline at end of file diff --git a/compose.x.perplexideez.ollama.yml b/compose.x.perplexideez.ollama.yml new file mode 100644 index 0000000..1025d5e --- /dev/null +++ b/compose.x.perplexideez.ollama.yml @@ -0,0 +1,5 @@ +services: + perplexideez: + environment: + - LLM_MODE="ollama" + - OLLAMA_URL=${HARBOR_OLLAMA_INTERNAL_URL} \ No newline at end of file diff --git a/compose.x.perplexideez.searxng.yml b/compose.x.perplexideez.searxng.yml new file mode 100644 index 0000000..44cda2c --- /dev/null +++ b/compose.x.perplexideez.searxng.yml @@ -0,0 +1,4 @@ +services: + perplexideez: + environment: + - SEARXNG_URL=http://searxng:8080 \ No newline at end of file diff --git a/compose.x.promptfoo.ollama.yml b/compose.x.promptfoo.ollama.yml new file mode 100644 index 0000000..a010c42 --- /dev/null +++ b/compose.x.promptfoo.ollama.yml @@ -0,0 +1,5 @@ +services: + promptfoo: + environment: + - OLLAMA_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL} + - OLLAMA_API_KEY=sk-ollama \ No newline at end of file diff --git a/harbor.sh b/harbor.sh index 9066a0f..6aa4858 100755 --- a/harbor.sh +++ b/harbor.sh @@ -1778,7 +1778,16 @@ hf_spec_2_folder_spec() { docker_fsacl() { local folder=$1 - sudo setfacl --recursive -m user:1000:rwx $folder && sudo setfacl --recursive -m user:1002:rwx $folder && sudo setfacl --recursive -m user:1001:rwx $folder + log_debug "fsacl: $folder" + + # 1000, 1001, 1002 - most frequent default users on Debian + # 100 - most frequent default on Alpine + # 911 - "abc" user from LinuxServer.io images + sudo setfacl --recursive -m user:1000:rwx $folder \ + && sudo setfacl --recursive -m user:1002:rwx $folder \ + && sudo setfacl --recursive -m user:1001:rwx $folder \ + && sudo setfacl --recursive -m user:100:rwx $folder \ + && sudo setfacl --recursive -m user:911:rwx $folder } run_fixfs() { @@ -2072,6 +2081,13 @@ run_harbor_env() { local env_file="$service/override.env" log_debug "'env' $env_file - $mgr_cmd $env_var $env_val" + + if [ ! -f "$env_file" ]; then + log_error "Unknown service: $service. Please provide a valid service name." + return 1 + fi + + env_manager --env-file "$env_file" --prefix "" "$mgr_cmd" "$env_var" "$env_val" } @@ -3595,17 +3611,83 @@ run_k6_command() { k6 run "$@" } +run_promptfoo_command() { + local services=$(get_active_services) + log_debug "Active services: $services" + + # Check if the specified service is running + if ! echo "$services" | grep -q "promptfoo"; then + log_debug "Promptfoo backend stopped, launching..." + run_up --no-defaults promptfoo + else + log_debug "Promptfoo backend already running." + fi + + case "$1" in + view|open|o) + shift + run_open promptfoo + ;; + esac + + $(compose_with_options $services "promptfoo") run \ + --rm \ + -it \ + --name $default_container_prefix.promptfoo-cli-$RANDOM \ + -e "TERM=xterm-256color" \ + -v "$original_dir:$original_dir" \ + --workdir "$original_dir" \ + --entrypoint promptfoo \ + promptfoo "$@" +} + +run_webtop_command() { + local services=$(get_active_services) + local is_running=false + + if echo "$services" | grep -q "webtop"; then + is_running=true + fi + + case "$1" in + reset) + shift + # Just in case + run_down webtop + # Cleanup data directory + local data_dir=$(env_manager get webtop.workspace) + log_info "Deleting Webtop workspace at '$data_dir'" + rm -rf $data_dir + return 0 + ;; + esac + + if [ "$is_running" = true ] ; then + log_error "Webtop is already running, use 'harbor exec webtop' to interact with it." + return 1 + fi + + $(compose_with_options $services "webtop") run \ + --rm \ + --service-ports \ + --name $default_container_prefix.webtop-cli-$RANDOM \ + -e "TERM=xterm-256color" \ + -v "$original_dir:$original_dir" \ + --workdir "$original_dir" \ + webtop with-contenv "$@" +} + # ======================================================================== # == Main script # ======================================================================== # Globals -version="0.2.16" +version="0.2.17" harbor_repo_url="https://github.com/av/harbor.git" harbor_release_url="https://api.github.com/repos/av/harbor/releases/latest" delimiter="|" scramble_exit_code=42 -harbor_home=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +harbor_home=${HARBOR_HOME:-$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")} profiles_dir="$harbor_home/profiles" default_profile="$profiles_dir/default.env" default_current_env="$harbor_home/.env" @@ -3881,6 +3963,14 @@ main_entrypoint() { shift run_k6_command "$@" ;; + promptfoo|pf) + shift + run_promptfoo_command "$@" + ;; + webtop) + shift + run_webtop_command "$@" + ;; tunnel | t) shift establish_tunnel "$@" diff --git a/http-catalog/perplexideez.http b/http-catalog/perplexideez.http new file mode 100644 index 0000000..41c9f0a --- /dev/null +++ b/http-catalog/perplexideez.http @@ -0,0 +1,10 @@ +@host = http://localhost:34261 + +### + +POST /auth/signUp?email=perplexideez%40gmail.com&password=perplexideez&username=perplexideez +Content-Type: application/x-www-form-urlencoded + +### + +GET / \ No newline at end of file diff --git a/omniparser/Dockerfile b/omniparser/Dockerfile new file mode 100644 index 0000000..608fd06 --- /dev/null +++ b/omniparser/Dockerfile @@ -0,0 +1,92 @@ +# Dockerfile for OmniParser with GPU and OpenGL support. +# +# Base: nvidia/cuda:12.3.1-devel-ubuntu22.04 +# Features: +# - Python 3.12 with Miniconda environment. +# - Git LFS for large file support. +# - Required libraries: OpenCV, Hugging Face, Gradio, OpenGL. +# - Gradio server on port 7861. +# +# 1. Build the image with CUDA support. +# Example: +# ```bash +# sudo nvidia-docker build -t omniparser . +# ``` +# +# 2. Run the Docker container with GPU access and port mapping for Gradio. +# Example: +# ```bash +# sudo docker run -d -p 7861:7861 --gpus all --name omniparser-container omniparser +# ``` + +FROM nvidia/cuda:12.3.1-devel-ubuntu22.04 + +# Install system dependencies with explicit OpenGL libraries +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ + git \ + git-lfs \ + wget \ + libgl1 \ + libglib2.0-0 \ + libsm6 \ + libxext6 \ + libxrender1 \ + libglu1-mesa \ + libglib2.0-0 \ + libsm6 \ + libxrender1 \ + libxext6 \ + python3-opencv \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* \ + && git lfs install + +# Install Miniconda for Python 3.12 +RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh && \ + bash miniconda.sh -b -p /opt/conda && \ + rm miniconda.sh +ENV PATH="/opt/conda/bin:$PATH" + +# Create and activate Conda environment with Python 3.12, and set it as the default +RUN conda create -n omni python=3.12 && \ + echo "source activate omni" > ~/.bashrc +ENV CONDA_DEFAULT_ENV=omni +ENV PATH="/opt/conda/envs/omni/bin:$PATH" + +RUN git clone https://github.com/OpenAdaptAI/OmniParser.git /usr/src/app + +# Set the working directory in the container +WORKDIR /usr/src/app + +# Initialize Git LFS and pull LFS files +RUN git lfs install && \ + git lfs pull + +# Install dependencies from requirements.txt with specific opencv-python-headless version +RUN . /opt/conda/etc/profile.d/conda.sh && conda activate omni && \ + pip uninstall -y opencv-python opencv-python-headless && \ + pip install --no-cache-dir opencv-python-headless==4.8.1.78 && \ + pip install -r requirements.txt && \ + pip install huggingface_hub + +# Run download.py to fetch model weights and convert safetensors to .pt format +RUN . /opt/conda/etc/profile.d/conda.sh && conda activate omni && \ + python download.py && \ + echo "Contents of weights directory:" && \ + ls -lR weights && \ + python weights/convert_safetensor_to_pt.py + +# Expose the default Gradio port +EXPOSE 7861 + +# Configure Gradio to be accessible externally +ENV GRADIO_SERVER_NAME="0.0.0.0" + +# Copy and set permissions for entrypoint script +RUN chmod +x /usr/src/app/entrypoint.sh + +# To debug, keep the container running +# CMD ["tail", "-f", "/dev/null"] + +# Set the entrypoint +ENTRYPOINT ["/usr/src/app/entrypoint.sh"] \ No newline at end of file diff --git a/omniparser/override.env b/omniparser/override.env new file mode 100644 index 0000000..e69de29 diff --git a/package.json b/package.json index 4b0e5f2..55f50f7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@avcodes/harbor", - "version": "0.2.16", + "version": "0.2.17", "description": "Effortlessly run LLM backends, APIs, frontends, and services with one command.", "private": false, "author": "av (https://av.codes)", diff --git a/perplexideez/override.env b/perplexideez/override.env new file mode 100644 index 0000000..5d8d6b2 --- /dev/null +++ b/perplexideez/override.env @@ -0,0 +1,56 @@ +# This file can be used for additional environment variables +# specifically for the Perplexideez service. +# You can also use the "harbor env" command to set these variables. + +########### +### APP ### +########### +# Make sure this matches the URL you are using. +PUBLIC_BASE_URL="https://localhost:34261" +# ORIGIN="http://localhost:34261" + +DATABASE_URL="postgresql://postgres:postgres@perplexideez-db:5432/postgres?schema=public" +# CHANGE THIS !!!!!! +AUTH_SECRET="sk-harbor-perplexideez" +# CHANGE THIS !!!!!! +RATE_LIMIT_SECRET="sk-harbor-perplexideez-rate-limit" +DISABLE_SIGN_UP="false" +LOG_LEVEL="debug" +LOG_MODE="json" +METRICS_PORT="9001" + +############ +### SSO ### +############ +# Set up your OIDC provider here +OIDC_CLIENT_ID="client-id" +OIDC_CLIENT_SECRET="client-secret" +OIDC_ISSUER="https://auth.authentik.com/application/o/perplexideez/.well-known/openid-configuration" +OIDC_SCOPES="openid email profile" +PUBLIC_OIDC_NAME="Name Shown in the UI" +DISABLE_PASSWORD_LOGIN="false" + +############### +### SEARXNG ### +############### +REDIS_URL="redis://redis:6379/0" +SEARXNG_URL="http://searxng:8080" + +########### +### LLM ### +########### + +#### or +# LLM_MODE="openai" +# OPENAI_BASE_URL="" +# OPENAI_API_KEY="" +#### +# Change these to the models you want to use +LLM_SPEED_MODEL="gemma2:2b" +LLM_BALANCED_MODEL="llama3.1:latest" +LLM_QUALITY_MODEL="qwen2.5:32b" +LLM_EMBEDDINGS_MODEL="nomic-embed-text:latest" +LLM_TITLE_MODEL="llama3.1:latest" +LLM_EMOJI_MODEL="llama3.1:latest" +LLM_IMAGE_SEARCH_MODEL="llama3.1:latest" +LLM_VIDEO_SEARCH_MODEL="llama3.1:latest" \ No newline at end of file diff --git a/profiles/default.env b/profiles/default.env index f23d389..b7b3444 100644 --- a/profiles/default.env +++ b/profiles/default.env @@ -451,6 +451,29 @@ HARBOR_K6_INFLUXDB_DB="k6" HARBOR_K6_GRAFANA_IMAGE="grafana/grafana" HARBOR_K6_GRAFANA_VERSION="10.2.4" +# Promptfoo +HARBOR_PROMPTFOO_HOST_PORT=34241 +HARBOR_PROMPTFOO_IMAGE="ghcr.io/promptfoo/promptfoo" +HARBOR_PROMPTFOO_VERSION="latest" +HARBOR_PROMPTFOO_REMOTE_API_BASE_URL=http://promptfoo:3000 +HARBOR_PROMPTFOO_REMOTE_APP_BASE_URL=http://localhost:34241 +HARBOR_PROMPTFOO_WORKSPACE="./promptfoo/data" + +# Webtop +HARBOR_WEBTOP_HOST_PORT=34251 +HARBOR_WEBTOP_IMAGE="lscr.io/linuxserver/webtop" +HARBOR_WEBTOP_VERSION="ubuntu-kde" +HARBOR_WEBTOP_WORKSPACE="./webtop/data" + +# Perplexideez +HARBOR_PERPLEXIDEEZ_HOST_PORT=34261 +HARBOR_PERPLEXIDEEZ_IMAGE="ghcr.io/brunostjohn/perplexideez/app" +HARBOR_PERPLEXIDEEZ_VERSION="latest" +HARBOR_PERPLEXIDEEZ_MIGRATE_IMAGE="ghcr.io/brunostjohn/perplexideez/migrate:latest" + +# Omniparser +HARBOR_OMNIPARSER_HOST_PORT=34271 + # ============================================ # Service Configuration. # You can specify any of the service's own environment variables here. diff --git a/promptfoo/.gitignore b/promptfoo/.gitignore new file mode 100644 index 0000000..adbb97d --- /dev/null +++ b/promptfoo/.gitignore @@ -0,0 +1 @@ +data/ \ No newline at end of file diff --git a/promptfoo/examples/hello-promptfoo/README.md b/promptfoo/examples/hello-promptfoo/README.md new file mode 100644 index 0000000..a128bdf --- /dev/null +++ b/promptfoo/examples/hello-promptfoo/README.md @@ -0,0 +1,10 @@ +To get started, set your OPENAI_API_KEY environment variable, or other required keys for the providers you selected. + +Next, edit promptfooconfig.yaml. + +Then run: +``` +promptfoo eval +``` + +Afterwards, you can view the results by running `promptfoo view` diff --git a/promptfoo/examples/hello-promptfoo/promptfooconfig.yaml b/promptfoo/examples/hello-promptfoo/promptfooconfig.yaml new file mode 100644 index 0000000..60cd7a5 --- /dev/null +++ b/promptfoo/examples/hello-promptfoo/promptfooconfig.yaml @@ -0,0 +1,39 @@ +# yaml-language-server: $schema=https://promptfoo.dev/config-schema.json + +# Learn more about building a configuration: https://promptfoo.dev/docs/configuration/guide + +description: "My eval" + +prompts: + - "Write a tweet about {{topic}}" + - "Write a concise, funny tweet about {{topic}}" + + +providers: + - "ollama:llama3.1:8b" + +tests: + - vars: + topic: bananas + + - vars: + topic: avocado toast + assert: + # For more information on assertions, see https://promptfoo.dev/docs/configuration/expected-outputs + + # Make sure output contains the word "avocado" + - type: icontains + value: avocado + + # Prefer shorter outputs + - type: javascript + value: 1 / (output.length + 1) + + - vars: + topic: new york city + assert: + # For more information on model-graded evals, see https://promptfoo.dev/docs/configuration/expected-outputs/model-graded + - type: llm-rubric + value: ensure that the output is funny + provider: ollama:llama3.1:8b + diff --git a/promptfoo/examples/temp-test/README.md b/promptfoo/examples/temp-test/README.md new file mode 100644 index 0000000..a128bdf --- /dev/null +++ b/promptfoo/examples/temp-test/README.md @@ -0,0 +1,10 @@ +To get started, set your OPENAI_API_KEY environment variable, or other required keys for the providers you selected. + +Next, edit promptfooconfig.yaml. + +Then run: +``` +promptfoo eval +``` + +Afterwards, you can view the results by running `promptfoo view` diff --git a/promptfoo/examples/temp-test/promptfooconfig.yaml b/promptfoo/examples/temp-test/promptfooconfig.yaml new file mode 100644 index 0000000..389fde7 --- /dev/null +++ b/promptfoo/examples/temp-test/promptfooconfig.yaml @@ -0,0 +1,64 @@ +# yaml-language-server: $schema=https://promptfoo.dev/config-schema.json + +# Learn more about building a configuration: https://promptfoo.dev/docs/configuration/guide + +description: "My eval" + +prompts: + - "Write a concise, funny tweet about {{topic}}" + +providers: + - id: ollama:llama3.1:8b + label: "l3.1-0.0" + config: + temperature: 0.0 + - id: ollama:llama3.1:8b + label: "l3.1-0.1" + config: + temperature: 0.1 + - id: ollama:llama3.1:8b + label: "l3.1-0.2" + config: + temperature: 0.2 + - id: ollama:llama3.1:8b + label: "l3.1-0.3" + config: + temperature: 0.3 + - id: ollama:llama3.1:8b + label: "l3.1-0.4" + config: + temperature: 0.4 + - id: ollama:llama3.1:8b + label: "l3.1-0.5" + config: + temperature: 0.5 + - id: ollama:llama3.1:8b + label: "l3.1-0.6" + config: + temperature: 0.6 + - id: ollama:llama3.1:8b + label: "l3.1-0.7" + config: + temperature: 0.7 + - id: ollama:llama3.1:8b + label: "l3.1-0.8" + config: + temperature: 0.8 + - id: ollama:llama3.1:8b + label: "l3.1-0.9" + config: + temperature: 0.9 + - id: ollama:llama3.1:8b + label: "l3.1-1.0" + config: + temperature: 1.0 + +tests: + - vars: + topic: new york city + assert: + # For more information on model-graded evals, see https://promptfoo.dev/docs/configuration/expected-outputs/model-graded + - type: llm-rubric + value: ensure that the output is funny + provider: ollama:llama3.1:8b + diff --git a/promptfoo/override.env b/promptfoo/override.env new file mode 100644 index 0000000..2688af4 --- /dev/null +++ b/promptfoo/override.env @@ -0,0 +1,3 @@ +# This file can be used for additional environment variables +# specifically for the Promptfoo CLI. +# You can also use the "harbor env" command to set these variables. diff --git a/pyproject.toml b/pyproject.toml index dcd8865..f421d7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "llm-harbor" -version = "0.2.16" +version = "0.2.17" description = "Effortlessly run LLM backends, APIs, frontends, and services with one command." repository = "https://github.com/av/harbor" documentation = "https://github.com/av/harbor/wiki" diff --git a/searxng/settings.yml.new b/searxng/settings.yml.new index 4077113..4f0f1f6 100644 --- a/searxng/settings.yml.new +++ b/searxng/settings.yml.new @@ -35,6 +35,9 @@ search: autocomplete: "" # minimun characters to type before autocompleter starts autocomplete_min: 4 + # backend for the favicon near URL in search results. + # Available resolvers: "allesedv", "duckduckgo", "google", "yandex" - leave blank to turn it off by default. + favicon_resolver: "" # Default search language - leave blank to detect from browser information or # use codes from 'languages.py' default_lang: "auto" @@ -88,7 +91,7 @@ server: # If your instance owns a /etc/searxng/settings.yml file, then set the following # values there. - secret_key: "c6ece0a034f24958b6bda05542fb4d6e6f3a4d66cdb42d455222ce526e6ed038" # Is overwritten by ${SEARXNG_SECRET} + secret_key: "e77a4285a0681629c9c5f39336850d9b228d4f92580e08584a42b13be03b5bcf" # Is overwritten by ${SEARXNG_SECRET} # Proxy image results through SearXNG. Is overwritten by ${SEARXNG_IMAGE_PROXY} image_proxy: false # 1.0 and 1.1 are supported @@ -219,19 +222,16 @@ outgoing: # # enabled_plugins: # # these plugins are enabled if nothing is configured .. +# - 'Basic Calculator' # - 'Hash plugin' # - 'Self Information' # - 'Tracker URL remover' +# - 'Unit converter plugin' # - 'Ahmia blacklist' # activation depends on outgoing.using_tor_proxy # # these plugins are disabled if nothing is configured .. # - 'Hostnames plugin' # see 'hostnames' configuration below -# - 'Basic Calculator' # - 'Open Access DOI rewrite' # - 'Tor check plugin' -# # Read the docs before activate: auto-detection of the language could be -# # detrimental to users expectations / users can activate the plugin in the -# # preferences if they want. -# - 'Autodetect search language' # Configuration of the "Hostnames plugin": # @@ -482,6 +482,23 @@ engines: # to show premium or plus results too: # skip_premium: false + - name: cloudflareai + engine: cloudflareai + shortcut: cfai + # get api token and accont id from https://developers.cloudflare.com/workers-ai/get-started/rest-api/ + cf_account_id: 'your_cf_accout_id' + cf_ai_api: 'your_cf_api' + # create your ai gateway by https://developers.cloudflare.com/ai-gateway/get-started/creating-gateway/ + cf_ai_gateway: 'your_cf_ai_gateway_name' + # find the model name from https://developers.cloudflare.com/workers-ai/models/#text-generation + cf_ai_model: 'ai_model_name' + # custom your preferences + # cf_ai_model_display_name: 'Cloudflare AI' + # cf_ai_model_assistant: 'prompts_for_assistant_role' + # cf_ai_model_system: 'prompts_for_system_role' + timeout: 30 + disabled: true + # - name: core.ac.uk # engine: core # categories: science @@ -562,33 +579,6 @@ engines: categories: general shortcut: cc - - name: bahnhof - engine: json_engine - search_url: https://www.bahnhof.de/api/stations/search/{query} - url_prefix: https://www.bahnhof.de/ - url_query: slug - title_query: name - content_query: state - shortcut: bf - disabled: true - about: - website: https://www.bahn.de - wikidata_id: Q22811603 - use_official_api: false - require_api_key: false - results: JSON - language: de - tests: - bahnhof: - matrix: - query: berlin - lang: en - result_container: - - not_empty - - ['one_title_contains', 'Berlin Hauptbahnhof'] - test: - - unique_results - - name: deezer engine: deezer shortcut: dz @@ -834,24 +824,21 @@ engines: timeout: 10 - name: gitlab - engine: json_engine - paging: true - search_url: https://gitlab.com/api/v4/projects?search={query}&page={pageno} - url_query: web_url - title_query: name_with_namespace - content_query: description - page_size: 20 - categories: [it, repos] + engine: gitlab + base_url: https://gitlab.com shortcut: gl - timeout: 10.0 disabled: true about: - website: https://about.gitlab.com/ + website: https://gitlab.com/ wikidata_id: Q16639197 - official_api_documentation: https://docs.gitlab.com/ee/api/ - use_official_api: false - require_api_key: false - results: JSON + + # - name: gnome + # engine: gitlab + # base_url: https://gitlab.gnome.org + # shortcut: gn + # about: + # website: https://gitlab.gnome.org + # wikidata_id: Q44316 - name: github engine: github @@ -930,26 +917,6 @@ engines: shortcut: mi disabled: true - - name: gpodder - engine: json_engine - shortcut: gpod - timeout: 4.0 - paging: false - search_url: https://gpodder.net/search.json?q={query} - url_query: url - title_query: title - content_query: description - page_size: 19 - categories: music - disabled: true - about: - website: https://gpodder.net - wikidata_id: Q3093354 - official_api_documentation: https://gpoddernet.readthedocs.io/en/latest/api/ - use_official_api: false - requires_api_key: false - results: JSON - - name: habrahabr engine: xpath paging: true @@ -1313,6 +1280,12 @@ engines: require_api_key: false results: JSON + - name: openlibrary + engine: openlibrary + shortcut: ol + timeout: 5 + disabled: true + - name: openmeteo engine: open_meteo shortcut: om @@ -1575,6 +1548,24 @@ engines: page_size: 25 disabled: true + - name: right dao + engine: xpath + paging: true + page_size: 12 + search_url: https://rightdao.com/search?q={query}&start={pageno} + results_xpath: //div[contains(@class, "description")] + url_xpath: ../div[contains(@class, "title")]/a/@href + title_xpath: ../div[contains(@class, "title")] + content_xpath: . + categories: general + shortcut: rd + disabled: true + about: + website: https://rightdao.com/ + use_official_api: false + require_api_key: false + results: HTML + - name: rottentomatoes engine: rottentomatoes shortcut: rt @@ -1814,6 +1805,22 @@ engines: engine: unsplash shortcut: us + - name: yandex + engine: yandex + categories: general + search_type: web + shortcut: yd + disabled: true + inactive: true + + - name: yandex images + engine: yandex + categories: images + search_type: images + shortcut: ydi + disabled: true + inactive: true + - name: yandex music engine: yandex_music shortcut: ydm @@ -1862,25 +1869,6 @@ engines: about: website: https://wiby.me/ - - name: alexandria - engine: json_engine - shortcut: alx - categories: general - paging: true - search_url: https://api.alexandria.org/?a=1&q={query}&p={pageno} - results_query: results - title_query: title - url_query: url - content_query: snippet - timeout: 1.5 - disabled: true - about: - website: https://alexandria.org/ - official_api_documentation: https://github.com/alexandria-org/alexandria-api/raw/master/README.md - use_official_api: true - require_api_key: false - results: JSON - - name: wikibooks engine: mediawiki weight: 0.5 @@ -2049,6 +2037,16 @@ engines: # query_str: 'SELECT * from mytable WHERE fieldname=%(query)s' # shortcut: mysql + # Required dependency: mariadb + # - name: mariadb + # engine: mariadb_server + # database: mydatabase + # username: user + # password: pass + # limit: 10 + # query_str: 'SELECT * from mytable WHERE fieldname=%(query)s' + # shortcut: mdb + - name: 1337x engine: 1337x shortcut: 1337x diff --git a/webtop/.gitignore b/webtop/.gitignore new file mode 100644 index 0000000..adbb97d --- /dev/null +++ b/webtop/.gitignore @@ -0,0 +1 @@ +data/ \ No newline at end of file diff --git a/webtop/Dockerfile b/webtop/Dockerfile new file mode 100644 index 0000000..06beb4f --- /dev/null +++ b/webtop/Dockerfile @@ -0,0 +1,40 @@ +ARG HARBOR_WEBTOP_IMAGE=lscr.io/linuxserver/webtop +ARG HARBOR_WEBTOP_VERSION=ubuntu-kde + +FROM ${HARBOR_WEBTOP_IMAGE}:${HARBOR_WEBTOP_VERSION} + +# Extras for desktop agents +RUN sudo apt-get update \ + && sudo apt-get install -y \ + curl \ + jq \ + wget \ + imagemagick \ + at-spi2-core \ + wmctrl \ + libreoffice \ + fonts-noto-cjk \ + neofetch \ + python3-requests \ + python3-numpy \ + python3-pandas \ + python3-matplotlib \ + python3-scipy \ + python3-sklearn \ + python3-reportlab + + +RUN curl -L https://npmjs.org/install.sh | sh +RUN npm install -g --unsafe-perm yarn +RUN sudo chown -R abc:abc /usr/lib/node_modules + +# Pre-provision Harbor +# 1. Install the App +USER abc +RUN LATEST_DEB_URL=$(curl -s "https://api.github.com/repos/av/harbor/releases/latest" | jq -r '.assets[] | select(.name | contains("Harbor") and endswith(".deb")) | .browser_download_url') \ + && mkdir -p /tmp/harbor \ + && wget -O /tmp/harbor/harbor.deb "$LATEST_DEB_URL" \ + && sudo apt-get install -y /tmp/harbor/harbor.deb +# 2. Prepare workspace for the CLI mount +USER root +RUN mkdir /harbor \ No newline at end of file diff --git a/webtop/init/fix_desktop_app.sh b/webtop/init/fix_desktop_app.sh new file mode 100755 index 0000000..2c2a274 --- /dev/null +++ b/webtop/init/fix_desktop_app.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# Update the desktop entry to use absolute path +exec_patch="Exec=env PATH=$HOME/.local/bin:/config/.local/bin:$PATH harbor-app" + +echo "Updating Harbor.desktop" + +# Update original desktop entry +sed -i "s|^Exec=harbor-app$|Exec=$exec_patch|" "/usr/share/applications/Harbor.desktop" diff --git a/webtop/init/provision_docker_groups.sh b/webtop/init/provision_docker_groups.sh new file mode 100755 index 0000000..745ec7f --- /dev/null +++ b/webtop/init/provision_docker_groups.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +log() { + if [ "$HARBOR_LOG_LEVEL" == "DEBUG" ]; then + echo "[harbor-init] $1" + fi +} + +log "Provisioning docker groups" +sudo groupadd -g 999 docker-ubuntu +sudo groupadd -g 992 docker-lsio +sudo groupadd -g 130 docker-plex + +log "Adding lsio user to docker groups" +sudo usermod -aG docker-ubuntu abc +sudo usermod -aG 999 abc + +sudo usermod -aG docker-lsio abc +sudo usermod -aG 992 abc + +sudo usermod -aG docker-plex abc +sudo usermod -aG 130 abc \ No newline at end of file diff --git a/webtop/override.env b/webtop/override.env new file mode 100644 index 0000000..6d97450 --- /dev/null +++ b/webtop/override.env @@ -0,0 +1,3 @@ +# This file can be used for additional environment variables +# specifically for the Webtop service. +# You can also use the "harbor env" command to set these variables.