diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..aff8bab --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +/db +/redis \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..2afcde7 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/app/src" + schedule: + interval: "daily" + open-pull-requests-limit: 0 + - package-ecosystem: "docker" + directory: "/app/envs/prod" + schedule: + interval: "weekly" + open-pull-requests-limit: 0 diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml new file mode 100644 index 0000000..8323843 --- /dev/null +++ b/.github/workflows/cd.yml @@ -0,0 +1,25 @@ +name: Deploy to AWS on push to certain branches + +on: + push: + branches: + - 'deploy-*' + +jobs: + deploy: + env: + AWS_ACCESS_KEY_ID: ${{ secrets.DEPLOYMENT_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.DEPLOYMENT_AWS_SECRET_ACCESS_KEY }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: deploy to aws + run: | + set -e + export ENVIRONMENT=${GITHUB_REF_NAME:7} + ./deploy-to-aws.sh $ENVIRONMENT + export TAG=deployed-${ENVIRONMENT}-`date -u +"%Y-%m-%dT%H.%M.%S"` + git tag $TAG + git push origin $TAG \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..3b9082b --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,52 @@ +name: Run linter and tests + +on: + push: + branches: [master, main] + pull_request: + branches: [master, main] + +env: + PYTHON_DEFAULT_VERSION: "3.11" + +jobs: + linter: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_DEFAULT_VERSION }} + cache: "pip" + - name: Install dependencies + run: python -m pip install --upgrade nox 'pdm>=2.12,<3' + - name: Run linters + run: nox -vs lint + test: + timeout-minutes: 10 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up Python ${{ env.PYTHON_DEFAULT_VERSION }} + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_DEFAULT_VERSION }} + cache: "pip" + - name: Install dependencies + run: python -m pip install --upgrade nox 'pdm>=2.12,<3' + - name: Prepare environment + run: ./setup-dev.sh + - name: Run dockerized services + run: docker compose up -d --wait + - name: Run migrations + run: cd app/src && pdm run python manage.py wait_for_database --timeout 120 && pdm run python manage.py migrate + - name: Run unit tests + run: nox -vs test + - name: Stop dockerized services + if: success() || failure() + run: docker compose down -v diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5c82755 --- /dev/null +++ b/.gitignore @@ -0,0 +1,19 @@ +*.pyc +*.sqlite3 +*~ +*.egg-info/ +/docker-compose.yml +/.idea/ +/redis/ +/db/ +.env +.venv +venv +media/ +.backups/ +.envrc +.pdm-python +.terraform.lock.hcl +.terraform/ +.nox/ +__pycache__ diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 0000000..678311c --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1,6 @@ +# disable common false-positive errors to ease adoption in existing projects +disable=SC1090-SC1092 # disable errors related to sourcing files +disable=SC2034 # disable errors related to unused variables +disable=SC2028 # swapping echo for printf requires more testing to ensure correctness +# disable errors related to cookiecutter templating: +disable=SC1054,SC1056,SC1072,SC1073,SC1083,SC1009 diff --git a/README.md b/README.md new file mode 100644 index 0000000..9efd100 --- /dev/null +++ b/README.md @@ -0,0 +1,190 @@ +# bittensor-panel + +Bittensor admin panel + +- - - + +Skeleton of this project was generated with `cookiecutter-rt-django`, which sometimes gets upgrades that are easy to retrofit into already older projects. + +# Base requirements + +- docker with [compose plugin](https://docs.docker.com/compose/install/linux/) +- python 3.11 +- [pdm](https://pdm-project.org) +- [nox](https://nox.thea.codes) + +# Setup development environment + +```sh +$ ./setup-dev.sh +docker compose up -d +cd app/src +pdm run manage.py wait_for_database --timeout 10 +pdm run manage.py migrate +pdm run manage.py runserver +``` + +# Setup production environment (git deployment) + +This sets up "deployment by pushing to git storage on remote", so that: + +- `git push origin ...` just pushes code to Github / other storage without any consequences; +- `git push production master` pushes code to a remote server running the app and triggers a git hook to redeploy the application. + +``` +Local .git ------------> Origin .git + \ + ------> Production .git (redeploy on push) +``` + +- - - + +Use `ssh-keygen` to generate a key pair for the server, then add read-only access to repository in "deployment keys" section (`ssh -A` is easy to use, but not safe). + +```sh +# remote server +mkdir -p ~/repos +cd ~/repos +git init --bare --initial-branch=master bittensor-panel.git + +mkdir -p ~/domains/bittensor-panel +``` + +```sh +# locally +git remote add production root@:~/repos/bittensor-panel.git +git push production master +``` + +```sh +# remote server +cd ~/repos/bittensor-panel.git + +cat <<'EOT' > hooks/post-receive +#!/bin/bash +unset GIT_INDEX_FILE +export ROOT=/root +export REPO=bittensor-panel +while read oldrev newrev ref +do + if [[ $ref =~ .*/master$ ]]; then + export GIT_DIR="$ROOT/repos/$REPO.git/" + export GIT_WORK_TREE="$ROOT/domains/$REPO/" + git checkout -f master + cd $GIT_WORK_TREE + ./deploy.sh + else + echo "Doing nothing: only the master branch may be deployed on this server." + fi +done +EOT + +chmod +x hooks/post-receive +./hooks/post-receive +cd ~/domains/bittensor-panel +sudo bin/prepare-os.sh +./setup-prod.sh + +# adjust the `.env` file + +mkdir letsencrypt +./letsencrypt_setup.sh +./deploy.sh +``` + +### Deploy another branch + +Only `master` branch is used to redeploy an application. +If one wants to deploy other branch, force may be used to push desired branch to remote's `master`: + +```sh +git push --force production local-branch-to-deploy:master +``` + +## Monitoring execution time of code blocks + +Somewhere, probably in `metrics.py`: + +```python +some_calculation_time = prometheus_client.Histogram( + 'some_calculation_time', + 'How Long it took to calculate something', + namespace='django', + unit='seconds', + labelnames=['task_type_for_example'], + buckets=[0.5, 1, *range(2, 30, 2), *range(30, 75, 5), *range(75, 135, 15)] +) +``` + +Somewhere else: + +```python +with some_calculation_time.labels('blabla').time(): + do_some_work() +``` + +# AWS + +Initiate the infrastructure with Terraform: +TODO + +To push a new version of the application to AWS, just push to a branch named `deploy-$(ENVIRONMENT_NAME)`. +Typical values for `$(ENVIRONMENT_NAME)` are `prod` and `staging`. +For this to work, GitHub actions needs to be provided with credentials for an account that has the following policies enabled: + +- AutoScalingFullAccess +- AmazonEC2ContainerRegistryFullAccess +- AmazonS3FullAccess + +See `.github/workflows/cd.yml` to find out the secret names. + +# Vultr + +Initiate the infrastructure with Terraform and cloud-init: + +- see Terraform template in `/devops/vultr_tf/core/` +- see scripts for interacting with Vultr API in `/devops/vultr_scripts/` + - note these scripts need `vultr-cli` installed + +- for more details see README_vultr.md + +# Setting up periodic backups + +Add to crontab: + +```sh +# crontab -e +30 0 * * * cd ~/domains/bittensor-panel && ./bin/backup-db.sh > ~/backup.log 2>&1 +``` + +Set `BACKUP_LOCAL_ROTATE_KEEP_LAST` to keep only a specific number of most recent backups in local `.backups` directory. + +## Configuring offsite targets for backups + +Backups are put in `.backups` directory locally, additionally then can be stored offsite in following ways: + +**Backblaze** + +Set in `.env` file: + +- `BACKUP_B2_BUCKET_NAME` +- `BACKUP_B2_KEY_ID` +- `BACKUP_B2_KEY_SECRET` + +**Email** + +Set in `.env` file: + +- `EMAIL_HOST` +- `EMAIL_PORT` +- `EMAIL_HOST_USER` +- `EMAIL_HOST_PASSWORD` +- `EMAIL_TARGET` + +# Restoring system from backup after a catastrophical failure + +1. Follow the instructions above to set up a new production environment +2. Restore the database using bin/restore-db.sh +3. See if everything works +4. Set up backups on the new machine +5. Make sure everything is filled up in .env, error reporting integration, email accounts etc diff --git a/README_AWS.md b/README_AWS.md new file mode 100644 index 0000000..050af38 --- /dev/null +++ b/README_AWS.md @@ -0,0 +1,137 @@ +# Deploying to AWS + +The deployment is split into two steps: + +Files related to AWS deployment has been generated in `devops/` directory. + +By convention, projects that are meant to be deployed to AWS have a `deploy-to-aws.sh` script in the root dir and a `devops` directory. +The script builds the docker image, uploads it and tells AWS to reload the app (causing a new ec2 machine to be spawned). +In the `devops` directory you will find terraform configuration as well as packer files (for building the AMI). + +If you want to deploy your app to an AWS environment, you need to do following steps: + +- configuring your environment +- create an infra s3 bucket +- deploy `tf/core` (contains stuff common to all environments in given AWS Account) +- deploy chosen `tf/main/envs/` (by default staging and prod are generated) + +## Required software + +*AWS CLI* + +AWS recommends using profiles, when dealing with multiple AWS accounts. +To choose between environments, rather than switching access and secret keys, we just switch our profiles. +We can choose our profile name, which make it easier to recognize in which environment we operate. +To configure AWS environment, you need to have AWS CLI installed. +It is recommended to use AWS v2, which can be downloaded from: + + +*Terraform* You will also need terraform version 1.0.x. It is recommended to use `tfenv` to install terraform with correct version. +You can download an install it from + +*direnv* To avoid mistakes when switching environments (or regions), it is recommended to use `direnv` tools, which supports loading environment variables from .envrc file, placed in directory. +You can read about it here: + + +## Configure your environment + +To configure your AWS profile, please run: + +``` +$ aws configure --profile +``` + +And answer following questions: + +``` +AWS Access Key ID: ... +AWS Secret Access Key: ... +Default region name: us-east-1 (just an example) +Default output format [None]: (can be left blank) +``` + +Once, configured, you can switch your profile using `AWS_PROFILE=` env variable or by adding `--profile` option to your aws cli command. + +It's handy to create .envrc file in the project rood directory (where deploy-to-aws.sh is created) with content: + +``` +export AWS_PROFILE= +export AWS_REGION= +``` + +And then accept changes by using command: + +``` +$ direnv allow +``` + +After doing that, anytime you enter the project directory, correct profile will be loaded. + +## Configuring infra + +You only need to do this if you change anything in `devops` directory (or if you mess something up in AWS console and want to revert the changes). + +Create infra bucket + +Before being able to run terraform, we need to create S3 bucket, which will hold the state. +This bucket is used by all environments and needs to be globally unique. + +To create bucket, please type: + +``` +aws s3 mb --region us-east-1 s3://bittensor-panel-qxnlar +``` + +TF has a following structure: + +``` +|- devops + |- tf + |- core + |- main + |- envs + | |- staging + | |- prod + |- modules +``` + +You can run terraform from: + +- core +- envs/staging +- envs/prod + +directories. + +Directory *core* contains infrastructure code, which needs to be created BEFORE pushing docker image. +It is responsible for creating docker registries, which you can use, to push docker images to. + +Code places in *main* is the rest of the infrastructure, which is created after pushing docker image. + +Each of the environment (and core) can be applied by executing: + +``` +terraform init +terraform apply +``` + +IMPORTANT! the env variables for the apps (`.env` file) and `docker-compose.yml` are defined in terraform files, if you change any, you need to run `terraform apply` AND refresh the ec2 instance. +The same goes for AMI built by packer. + +## Adding secrets to the projects + +Cloud init is configured to provision EC2 machines spun up as part of this project's infrastructure. +As part of this provisioning, SSM parameters following a specific name convention are read and saved as files in EC2's home directory (RDS access details are managed in another way). +The naming convention is `/application/bittensor-panel/{env}/{path_of_the_file_to_be_created}`, for example `/application/project/staging/.env`. +A few such parameters are managed by terraform in this project (e.g. `.env`, `docker-compose.yml`) and more can be added. +In case you need to add confidential files (like a GCP credentials file) you can simply create appropriate SSM parameters. +These will only be accessible to people that access to AWS or EC2 machines, not to people who have access to this repository. +One such parameter, namely `/application/bittensor-panel/{env}/secret.env` is treated specially - if it exists (it doesn't by default) its contents are appended to `.env` during EC2 machine provisioning - this is a convenient way of supplying pieces of confidential information, like external systems' access keys to `.env`. + +## Deploying apps + +The docker containers are built with code you have locally, including any changes. +Building requires docker. +To successfully run `deploy-to-aws.sh` you first need to do `./setup.prod.sh`. +It uses the aws credentials stored as `AWS_PROFILE` variable. +If you don't set this variable, the `default` will be used. diff --git a/README_vultr.md b/README_vultr.md new file mode 100644 index 0000000..d2a1150 --- /dev/null +++ b/README_vultr.md @@ -0,0 +1,56 @@ +# Deploying to Vultr + + +Files related to Vultr deployment are in `devops/vultr_scripts/` and `devops/vultr_tf`. + +To use Terraform, you need: +- create API key which you can find in Vultr -> Account -> API: +- allow your IP in Access Control section at the same page as above + +- To use ssh keys in Terraform, you need to create them in Vultr -> Account -> SSH Keys: + + +## Required software + + +*Terraform* You will also need terraform version 1.0.x. It is recommended to use `tfenv` to install terraform with correct version. +You can download an install it from + +*direnv* To avoid mistakes when switching environments (or regions), it is recommended to use `direnv` tools, which supports loading environment variables from .envrc file, placed in directory. +You can read about it here: + + +(recommended) *Vultr CLI* via to interact with Vultr instances post-deployment, eg. get their IP addressed, instances ID, update Cloud Init data. + +## Configure your environment + + +To deploy via Terraform, you have to fill all variables for Cloud Init in `vultr-cloud-init.tftpl`. +These variables can be sourced from various sources, recommended approach is to use environment variables in combination with `dotenv` + +To use Vultr CLI, you have to have API key, ideally in environment variable again. + +## Configuring infra + +You only need to do this if you change anything in `devops/vultr_tf` directory. + +TODO - currently TF Vultr is not configured to use S3 buckets. + + +``` +terraform init +terraform apply +``` + +## Adding secrets to the projects + +Project uses `.env` file in same directory as `docker-compose.yml` is, so any secrets should be sourced via this file. + +Do not commit secrets into the repository, this `.env` file can be updated via Cloud init executed when a new machines is spawned or reinstalled. The Cloud Init is located in Terraform directory: `vultr-cloud-init.tftpl`. + +After spawning the machines, Cloud Init can be updated via Vultr CLI, see `devops/vultr_scripts/vultr-update-cloudinit.py`. Updating Cloud Data in Terraform would mean destroying & recreating all instances from scratch. + + +## Deploying apps + +Deployment is executed via `post-receive` hook in git repository on each instance. See `devops/vultr_scripts/vultr-deploy.py` diff --git a/app/envs/prod/Dockerfile b/app/envs/prod/Dockerfile new file mode 100644 index 0000000..de535d2 --- /dev/null +++ b/app/envs/prod/Dockerfile @@ -0,0 +1,47 @@ +ARG BASE_IMAGE=python:3.11-slim + + +FROM $BASE_IMAGE AS base-image +LABEL builder=true + +WORKDIR /root/src/ + +RUN pip3 install --no-cache-dir 'pdm>=2.12,<3' + +COPY pyproject.toml pdm.lock ./ +RUN pdm lock --check + +RUN apt-get update && apt-get install -y git + + +RUN pdm config python.use_venv False && pdm sync --prod --group :all +RUN mkdir -p /opt/ && mv __pypackages__/3.11/ /opt/pypackages/ +ENV PATH=/opt/pypackages/bin:$PATH +ENV PYTHONPATH=/opt/pypackages/lib:$PYTHONPATH + +COPY ./app/src/ ./app/envs/prod/entrypoint.sh ./app/envs/prod/gunicorn.conf.py ./app/envs/prod/celery-entrypoint.sh ./app/envs/prod/prometheus-cleanup.sh /root/src/ + +RUN python3 -m compileall -b -f -q /root/ +RUN ENV=prod ENV_FILL_MISSING_VALUES=1 SECRET_KEY=dummy python3 manage.py collectstatic --no-input --clear + + +FROM $BASE_IMAGE AS secondary-image +LABEL builder=false + +RUN apt-get update \ + && apt-get install -y wget \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /root/src/ +ENV PYTHONUNBUFFERED=1 +ENV PATH=/opt/pypackages/bin:$PATH +ENV PYTHONPATH=/opt/pypackages/lib:$PYTHONPATH + +COPY --from=base-image /root/src/ /root/src/ +COPY --from=base-image /opt/pypackages/ /opt/pypackages/ + + + +EXPOSE 8000 + +CMD ["./entrypoint.sh"] diff --git a/app/envs/prod/celery-entrypoint.sh b/app/envs/prod/celery-entrypoint.sh new file mode 100755 index 0000000..2c2225d --- /dev/null +++ b/app/envs/prod/celery-entrypoint.sh @@ -0,0 +1,30 @@ +#!/bin/sh +set -eu + +./prometheus-cleanup.sh + +# below we define two workers types (each may have any concurrency); +# each worker may have its own settings +WORKERS="master worker" +OPTIONS="-A bittensor_panel -E -l ERROR --pidfile=/var/run/celery-%n.pid --logfile=/var/log/celery-%n.log" + +# set up settings for workers and run the latter; +# here events from "celery" queue (default one, will be used if queue not specified) +# will go to "master" workers, and events from "worker" queue go to "worker" workers; +# by default there are no workers, but each type of worker may scale up to 4 processes +# Since celery runs in root of the docker, we also need to allow it to. +# shellcheck disable=2086 +C_FORCE_ROOT=1 nice celery multi start $WORKERS $OPTIONS \ + -Q:master celery --autoscale:master=$CELERY_MASTER_CONCURRENCY,0 \ + -Q:worker worker --autoscale:worker=$CELERY_WORKER_CONCURRENCY,0 + +# shellcheck disable=2064 +trap "celery multi stop $WORKERS $OPTIONS; exit 0" INT TERM + +tail -f /var/log/celery-*.log & + +# check celery status periodically to exit if it crashed +while true; do + sleep 30 + celery -A bittensor_panel status > /dev/null 2>&1 || exit 1 +done diff --git a/app/envs/prod/entrypoint.sh b/app/envs/prod/entrypoint.sh new file mode 100755 index 0000000..878cb71 --- /dev/null +++ b/app/envs/prod/entrypoint.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +# We assume that WORKDIR is defined in Dockerfile + +./prometheus-cleanup.sh +./manage.py wait_for_database --timeout 10 +# this seems to be the only place to put this for AWS deployments to pick it up +./manage.py migrate + +gunicorn -c gunicorn.conf.py diff --git a/app/envs/prod/gunicorn.conf.py b/app/envs/prod/gunicorn.conf.py new file mode 100644 index 0000000..4eed1cb --- /dev/null +++ b/app/envs/prod/gunicorn.conf.py @@ -0,0 +1,9 @@ +import multiprocessing + +workers = 2 * multiprocessing.cpu_count() + 1 +bind = "0.0.0.0:8000" +wsgi_app = "bittensor_panel.wsgi:application" +access_logfile = "-" + + + diff --git a/app/envs/prod/prometheus-cleanup.sh b/app/envs/prod/prometheus-cleanup.sh new file mode 100755 index 0000000..b7f5ce0 --- /dev/null +++ b/app/envs/prod/prometheus-cleanup.sh @@ -0,0 +1,2 @@ +#!/bin/sh +# No Prometheus clean up required as metrics are disabled. diff --git a/app/src/bandit.ini b/app/src/bandit.ini new file mode 100644 index 0000000..a1be520 --- /dev/null +++ b/app/src/bandit.ini @@ -0,0 +1,2 @@ +[bandit] +skips = B101 diff --git a/app/src/bittensor_panel/__init__.py b/app/src/bittensor_panel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/src/bittensor_panel/asgi.py b/app/src/bittensor_panel/asgi.py new file mode 100644 index 0000000..e869d08 --- /dev/null +++ b/app/src/bittensor_panel/asgi.py @@ -0,0 +1,9 @@ +import os + + +from django.core.asgi import get_asgi_application + + + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bittensor_panel.settings") +application = get_asgi_application() diff --git a/app/src/bittensor_panel/celery.py b/app/src/bittensor_panel/celery.py new file mode 100644 index 0000000..204ec00 --- /dev/null +++ b/app/src/bittensor_panel/celery.py @@ -0,0 +1,24 @@ +import logging +import os + +from celery import Celeryfrom django.conf import settings +from django_structlog.celery.steps import DjangoStructLogInitStep +from .settings import configure_structlog + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bittensor_panel.settings") + +app = Celery("bittensor_panel") + +app.config_from_object("django.conf:settings", namespace="CELERY") +app.steps["worker"].add(DjangoStructLogInitStep) +app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) + + +@setup_logging.connect +def receiver_setup_logging(loglevel, logfile, format, colorize, **kwargs): # pragma: no cover + logging.config.dictConfig(settings.LOGGING) + configure_structlog() + + +def route_task(name, args, kwargs, options, task=None, **kw): + return {"queue": "celery"} diff --git a/app/src/bittensor_panel/core/__init__.py b/app/src/bittensor_panel/core/__init__.py new file mode 100644 index 0000000..61637fc --- /dev/null +++ b/app/src/bittensor_panel/core/__init__.py @@ -0,0 +1 @@ +default_app_config = "bittensor_panel.core.apps.CoreConfig" diff --git a/app/src/bittensor_panel/core/admin.py b/app/src/bittensor_panel/core/admin.py new file mode 100644 index 0000000..e5bc44e --- /dev/null +++ b/app/src/bittensor_panel/core/admin.py @@ -0,0 +1,7 @@ +from django.contrib import admin # noqa +from django.contrib.admin import register # noqa + + +admin.site.site_header = "Bittensor Administration Panel" +admin.site.site_title = "Bittensor Administration Panel" +admin.site.index_title = "Welcome to Bittensor Administration Panel" diff --git a/app/src/bittensor_panel/core/apps.py b/app/src/bittensor_panel/core/apps.py new file mode 100644 index 0000000..cd164a1 --- /dev/null +++ b/app/src/bittensor_panel/core/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class CoreConfig(AppConfig): + name = "bittensor_panel.core" diff --git a/app/src/bittensor_panel/core/business_metrics.py b/app/src/bittensor_panel/core/business_metrics.py new file mode 100644 index 0000000..e69de29 diff --git a/app/src/bittensor_panel/core/consumers.py b/app/src/bittensor_panel/core/consumers.py new file mode 100644 index 0000000..e69de29 diff --git a/app/src/bittensor_panel/core/email.py b/app/src/bittensor_panel/core/email.py new file mode 100644 index 0000000..4632e47 --- /dev/null +++ b/app/src/bittensor_panel/core/email.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from collections.abc import Callable +from email.mime.base import MIMEBase +from email.mime.image import MIMEImage +from functools import lru_cache +from pathlib import Path +from typing import TypeVar + +from django.conf import settings +from django.contrib.staticfiles import finders +from django.core.mail import EmailMessage +from django.template import loader + +MIMEType = TypeVar("MIMEType", bound=MIMEBase) + + +@lru_cache(maxsize=10) +def create_attachment( + path: str, + mime_type: Callable[[bytes], MIMEType] = MIMEImage, # type: ignore[assignment] # https://github.com/python/mypy/issues/3737 +) -> MIMEType: + real_path = finders.find(path) + if not real_path: + raise FileNotFoundError(f"File {path} not found") + content = Path(real_path).read_bytes() + attachment = mime_type(content) + + file_name = path.rsplit("/", maxsplit=1)[-1] + attachment.add_header("Content-ID", file_name) + return attachment + + +def send_mail( + template_name: str, + subject: str, + to: list[str], + from_: str = f"<{settings.DEFAULT_FROM_EMAIL}>", + context: dict | None = None, + attachments: list[str] | None = None, + cc: list[str] | None = None, +): + context = context or {} + attachments = attachments or [] + + html = loader.render_to_string(template_name, context) + + message = EmailMessage( + subject=subject, + body=html, + from_email=from_, + to=to, + cc=cc, + attachments=[create_attachment(file) for file in attachments], + ) + message.content_subtype = "html" + message.mixed_subtype = "related" + message.send() diff --git a/app/src/bittensor_panel/core/metrics.py b/app/src/bittensor_panel/core/metrics.py new file mode 100644 index 0000000..e69de29 diff --git a/app/src/bittensor_panel/core/models.py b/app/src/bittensor_panel/core/models.py new file mode 100644 index 0000000..2f0a416 --- /dev/null +++ b/app/src/bittensor_panel/core/models.py @@ -0,0 +1 @@ +from django.db import models # noqa diff --git a/app/src/bittensor_panel/core/tasks.py b/app/src/bittensor_panel/core/tasks.py new file mode 100644 index 0000000..c602359 --- /dev/null +++ b/app/src/bittensor_panel/core/tasks.py @@ -0,0 +1,12 @@ +import structlog +from celery.utils.log import get_task_logger + +from bittensor_panel.celery import app + +logger = structlog.wrap_logger(get_task_logger(__name__)) + + +@app.task +def demo_task(x, y): + logger.info("adding two numbers", x=x, y=y) + return x + y diff --git a/app/src/bittensor_panel/core/tests/__init__.py b/app/src/bittensor_panel/core/tests/__init__.py new file mode 100644 index 0000000..326fe3a --- /dev/null +++ b/app/src/bittensor_panel/core/tests/__init__.py @@ -0,0 +1,7 @@ +""" +This file is required by pytest, otherwise import errors will pop up: + +project/core/tests/conftest.py:8: in + from .models import User +E ImportError: attempted relative import with no known parent package +""" diff --git a/app/src/bittensor_panel/core/tests/conftest.py b/app/src/bittensor_panel/core/tests/conftest.py new file mode 100644 index 0000000..7149037 --- /dev/null +++ b/app/src/bittensor_panel/core/tests/conftest.py @@ -0,0 +1,10 @@ +from collections.abc import Generator + +import pytest + + +@pytest.fixture +def some() -> Generator[int, None, None]: + # setup code + yield 1 + # teardown code diff --git a/app/src/bittensor_panel/core/tests/settings.py b/app/src/bittensor_panel/core/tests/settings.py new file mode 100644 index 0000000..a105923 --- /dev/null +++ b/app/src/bittensor_panel/core/tests/settings.py @@ -0,0 +1,6 @@ +import os + +os.environ["DEBUG_TOOLBAR"] = "False" + +from bittensor_panel.settings import * # noqa: E402,F403 + diff --git a/app/src/bittensor_panel/core/tests/test_settings.py b/app/src/bittensor_panel/core/tests/test_settings.py new file mode 100644 index 0000000..d3c9d6b --- /dev/null +++ b/app/src/bittensor_panel/core/tests/test_settings.py @@ -0,0 +1,21 @@ +from importlib import import_module + +import pytest + + +def test__settings__celery_beat_schedule(settings): + """Ensure that CELERY_BEAT_SCHEDULE points to existing tasks""" + + if not hasattr(settings, "CELERY_BEAT_SCHEDULE"): + pytest.skip("CELERY_BEAT_SCHEDULE is not defined") + + paths = {task["task"] for task in settings.CELERY_BEAT_SCHEDULE.values()} + for path in paths: + module_path, task_name = path.rsplit(".", maxsplit=1) + try: + module = import_module(module_path) + except ImportError: + pytest.fail(f"The module '{module_path}' does not exist") + + if not hasattr(module, task_name): + pytest.fail(f"The task '{task_name}' does not exist in {module_path}") diff --git a/app/src/bittensor_panel/core/tests/test_setup.py b/app/src/bittensor_panel/core/tests/test_setup.py new file mode 100644 index 0000000..88e7f71 --- /dev/null +++ b/app/src/bittensor_panel/core/tests/test_setup.py @@ -0,0 +1,18 @@ +""" +This test file is here always to indicate that everything was installed and the CI was able to run tests. +It should always pass as long as all dependencies are properly installed. +""" + +from datetime import timedelta + +import pytest +from django.utils.timezone import now +from freezegun import freeze_time + + +def test__setup(db, some): + with freeze_time(now() - timedelta(days=1)): + assert some == 1 + + with pytest.raises(ZeroDivisionError): + 1 / 0 diff --git a/app/src/bittensor_panel/core/views.py b/app/src/bittensor_panel/core/views.py new file mode 100644 index 0000000..e69de29 diff --git a/app/src/bittensor_panel/settings.py b/app/src/bittensor_panel/settings.py new file mode 100644 index 0000000..61dc605 --- /dev/null +++ b/app/src/bittensor_panel/settings.py @@ -0,0 +1,307 @@ +""" +Django settings for bittensor_panel project. +""" + +import inspect +import logging +from datetime import timedelta +from functools import wraps + +import environ + +# from celery.schedules import crontab +import structlog + +root = environ.Path(__file__) - 2 + +env = environ.Env(DEBUG=(bool, False)) + +# .env file contents are not passed to docker image during build stage; +# this results in errors if you require some env var to be set, as if in "env('MYVAR')" - +# obviously it's not set during build stage, but you don't care and want to ignore that. +# To mitigate this, we set ENV_FILL_MISSING_VALUES=1 during build phase, and it activates +# monkey-patching of "environ" module, so that all unset variables get some default value +# and the library does not complain anymore +if env.bool("ENV_FILL_MISSING_VALUES", default=False): + + def patch(fn): + @wraps(fn) + def wrapped(*args, **kwargs): + if kwargs.get("default") is env.NOTSET: + kwargs["default"] = { + bool: False, + int: 0, + float: 0.0, + }.get(kwargs.get("cast"), None) + + return fn(*args, **kwargs) + + return wrapped + + for name, method in inspect.getmembers(env, predicate=inspect.ismethod): + setattr(env, name, patch(method)) + +# read from the .env file if hasn't been sourced already +if env("ENV", default=None) is None: + env.read_env(root("../../.env")) + +ENV = env("ENV") + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = env("SECRET_KEY") + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = env("DEBUG") + +ALLOWED_HOSTS = ["*"] + +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "django_extensions", + "django_probes", + "django_structlog", + "constance", + "bittensor_panel.core", +] + + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "django_structlog.middlewares.RequestMiddleware", +] + + +if DEBUG_TOOLBAR := env.bool("DEBUG_TOOLBAR", default=False): + INTERNAL_IPS = [ + "127.0.0.1", + ] + + DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda _request: True} + INSTALLED_APPS.append("debug_toolbar") + MIDDLEWARE = ["debug_toolbar.middleware.DebugToolbarMiddleware"] + MIDDLEWARE + +if CORS_ENABLED := env.bool("CORS_ENABLED", default=True): + INSTALLED_APPS.append("corsheaders") + MIDDLEWARE = ["corsheaders.middleware.CorsMiddleware"] + MIDDLEWARE + CORS_ALLOWED_ORIGINS = env.list("CORS_ALLOWED_ORIGINS", default=[]) + CORS_ALLOWED_ORIGIN_REGEXES = env.list("CORS_ALLOWED_ORIGIN_REGEXES", default=[]) + CORS_ALLOW_ALL_ORIGINS = env.bool("CORS_ALLOW_ALL_ORIGINS", default=False) + +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") + +# Content Security Policy +if CSP_ENABLED := env.bool("CSP_ENABLED"): + MIDDLEWARE.append("csp.middleware.CSPMiddleware") + + CSP_REPORT_ONLY = env.bool("CSP_REPORT_ONLY", default=True) + CSP_REPORT_URL = env("CSP_REPORT_URL", default=None) or None + + CSP_DEFAULT_SRC = env.tuple("CSP_DEFAULT_SRC") + CSP_SCRIPT_SRC = env.tuple("CSP_SCRIPT_SRC") + CSP_STYLE_SRC = env.tuple("CSP_STYLE_SRC") + CSP_FONT_SRC = env.tuple("CSP_FONT_SRC") + CSP_IMG_SRC = env.tuple("CSP_IMG_SRC") + CSP_MEDIA_SRC = env.tuple("CSP_MEDIA_SRC") + CSP_OBJECT_SRC = env.tuple("CSP_OBJECT_SRC") + CSP_FRAME_SRC = env.tuple("CSP_FRAME_SRC") + CSP_CONNECT_SRC = env.tuple("CSP_CONNECT_SRC") + CSP_CHILD_SRC = env.tuple("CSP_CHILD_SRC") + CSP_MANIFEST_SRC = env.tuple("CSP_MANIFEST_SRC") + CSP_WORKER_SRC = env.tuple("CSP_WORKER_SRC") + + CSP_BLOCK_ALL_MIXED_CONTENT = env.bool("CSP_BLOCK_ALL_MIXED_CONTENT", default=False) + CSP_EXCLUDE_URL_PREFIXES = env.tuple("CSP_EXCLUDE_URL_PREFIXES", default=tuple()) + + +ROOT_URLCONF = "bittensor_panel.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [root("bittensor_panel/templates")], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "bittensor_panel.wsgi.application" + +DATABASES = {} +if env("DATABASE_POOL_URL"): # DB transaction-based connection pool, such as one provided PgBouncer + DATABASES["default"] = { + **env.db_url("DATABASE_POOL_URL"), + "DISABLE_SERVER_SIDE_CURSORS": True, # prevents random cursor errors with transaction-based connection pool + } +elif env("DATABASE_URL"): + DATABASES["default"] = env.db_url("DATABASE_URL") + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", + }, +] + +# Internationalization +LANGUAGE_CODE = "en-us" +TIME_ZONE = "UTC" +USE_I18N = True +USE_L10N = True +USE_TZ = True + +# Static files (CSS, JavaScript, Images) +STATIC_URL = env("STATIC_URL", default="/static/") +STATIC_ROOT = env("STATIC_ROOT", default=root("static")) +MEDIA_URL = env("MEDIA_URL", default="/media/") +MEDIA_ROOT = env("MEDIA_ROOT", default=root("media")) + +# Security +# redirect HTTP to HTTPS +if env.bool("HTTPS_REDIRECT", default=False) and not DEBUG: + SECURE_SSL_REDIRECT = True + SECURE_REDIRECT_EXEMPT = [] # type: ignore + SESSION_COOKIE_SECURE = True + CSRF_COOKIE_SECURE = True +else: + SECURE_SSL_REDIRECT = False + +CONSTANCE_BACKEND = "constance.backends.database.DatabaseBackend" +CONSTANCE_CONFIG = { + # "PARAMETER": (default-value, "Help text", type), +} + + +CELERY_BROKER_URL = env("CELERY_BROKER_URL", default="") +CELERY_RESULT_BACKEND = env("CELERY_BROKER_URL", default="") # store results in Redis +CELERY_RESULT_EXPIRES = int(timedelta(days=1).total_seconds()) # time until task result deletion +CELERY_COMPRESSION = "gzip" # task compression +CELERY_MESSAGE_COMPRESSION = "gzip" # result compression +CELERY_SEND_EVENTS = True # needed for worker monitoring +CELERY_BEAT_SCHEDULE = { # type: ignore + # 'task_name': { + # 'task': "bittensor_panel.core.tasks.demo_task", + # 'args': [2, 2], + # 'kwargs': {}, + # 'schedule': crontab(minute=0, hour=0), + # 'options': {"time_limit": 300}, + # }, +} +CELERY_TASK_ROUTES = ["bittensor_panel.celery.route_task"] +CELERY_TASK_TIME_LIMIT = int(timedelta(minutes=5).total_seconds()) +CELERY_TASK_ALWAYS_EAGER = env.bool("CELERY_TASK_ALWAYS_EAGER", default=False) +CELERY_ACCEPT_CONTENT = ["json"] +CELERY_TASK_SERIALIZER = "json" +CELERY_RESULT_SERIALIZER = "json" +CELERY_WORKER_PREFETCH_MULTIPLIER = env.int("CELERY_WORKER_PREFETCH_MULTIPLIER", default=10) +CELERY_BROKER_POOL_LIMIT = env.int("CELERY_BROKER_POOL_LIMIT", default=50) + +EMAIL_BACKEND = env("EMAIL_BACKEND") +EMAIL_FILE_PATH = env("EMAIL_FILE_PATH") +EMAIL_HOST = env("EMAIL_HOST") +EMAIL_PORT = env.int("EMAIL_PORT") +EMAIL_HOST_USER = env("EMAIL_HOST_USER") +EMAIL_HOST_PASSWORD = env("EMAIL_HOST_PASSWORD") +EMAIL_USE_TLS = env.bool("EMAIL_USE_TLS") +DEFAULT_FROM_EMAIL = env("DEFAULT_FROM_EMAIL") + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "main": { + "()": structlog.stdlib.ProcessorFormatter, + "processor": structlog.dev.ConsoleRenderer(), + }, + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "main", + }, + }, + "root": { + "handlers": ["console"], + "level": "DEBUG", + }, + "loggers": { + "django": { + "handlers": ["console"], + "level": "INFO", + "propagate": True, + }, + }, +} +DJANGO_STRUCTLOG_CELERY_ENABLED = True + + +def configure_structlog(): + structlog.configure( + processors=[ + structlog.contextvars.merge_contextvars, + structlog.stdlib.filter_by_level, + structlog.processors.TimeStamper(fmt="iso"), + structlog.stdlib.add_logger_name, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.StackInfoRenderer(), + structlog.processors.format_exc_info, + structlog.processors.UnicodeDecoder(), + structlog.stdlib.ProcessorFormatter.wrap_for_formatter, + ], + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) + + +configure_structlog() + +# Sentry +if SENTRY_DSN := env("SENTRY_DSN", default=""): + import sentry_sdk + from sentry_sdk.integrations.celery import CeleryIntegration + from sentry_sdk.integrations.django import DjangoIntegration + from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger + from sentry_sdk.integrations.redis import RedisIntegration + + sentry_sdk.init( # type: ignore + dsn=SENTRY_DSN, + environment=ENV, + integrations=[ + DjangoIntegration(), + CeleryIntegration(), + RedisIntegration(), + LoggingIntegration( + level=logging.INFO, # Capture info and above as breadcrumbs + event_level=logging.ERROR, # Send error events from log messages + ), + ], + ) + ignore_logger("django.security.DisallowedHost") diff --git a/app/src/bittensor_panel/urls.py b/app/src/bittensor_panel/urls.py new file mode 100644 index 0000000..8fce2ae --- /dev/null +++ b/app/src/bittensor_panel/urls.py @@ -0,0 +1,14 @@ +from django.conf import settings +from django.contrib.admin.sites import site +from django.urls import include, path + + +urlpatterns = [ + path("admin/", site.urls), + path("", include("django.contrib.auth.urls")), +] + +if settings.DEBUG_TOOLBAR: + urlpatterns += [ + path("__debug__/", include("debug_toolbar.urls")), + ] diff --git a/app/src/bittensor_panel/wsgi.py b/app/src/bittensor_panel/wsgi.py new file mode 100644 index 0000000..0153a67 --- /dev/null +++ b/app/src/bittensor_panel/wsgi.py @@ -0,0 +1,7 @@ +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bittensor_panel.settings") + +application = get_wsgi_application() diff --git a/app/src/manage.py b/app/src/manage.py new file mode 100755 index 0000000..2017ad1 --- /dev/null +++ b/app/src/manage.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +import os +import sys + + +def main(): + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bittensor_panel.settings") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/app/src/mypy.ini b/app/src/mypy.ini new file mode 100644 index 0000000..627624c --- /dev/null +++ b/app/src/mypy.ini @@ -0,0 +1,9 @@ +[mypy] +plugins = + mypy_django_plugin.main, + mypy_drf_plugin.main +strict_optional = True +ignore_missing_imports = True + +[mypy.plugins.django-stubs] +django_settings_module = "bittensor_panel.core.tests.settings" diff --git a/app/src/pytest.ini b/app/src/pytest.ini new file mode 100644 index 0000000..3c3e44f --- /dev/null +++ b/app/src/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +python_files = tests.py test_*.py *_tests.py +DJANGO_SETTINGS_MODULE = bittensor_panel.core.tests.settings \ No newline at end of file diff --git a/bin/backup-db-to-email.sh b/bin/backup-db-to-email.sh new file mode 100755 index 0000000..060e8a7 --- /dev/null +++ b/bin/backup-db-to-email.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -eu + +if [ "$(basename "$0")" == 'bin' ]; then + cd .. +fi + +if [ ! -f "$1" ]; then + echo "Pass existing backup file name (with .backups/ directory name) as the first argument" + exit 127 +fi + +. .env + +date + +EMAIL_CREDS="${EMAIL_HOST_USER}:${EMAIL_HOST_PASSWORD}@${EMAIL_HOST}:${EMAIL_PORT}" bin/emailhelper.py --to "${EMAIL_TARGET}" --subject "Backup of ${POSTGRES_DB}" -f "$1" + +echo "Email sent successfully" diff --git a/bin/backup-db.sh b/bin/backup-db.sh new file mode 100755 index 0000000..4ab2df4 --- /dev/null +++ b/bin/backup-db.sh @@ -0,0 +1,32 @@ +#!/bin/bash +set -euo pipefail +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +source "${SCRIPT_DIR}/common.sh" + +check_env_vars DATABASE_URL + +TARGET_FILENAME="db_dump_$(date +%Y-%m-%d_%H%M%S).Fc.dump.zstd" +DOCKER_NETWORK="$(get_db_docker_network)" + +DUMP_DB_TO_STDOUT=( + docker run --rm --log-driver none --network "$DOCKER_NETWORK" postgres:16-alpine + pg_dump -Fc --compress=zstd -c --if-exists "$DATABASE_URL" +) + +if [ -n "${BACKUP_B2_BUCKET}" ]; then + "${DUMP_DB_TO_STDOUT[@]}" | "${SCRIPT_DIR}"/backup-file-to-b2.sh - "${TARGET_FILENAME}" +else + LOCAL_BACKUP_DIR=".backups" + mkdir -p "$LOCAL_BACKUP_DIR" + TARGET="$LOCAL_BACKUP_DIR/$TARGET_FILENAME" + "${DUMP_DB_TO_STDOUT[@]}" > "$TARGET" + + if [ -n "${EMAIL_HOST:-}" ] && [ -n "${EMAIL_TARGET:-}" ]; then + "${SCRIPT_DIR}"/backup-db-to-email.sh "${TARGET}" + fi +fi + +if [ -n "${BACKUP_LOCAL_ROTATE_KEEP_LAST:-}" ]; then + echo "Rotating backup files - keeping ${BACKUP_LOCAL_ROTATE_KEEP_LAST} last ones" + bin/rotate-local-backups.py "${BACKUP_LOCAL_ROTATE_KEEP_LAST}" +fi diff --git a/bin/backup-file-to-b2.sh b/bin/backup-file-to-b2.sh new file mode 100755 index 0000000..b97f1e3 --- /dev/null +++ b/bin/backup-file-to-b2.sh @@ -0,0 +1,21 @@ +#!/bin/bash +set -euo pipefail +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +source "${SCRIPT_DIR}/common.sh" + +check_env_vars BACKUP_B2_KEY_ID BACKUP_B2_KEY_SECRET BACKUP_B2_BUCKET + +if [ "$1" == "-" ]; then + BACKUP_B2_FILENAME="$2" + [ -n "$BACKUP_B2_FILENAME" ] || (echo "Pass backup file name as the second argument if stdin was provided as data source">&2; exit 2) +elif [ ! -f "$1" ]; then + echo "Pass existing backup file name (with .backups/ directory name) as the first argument" + exit 2 +else + BACKUP_B2_FILENAME"$(basename "$1")" +fi + +export B2_APPLICATION_KEY_ID="$BACKUP_B2_KEY_ID" +export B2_APPLICATION_KEY="$BACKUP_B2_KEY_SECRET" +docker run --rm -iq -e B2_APPLICATION_KEY -e B2_APPLICATION_KEY_ID \ + backblazeit/b2:3.13.1 upload-file "$BACKUP_B2_BUCKET" "$1" "$BACKUP_B2_FILENAME" diff --git a/bin/common.sh b/bin/common.sh new file mode 100644 index 0000000..40d475b --- /dev/null +++ b/bin/common.sh @@ -0,0 +1,47 @@ +#!/bin/bash +set -euo pipefail + +if [ -z "${_COMMON_SH_LOADED:-}" ]; then + PATH=/usr/local/sbin:/usr/local/bin:$PATH + + check_env_vars() { + local required_vars=("$@") + local missing_vars="" + for var in "${required_vars[@]}"; do + if [ -z "${!var}" ]; then + missing_vars+="$var " + fi + done + + if [ -n "$missing_vars" ]; then + echo "Error: The following required environment variables are missing: $missing_vars" >&2 + exit 2 + fi + } + + + load_project_env() { + if [ "$(basename "$0")" == 'bin' ]; then + cd .. + fi + + . .env + } + + get_db_docker_network() { + if [[ "$DATABASE_URL" =~ "@db:" ]]; then + echo bittensor-panel_default + else + echo host + fi + } + + load_project_env + + if [ -n "${SENTRY_DSN}" ]; then + export SENTRY_DSN + eval "$(sentry-cli bash-hook)" + fi + + _COMMON_SH_LOADED=true +fi \ No newline at end of file diff --git a/bin/dbshell.sh b/bin/dbshell.sh new file mode 100755 index 0000000..f88ca7d --- /dev/null +++ b/bin/dbshell.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +if [ "$(basename "$0")" == 'bin' ]; then + cd .. +fi + +. .env + +if [[ "$DATABASE_URL" =~ "@db:" ]]; then + DOCKER_NETWORK=bittensor-panel_default +else + DOCKER_NETWORK=host +fi + +# this works even if `app` container doesn't have psql installed (where `bin/run-manage-py.sh dbshell` fails) +docker run -it --rm --network "$DOCKER_NETWORK" postgres::16-alpine psql "$DATABASE_URL" diff --git a/bin/emailhelper.py b/bin/emailhelper.py new file mode 100755 index 0000000..9bc8d43 --- /dev/null +++ b/bin/emailhelper.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2018, Reef Technologies, BSD 3-Clause License + +import argparse +import os +import smtplib +import sys +from collections import namedtuple +from email import encoders +from email.mime.base import MIMEBase +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from urllib.parse import urlsplit + + +class GmailSender(namedtuple("SmtpAuthData", "server port user password")): + def send(self, addr_from, addr_to, subject, message, files=tuple()): + msg = MIMEMultipart("alternative") + msg["To"] = addr_to + msg["From"] = addr_from + msg["Subject"] = subject + + text = "view the html version." + msg.attach(MIMEText(text, "plain")) + msg.attach(MIMEText(message, "html")) + + for file in files: + part = MIMEBase("application", "octet-stream") + with open(file, "rb") as stream: + part.set_payload(stream.read()) + encoders.encode_base64(part) + part.add_header( + "Content-Disposition", + 'attachment; filename="%s"' % os.path.basename(file), + ) + msg.attach(part) + + s = smtplib.SMTP(self.server, self.port) + s.ehlo() + s.starttls() + if self.password: + s.login(self.user, self.password) + s.sendmail(addr_from, addr_to, msg.as_string()) + s.quit() + + +def parse_arguments(): + parser = argparse.ArgumentParser() + + parser.add_argument( + "-t", + "--to", + required=True, + action="store", + dest="to_email", + help="Destination address", + ) + + parser.add_argument( + "-f", + "--files", + action="store", + nargs="*", + dest="files", + help="Files to be send as attachments", + ) + + parser.add_argument( + "-s", + "--subject", + action="store", + dest="subject", + help="Subject of Email", + ) + + result = parser.parse_args() + return result + + +if __name__ == "__main__": + parser_result = parse_arguments() + email_creds = os.environ.get("EMAIL_CREDS") + if not email_creds: + sys.stderr.write("no EMAIL_CREDS environment variable!\nexport EMAIL_CREDS=user:password@server:port") + sys.exit(2) + + try: + email_creds = urlsplit("//%s" % email_creds) + if not all([email_creds.username, email_creds.hostname, email_creds.port]): + raise ValueError + except ValueError: + sys.stderr.write( + "EMAIL_CREDS environment variable has wrong format!\nexport EMAIL_CREDS=user:password@server:port" + ) + sys.exit(2) + + addr_to = parser_result.to_email + files = parser_result.files or [] + if "@" in email_creds.username: + addr_from = email_creds.username + else: + addr_from = f"{email_creds.username}@{email_creds.hostname}" + + print("Enter/Paste the message for email. Ctrl-%s to save it." % (os.name == "nt" and "Z" or "D")) + message_lines = [] + while True: + try: + line = input() + except EOFError: + break + message_lines.append(line) + + subject = parser_result.subject + message = "\n".join(message_lines) + + sender = GmailSender(email_creds.hostname, email_creds.port, email_creds.username, email_creds.password) + print("Sending email...") + sender.send(addr_from, addr_to, subject, message, files=files) diff --git a/bin/list-b2-backups.sh b/bin/list-b2-backups.sh new file mode 100755 index 0000000..fbf2686 --- /dev/null +++ b/bin/list-b2-backups.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -euo pipefail +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +source "${SCRIPT_DIR}/common.sh" + +BACKUP_B2_KEY_ID="${1-$BACKUP_B2_KEY_ID}" +BACKUP_B2_KEY_SECRET="${2-$BACKUP_B2_KEY_SECRET}" +BACKUP_B2_BUCKET="${3-$BACKUP_B2_BUCKET}" + +export B2_APPLICATION_KEY_ID="$BACKUP_B2_KEY_ID" +export B2_APPLICATION_KEY="$BACKUP_B2_KEY_SECRET" +docker run --rm -iq -e B2_APPLICATION_KEY -e B2_APPLICATION_KEY_ID \ + backblazeit/b2:3.13.1 ls --long "$BACKUP_B2_BUCKET" diff --git a/bin/prepare-os.sh b/bin/prepare-os.sh new file mode 100755 index 0000000..3b816c6 --- /dev/null +++ b/bin/prepare-os.sh @@ -0,0 +1,101 @@ +#!/bin/sh +# Copyright 2020, Reef Technologies (reef.pl), All rights reserved. +set -eux + +DOCKER_BIN="$(command -v docker || true)" +DOCKER_COMPOSE_INSTALLED="$(docker compose version || true)" +SENTRY_CLI="$(command -v sentry-cli || true)" +B2_CLI="$(command -v b2 || true)" +AWS_CLI="$(command -v aws || true)" +JQ_BIN="$(command -v jq || true)" + +if [ -x "${DOCKER_BIN}" ] && [ -n "${DOCKER_COMPOSE_INSTALLED}" ] && [ -x "${SENTRY_CLI}" ] && [ -x "${B2_CLI}" ] && [ -x "${AWS_CLI}" ] && [ -x "${JQ_BIN}" ]; then + echo "\e[31mEverything required is already installed!\e[0m"; + exit 0; +fi + +PLATFORM="$(uname -i)" +if [ "${PLATFORM}" != "x86_64" ] && [ "${PLATFORM}" != "aarch64" ]; then + echo "Unsupported hardware platform: ${PLATFORM}" + exit 1 +fi + +WORK_DIR="$(mktemp -d)" +if [ ! "${WORK_DIR}" ] || [ ! -d "${WORK_DIR}" ]; then + echo "Could not create temp dir" + exit 1 +fi +cd "${WORK_DIR}" +cleanup() { + rm -rf "${WORK_DIR}" +} +trap cleanup EXIT + +DEBIAN_FRONTEND=noninteractive + +apt-get update +apt-get install -y apt-transport-https ca-certificates curl software-properties-common python3-pip rng-tools + +if [ ! -x "${SENTRY_CLI}" ]; then + curl -sL https://sentry.io/get-cli/ | bash +fi + +if [ ! -x "${B2_CLI}" ]; then + curl -s --output /usr/local/bin/b2 -L https://github.com/Backblaze/B2_Command_Line_Tool/releases/latest/download/b2-linux + chmod a+x /usr/local/bin/b2 +fi + +if [ ! -x "${DOCKER_BIN}" ]; then + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - + add-apt-repository -y "deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable" + apt-get -y install docker-ce + usermod -aG docker "$USER" +fi + +if [ ! -x "${DOCKER_COMPOSE_INSTALLED}" ]; then + apt-get -y install docker-ce docker-compose-plugin +fi + +if [ ! -x "${AWS_CLI}" ]; then + apt-get -y install gpg unzip + curl "https://awscli.amazonaws.com/awscli-exe-linux-${PLATFORM}.zip" -o "awscliv2.zip" + curl "https://awscli.amazonaws.com/awscli-exe-linux-${PLATFORM}.zip.sig" -o "awscliv2.sig" + gpg --import < /dev/null && pwd )" +source "${SCRIPT_DIR}/common.sh" + +if [[ $# -ne 1 ]]; then + echo "Usage: bin/restore-db-from-b2.sh " + echo "All arguments are required" + exit 2 +fi + +B2_FILE_ID="$1" + +export B2_APPLICATION_KEY_ID="$BACKUP_B2_KEY_ID" +export B2_APPLICATION_KEY="$BACKUP_B2_KEY_SECRET" +docker run --rm -iq -e B2_APPLICATION_KEY -e B2_APPLICATION_KEY_ID \ + backblazeit/b2:3.13.1 cat "b2id://$B2_FILE_ID" | "${SCRIPT_DIR}"/restore-db.sh - diff --git a/bin/restore-db.sh b/bin/restore-db.sh new file mode 100755 index 0000000..de8a697 --- /dev/null +++ b/bin/restore-db.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -euo pipefail +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +source "${SCRIPT_DIR}/common.sh" + +TARGET_FILEPATH="$1" + +DOCKER_NETWORK=$(get_db_docker_network) + +# shellcheck disable=SC2002 +cat "$TARGET_FILEPATH" | docker run -i --rm --network "$DOCKER_NETWORK" postgres:16-alpine pg_restore -c -d "$DATABASE_URL" + +echo 'restore finished' diff --git a/bin/rotate-local-backups.py b/bin/rotate-local-backups.py new file mode 100755 index 0000000..44eff24 --- /dev/null +++ b/bin/rotate-local-backups.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2020, Reef Technologies, BSD 3-Clause License + +import argparse +from pathlib import Path + + +def parse_arguments(): + parser = argparse.ArgumentParser(description="Keep all but N most recent files in a directory") + parser.add_argument("file_count", help="How many last files to keep", type=int) + return parser.parse_args() + + +def rotate_backups(path, file_count): + files = [(f.stat().st_mtime, f) for f in Path(path).iterdir() if f.is_file()] + files.sort() + files = files[:-file_count] + if files: + print(f"Removing {len(files)} old files") + for mtime, f in files: + f.unlink() + else: + print("No old files to remove") + + +if __name__ == "__main__": + parser_result = parse_arguments() + rotate_backups(".backups", parser_result.file_count) diff --git a/bin/run-manage-py.sh b/bin/run-manage-py.sh new file mode 100755 index 0000000..1350a23 --- /dev/null +++ b/bin/run-manage-py.sh @@ -0,0 +1,5 @@ +#!/bin/bash +if [ "$(basename "$0")" == 'bin' ]; then + cd .. +fi +docker compose exec app sh -c "python manage.py $*" diff --git a/deploy-to-aws.sh b/deploy-to-aws.sh new file mode 100755 index 0000000..7bd35ab --- /dev/null +++ b/deploy-to-aws.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -e +# shellcheck disable=2086 +./devops/scripts/build-backend.sh "$1" +./devops/scripts/deploy-backend.sh "$1" diff --git a/deploy.sh b/deploy.sh new file mode 100755 index 0000000..712646e --- /dev/null +++ b/deploy.sh @@ -0,0 +1,35 @@ +#!/bin/sh +# Copyright 2024, Reef Technologies (reef.pl), All rights reserved. +set -eux + +if [ ! -f ".env" ]; then + echo "\e[31mPlease setup the environment first!\e[0m"; + exit 1; +fi + +DOCKER_BUILDKIT=0 docker compose build + +# Tag the first image from multi-stage app Dockerfile to mark it as not dangling +BASE_IMAGE=$(docker images --quiet --filter="label=builder=true" | head -n1) +docker image tag "${BASE_IMAGE}" bittensor_panel/app-builder + +# collect static files to external storage while old app is still running +# docker compose run --rm app sh -c "python manage.py collectstatic --no-input" + +SERVICES=$(docker compose ps --services 2>&1 > /dev/stderr \ + | grep -v -e 'is not set' -e db -e redis) + +# shellcheck disable=2086 +docker compose stop $SERVICES + +# start the app container only in order to perform migrations +docker compose up -d db # in case it hasn't been launched before +docker compose run --rm app sh -c "python manage.py wait_for_database --timeout 10; python manage.py migrate" + +# start everything +docker compose up -d + +# Clean all dangling images +docker images --quiet --filter=dangling=true \ + | xargs --no-run-if-empty docker rmi \ + || true diff --git a/devops/packer/build.sh b/devops/packer/build.sh new file mode 100755 index 0000000..4abaa36 --- /dev/null +++ b/devops/packer/build.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +# initialize packer script and building image +packer init . + +packer build docker-optimized.pkr.hcl \ No newline at end of file diff --git a/devops/packer/docker-optimized.pkr.hcl b/devops/packer/docker-optimized.pkr.hcl new file mode 100644 index 0000000..021c509 --- /dev/null +++ b/devops/packer/docker-optimized.pkr.hcl @@ -0,0 +1,76 @@ +packer { + required_plugins { + amazon = { + version = ">= 1.0.0" + source = "github.com/hashicorp/amazon" + } + } +} + +local "ts" { + expression = formatdate("YYYYMMDDhhmm", timestamp()) +} + +source "amazon-ebs" "docker-optimized" { + ami_name = "docker-optimized-${local.ts}" + + source_ami_filter { + filters = { + virtualization-type = "hvm" + name = "*ubuntu-focal-20.04-amd64-minimal-*" + root-device-type = "ebs" + } + + owners = [ + "099720109477" + ] + + most_recent = true + } + + instance_type = "t3.medium" + ssh_username = "ubuntu" + force_deregister = true + encrypt_boot = true + + launch_block_device_mappings { + device_name = "/dev/sda1" + encrypted = true + volume_size = 20 + volume_type = "gp3" + delete_on_termination = true + } +} + +build { + sources = [ + "source.amazon-ebs.docker-optimized" + ] + + provisioner "shell" { + environment_vars = [ + "DEBIAN_FRONTEND=noninteractive" + ] + + inline = [ + "sleep 15", + + "sudo apt-get clean", + "sudo apt-get update", + "sudo apt-get install -y ca-certificates curl gnupg lsb-release unzip jq rng-tools", + + "curl https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip -o awscliv2.zip", + "unzip awscliv2.zip", + "sudo ./aws/install", + "rm -rf ./aws ./awscliv2.zip", + + "curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg", + "echo \"deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable\" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null", + "sudo apt-get update", + "sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin", + "sudo gpasswd -a ubuntu docker", + "sudo mkdir -p /etc/docker/", + "sudo service docker restart", + ] + } +} diff --git a/devops/scripts/build-backend.sh b/devops/scripts/build-backend.sh new file mode 100755 index 0000000..597d371 --- /dev/null +++ b/devops/scripts/build-backend.sh @@ -0,0 +1,31 @@ +#!/bin/bash +set -xe + +THIS_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +source "$THIS_DIR"/vars.sh + +cd "$PROJECT_DIR" + +DATE_UTC=$(date -u) +TIMESTAMP_UTC=$(date +%s) +COMMIT_HASH=$(git rev-parse --short HEAD || echo -n "local") + +echo "Building Backend: ${APP_NAME}" + +./setup-prod.sh + +aws ecr get-login-password --region "${APP_REGION}" | docker login --username AWS --password-stdin "${APP_OWNER}".dkr.ecr."${APP_REGION}".amazonaws.com + +DOCKER_BUILDKIT=1 docker build \ + -f app/Dockerfile \ + --progress plain \ + --platform linux/amd64 \ + -t "${APP_NAME}" \ + --label build_date_utc="$DATE_UTC" \ + --label build_timestamp_utc="$TIMESTAMP_UTC" \ + --label git_commit_hash="$COMMIT_HASH" \ + . +docker tag "${APP_NAME}":latest "${APP_OWNER}".dkr.ecr."${APP_REGION}".amazonaws.com/"${APP_NAME}":latest +docker tag "${APP_NAME}":latest "${APP_OWNER}".dkr.ecr."${APP_REGION}".amazonaws.com/"${APP_NAME}":"${COMMIT_HASH}" + +docker push "${APP_OWNER}".dkr.ecr."${APP_REGION}".amazonaws.com/"${APP_NAME}":"${COMMIT_HASH}" diff --git a/devops/scripts/deploy-backend.sh b/devops/scripts/deploy-backend.sh new file mode 100755 index 0000000..ac3a77a --- /dev/null +++ b/devops/scripts/deploy-backend.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -xe + +THIS_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +source "$THIS_DIR"/vars.sh + +cd "$PROJECT_DIR"/app + +echo "Deploying Backend: ${APP_NAME}" +docker push "${APP_OWNER}".dkr.ecr."${APP_REGION}".amazonaws.com/"${APP_NAME}":latest + +aws autoscaling start-instance-refresh --region "${APP_REGION}" --auto-scaling-group-name "${APP_NAME}" diff --git a/devops/scripts/vars.sh b/devops/scripts/vars.sh new file mode 100755 index 0000000..d240512 --- /dev/null +++ b/devops/scripts/vars.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# shellcheck disable=SC2034 +[ "$1" != "staging" ] && [ "$1" != "prod" ] && echo "Please provide environment name to deploy: staging or prod" && exit 1; + +PROJECT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/../../ + +APP_SUFFIX="-$1" + +APP_OWNER=$(aws sts get-caller-identity --region us-east-1 --query "Account" --output text) +APP_REGION="us-east-1" +APP_NAME="bittensor-panel${APP_SUFFIX}" +CLOUDFRONT_BUCKET="${APP_NAME}-spa${APP_SUFFIX}" \ No newline at end of file diff --git a/devops/tf/core/backend.tf b/devops/tf/core/backend.tf new file mode 100644 index 0000000..bcb0b37 --- /dev/null +++ b/devops/tf/core/backend.tf @@ -0,0 +1,16 @@ +terraform { + backend "s3" { + bucket = "bittensor-panel-qxnlar" + key = "core.tfstate" + region = "us-east-1" + } + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 4.0" + } + } + + required_version = "~> 1.0" +} \ No newline at end of file diff --git a/devops/tf/core/main.tf b/devops/tf/core/main.tf new file mode 100644 index 0000000..99e24a4 --- /dev/null +++ b/devops/tf/core/main.tf @@ -0,0 +1,21 @@ +provider "aws" { + region = var.region +} + +resource "aws_ecr_repository" "app" { + name = "${var.name}-prod" + image_tag_mutability = "MUTABLE" + + image_scanning_configuration { + scan_on_push = true + } +} + +resource "aws_ecr_repository" "app_staging" { + name = "${var.name}-staging" + image_tag_mutability = "MUTABLE" + + image_scanning_configuration { + scan_on_push = true + } +} \ No newline at end of file diff --git a/devops/tf/core/terraform.tfvars b/devops/tf/core/terraform.tfvars new file mode 100644 index 0000000..d5e30ec --- /dev/null +++ b/devops/tf/core/terraform.tfvars @@ -0,0 +1,2 @@ +region = "us-east-1" +name = "bittensor-panel" \ No newline at end of file diff --git a/devops/tf/core/vars.tf b/devops/tf/core/vars.tf new file mode 100644 index 0000000..1b56ec7 --- /dev/null +++ b/devops/tf/core/vars.tf @@ -0,0 +1,7 @@ +variable "name" { + type = string +} + +variable "region" { + type = string +} \ No newline at end of file diff --git a/devops/tf/main/envs/common/main.tf b/devops/tf/main/envs/common/main.tf new file mode 100644 index 0000000..ea98a67 --- /dev/null +++ b/devops/tf/main/envs/common/main.tf @@ -0,0 +1,80 @@ +provider "aws" { + region = var.region +} + +data "aws_caller_identity" "env" {} + +data "aws_ami" "base_ami" { + most_recent = true + + filter { + name = "name" + values = [var.base_ami_image] + } + + filter { + name = "virtualization-type" + values = ["hvm"] + } + + owners = [var.base_ami_image_owner] +} + +locals { + ecr_base_url = "${data.aws_caller_identity.env.account_id}.dkr.ecr.${var.region}.amazonaws.com" + ecr_image = "${var.name}-${var.env}:latest" +} + +module "networking" { + source = "../../modules/networking" + + name = var.name + env = var.env + azs = var.azs + vpc_cidr = var.vpc_cidr + subnet_cidrs = var.subnet_cidrs +} + +module "database" { + source = "../../modules/database" + + name = var.name + env = var.env + vpc_id = module.networking.vpc_id + vpc_cidr = module.networking.vpc_cidr_block + azs = module.networking.azs + subnets = module.networking.subnets + instance_type = var.rds_instance_type +} + +module "backend" { + source = "../../modules/backend" + + depends_on = [ + module.database + ] + + base_ami_id = data.aws_ami.base_ami.image_id + + name = var.name + region = var.region + env = var.env + + ecr_base_url = local.ecr_base_url + ecr_image = local.ecr_image + + base_domain_name = var.base_domain_name + domain_name = var.domain_name + ec2_ssh_key = var.ec2_ssh_key + + vpc_id = module.networking.vpc_id + vpc_cidr = module.networking.vpc_cidr_block + + azs = module.networking.azs + subnets = module.networking.subnets + + instance_type = var.instance_type + health_check_type = var.autoscaling_health_check_type + account_id = data.aws_caller_identity.env.account_id + database = module.database +} \ No newline at end of file diff --git a/devops/tf/main/envs/common/vars.tf b/devops/tf/main/envs/common/vars.tf new file mode 100644 index 0000000..7e5527f --- /dev/null +++ b/devops/tf/main/envs/common/vars.tf @@ -0,0 +1,58 @@ +variable "region" { + type = string +} + +variable "name" { + type = string +} + +variable "env" { + type = string +} + +variable "base_ami_image" { + type = string +} + +variable "base_ami_image_owner" { + type = string +} + +variable "vpc_cidr" { + type = string +} + +variable "subnet_cidrs" { + type = set(string) +} + +variable "azs" { + type = set(string) +} + +variable "base_domain_name" { + type = string +} + +variable "domain_name" { + type = string +} + +variable "ec2_ssh_key" { + type = string +} + +variable "instance_type" { + description = "EC2 instance type" + type = string +} + +variable "rds_instance_type" { + description = "RDS instance type" + type = string +} + +variable "autoscaling_health_check_type" { + description = "either EC2 or ELB" + type = string +} diff --git a/devops/tf/main/envs/common/versions.tf b/devops/tf/main/envs/common/versions.tf new file mode 100644 index 0000000..70cbf24 --- /dev/null +++ b/devops/tf/main/envs/common/versions.tf @@ -0,0 +1,10 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 4.0" + } + } + + required_version = "~> 1.0" +} \ No newline at end of file diff --git a/devops/tf/main/envs/prod/backend.tf b/devops/tf/main/envs/prod/backend.tf new file mode 100644 index 0000000..f13ba80 --- /dev/null +++ b/devops/tf/main/envs/prod/backend.tf @@ -0,0 +1,7 @@ +terraform { + backend "s3" { + bucket = "bittensor-panel-qxnlar" + key = "prod/main.tfstate" + region = "us-east-1" + } +} diff --git a/devops/tf/main/envs/prod/main.tf b/devops/tf/main/envs/prod/main.tf new file mode 100644 index 0000000..ea98a67 --- /dev/null +++ b/devops/tf/main/envs/prod/main.tf @@ -0,0 +1,80 @@ +provider "aws" { + region = var.region +} + +data "aws_caller_identity" "env" {} + +data "aws_ami" "base_ami" { + most_recent = true + + filter { + name = "name" + values = [var.base_ami_image] + } + + filter { + name = "virtualization-type" + values = ["hvm"] + } + + owners = [var.base_ami_image_owner] +} + +locals { + ecr_base_url = "${data.aws_caller_identity.env.account_id}.dkr.ecr.${var.region}.amazonaws.com" + ecr_image = "${var.name}-${var.env}:latest" +} + +module "networking" { + source = "../../modules/networking" + + name = var.name + env = var.env + azs = var.azs + vpc_cidr = var.vpc_cidr + subnet_cidrs = var.subnet_cidrs +} + +module "database" { + source = "../../modules/database" + + name = var.name + env = var.env + vpc_id = module.networking.vpc_id + vpc_cidr = module.networking.vpc_cidr_block + azs = module.networking.azs + subnets = module.networking.subnets + instance_type = var.rds_instance_type +} + +module "backend" { + source = "../../modules/backend" + + depends_on = [ + module.database + ] + + base_ami_id = data.aws_ami.base_ami.image_id + + name = var.name + region = var.region + env = var.env + + ecr_base_url = local.ecr_base_url + ecr_image = local.ecr_image + + base_domain_name = var.base_domain_name + domain_name = var.domain_name + ec2_ssh_key = var.ec2_ssh_key + + vpc_id = module.networking.vpc_id + vpc_cidr = module.networking.vpc_cidr_block + + azs = module.networking.azs + subnets = module.networking.subnets + + instance_type = var.instance_type + health_check_type = var.autoscaling_health_check_type + account_id = data.aws_caller_identity.env.account_id + database = module.database +} \ No newline at end of file diff --git a/devops/tf/main/envs/prod/terraform.tfvars b/devops/tf/main/envs/prod/terraform.tfvars new file mode 100644 index 0000000..fd6a459 --- /dev/null +++ b/devops/tf/main/envs/prod/terraform.tfvars @@ -0,0 +1,31 @@ +# each of this vars can be overridden by adding ENVIRONMENT variable with name: +# TF_VAR_var_name="value" + +name = "bittensor-panel" +region = "us-east-1" +env = "prod" + +# VPC and subnet CIDR settings, change them if you need to pair +# multiple CIDRs (i.e. with different component) +vpc_cidr = "10.2.0.0/16" +subnet_cidrs = ["10.2.1.0/24", "10.2.2.0/24"] +azs = ["us-east-1c", "us-east-1d"] + +# By default, we have an ubuntu image +base_ami_image = "*ubuntu-focal-20.04-amd64-minimal-*" +base_ami_image_owner = "099720109477" + +# domain setting +base_domain_name = "fake-domain.com" +domain_name = "api.fake-domain.com" + +# default ssh key +ec2_ssh_key = "" + +instance_type = "t3.medium" +rds_instance_type = "db.t3.small" + +# defines if we use EC2-only healthcheck or ELB healthcheck +# EC2 healthcheck reacts only on internal EC2 checks (i.e. if machine cannot be reached) +# recommended for staging = EC2, for prod = ELB +autoscaling_health_check_type = "ELB" diff --git a/devops/tf/main/envs/prod/vars.tf b/devops/tf/main/envs/prod/vars.tf new file mode 100644 index 0000000..7e5527f --- /dev/null +++ b/devops/tf/main/envs/prod/vars.tf @@ -0,0 +1,58 @@ +variable "region" { + type = string +} + +variable "name" { + type = string +} + +variable "env" { + type = string +} + +variable "base_ami_image" { + type = string +} + +variable "base_ami_image_owner" { + type = string +} + +variable "vpc_cidr" { + type = string +} + +variable "subnet_cidrs" { + type = set(string) +} + +variable "azs" { + type = set(string) +} + +variable "base_domain_name" { + type = string +} + +variable "domain_name" { + type = string +} + +variable "ec2_ssh_key" { + type = string +} + +variable "instance_type" { + description = "EC2 instance type" + type = string +} + +variable "rds_instance_type" { + description = "RDS instance type" + type = string +} + +variable "autoscaling_health_check_type" { + description = "either EC2 or ELB" + type = string +} diff --git a/devops/tf/main/envs/prod/versions.tf b/devops/tf/main/envs/prod/versions.tf new file mode 100644 index 0000000..70cbf24 --- /dev/null +++ b/devops/tf/main/envs/prod/versions.tf @@ -0,0 +1,10 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 4.0" + } + } + + required_version = "~> 1.0" +} \ No newline at end of file diff --git a/devops/tf/main/envs/staging/backend.tf b/devops/tf/main/envs/staging/backend.tf new file mode 100644 index 0000000..7bcb36e --- /dev/null +++ b/devops/tf/main/envs/staging/backend.tf @@ -0,0 +1,7 @@ +terraform { + backend "s3" { + bucket = "bittensor-panel-qxnlar" + key = "staging/main.tfstate" + region = "us-east-1" + } +} diff --git a/devops/tf/main/envs/staging/main.tf b/devops/tf/main/envs/staging/main.tf new file mode 100644 index 0000000..ea98a67 --- /dev/null +++ b/devops/tf/main/envs/staging/main.tf @@ -0,0 +1,80 @@ +provider "aws" { + region = var.region +} + +data "aws_caller_identity" "env" {} + +data "aws_ami" "base_ami" { + most_recent = true + + filter { + name = "name" + values = [var.base_ami_image] + } + + filter { + name = "virtualization-type" + values = ["hvm"] + } + + owners = [var.base_ami_image_owner] +} + +locals { + ecr_base_url = "${data.aws_caller_identity.env.account_id}.dkr.ecr.${var.region}.amazonaws.com" + ecr_image = "${var.name}-${var.env}:latest" +} + +module "networking" { + source = "../../modules/networking" + + name = var.name + env = var.env + azs = var.azs + vpc_cidr = var.vpc_cidr + subnet_cidrs = var.subnet_cidrs +} + +module "database" { + source = "../../modules/database" + + name = var.name + env = var.env + vpc_id = module.networking.vpc_id + vpc_cidr = module.networking.vpc_cidr_block + azs = module.networking.azs + subnets = module.networking.subnets + instance_type = var.rds_instance_type +} + +module "backend" { + source = "../../modules/backend" + + depends_on = [ + module.database + ] + + base_ami_id = data.aws_ami.base_ami.image_id + + name = var.name + region = var.region + env = var.env + + ecr_base_url = local.ecr_base_url + ecr_image = local.ecr_image + + base_domain_name = var.base_domain_name + domain_name = var.domain_name + ec2_ssh_key = var.ec2_ssh_key + + vpc_id = module.networking.vpc_id + vpc_cidr = module.networking.vpc_cidr_block + + azs = module.networking.azs + subnets = module.networking.subnets + + instance_type = var.instance_type + health_check_type = var.autoscaling_health_check_type + account_id = data.aws_caller_identity.env.account_id + database = module.database +} \ No newline at end of file diff --git a/devops/tf/main/envs/staging/terraform.tfvars b/devops/tf/main/envs/staging/terraform.tfvars new file mode 100644 index 0000000..304e65d --- /dev/null +++ b/devops/tf/main/envs/staging/terraform.tfvars @@ -0,0 +1,31 @@ +# each of this vars can be overridden by adding ENVIRONMENT variable with name: +# TF_VAR_var_name="value" + +name = "bittensor-panel" +region = "us-east-1" +env = "staging" + +# VPC and subnet CIDR settings, change them if you need to pair +# multiple CIDRs (i.e. with different component) +vpc_cidr = "10.20.0.0/16" +subnet_cidrs = ["10.20.1.0/24", "10.20.2.0/24"] +azs = ["us-east-1a", "us-east-1b"] + +# By default, we have an ubuntu image +base_ami_image = "*ubuntu-focal-20.04-amd64-minimal-*" +base_ami_image_owner = "099720109477" + +# domain setting +base_domain_name = "fake-domain.com" +domain_name = "staging.api.fake-domain.com" + +# default ssh key +ec2_ssh_key = "" + +instance_type = "t3.medium" +rds_instance_type = "db.t3.small" + +# defines if we use EC2-only healthcheck or ELB healthcheck +# EC2 healthcheck reacts only on internal EC2 checks (i.e. if machine cannot be reached) +# recommended for staging = EC2, for prod = ELB +autoscaling_health_check_type = "EC2" diff --git a/devops/tf/main/envs/staging/vars.tf b/devops/tf/main/envs/staging/vars.tf new file mode 100644 index 0000000..7e5527f --- /dev/null +++ b/devops/tf/main/envs/staging/vars.tf @@ -0,0 +1,58 @@ +variable "region" { + type = string +} + +variable "name" { + type = string +} + +variable "env" { + type = string +} + +variable "base_ami_image" { + type = string +} + +variable "base_ami_image_owner" { + type = string +} + +variable "vpc_cidr" { + type = string +} + +variable "subnet_cidrs" { + type = set(string) +} + +variable "azs" { + type = set(string) +} + +variable "base_domain_name" { + type = string +} + +variable "domain_name" { + type = string +} + +variable "ec2_ssh_key" { + type = string +} + +variable "instance_type" { + description = "EC2 instance type" + type = string +} + +variable "rds_instance_type" { + description = "RDS instance type" + type = string +} + +variable "autoscaling_health_check_type" { + description = "either EC2 or ELB" + type = string +} diff --git a/devops/tf/main/envs/staging/versions.tf b/devops/tf/main/envs/staging/versions.tf new file mode 100644 index 0000000..70cbf24 --- /dev/null +++ b/devops/tf/main/envs/staging/versions.tf @@ -0,0 +1,10 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 4.0" + } + } + + required_version = "~> 1.0" +} \ No newline at end of file diff --git a/devops/tf/main/files/authorized_keys b/devops/tf/main/files/authorized_keys new file mode 100644 index 0000000..737d852 --- /dev/null +++ b/devops/tf/main/files/authorized_keys @@ -0,0 +1 @@ +${ec2_ssh_key} \ No newline at end of file diff --git a/devops/tf/main/files/cloud-init.yml b/devops/tf/main/files/cloud-init.yml new file mode 100644 index 0000000..652d51a --- /dev/null +++ b/devops/tf/main/files/cloud-init.yml @@ -0,0 +1,59 @@ +#cloud-config +groups: + - docker + +system_info: + default_user: + groups: [docker] + +write_files: + - path: /home/ubuntu/installer.sh + permissions: '0755' + content: | + apt-get clean && apt-get update && apt-get install -y ca-certificates curl gnupg lsb-release unzip jq rng-tools + + curl https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip -o awscliv2.zip + unzip awscliv2.zip + ./aws/install + rm -rf ./aws ./awscliv2.zip + + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg + echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + apt-get update + apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin + gpasswd -a ubuntu docker + mkdir -p /etc/docker/ + service docker restart + + - path: /home/ubuntu/cloud-init.sh + permissions: '0755' + content: | + #!/bin/bash + + export APP_NAME=${name} + export APP_ENV=${env} + + aws ssm describe-parameters | jq -r '.Parameters[].Name' | grep "\/application\/$APP_NAME\/$APP_ENV" | sed "s/\/application.*$APP_ENV\///" | while read -r FILE; do + mkdir -p $(dirname "$FILE"); + aws ssm get-parameter --name "/application/$APP_NAME/$APP_ENV/$FILE" --output text --with-decrypt --query 'Parameter.Value' | sed "s/###//g" > "$FILE"; + done + + source .envrc + + export INSTANCE_ID_SUBST=`wget http://169.254.169.254/latest/meta-data/instance-id -O- --timeout=5 --tries=1` + [ -z "$INSTANCE_ID_SUBST" ] && export INSTANCE_ID_SUBST='{{.FullID}}' + echo "INSTANCE_ID_SUBST=$INSTANCE_ID_SUBST" >> .env + + [ -f secret.env ] && cat secret.env >> .env + + + aws ecr get-login-password --region ${region} | docker login --username AWS --password-stdin "$AWS_ECR_BASE_URL" + docker compose up -d + +runcmd: + - chown -R ubuntu:ubuntu /home/ubuntu + - cd /home/ubuntu/ + + - "[ -f ./installer.sh ] && ./installer.sh" + + - sudo -u ubuntu ./cloud-init.sh \ No newline at end of file diff --git a/devops/tf/main/files/docker-compose.yml b/devops/tf/main/files/docker-compose.yml new file mode 100644 index 0000000..2abb071 --- /dev/null +++ b/devops/tf/main/files/docker-compose.yml @@ -0,0 +1,52 @@ +version: '3.7' + +services: + app: + image: ${ecr_base_url}/${ecr_image} + init: true + restart: always + env_file: ./.env + + volumes: + - backend-static:/root/src/static + - ./media:/root/src/media + + logging: + driver: awslogs + options: + awslogs-region: ${region} + awslogs-group: /aws/ec2/${name}-${env} + tag: '$${INSTANCE_ID_SUBST}-app' + awslogs-create-group: "true" + + + + nginx: + image: 'ghcr.io/reef-technologies/nginx-rt:v1.2.2' + restart: unless-stopped + healthcheck: + test: wget -q --spider http://0.0.0.0:8000/admin/login || exit 1 + depends_on: + - app + + command: nginx -g 'daemon off;' + ports: + + - 8000:8000 + volumes: + - ./nginx/templates:/etc/nginx/templates + - ./nginx/config_helpers:/etc/nginx/config_helpers + - backend-static:/srv/static:ro + - ./media:/srv/media:ro + - ./nginx/monitoring_certs:/etc/monitoring_certs + logging: + driver: awslogs + options: + awslogs-region: ${region} + awslogs-group: /aws/ec2/${name}-${env} + tag: '$${INSTANCE_ID_SUBST}-nginx' + awslogs-create-group: "true" + + +volumes: + backend-static: diff --git a/devops/tf/main/files/env b/devops/tf/main/files/env new file mode 100644 index 0000000..312cb79 --- /dev/null +++ b/devops/tf/main/files/env @@ -0,0 +1,35 @@ +ENV=prod +NGINX_HOST=localhost +DEBUG=0 +SECRET_KEY=${secret_key} +POSTGRES_DB=${database_name} +POSTGRES_USER=${database_user} +POSTGRES_PASSWORD=${database_password} +DATABASE_URL=${database_connection_string} + +SENTRY_DSN= +HTTPS_REDIRECT=n +HTTPS_PROXY_HEADER=X_SCHEME +CSP_ENABLED=n +CSP_REPORT_ONLY=n +CSP_REPORT_URL= +CSP_DEFAULT_SRC="'none'" +CSP_SCRIPT_SRC="'self'" +CSP_STYLE_SRC="'self'" +CSP_FONT_SRC="'self'" +CSP_IMG_SRC="'self'" +CSP_MEDIA_SRC="'self'" +CSP_OBJECT_SRC="'self'" +CSP_FRAME_SRC="'self'" +CSP_CONNECT_SRC="'self'" +CSP_CHILD_SRC="'self'" +CSP_MANIFEST_SRC="'self'" +CSP_WORKER_SRC="'self'" +CSP_BLOCK_ALL_MIXED_CONTENT=y +CSP_EXCLUDE_URL_PREFIXES= +BACKUP_B2_BUCKET= +BACKUP_B2_KEY_ID= +BACKUP_B2_KEY_SECRET= +BACKUP_LOCAL_ROTATE_KEEP_LAST= +DATABASE_POOL_URL= +CHANNELS_BACKEND_URL=redis://redis:6379/1 \ No newline at end of file diff --git a/devops/tf/main/files/envrc b/devops/tf/main/files/envrc new file mode 100644 index 0000000..8b2a386 --- /dev/null +++ b/devops/tf/main/files/envrc @@ -0,0 +1,5 @@ +export APP_NAME=${name} +export APP_ENV=${env} +export AWS_ACCOUNT_ID=${account_id} +export AWS_ECR_BASE_URL=${ecr_base_url} +export AWS_ECR_TAG=${ecr_image} diff --git a/devops/tf/main/files/nginx/config_helpers/brotli.conf b/devops/tf/main/files/nginx/config_helpers/brotli.conf new file mode 100644 index 0000000..1e4cb51 --- /dev/null +++ b/devops/tf/main/files/nginx/config_helpers/brotli.conf @@ -0,0 +1,44 @@ +brotli off; +brotli_static off; + +brotli_comp_level 6; +brotli_types + # text/html is always in brotli_types + text/richtext + text/plain + text/css + text/x-script + text/x-component + text/x-java-source + text/x-markdown + application/javascript + application/x-javascript + text/javascript + text/js + image/x-icon + application/x-perl + application/x-httpd-cgi + text/xml + application/xml + application/xml+rss + application/json + multipart/bag + multipart/mixed + application/xhtml+xml + font/ttf + font/otf + font/x-woff + image/svg+xml + application/vnd.ms-fontobject + application/ttf + application/x-ttf + application/otf + application/x-otf + application/truetype + application/opentype + application/x-opentype + application/font-woff + application/eot + application/font + application/font-sfnt + application/wasm; diff --git a/devops/tf/main/files/nginx/config_helpers/gzip.conf b/devops/tf/main/files/nginx/config_helpers/gzip.conf new file mode 100644 index 0000000..6ba8194 --- /dev/null +++ b/devops/tf/main/files/nginx/config_helpers/gzip.conf @@ -0,0 +1,48 @@ +gzip off; +gzip_static off; +gzip_proxied off; + +gzip_vary on; +gzip_comp_level 6; +gzip_buffers 16 8k; +gzip_http_version 1.1; +gzip_types + # text/html is always in gzip_types + text/richtext + text/plain + text/css + text/x-script + text/x-component + text/x-java-source + text/x-markdown + application/javascript + application/x-javascript + text/javascript + text/js + image/x-icon + application/x-perl + application/x-httpd-cgi + text/xml + application/xml + application/xml+rss + application/json + multipart/bag + multipart/mixed + application/xhtml+xml + font/ttf + font/otf + font/x-woff + image/svg+xml + application/vnd.ms-fontobject + application/ttf + application/x-ttf + application/otf + application/x-otf + application/truetype + application/opentype + application/x-opentype + application/font-woff + application/eot + application/font + application/font-sfnt + application/wasm; diff --git a/devops/tf/main/files/nginx/monitoring_certs/monitoring-ca.crt.txt b/devops/tf/main/files/nginx/monitoring_certs/monitoring-ca.crt.txt new file mode 100644 index 0000000..70dc2a2 --- /dev/null +++ b/devops/tf/main/files/nginx/monitoring_certs/monitoring-ca.crt.txt @@ -0,0 +1 @@ +"replace-me" \ No newline at end of file diff --git a/devops/tf/main/files/nginx/monitoring_certs/monitoring.crt.txt b/devops/tf/main/files/nginx/monitoring_certs/monitoring.crt.txt new file mode 100644 index 0000000..70dc2a2 --- /dev/null +++ b/devops/tf/main/files/nginx/monitoring_certs/monitoring.crt.txt @@ -0,0 +1 @@ +"replace-me" \ No newline at end of file diff --git a/devops/tf/main/files/nginx/monitoring_certs/monitoring.key.txt b/devops/tf/main/files/nginx/monitoring_certs/monitoring.key.txt new file mode 100644 index 0000000..70dc2a2 --- /dev/null +++ b/devops/tf/main/files/nginx/monitoring_certs/monitoring.key.txt @@ -0,0 +1 @@ +"replace-me" \ No newline at end of file diff --git a/devops/tf/main/files/nginx/templates/default.conf.template b/devops/tf/main/files/nginx/templates/default.conf.template new file mode 100644 index 0000000..34b3462 --- /dev/null +++ b/devops/tf/main/files/nginx/templates/default.conf.template @@ -0,0 +1,38 @@ +server { + listen 8000 default_server; + server_name _; + server_name_in_redirect off; + + include /etc/nginx/config_helpers/brotli.conf; + include /etc/nginx/config_helpers/gzip.conf; + + access_log /dev/stdout; + error_log /dev/stderr info; + + client_max_body_size 100M; + + location /static/ { + root /srv/; + } + + location /media/ { + root /srv/; + } + + + + location / { + + proxy_pass_header Server; + proxy_redirect off; + proxy_set_header Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_pass_header X-Forwarded-Proto; + + proxy_pass http://app:8000/; + } +} + + + + diff --git a/devops/tf/main/modules/backend/alb.tf b/devops/tf/main/modules/backend/alb.tf new file mode 100644 index 0000000..6c85398 --- /dev/null +++ b/devops/tf/main/modules/backend/alb.tf @@ -0,0 +1,36 @@ +resource "aws_lb" "self" { + name = "${var.name}-${var.env}" + internal = false + load_balancer_type = "application" + subnets = var.subnets + security_groups = [aws_security_group.public.id] + enable_deletion_protection = false +} + +resource "aws_lb_target_group" "self" { + name = "${var.name}-${var.env}" + port = 8000 + protocol = "HTTP" + vpc_id = var.vpc_id + target_type = "instance" + + health_check { + enabled = true + port = 8000 + path = "/admin/login" + matcher = "200-302" + } +} + +resource "aws_lb_listener" "self" { + load_balancer_arn = aws_lb.self.arn + port = "443" + protocol = "HTTPS" + ssl_policy = "ELBSecurityPolicy-2016-08" + certificate_arn = aws_acm_certificate.self.arn + + default_action { + type = "forward" + target_group_arn = aws_lb_target_group.self.arn + } +} diff --git a/devops/tf/main/modules/backend/domain.tf b/devops/tf/main/modules/backend/domain.tf new file mode 100644 index 0000000..dbd0cde --- /dev/null +++ b/devops/tf/main/modules/backend/domain.tf @@ -0,0 +1,46 @@ +data "aws_route53_zone" "self" { + name = var.base_domain_name +} + +resource "aws_route53_record" "a" { + zone_id = data.aws_route53_zone.self.zone_id + name = var.domain_name + type = "A" + + alias { + name = aws_lb.self.dns_name + zone_id = aws_lb.self.zone_id + evaluate_target_health = true + } +} + +resource "aws_acm_certificate" "self" { + domain_name = var.domain_name + validation_method = "DNS" + + tags = { + Project = var.name + Env = var.env + } + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_route53_record" "cert-validation" { + for_each = { + for dvo in aws_acm_certificate.self.domain_validation_options: dvo.domain_name => { + name = dvo.resource_record_name + record = dvo.resource_record_value + type = dvo.resource_record_type + } + } + + allow_overwrite = true + name = each.value.name + records = [each.value.record] + ttl = 60 + type = each.value.type + zone_id = data.aws_route53_zone.self.zone_id +} \ No newline at end of file diff --git a/devops/tf/main/modules/backend/ec2-autoscale.tf b/devops/tf/main/modules/backend/ec2-autoscale.tf new file mode 100644 index 0000000..a7784f0 --- /dev/null +++ b/devops/tf/main/modules/backend/ec2-autoscale.tf @@ -0,0 +1,66 @@ +locals { + name_env = "${var.name}-${var.env}" + cloudinit = templatefile("../../files/cloud-init.yml", { + name = var.name + env = var.env + region = var.region + }) +} + +resource "aws_launch_template" "self" { + name = local.name_env + image_id = var.base_ami_id + instance_type = var.instance_type + + iam_instance_profile { + name = aws_iam_instance_profile.self.name + } + + disable_api_termination = false + key_name = aws_key_pair.self.key_name + + user_data = base64encode(local.cloudinit) + + block_device_mappings { + device_name = "/dev/sda1" + + ebs { + delete_on_termination = true + encrypted = true + volume_size = 20 + } + } + + credit_specification { + cpu_credits = "standard" + } + + vpc_security_group_ids = [ + aws_security_group.internal.id + ] +} + +resource "aws_autoscaling_group" "self" { + name = local.name_env + desired_capacity = 1 + max_size = 1 + min_size = 1 + vpc_zone_identifier = [var.subnets[0]] + + launch_template { + id = aws_launch_template.self.id + version = "$Latest" + } + + tag { + key = "Name" + propagate_at_launch = true + value = local.name_env + } + + target_group_arns = [ + aws_lb_target_group.self.arn + ] + + health_check_type = var.health_check_type +} diff --git a/devops/tf/main/modules/backend/ec2-keys.tf b/devops/tf/main/modules/backend/ec2-keys.tf new file mode 100644 index 0000000..9a496fe --- /dev/null +++ b/devops/tf/main/modules/backend/ec2-keys.tf @@ -0,0 +1,4 @@ +resource "aws_key_pair" "self" { + key_name = "${var.name}-${var.env}-key" + public_key = var.ec2_ssh_key +} \ No newline at end of file diff --git a/devops/tf/main/modules/backend/ec2-profile.tf b/devops/tf/main/modules/backend/ec2-profile.tf new file mode 100644 index 0000000..71bf1fd --- /dev/null +++ b/devops/tf/main/modules/backend/ec2-profile.tf @@ -0,0 +1,27 @@ +resource "aws_iam_role" "self" { + name = "${var.name}-${var.env}-ec2-role" + + assume_role_policy = jsonencode({ + Version = "2012-10-17", + Statement = [ + { + Effect = "Allow", + Principal = { + Service: "ec2.amazonaws.com" + }, + Action = "sts:AssumeRole" + } + ] + }) + + managed_policy_arns = [ + "arn:aws:iam::aws:policy/AmazonSSMReadOnlyAccess", + "arn:aws:iam::aws:policy/AmazonEC2ContainerRegistryReadOnly", + "arn:aws:iam::aws:policy/CloudWatchLogsFullAccess" + ] +} + +resource "aws_iam_instance_profile" "self" { + name = "${var.name}-${var.env}-ec2-profile" + role = aws_iam_role.self.name +} diff --git a/devops/tf/main/modules/backend/parameters.docker-compose.tf b/devops/tf/main/modules/backend/parameters.docker-compose.tf new file mode 100644 index 0000000..eb3bac9 --- /dev/null +++ b/devops/tf/main/modules/backend/parameters.docker-compose.tf @@ -0,0 +1,13 @@ +data "aws_partition" "self" {} + +resource "aws_ssm_parameter" "compose" { + name = "/application/${var.name}/${var.env}/docker-compose.yml" + type = "SecureString" + value = templatefile("../../files/docker-compose.yml", { + name = var.name + env = var.env + region = var.region + ecr_base_url = var.ecr_base_url + ecr_image = var.ecr_image + }) +} \ No newline at end of file diff --git a/devops/tf/main/modules/backend/parameters.env.tf b/devops/tf/main/modules/backend/parameters.env.tf new file mode 100644 index 0000000..f865a6b --- /dev/null +++ b/devops/tf/main/modules/backend/parameters.env.tf @@ -0,0 +1,31 @@ +resource "random_uuid" "random_uuid" {} + +resource "aws_ssm_parameter" "envrc" { + name = "/application/${var.name}/${var.env}/.envrc" + type = "SecureString" + value = templatefile("../../files/envrc", { + name = var.name + env = var.env + region = var.region + account_id = var.account_id + ecr_base_url = var.ecr_base_url + ecr_image = var.ecr_image + }) +} + + +resource "aws_ssm_parameter" "env" { + name = "/application/${var.name}/${var.env}/.env" + type = "SecureString" + value = templatefile("../../files/env", { + name = var.name + env = var.env + region = var.region + secret_key = random_uuid.random_uuid.result + + database_name = var.database.name + database_user = var.database.user + database_password = var.database.password + database_connection_string = var.database.connection_string + }) +} \ No newline at end of file diff --git a/devops/tf/main/modules/backend/parameters.nginx.tf b/devops/tf/main/modules/backend/parameters.nginx.tf new file mode 100644 index 0000000..ab0f2f4 --- /dev/null +++ b/devops/tf/main/modules/backend/parameters.nginx.tf @@ -0,0 +1,46 @@ +locals { + cert_dir = "../../files/nginx/monitoring_certs" + cert_files = fileset(local.cert_dir, "*.txt") + + certs = length(local.cert_files) > 0 ? [for cert_file in local.cert_files : { + name: replace(cert_file, ".txt", "") + content: "${local.cert_dir}/${cert_file}" + }] : [] + + helper_dir = "../../files/nginx/config_helpers" + helper_files = fileset(local.helper_dir, "*") + + helpers = length(local.helper_files) > 0 ? [for helper_file in local.helper_files : { + name: helper_file, + content: "${local.helper_dir}/${helper_file}" + }] : [] + + template_dir = "../../files/nginx/templates" + template_files = fileset(local.template_dir, "*") + + templates = length(local.template_files) > 0 ? [for template_file in local.template_files : { + name: template_file, + content: "${local.template_dir}/${template_file}" + }] : [] +} + +resource "aws_ssm_parameter" "certs" { + count = length(local.certs) + name = "/application/${var.name}/${var.env}/nginx/monitoring_certs/${local.certs[count.index].name}" + type = "SecureString" + value = file(local.certs[count.index].content) +} + +resource "aws_ssm_parameter" "helpers" { + count = length(local.helpers) + name = "/application/${var.name}/${var.env}/nginx/config_helpers/${local.helpers[count.index].name}" + type = "SecureString" + value = file(local.helpers[count.index].content) +} + +resource "aws_ssm_parameter" "templates" { + count = length(local.templates) + name = "/application/${var.name}/${var.env}/nginx/templates/${local.templates[count.index].name}" + type = "SecureString" + value = file(local.templates[count.index].content) +} \ No newline at end of file diff --git a/devops/tf/main/modules/backend/parameters.ssh-keys.tf b/devops/tf/main/modules/backend/parameters.ssh-keys.tf new file mode 100644 index 0000000..fe84407 --- /dev/null +++ b/devops/tf/main/modules/backend/parameters.ssh-keys.tf @@ -0,0 +1,7 @@ +resource "aws_ssm_parameter" "ssh-keys" { + name = "/application/${var.name}/${var.env}/.ssh/authorized_keys" + type = "SecureString" + value = templatefile("../../files/authorized_keys", { + ec2_ssh_key = var.ec2_ssh_key + }) +} diff --git a/devops/tf/main/modules/backend/security.tf b/devops/tf/main/modules/backend/security.tf new file mode 100644 index 0000000..6440b4a --- /dev/null +++ b/devops/tf/main/modules/backend/security.tf @@ -0,0 +1,63 @@ +resource "aws_security_group" "public" { + name = "${var.name}-${var.env}-public-sg" + vpc_id = var.vpc_id + + ingress { + description = "allow traffic between load-balancer and EC2 instances within VPC" + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + + ingress { + description = "allow traffic between load-balancer and EC2 instances within VPC" + from_port = 443 + to_port = 443 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } +} + +resource "aws_security_group" "internal" { + name = "${var.name}-internal-sg" + vpc_id = var.vpc_id + + ingress { + description = "allow traffic to ssh from internet" + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = ["51.254.203.61/32"] + } + + ingress { + description = "allow monitoring" + from_port = 10443 + to_port = 10443 + protocol = "tcp" + cidr_blocks = ["138.68.147.48/32", "95.179.202.73/32"] + } + + ingress { + description = "allow traffic between load-balancer and EC2 instances within VPC" + from_port = 8000 + to_port = 8000 + protocol = "tcp" + cidr_blocks = [var.vpc_cidr] + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } +} diff --git a/devops/tf/main/modules/backend/vars.tf b/devops/tf/main/modules/backend/vars.tf new file mode 100644 index 0000000..108c852 --- /dev/null +++ b/devops/tf/main/modules/backend/vars.tf @@ -0,0 +1,22 @@ +variable "name" {} +variable "env" {} +variable "region" {} + +variable "vpc_id" {} +variable "vpc_cidr" {} +variable "subnets" {} +variable "azs" {} + +variable "base_ami_id" {} +variable "base_domain_name" {} + +variable "domain_name" {} +variable "ec2_ssh_key" {} + +variable "ecr_base_url" {} +variable "ecr_image" {} + +variable "instance_type" {} +variable "health_check_type" {} +variable "account_id" {} +variable "database" {} \ No newline at end of file diff --git a/devops/tf/main/modules/database/output.tf b/devops/tf/main/modules/database/output.tf new file mode 100644 index 0000000..96f5801 --- /dev/null +++ b/devops/tf/main/modules/database/output.tf @@ -0,0 +1,25 @@ +output "connection_string" { + value = "postgres://${aws_db_instance.self.username}:${aws_db_instance.self.password}@${aws_db_instance.self.endpoint}/${aws_db_instance.self.db_name}" + sensitive = true +} + +output "user" { + value = aws_db_instance.self.username +} + +output "password" { + value = aws_db_instance.self.password + sensitive = true +} + +output "endpoint" { + value = aws_db_instance.self.endpoint +} + +output "port" { + value = aws_db_instance.self.port +} + +output "name" { + value = aws_db_instance.self.db_name +} \ No newline at end of file diff --git a/devops/tf/main/modules/database/rds.tf b/devops/tf/main/modules/database/rds.tf new file mode 100644 index 0000000..5504b93 --- /dev/null +++ b/devops/tf/main/modules/database/rds.tf @@ -0,0 +1,36 @@ +resource "random_string" "random" { + length = 20 + special = true + override_special = "$." +} + +resource "aws_db_subnet_group" "self" { + name = "${var.name}-${var.env}" + subnet_ids = var.subnets + + tags = { + Project = var.name + Env = var.env + Name = "DB subnet group" + } +} + +resource "aws_db_instance" "self" { + identifier = "${var.name}-${var.env}-db" + allocated_storage = 5 + max_allocated_storage = 20 + storage_encrypted = true + engine = "postgres" + instance_class = var.instance_type + username = "master" + db_name = "backend" + password = random_string.random.result + skip_final_snapshot = true + availability_zone = var.azs[0] + db_subnet_group_name = aws_db_subnet_group.self.name + vpc_security_group_ids = [aws_security_group.db.id] + + tags = { + Project = var.name + } +} diff --git a/devops/tf/main/modules/database/security.tf b/devops/tf/main/modules/database/security.tf new file mode 100644 index 0000000..cf34c26 --- /dev/null +++ b/devops/tf/main/modules/database/security.tf @@ -0,0 +1,19 @@ +resource "aws_security_group" "db" { + name = "${var.name}-db-sg" + vpc_id = var.vpc_id + + ingress { + description = "allow traffic to postgres port from within VPC" + from_port = 5432 + to_port = 5432 + protocol = "tcp" + cidr_blocks = [var.vpc_cidr] + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } +} \ No newline at end of file diff --git a/devops/tf/main/modules/database/vars.tf b/devops/tf/main/modules/database/vars.tf new file mode 100644 index 0000000..fd54160 --- /dev/null +++ b/devops/tf/main/modules/database/vars.tf @@ -0,0 +1,8 @@ +variable "name" {} +variable "env" {} + +variable "vpc_id" {} +variable "vpc_cidr" {} +variable "subnets" {} +variable "azs" {} +variable "instance_type" {} diff --git a/devops/tf/main/modules/networking/network.tf b/devops/tf/main/modules/networking/network.tf new file mode 100644 index 0000000..ded801b --- /dev/null +++ b/devops/tf/main/modules/networking/network.tf @@ -0,0 +1,12 @@ +module "vpc" { + source = "terraform-aws-modules/vpc/aws" + version = "3.19.0" + + name = "${var.name}-${var.env}-vpc" + cidr = var.vpc_cidr + + azs = var.azs + public_subnets = var.subnet_cidrs + enable_nat_gateway = false + enable_vpn_gateway = false +} diff --git a/devops/tf/main/modules/networking/output.tf b/devops/tf/main/modules/networking/output.tf new file mode 100644 index 0000000..717f7b0 --- /dev/null +++ b/devops/tf/main/modules/networking/output.tf @@ -0,0 +1,15 @@ +output "vpc_id" { + value = module.vpc.vpc_id +} + +output "vpc_cidr_block" { + value = module.vpc.vpc_cidr_block +} + +output "subnets" { + value = module.vpc.public_subnets +} + +output "azs" { + value = module.vpc.azs +} diff --git a/devops/tf/main/modules/networking/vars.tf b/devops/tf/main/modules/networking/vars.tf new file mode 100644 index 0000000..7f5c180 --- /dev/null +++ b/devops/tf/main/modules/networking/vars.tf @@ -0,0 +1,5 @@ +variable "name" {} +variable "env" {} +variable "azs" {} +variable "vpc_cidr" {} +variable "subnet_cidrs" {} \ No newline at end of file diff --git a/devops/vultr_scripts/vultr-deploy.py b/devops/vultr_scripts/vultr-deploy.py new file mode 100644 index 0000000..002755e --- /dev/null +++ b/devops/vultr_scripts/vultr-deploy.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# deploy to list of IPs from `instances_ip.txt` (see `vultr-get-instances.py`) + +import subprocess +from pathlib import Path + +pwd = Path(__file__).parent + + +with open(pwd / "instances_ip.txt") as f: + ips = f.readlines() + +errs = [] +for ip in ips: + print("deploying to", ip) + try: + res = subprocess.Popen( + ["git", "push", f"root@{ip.strip()}:~/repos/bittensor_panel-central.git"], + env={ + "GIT_SSH_COMMAND": "ssh -o ConnectTimeout=10 -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" + }, + ).communicate() + except subprocess.CalledProcessError: + errs.append(ip) + else: + print("res", res) + +for err_ip in errs: + print("error deploying to", err_ip) diff --git a/devops/vultr_scripts/vultr-get-instances.py b/devops/vultr_scripts/vultr-get-instances.py new file mode 100644 index 0000000..06bf3bf --- /dev/null +++ b/devops/vultr_scripts/vultr-get-instances.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# get list of all instances in Vultr account +# save their IDs and IPs into files which will be used by \ +# `vultr-deploy.py` and `vultr-update-cloudinit.py` + +import subprocess +from pathlib import Path + +pwd = Path(__file__).parent + +instance_id = pwd / "instances_id.txt" +instance_ip = pwd / "instances_ip.txt" + +res = subprocess.check_output(["vultr-cli", "instance", "list", "ipv4"]).decode("utf-8").split("\n") + +ids = [] +ips = [] +for line in res[1:]: # skip header + line_items = line.split("\t") + if len(line_items) != 13: + continue + ids.append(line_items[0].strip()) + ips.append(line_items[1].strip()) + +with open(instance_ip, "w") as f: + f.write("\n".join(ips)) + +with open(instance_id, "w") as f: + f.write("\n".join(ids)) diff --git a/devops/vultr_scripts/vultr-update-cloudinit.py b/devops/vultr_scripts/vultr-update-cloudinit.py new file mode 100644 index 0000000..8977e84 --- /dev/null +++ b/devops/vultr_scripts/vultr-update-cloudinit.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# update cloud-init data +# this should be used only to UPDATE the data, initialization should be done via Terraform +# see vultr_tf/core/vultr-cloud-init.tftpl +import subprocess +from pathlib import Path + +pwd = Path(__file__).parent + +# cloud-init script +# use `vultr-cli instance user-data get ` to get existing data +user_data = pwd / "userdata.txt" +assert user_data.exists() + +with open(pwd / "instances_id.txt") as f: + for instance_id in f.readlines(): + print("instance id", instance_id) + # res = subprocess.check_output(['vultr-cli', 'instance', 'user-data', 'get', instance_id.strip()]) + res = subprocess.check_output( + [ + "vultr-cli", + "instance", + "user-data", + "set", + instance_id.strip(), + "-d", + str(user_data), + ] + ) + print("res", res, type(res)) diff --git a/devops/vultr_tf/core/backend.tf b/devops/vultr_tf/core/backend.tf new file mode 100644 index 0000000..1785925 --- /dev/null +++ b/devops/vultr_tf/core/backend.tf @@ -0,0 +1,10 @@ +terraform { + + required_providers { + vultr = { + source = "vultr/vultr" + version = "~> 2.15.1" + } + } + +} \ No newline at end of file diff --git a/devops/vultr_tf/core/main.tf b/devops/vultr_tf/core/main.tf new file mode 100644 index 0000000..ac0fa98 --- /dev/null +++ b/devops/vultr_tf/core/main.tf @@ -0,0 +1,52 @@ +provider "vultr" { + api_key = var.vultr_api_key +} + +resource "vultr_instance" "worker" { + count = 1 + hostname = "instance-bittensor_panel-${count.index + 1}" + region = var.region + plan = "vc2-1c-1gb" // via `vultr-cli plans list` + os_id = 1743 // ubuntu 22-04, via `vultr-cli os list` + ssh_key_ids = [ + // uuid-4 of ssh keys added in Vultr + ] + enable_ipv6 = true + activation_email = false + label = "instance-bittensor_panel" + backups = "disabled" + + user_data = templatefile("vultr-cloud-init.tftpl", { + DEPLOY_SSH_KEY = var.DEPLOY_SSH_KEY + SECRET_KEY = var.DOTENV_SECRET_KEY + POSTGRES_HOST = var.DOTENV_POSTGRES_HOST + POSTGRES_USER = var.DOTENV_POSTGRES_USER + POSTGRES_PASSWORD = var.DOTENV_POSTGRES_PASSWORD + DATABASE_POOL_URL = var.DOTENV_DATABASE_POOL_URL + DATABASE_URL = var.DOTENV_DATABASE_URL + SENTRY_DSN = var.DOTENV_SENTRY_DSN + }) +} + +resource "vultr_load_balancer" "loadbalancer" { + region = var.region + + forwarding_rules { + frontend_protocol = "https" + frontend_port = 443 + backend_protocol = "https" + backend_port = 443 + } + + health_check { + path = "/admin/" + port = "443" + protocol = "https" + response_timeout = 5 + unhealthy_threshold = 2 + check_interval = 15 + healthy_threshold = 4 + } + + attached_instances = [for instance in vultr_instance.worker : instance.id] +} diff --git a/devops/vultr_tf/core/vars.tf b/devops/vultr_tf/core/vars.tf new file mode 100644 index 0000000..03055da --- /dev/null +++ b/devops/vultr_tf/core/vars.tf @@ -0,0 +1,8 @@ +variable "region" { + type = string +} + +variable "vultr_api_key" { + type = string + sensitive = true +} diff --git a/devops/vultr_tf/core/vars_cloud_init.tf b/devops/vultr_tf/core/vars_cloud_init.tf new file mode 100644 index 0000000..53075ae --- /dev/null +++ b/devops/vultr_tf/core/vars_cloud_init.tf @@ -0,0 +1,41 @@ +variable "DEPLOY_SSH_KEY" { + // private ssh key for cloning github repo + type = string + sensitive = true +} + +// variables for .env file +variable "DOTENV_SECRET_KEY" { + type = string + sensitive = true +} + +variable "DOTENV_POSTGRES_HOST" { + type = string + sensitive = true +} + +variable "DOTENV_POSTGRES_USER" { + type = string + sensitive = true +} + +variable "DOTENV_POSTGRES_PASSWORD" { + type = string + sensitive = true +} + +variable "DOTENV_DATABASE_POOL_URL" { + type = string + sensitive = true +} + +variable "DOTENV_DATABASE_URL" { + type = string + sensitive = true +} + +variable "DOTENV_SENTRY_DSN" { + type = string + sensitive = true +} diff --git a/devops/vultr_tf/core/vultr-cloud-init.tftpl b/devops/vultr_tf/core/vultr-cloud-init.tftpl new file mode 100644 index 0000000..4b127ca --- /dev/null +++ b/devops/vultr_tf/core/vultr-cloud-init.tftpl @@ -0,0 +1,44 @@ +#!/bin/bash +# shell variables have to have doubled dollar sign, otherwise Terraform will try to interpolate them +# the only variable with single dollar is `DEPLOY_SSH_KEY` in the block below +echo "starting custom cloud-init" + +# add deploy ssh key to clone repo +DEPLOY_KEY_FILE="/root/.ssh/id_ed25519" +echo "${DEPLOY_SSH_KEY}" > $${DEPLOY_KEY_FILE} +chmod 600 $${DEPLOY_KEY_FILE} + +DEPLOY_DIR="/root/domains/bittensor-panel/" +REPO_DIR="/root/repos/bittensor-panel.git" +REPO_ORIGIN="git@github.com:reef-technologies/bittensor_panel.git" + +mkdir -p /root/repos/ +mkdir -p $${DEPLOY_DIR} +mkdir -p /root/volumes/bittensor_panel-mount/ + +# repo init script for Vultr server +ssh-keyscan github.com >> /root/.ssh/known_hosts +apt install -y git +GIT_SSH_COMMAND="ssh -i $${DEPLOY_KEY_FILE}" git clone --depth=1 --bare --no-checkout $${REPO_ORIGIN} $${REPO_DIR} + +# 1st time deploy and setup +git --work-tree=$${DEPLOY_DIR} --git-dir=$${REPO_DIR} checkout -f main +cp $${DEPLOY_DIR}/bin/post-receive $${REPO_DIR}/hooks/post-receive + +$${DEPLOY_DIR}/bin/prepare-os.sh +$${DEPLOY_DIR}/setup-prod.sh + +# add env variables to .env file +cat <> $${DEPLOY_DIR}/.env +POSTGRES_HOST=${POSTGRES_HOST} +POSTGRES_USER=${POSTGRES_USER} +POSTGRES_PASSWORD=${POSTGRES_PASSWORD} +DATABASE_POOL_URL=${DATABASE_POOL_URL} +DATABASE_URL=${DATABASE_URL} +SENTRY_DSN=${SENTRY_DSN} +SECRET_KEY=${SECRET_KEY} + +EOF + +cd $${DEPLOY_DIR} && docker compose up --build --detach +echo "finishing custom cloud-init" diff --git a/docs/3rd_party/cookiecutter-rt-django/CHANGELOG.md b/docs/3rd_party/cookiecutter-rt-django/CHANGELOG.md new file mode 100644 index 0000000..fe56806 --- /dev/null +++ b/docs/3rd_party/cookiecutter-rt-django/CHANGELOG.md @@ -0,0 +1,24 @@ +# cookiecutter-rt-django Changelog + +Main purpose of this file is to provide a changelog for the template itself. +It is not intended to be used as a changelog for the generated project. + +This changelog will document any know **BREAKING** changes between versions of the template. +Please review this new entries carefully after applying `cruft update` before committing the changes. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +Currently, `cookiecutter-rt-django` has no explicit versioning amd we purely rely on `cruft` diff. + +## [Unreleased] + +* **BREAKING** Switched from `docker-compose` v1 script to `docker compose` v2 plugin (https://docs.docker.com/compose/cli-command/) +* **BREAKING** Added formatting with ruff. +* **BREAKING** Started using [pdm](https://github.com/pdm-project/pdm) for managing Python dependencies. +* **BREAKING** root of repository is used as docker build context instead of just `./app/`. +* **BREAKING** Updated django-environ from 0.4.5 to 0.10 (https://django-environ.readthedocs.io/en/latest/changelog.html) +* **BREAKING** Updated redis python package from 3.5.3 to 4.6 (breaking changes listed in https://github.com/redis/redis-py/releases/tag/v4.0.0b1) +* **BREAKING** Updated Python from 3.9 to 3.11 +* **BREAKING** Updated Django from 3.2 to 4.2 (https://docs.djangoproject.com/en/4.2/releases/4.0/#backwards-incompatible-changes-in-4-0) +* **BREAKING** Updated django-cors-headers from 3.7 to 4.0 (https://github.com/adamchainz/django-cors-headers/blob/main/CHANGELOG.rst#400-2023-05-12) +* **BREAKING** Updated django-environ from 0.7 to 0.10 (https://django-environ.readthedocs.io/en/latest/changelog.html) \ No newline at end of file diff --git a/envs/dev/.env.template b/envs/dev/.env.template new file mode 100644 index 0000000..b5a9fbb --- /dev/null +++ b/envs/dev/.env.template @@ -0,0 +1,67 @@ +ENV=backend-dev +DEBUG=on +DEBUG_TOOLBAR=on +SECRET_KEY=12345 + +POSTGRES_DB=bittensor_panel +POSTGRES_HOST=localhost +POSTGRES_PORT=8432 +POSTGRES_USER=postgres +POSTGRES_PASSWORD=12345 +DATABASE_POOL_URL= +# using transaction-based db connection pool as DATABASE_URL instead of DATABASE_POOL_URL will break production +DATABASE_URL=postgres://postgres:12345@localhost:8432/bittensor_panel + +NGINX_HOST=localhost + +CORS_ENABLED=on +CORS_ALLOWED_ORIGINS= +CORS_ALLOWED_ORIGIN_REGEXES= +CORS_ALLOW_ALL_ORIGINS=0 + +REDIS_HOST=localhost +REDIS_PORT=8379 + + +CELERY_BROKER_URL=redis://localhost:8379/0 +CELERY_TASK_ALWAYS_EAGER=1 +CELERY_MASTER_CONCURRENCY=1 +CELERY_WORKER_CONCURRENCY=1 + + + + + +EMAIL_BACKEND=django.core.mail.backends.filebased.EmailBackend +EMAIL_FILE_PATH=/tmp/email +EMAIL_HOST=smtp.sendgrid.net +EMAIL_PORT=587 +EMAIL_USE_TLS=1 +EMAIL_HOST_USER=apikey +EMAIL_HOST_PASSWORD= +DEFAULT_FROM_EMAIL= + +SENTRY_DSN= + +CSP_ENABLED=n +CSP_REPORT_ONLY=n +CSP_REPORT_URL="" +CSP_DEFAULT_SRC="'none'" +CSP_SCRIPT_SRC="'self'" +CSP_STYLE_SRC="'self'" +CSP_FONT_SRC="'self'" +CSP_IMG_SRC="'self'" +CSP_MEDIA_SRC="'self'" +CSP_OBJECT_SRC="'self'" +CSP_FRAME_SRC="'self'" +CSP_CONNECT_SRC="'self'" +CSP_CHILD_SRC="'self'" +CSP_MANIFEST_SRC="'self'" +CSP_WORKER_SRC="'self'" +CSP_BLOCK_ALL_MIXED_CONTENT=y +CSP_EXCLUDE_URL_PREFIXES= + +BACKUP_B2_BUCKET= +BACKUP_B2_KEY_ID= +BACKUP_B2_KEY_SECRET= +BACKUP_LOCAL_ROTATE_KEEP_LAST= diff --git a/envs/dev/docker-compose.yml b/envs/dev/docker-compose.yml new file mode 100644 index 0000000..262eca9 --- /dev/null +++ b/envs/dev/docker-compose.yml @@ -0,0 +1,25 @@ +version: '3.7' + +services: + redis: + image: redis:6-alpine + command: redis-server --appendonly yes + healthcheck: + test: redis-cli ping + volumes: + - ./redis/data:/data + ports: + - ${REDIS_PORT}:6379 + + db: + image: postgres:14.0-alpine + healthcheck: + test: pg_isready -U ${POSTGRES_USER} || exit 1 + environment: + - POSTGRES_DB=${POSTGRES_DB} + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + volumes: + - ./db/data:/var/lib/postgresql/data + ports: + - ${POSTGRES_PORT}:5432 diff --git a/envs/prod/.env.template b/envs/prod/.env.template new file mode 100644 index 0000000..a82893d --- /dev/null +++ b/envs/prod/.env.template @@ -0,0 +1,69 @@ +ENV=backend-prod +DEBUG=off +DEBUG_TOOLBAR=off +SECRET_KEY= + +POSTGRES_DB=bittensor_panel +POSTGRES_HOST=db +POSTGRES_PORT=5432 +POSTGRES_USER=postgres +POSTGRES_PASSWORD= +DATABASE_POOL_URL= +# using transaction-based db connection pool as DATABASE_URL instead of DATABASE_POOL_URL will break production +DATABASE_URL=postgres://postgres:@db:5432/bittensor_panel + +NGINX_HOST= + +CORS_ENABLED=on +CORS_ALLOWED_ORIGINS= +CORS_ALLOWED_ORIGIN_REGEXES= +CORS_ALLOW_ALL_ORIGINS=0 + +REDIS_HOST=redis +REDIS_PORT=6379 + + +CELERY_BROKER_URL=redis://redis:6379/0 +CELERY_TASK_ALWAYS_EAGER=0 +CELERY_MASTER_CONCURRENCY=2 +CELERY_WORKER_CONCURRENCY=2 + + + + + + + +EMAIL_BACKEND=django.core.mail.backends.smtp.EmailBackend +EMAIL_FILE_PATH=/tmp/email +EMAIL_HOST=smtp.sendgrid.net +EMAIL_PORT=587 +EMAIL_USE_TLS=1 +EMAIL_HOST_USER=apikey +EMAIL_HOST_PASSWORD= +DEFAULT_FROM_EMAIL= + +SENTRY_DSN= + +CSP_ENABLED=n +CSP_REPORT_ONLY=n +CSP_REPORT_URL="" +CSP_DEFAULT_SRC="'none'" +CSP_SCRIPT_SRC="'self'" +CSP_STYLE_SRC="'self'" +CSP_FONT_SRC="'self'" +CSP_IMG_SRC="'self'" +CSP_MEDIA_SRC="'self'" +CSP_OBJECT_SRC="'self'" +CSP_FRAME_SRC="'self'" +CSP_CONNECT_SRC="'self'" +CSP_CHILD_SRC="'self'" +CSP_MANIFEST_SRC="'self'" +CSP_WORKER_SRC="'self'" +CSP_BLOCK_ALL_MIXED_CONTENT=y +CSP_EXCLUDE_URL_PREFIXES= + +BACKUP_B2_BUCKET= +BACKUP_B2_KEY_ID= +BACKUP_B2_KEY_SECRET= +BACKUP_LOCAL_ROTATE_KEEP_LAST= diff --git a/envs/prod/docker-compose.yml b/envs/prod/docker-compose.yml new file mode 100644 index 0000000..3a8339c --- /dev/null +++ b/envs/prod/docker-compose.yml @@ -0,0 +1,125 @@ +version: '3.7' + +services: + redis: + image: redis:6-alpine + command: redis-server --appendonly yes + healthcheck: + test: redis-cli ping + restart: unless-stopped + volumes: + - ./redis/data:/data + logging: &logging + driver: journald + options: + tag: '{{.Name}}' + + db: + image: postgres:14.0-alpine + healthcheck: + test: pg_isready -U ${POSTGRES_USER} || exit 1 + restart: unless-stopped + env_file: ./.env + environment: + - POSTGRES_DB=${POSTGRES_DB} + - POSTGRES_USER=${POSTGRES_USER} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + volumes: + - ./db/data:/var/lib/postgresql/data + logging: + <<: *logging + + app: + build: + context: . + dockerfile: app/Dockerfile + image: bittensor_panel/app + healthcheck: + test: wget -q --spider 127.0.0.1:8000/admin/login/ || exit 1 + init: true + restart: unless-stopped + env_file: ./.env + + volumes: + - backend-static:/root/src/static + - ./media:/root/src/media + + depends_on: + - redis + - db + logging: + <<: *logging + + celery-worker: + image: bittensor_panel/app + init: true + healthcheck: + test: celery -A bittensor_panel status > /dev/null || exit 1 + restart: unless-stopped + env_file: ./.env + environment: + - DEBUG=off + + command: ./celery-entrypoint.sh + + tmpfs: /run + depends_on: + - redis + logging: + <<: *logging + + celery-beat: + image: bittensor_panel/app + init: true + restart: unless-stopped + env_file: ./.env + environment: + - DEBUG=off + command: nice celery -A bittensor_panel beat -l INFO --schedule /tmp/celerybeat-schedule -f /tmp/logs/celery-beat.log + volumes: + - ./logs:/tmp/logs + depends_on: + - redis + logging: + <<: *logging + + + nginx: + image: 'ghcr.io/reef-technologies/nginx-rt:v1.2.2' + restart: unless-stopped + healthcheck: + test: [ + "CMD-SHELL", + "curl 0.0.0.0:80 -s --fail -H \"Host: $NGINX_HOST\" -H \"User-Agent: docker-compose-healthcheck\" -o /dev/null || exit 1" + ] + interval: 30s + retries: 5 + start_period: 20s + timeout: 10s + environment: + - NGINX_HOST=${NGINX_HOST} + volumes: + - ./nginx/templates:/etc/nginx/templates + - ./nginx/config_helpers:/etc/nginx/config_helpers + - backend-static:/srv/static:ro + - ./media:/srv/media:ro + - ./letsencrypt/etc:/etc/letsencrypt + - ./nginx/monitoring_certs:/etc/monitoring_certs + depends_on: + - app + + command: nginx -g 'daemon off;' + ports: + - 80:80 + - 443:443 + + logging: + <<: *logging + + + + + + +volumes: + backend-static: diff --git a/letsencrypt_setup.sh b/letsencrypt_setup.sh new file mode 100755 index 0000000..161f602 --- /dev/null +++ b/letsencrypt_setup.sh @@ -0,0 +1,22 @@ +#!/bin/bash +set -eux +RELPATH="$(dirname "$0")" +ABSPATH="$(realpath "$RELPATH")" + +cd "$ABSPATH" + +source ./.env +mkdir -p "$ABSPATH/letsencrypt/etc/dhparams" + +docker run -it --rm \ + -v "$ABSPATH/letsencrypt/etc:/etc/letsencrypt" \ + alpine/openssl \ + dhparam -out /etc/letsencrypt/dhparams/dhparam.pem 2048 + +docker run --entrypoint certbot -it --rm \ + -v "$ABSPATH/letsencrypt/etc:/etc/letsencrypt" \ + -p 80:80\ + ghcr.io/reef-technologies/nginx-rt:v1.2.2 \ + certonly \ + --standalone --preferred-challenges http\ + -d "$NGINX_HOST" -d "www.$NGINX_HOST" diff --git a/nginx/config_helpers/brotli.conf b/nginx/config_helpers/brotli.conf new file mode 100644 index 0000000..1e4cb51 --- /dev/null +++ b/nginx/config_helpers/brotli.conf @@ -0,0 +1,44 @@ +brotli off; +brotli_static off; + +brotli_comp_level 6; +brotli_types + # text/html is always in brotli_types + text/richtext + text/plain + text/css + text/x-script + text/x-component + text/x-java-source + text/x-markdown + application/javascript + application/x-javascript + text/javascript + text/js + image/x-icon + application/x-perl + application/x-httpd-cgi + text/xml + application/xml + application/xml+rss + application/json + multipart/bag + multipart/mixed + application/xhtml+xml + font/ttf + font/otf + font/x-woff + image/svg+xml + application/vnd.ms-fontobject + application/ttf + application/x-ttf + application/otf + application/x-otf + application/truetype + application/opentype + application/x-opentype + application/font-woff + application/eot + application/font + application/font-sfnt + application/wasm; diff --git a/nginx/config_helpers/gzip.conf b/nginx/config_helpers/gzip.conf new file mode 100644 index 0000000..6ba8194 --- /dev/null +++ b/nginx/config_helpers/gzip.conf @@ -0,0 +1,48 @@ +gzip off; +gzip_static off; +gzip_proxied off; + +gzip_vary on; +gzip_comp_level 6; +gzip_buffers 16 8k; +gzip_http_version 1.1; +gzip_types + # text/html is always in gzip_types + text/richtext + text/plain + text/css + text/x-script + text/x-component + text/x-java-source + text/x-markdown + application/javascript + application/x-javascript + text/javascript + text/js + image/x-icon + application/x-perl + application/x-httpd-cgi + text/xml + application/xml + application/xml+rss + application/json + multipart/bag + multipart/mixed + application/xhtml+xml + font/ttf + font/otf + font/x-woff + image/svg+xml + application/vnd.ms-fontobject + application/ttf + application/x-ttf + application/otf + application/x-otf + application/truetype + application/opentype + application/x-opentype + application/font-woff + application/eot + application/font + application/font-sfnt + application/wasm; diff --git a/nginx/monitoring_certs/README.md b/nginx/monitoring_certs/README.md new file mode 100644 index 0000000..a3c4a4d --- /dev/null +++ b/nginx/monitoring_certs/README.md @@ -0,0 +1,2 @@ +Go to [promehtues-grafana-monitoring](https://github.com/reef-technologies/prometheus-grafana-monitoring) and generate a cert-key pair for this project (see prometheus-grafana-monitoring's README to find out how to do that). +Copy the generated cert-key pair along with `ca.crt` and place there, named `cert.crt`, `cert.key` and `ca.crt`, respectively. diff --git a/nginx/templates/default.conf.template b/nginx/templates/default.conf.template new file mode 100644 index 0000000..22792ff --- /dev/null +++ b/nginx/templates/default.conf.template @@ -0,0 +1,99 @@ +# +# SSL config below is inspired by websites: +# - https://syslink.pl/cipherlist/ +# - https://ssl-config.mozilla.org/ +# Generated for Intermediate configuration, nginx 1.20.1 or later +# + +server { + listen 80 default_server; + server_name _; + server_name_in_redirect off; + + return 444; +} + +server { + listen 80; + server_name www.${NGINX_HOST} ${NGINX_HOST}; + return 301 https://${NGINX_HOST}$request_uri; +} + +server { + listen 443 ssl default_server; + server_name _; + server_name_in_redirect off; + + # Load the Diffie-Hellman parameter. + ssl_dhparam /etc/letsencrypt/dhparams/dhparam.pem; + + ssl_certificate /etc/letsencrypt/live/${NGINX_HOST}/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/${NGINX_HOST}/privkey.pem; + ssl_trusted_certificate /etc/letsencrypt/live/${NGINX_HOST}/chain.pem; + + return 444; +} + +server { + listen 443 ssl; + server_name www.${NGINX_HOST}; + + # Load the Diffie-Hellman parameter. + ssl_dhparam /etc/letsencrypt/dhparams/dhparam.pem; + + ssl_certificate /etc/letsencrypt/live/${NGINX_HOST}/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/${NGINX_HOST}/privkey.pem; + ssl_trusted_certificate /etc/letsencrypt/live/${NGINX_HOST}/chain.pem; + + return 301 https://${NGINX_HOST}$request_uri; +} + +server { + listen 443 ssl http2; + server_name ${NGINX_HOST} ; + + add_header Strict-Transport-Security "max-age=31536000" always; + add_header X-Content-Type-Options nosniff; + add_header X-XSS-Protection "1; mode=block"; + add_header X-Frame-Options DENY; + + # Load the Diffie-Hellman parameter. + ssl_dhparam /etc/letsencrypt/dhparams/dhparam.pem; + + ssl_certificate /etc/letsencrypt/live/${NGINX_HOST}/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/${NGINX_HOST}/privkey.pem; + ssl_trusted_certificate /etc/letsencrypt/live/${NGINX_HOST}/chain.pem; + + include /etc/nginx/config_helpers/brotli.conf; + include /etc/nginx/config_helpers/gzip.conf; + + access_log /dev/stdout; + error_log /dev/stderr info; + + client_max_body_size 100M; + + location /static/ { + root /srv/; + } + + location /media/ { + root /srv/; + } + + + + location / { + + proxy_pass_header Server; + proxy_redirect off; + proxy_set_header Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-Proto $scheme; + + proxy_pass http://app:8000/; + } +} + + + + diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..32ac165 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,165 @@ +from __future__ import annotations + +import functools +import os +import subprocess +import tempfile +from pathlib import Path + +import nox + +os.environ["PDM_IGNORE_SAVED_PYTHON"] = "1" + +CI = os.environ.get("CI") is not None + +ROOT = Path(".") +PYTHON_VERSIONS = ["3.11"] +PYTHON_DEFAULT_VERSION = PYTHON_VERSIONS[-1] +APP_ROOT = ROOT / "app" / "src" + +nox.options.default_venv_backend = "venv" +nox.options.stop_on_first_error = True +nox.options.reuse_existing_virtualenvs = not CI + + +def install(session: nox.Session, *args): + groups = [] + for group in args: + groups.extend(["--group", group]) + session.run("pdm", "install", "--check", *groups, external=True) + + +@functools.lru_cache +def _list_files() -> list[Path]: + file_list = [] + for cmd in ( + ["git", "ls-files"], + ["git", "ls-files", "--others", "--exclude-standard"], + ): + cmd_result = subprocess.run(cmd, check=True, text=True, capture_output=True) + file_list.extend(cmd_result.stdout.splitlines()) + file_paths = [Path(p) for p in file_list] + return file_paths + + +def list_files(suffix: str | None = None) -> list[Path]: + """List all non-files not-ignored by git.""" + file_paths = _list_files() + if suffix is not None: + file_paths = [p for p in file_paths if p.suffix == suffix] + return file_paths + + +def run_readable(session, mode="check"): + session.run( + "docker", + "run", + "--platform", + "linux/amd64", + "--rm", + "-v", + f"{ROOT.absolute()}:/data", + "-w", + "/data", + "ghcr.io/bobheadxi/readable:v0.5.0@sha256:423c133e7e9ca0ac20b0ab298bd5dbfa3df09b515b34cbfbbe8944310cc8d9c9", + mode, + "![.]**/*.md", + external=True, + ) + + +def run_shellcheck(session, mode="check"): + shellcheck_cmd = [ + "docker", + "run", + "--platform", + "linux/amd64", # while this image is multi-arch, we cannot use digest with multi-arch images + "--rm", + "-v", + f"{ROOT.absolute()}:/mnt", + "-w", + "/mnt", + "-q", + "koalaman/shellcheck:0.9.0@sha256:a527e2077f11f28c1c1ad1dc784b5bc966baeb3e34ef304a0ffa72699b01ad9c", + ] + + files = list_files(suffix=".sh") + if not files: + session.log("No shell files found") + return + shellcheck_cmd.extend(files) + + if mode == "fmt": + with tempfile.NamedTemporaryFile(mode="w+") as diff_file: + session.run( + *shellcheck_cmd, + "--format=diff", + external=True, + stdout=diff_file, + success_codes=[0, 1], + ) + diff_file.seek(0) + diff = diff_file.read() + if len(diff.splitlines()) > 1: # ignore single-line message + session.log("Applying shellcheck patch:\n%s", diff) + subprocess.run( + ["patch", "-p1"], + input=diff, + text=True, + check=True, + ) + + session.run(*shellcheck_cmd, external=True) + + +@nox.session(name="format", python=PYTHON_DEFAULT_VERSION) +def format_(session): + """Lint the code and apply fixes in-place whenever possible.""" + install(session, "format") + session.run("ruff", "check", "--fix", ".") + run_shellcheck(session, mode="fmt") + run_readable(session, mode="fmt") + session.run("ruff", "format", ".") + + +@nox.session(python=PYTHON_DEFAULT_VERSION) +def lint(session): + """Run linters in readonly mode.""" + install(session, "lint") + session.run("ruff", "check", "--diff", ".") + session.run("codespell", ".") + run_shellcheck(session, mode="check") + run_readable(session, mode="check") + session.run("ruff", "format", "--diff", ".") + + +@nox.session(python=PYTHON_DEFAULT_VERSION) +def type_check(session): + install(session, "type_check") + with session.chdir(str(APP_ROOT)): + session.run("mypy", "--config-file", "mypy.ini", ".", *session.posargs) + + +@nox.session(python=PYTHON_DEFAULT_VERSION) +def security_check(session): + install(session, "security_check") + with session.chdir(str(APP_ROOT)): + session.run("bandit", "--ini", "bandit.ini", "-r", ".", *session.posargs) + + +@nox.session(python=PYTHON_VERSIONS) +def test(session): + install(session, "test") + with session.chdir(str(APP_ROOT)): + session.run( + "pytest", + "-W", + "ignore::DeprecationWarning", + "-s", + "-x", + "-vv", + "-n", + "auto", + "bittensor_panel", + *session.posargs, + ) diff --git a/pdm.lock b/pdm.lock new file mode 100644 index 0000000..6c96e19 --- /dev/null +++ b/pdm.lock @@ -0,0 +1,1322 @@ +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "lint", "security_check", "test", "type_check"] +strategy = ["cross_platform", "inherit_metadata"] +lock_version = "4.4.1" +content_hash = "sha256:79691e9955cbcb2d7a461380daebf16c6f4167a02d6817f15ad85d855cd6101c" + +[[package]] +name = "amqp" +version = "5.2.0" +requires_python = ">=3.6" +summary = "Low-level AMQP client for Python (fork of amqplib)." +groups = ["default"] +dependencies = [ + "vine<6.0.0,>=5.0.0", +] +files = [ + {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, + {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, +] + +[[package]] +name = "appnope" +version = "0.1.4" +requires_python = ">=3.6" +summary = "Disable App Nap on macOS >= 10.9" +groups = ["default", "test"] +marker = "sys_platform == \"darwin\"" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "argcomplete" +version = "3.3.0" +requires_python = ">=3.8" +summary = "Bash tab completion for argparse" +groups = ["default"] +files = [ + {file = "argcomplete-3.3.0-py3-none-any.whl", hash = "sha256:c168c3723482c031df3c207d4ba8fa702717ccb9fc0bfe4117166c1f537b4a54"}, + {file = "argcomplete-3.3.0.tar.gz", hash = "sha256:fd03ff4a5b9e6580569d34b273f741e85cd9e072f3feeeee3eba4891c70eda62"}, +] + +[[package]] +name = "asgiref" +version = "3.8.1" +requires_python = ">=3.8" +summary = "ASGI specs, helper code, and adapters" +groups = ["default", "type_check"] +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +summary = "Annotate AST trees with source code positions" +groups = ["default", "test"] +dependencies = [ + "six>=1.12.0", +] +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +requires_python = ">=3.7" +summary = "Timeout context manager for asyncio programs" +groups = ["default"] +marker = "python_full_version <= \"3.11.2\"" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "backcall" +version = "0.2.0" +summary = "Specifications for callback functions passed in to an API" +groups = ["default", "test"] +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "bandit" +version = "1.7.8" +requires_python = ">=3.8" +summary = "Security oriented static analyser for python code." +groups = ["security_check"] +dependencies = [ + "PyYAML>=5.3.1", + "colorama>=0.3.9; platform_system == \"Windows\"", + "rich", + "stevedore>=1.20.0", +] +files = [ + {file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"}, + {file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"}, +] + +[[package]] +name = "billiard" +version = "4.2.0" +requires_python = ">=3.7" +summary = "Python multiprocessing fork with improvements and bugfixes" +groups = ["default"] +files = [ + {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, + {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, +] + +[[package]] +name = "celery" +version = "5.3.6" +requires_python = ">=3.8" +summary = "Distributed Task Queue." +groups = ["default"] +dependencies = [ + "billiard<5.0,>=4.2.0", + "click-didyoumean>=0.3.0", + "click-plugins>=1.1.1", + "click-repl>=0.2.0", + "click<9.0,>=8.1.2", + "kombu<6.0,>=5.3.4", + "python-dateutil>=2.8.2", + "tzdata>=2022.7", + "vine<6.0,>=5.1.0", +] +files = [ + {file = "celery-5.3.6-py3-none-any.whl", hash = "sha256:9da4ea0118d232ce97dff5ed4974587fb1c0ff5c10042eb15278487cdd27d1af"}, + {file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +requires_python = ">=3.6" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["default", "type_check"] +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +requires_python = ">=3.7.0" +summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +groups = ["type_check"] +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +requires_python = ">=3.7" +summary = "Composable command line interface toolkit" +groups = ["default"] +dependencies = [ + "colorama; platform_system == \"Windows\"", +] +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +requires_python = ">=3.6.2" +summary = "Enables git-like *did-you-mean* feature in click" +groups = ["default"] +dependencies = [ + "click>=7", +] +files = [ + {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, + {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, +] + +[[package]] +name = "click-plugins" +version = "1.1.1" +summary = "An extension module for click to enable registering CLI commands via setuptools entry-points." +groups = ["default"] +dependencies = [ + "click>=4.0", +] +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[[package]] +name = "click-repl" +version = "0.3.0" +requires_python = ">=3.6" +summary = "REPL plugin for Click" +groups = ["default"] +dependencies = [ + "click>=7.0", + "prompt-toolkit>=3.0.36", +] +files = [ + {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, + {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, +] + +[[package]] +name = "codespell" +version = "2.2.6" +requires_python = ">=3.8" +summary = "Codespell" +groups = ["lint"] +files = [ + {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, + {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, +] + +[[package]] +name = "codespell" +version = "2.2.6" +extras = ["toml"] +requires_python = ">=3.8" +summary = "Codespell" +groups = ["lint"] +dependencies = [ + "codespell==2.2.6", +] +files = [ + {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, + {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["default", "security_check", "test"] +marker = "platform_system == \"Windows\" or sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.8.2" +requires_python = ">=3.6" +summary = "Add colours to the output of Python's logging module." +groups = ["default"] +dependencies = [ + "colorama; sys_platform == \"win32\"", +] +files = [ + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +requires_python = ">=3.5" +summary = "Decorators for Humans" +groups = ["default", "test"] +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "distlib" +version = "0.3.8" +summary = "Distribution utilities" +groups = ["default"] +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "django" +version = "4.2.11" +requires_python = ">=3.8" +summary = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +groups = ["default", "type_check"] +dependencies = [ + "asgiref<4,>=3.6.0", + "sqlparse>=0.3.1", + "tzdata; sys_platform == \"win32\"", +] +files = [ + {file = "Django-4.2.11-py3-none-any.whl", hash = "sha256:ddc24a0a8280a0430baa37aff11f28574720af05888c62b7cfe71d219f4599d3"}, + {file = "Django-4.2.11.tar.gz", hash = "sha256:6e6ff3db2d8dd0c986b4eec8554c8e4f919b5c1ff62a5b4390c17aff2ed6e5c4"}, +] + +[[package]] +name = "django-constance" +version = "3.1.0" +requires_python = ">=3.7" +summary = "Django live settings with pluggable backends, including Redis." +groups = ["default"] +dependencies = [ + "django-picklefield", +] +files = [ + {file = "django-constance-3.1.0.tar.gz", hash = "sha256:2b96e51de63751ef63f8f92f74e0f6aea30fb6453f3a736c21e1f8b3f6cf0b4f"}, + {file = "django_constance-3.1.0-py3-none-any.whl", hash = "sha256:6242486a346e396d765a9333d17f3101c8613cabc92e0b98dcb70c2a391bc53b"}, +] + +[[package]] +name = "django-constance" +version = "3.1.0" +extras = ["database"] +requires_python = ">=3.7" +summary = "Django live settings with pluggable backends, including Redis." +groups = ["default"] +dependencies = [ + "django-constance==3.1.0", +] +files = [ + {file = "django-constance-3.1.0.tar.gz", hash = "sha256:2b96e51de63751ef63f8f92f74e0f6aea30fb6453f3a736c21e1f8b3f6cf0b4f"}, + {file = "django_constance-3.1.0-py3-none-any.whl", hash = "sha256:6242486a346e396d765a9333d17f3101c8613cabc92e0b98dcb70c2a391bc53b"}, +] + +[[package]] +name = "django-cors-headers" +version = "4.2.0" +requires_python = ">=3.8" +summary = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +groups = ["default"] +dependencies = [ + "Django>=3.2", +] +files = [ + {file = "django_cors_headers-4.2.0-py3-none-any.whl", hash = "sha256:9ada212b0e2efd4a5e339360ffc869cb21ac5605e810afe69f7308e577ea5bde"}, + {file = "django_cors_headers-4.2.0.tar.gz", hash = "sha256:f9749c6410fe738278bc2b6ef17f05195bc7b251693c035752d8257026af024f"}, +] + +[[package]] +name = "django-debug-toolbar" +version = "4.1.0" +requires_python = ">=3.8" +summary = "A configurable set of panels that display various debug information about the current request/response." +groups = ["default"] +dependencies = [ + "django>=3.2.4", + "sqlparse>=0.2", +] +files = [ + {file = "django_debug_toolbar-4.1.0-py3-none-any.whl", hash = "sha256:a0b532ef5d52544fd745d1dcfc0557fa75f6f0d1962a8298bd568427ef2fa436"}, + {file = "django_debug_toolbar-4.1.0.tar.gz", hash = "sha256:f57882e335593cb8e74c2bda9f1116bbb9ca8fc0d81b50a75ace0f83de5173c7"}, +] + +[[package]] +name = "django-environ" +version = "0.11.2" +requires_python = ">=3.6,<4" +summary = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." +groups = ["default"] +files = [ + {file = "django-environ-0.11.2.tar.gz", hash = "sha256:f32a87aa0899894c27d4e1776fa6b477e8164ed7f6b3e410a62a6d72caaf64be"}, + {file = "django_environ-0.11.2-py2.py3-none-any.whl", hash = "sha256:0ff95ab4344bfeff693836aa978e6840abef2e2f1145adff7735892711590c05"}, +] + +[[package]] +name = "django-extensions" +version = "3.2.3" +requires_python = ">=3.6" +summary = "Extensions for Django" +groups = ["default"] +dependencies = [ + "Django>=3.2", +] +files = [ + {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, + {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, +] + +[[package]] +name = "django-ipware" +version = "7.0.1" +requires_python = ">=3.8" +summary = "A Django application to retrieve user's IP address" +groups = ["default"] +dependencies = [ + "python-ipware>=2.0.3", +] +files = [ + {file = "django-ipware-7.0.1.tar.gz", hash = "sha256:d9ec43d2bf7cdf216fed8d494a084deb5761a54860a53b2e74346a4f384cff47"}, + {file = "django_ipware-7.0.1-py2.py3-none-any.whl", hash = "sha256:db16bbee920f661ae7f678e4270460c85850f03c6761a4eaeb489bdc91f64709"}, +] + +[[package]] +name = "django-picklefield" +version = "3.2" +requires_python = ">=3" +summary = "Pickled object field for Django" +groups = ["default"] +dependencies = [ + "Django>=3.2", +] +files = [ + {file = "django-picklefield-3.2.tar.gz", hash = "sha256:aa463f5d79d497dbe789f14b45180f00a51d0d670067d0729f352a3941cdfa4d"}, + {file = "django_picklefield-3.2-py3-none-any.whl", hash = "sha256:e9a73539d110f69825d9320db18bcb82e5189ff48dbed41821c026a20497764c"}, +] + +[[package]] +name = "django-probes" +version = "1.7.0" +summary = "Make Django wait until database is ready. Probes for Docker and Kubernetes." +groups = ["default"] +files = [ + {file = "django_probes-1.7.0-py3-none-any.whl", hash = "sha256:60e656b83cbd0e290fca8263b256d7ebc42452f8c55a07044e56736f986c99a5"}, + {file = "django_probes-1.7.0.tar.gz", hash = "sha256:bb54c9db54f9c6aaadb032fcf77dfb6c6b17e597df1585d667cfe7491d2747d0"}, +] + +[[package]] +name = "django-structlog" +version = "8.0.0" +requires_python = ">=3.8" +summary = "Structured Logging for Django" +groups = ["default"] +dependencies = [ + "asgiref>=3.6.0", + "django-ipware>=6.0.2", + "django>=3.2", + "structlog>=21.4.0", +] +files = [ + {file = "django-structlog-8.0.0.tar.gz", hash = "sha256:e439ae173d8d852b5f991ff7528d8ce5b0ae003cc6ea513215334b3204aee09c"}, + {file = "django_structlog-8.0.0-py3-none-any.whl", hash = "sha256:8431db72e9093850f4ec952a74768619667ff43987e1559526de39a82e5869a7"}, +] + +[[package]] +name = "django-structlog" +version = "8.0.0" +extras = ["celery"] +requires_python = ">=3.8" +summary = "Structured Logging for Django" +groups = ["default"] +dependencies = [ + "celery>=5.1", + "django-structlog==8.0.0", +] +files = [ + {file = "django-structlog-8.0.0.tar.gz", hash = "sha256:e439ae173d8d852b5f991ff7528d8ce5b0ae003cc6ea513215334b3204aee09c"}, + {file = "django_structlog-8.0.0-py3-none-any.whl", hash = "sha256:8431db72e9093850f4ec952a74768619667ff43987e1559526de39a82e5869a7"}, +] + +[[package]] +name = "django-stubs" +version = "5.0.0" +requires_python = ">=3.8" +summary = "Mypy stubs for Django" +groups = ["type_check"] +dependencies = [ + "asgiref", + "django", + "django-stubs-ext>=5.0.0", + "types-PyYAML", + "typing-extensions", +] +files = [ + {file = "django_stubs-5.0.0-py3-none-any.whl", hash = "sha256:084484cbe16a6d388e80ec687e46f529d67a232f3befaf55c936b3b476be289d"}, + {file = "django_stubs-5.0.0.tar.gz", hash = "sha256:b8a792bee526d6cab31e197cb414ee7fa218abd931a50948c66a80b3a2548621"}, +] + +[[package]] +name = "django-stubs-ext" +version = "5.0.0" +requires_python = ">=3.8" +summary = "Monkey-patching and extensions for django-stubs" +groups = ["type_check"] +dependencies = [ + "django", + "typing-extensions", +] +files = [ + {file = "django_stubs_ext-5.0.0-py3-none-any.whl", hash = "sha256:8e1334fdf0c8bff87e25d593b33d4247487338aaed943037826244ff788b56a8"}, + {file = "django_stubs_ext-5.0.0.tar.gz", hash = "sha256:5bacfbb498a206d5938454222b843d81da79ea8b6fcd1a59003f529e775bc115"}, +] + +[[package]] +name = "django-stubs" +version = "5.0.0" +extras = ["compatible-mypy"] +requires_python = ">=3.8" +summary = "Mypy stubs for Django" +groups = ["type_check"] +dependencies = [ + "django-stubs==5.0.0", + "mypy~=1.10.0", +] +files = [ + {file = "django_stubs-5.0.0-py3-none-any.whl", hash = "sha256:084484cbe16a6d388e80ec687e46f529d67a232f3befaf55c936b3b476be289d"}, + {file = "django_stubs-5.0.0.tar.gz", hash = "sha256:b8a792bee526d6cab31e197cb414ee7fa218abd931a50948c66a80b3a2548621"}, +] + +[[package]] +name = "djangorestframework-stubs" +version = "3.15.0" +requires_python = ">=3.8" +summary = "PEP-484 stubs for django-rest-framework" +groups = ["type_check"] +dependencies = [ + "django-stubs>=5.0.0", + "requests>=2.0.0", + "types-PyYAML>=5.4.3", + "types-requests>=0.1.12", + "typing-extensions>=3.10.0", +] +files = [ + {file = "djangorestframework_stubs-3.15.0-py3-none-any.whl", hash = "sha256:6c634f16fe1f9b1654cfd921eca64cd4188ce8534ab5e3ec7e44aaa0ca969d93"}, + {file = "djangorestframework_stubs-3.15.0.tar.gz", hash = "sha256:f60ee1c80abb01a77acc0169969e07c45c2739ae64667b9a0dd4a2e32697dcab"}, +] + +[[package]] +name = "djangorestframework-stubs" +version = "3.15.0" +extras = ["compatible-mypy"] +requires_python = ">=3.8" +summary = "PEP-484 stubs for django-rest-framework" +groups = ["type_check"] +dependencies = [ + "django-stubs[compatible-mypy]", + "djangorestframework-stubs==3.15.0", + "mypy~=1.10.0", +] +files = [ + {file = "djangorestframework_stubs-3.15.0-py3-none-any.whl", hash = "sha256:6c634f16fe1f9b1654cfd921eca64cd4188ce8534ab5e3ec7e44aaa0ca969d93"}, + {file = "djangorestframework_stubs-3.15.0.tar.gz", hash = "sha256:f60ee1c80abb01a77acc0169969e07c45c2739ae64667b9a0dd4a2e32697dcab"}, +] + +[[package]] +name = "execnet" +version = "2.1.1" +requires_python = ">=3.8" +summary = "execnet: rapid multi-Python deployment" +groups = ["test"] +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[[package]] +name = "executing" +version = "2.0.1" +requires_python = ">=3.5" +summary = "Get the currently executing AST node of a frame, and other information" +groups = ["default", "test"] +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[[package]] +name = "filelock" +version = "3.14.0" +requires_python = ">=3.8" +summary = "A platform independent file lock." +groups = ["default"] +files = [ + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, +] + +[[package]] +name = "freezegun" +version = "1.5.0" +requires_python = ">=3.7" +summary = "Let your Python tests travel through time" +groups = ["test"] +dependencies = [ + "python-dateutil>=2.7", +] +files = [ + {file = "freezegun-1.5.0-py3-none-any.whl", hash = "sha256:ec3f4ba030e34eb6cf7e1e257308aee2c60c3d038ff35996d7475760c9ff3719"}, + {file = "freezegun-1.5.0.tar.gz", hash = "sha256:200a64359b363aa3653d8aac289584078386c7c3da77339d257e46a01fb5c77c"}, +] + +[[package]] +name = "gunicorn" +version = "20.1.0" +requires_python = ">=3.5" +summary = "WSGI HTTP Server for UNIX" +groups = ["default"] +dependencies = [ + "setuptools>=3.0", +] +files = [ + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] + +[[package]] +name = "idna" +version = "3.7" +requires_python = ">=3.5" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["type_check"] +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +requires_python = ">=3.7" +summary = "brain-dead simple config-ini parsing" +groups = ["test"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipdb" +version = "0.13.13" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +summary = "IPython-enabled pdb" +groups = ["test"] +dependencies = [ + "decorator; python_version >= \"3.11\"", + "ipython>=7.31.1; python_version >= \"3.11\"", +] +files = [ + {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, + {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, +] + +[[package]] +name = "ipython" +version = "8.14.0" +requires_python = ">=3.9" +summary = "IPython: Productive Interactive Computing" +groups = ["default", "test"] +dependencies = [ + "appnope; sys_platform == \"darwin\"", + "backcall", + "colorama; sys_platform == \"win32\"", + "decorator", + "jedi>=0.16", + "matplotlib-inline", + "pexpect>4.3; sys_platform != \"win32\"", + "pickleshare", + "prompt-toolkit!=3.0.37,<3.1.0,>=3.0.30", + "pygments>=2.4.0", + "stack-data", + "traitlets>=5", +] +files = [ + {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, + {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, +] + +[[package]] +name = "jedi" +version = "0.19.1" +requires_python = ">=3.6" +summary = "An autocompletion tool for Python that can be used for text editors." +groups = ["default", "test"] +dependencies = [ + "parso<0.9.0,>=0.8.3", +] +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[[package]] +name = "kombu" +version = "5.3.7" +requires_python = ">=3.8" +summary = "Messaging library for Python." +groups = ["default"] +dependencies = [ + "amqp<6.0.0,>=5.1.1", + "vine", +] +files = [ + {file = "kombu-5.3.7-py3-none-any.whl", hash = "sha256:5634c511926309c7f9789f1433e9ed402616b56836ef9878f01bd59267b4c7a9"}, + {file = "kombu-5.3.7.tar.gz", hash = "sha256:011c4cd9a355c14a1de8d35d257314a1d2456d52b7140388561acac3cf1a97bf"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +requires_python = ">=3.8" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["security_check"] +dependencies = [ + "mdurl~=0.1", +] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +requires_python = ">=3.8" +summary = "Inline Matplotlib backend for Jupyter" +groups = ["default", "test"] +dependencies = [ + "traitlets", +] +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +requires_python = ">=3.7" +summary = "Markdown URL utilities" +groups = ["security_check"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.10.0" +requires_python = ">=3.8" +summary = "Optional static typing for Python" +groups = ["type_check"] +dependencies = [ + "mypy-extensions>=1.0.0", + "typing-extensions>=4.1.0", +] +files = [ + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +requires_python = ">=3.5" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["type_check"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nox" +version = "2023.4.22" +requires_python = ">=3.7" +summary = "Flexible test automation." +groups = ["default"] +dependencies = [ + "argcomplete<4.0,>=1.9.4", + "colorlog<7.0.0,>=2.6.1", + "packaging>=20.9", + "virtualenv>=14", +] +files = [ + {file = "nox-2023.4.22-py3-none-any.whl", hash = "sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891"}, + {file = "nox-2023.4.22.tar.gz", hash = "sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f"}, +] + +[[package]] +name = "packaging" +version = "24.0" +requires_python = ">=3.7" +summary = "Core utilities for Python packages" +groups = ["default", "test"] +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +requires_python = ">=3.6" +summary = "A Python Parser" +groups = ["default", "test"] +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[[package]] +name = "pbr" +version = "6.0.0" +requires_python = ">=2.6" +summary = "Python Build Reasonableness" +groups = ["security_check"] +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +summary = "Pexpect allows easy control of interactive console applications." +groups = ["default", "test"] +marker = "sys_platform != \"win32\"" +dependencies = [ + "ptyprocess>=0.5", +] +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[[package]] +name = "pickleshare" +version = "0.7.5" +summary = "Tiny 'shelve'-like database with concurrency support" +groups = ["default", "test"] +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.1" +requires_python = ">=3.8" +summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +groups = ["default"] +files = [ + {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, + {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +requires_python = ">=3.8" +summary = "plugin and hook calling mechanisms for python" +groups = ["test"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.43" +requires_python = ">=3.7.0" +summary = "Library for building powerful interactive command lines in Python" +groups = ["default", "test"] +dependencies = [ + "wcwidth", +] +files = [ + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.9" +requires_python = ">=3.7" +summary = "psycopg2 - Python-PostgreSQL Database Adapter" +groups = ["default"] +files = [ + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +summary = "Run a subprocess in a pseudo terminal" +groups = ["default", "test"] +marker = "sys_platform != \"win32\"" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +summary = "Safely evaluate AST nodes without side effects" +groups = ["default", "test"] +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +requires_python = ">=3.7" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["default", "security_check", "test"] +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[[package]] +name = "pytest" +version = "8.2.0" +requires_python = ">=3.8" +summary = "pytest: simple powerful testing with Python" +groups = ["test"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "iniconfig", + "packaging", + "pluggy<2.0,>=1.5", +] +files = [ + {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, + {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, +] + +[[package]] +name = "pytest-django" +version = "4.8.0" +requires_python = ">=3.8" +summary = "A Django plugin for pytest." +groups = ["test"] +dependencies = [ + "pytest>=7.0.0", +] +files = [ + {file = "pytest-django-4.8.0.tar.gz", hash = "sha256:5d054fe011c56f3b10f978f41a8efb2e5adfc7e680ef36fb571ada1f24779d90"}, + {file = "pytest_django-4.8.0-py3-none-any.whl", hash = "sha256:ca1ddd1e0e4c227cf9e3e40a6afc6d106b3e70868fd2ac5798a22501271cd0c7"}, +] + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +requires_python = ">=3.8" +summary = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +groups = ["test"] +dependencies = [ + "execnet>=2.1", + "pytest>=7.0.0", +] +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Extensions to the standard Python datetime module" +groups = ["default", "test"] +dependencies = [ + "six>=1.5", +] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[[package]] +name = "python-ipware" +version = "3.0.0" +requires_python = ">=3.7" +summary = "A Python package to retrieve user's IP address" +groups = ["default"] +files = [ + {file = "python_ipware-3.0.0-py3-none-any.whl", hash = "sha256:fc936e6e7ec9fcc107f9315df40658f468ac72f739482a707181742882e36b60"}, + {file = "python_ipware-3.0.0.tar.gz", hash = "sha256:9117b1c4dddcb5d5ca49e6a9617de2fc66aec2ef35394563ac4eecabdf58c062"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +requires_python = ">=3.6" +summary = "YAML parser and emitter for Python" +groups = ["security_check"] +files = [ + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "redis" +version = "4.6.0" +requires_python = ">=3.7" +summary = "Python client for Redis database and key-value store" +groups = ["default"] +dependencies = [ + "async-timeout>=4.0.2; python_full_version <= \"3.11.2\"", +] +files = [ + {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, + {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +requires_python = ">=3.7" +summary = "Python HTTP for Humans." +groups = ["type_check"] +dependencies = [ + "certifi>=2017.4.17", + "charset-normalizer<4,>=2", + "idna<4,>=2.5", + "urllib3<3,>=1.21.1", +] +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[[package]] +name = "rich" +version = "13.7.1" +requires_python = ">=3.7.0" +summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +groups = ["security_check"] +dependencies = [ + "markdown-it-py>=2.2.0", + "pygments<3.0.0,>=2.13.0", +] +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[[package]] +name = "ruff" +version = "0.4.2" +requires_python = ">=3.7" +summary = "An extremely fast Python linter and code formatter, written in Rust." +groups = ["lint"] +files = [ + {file = "ruff-0.4.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d14dc8953f8af7e003a485ef560bbefa5f8cc1ad994eebb5b12136049bbccc5"}, + {file = "ruff-0.4.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:24016ed18db3dc9786af103ff49c03bdf408ea253f3cb9e3638f39ac9cf2d483"}, + {file = "ruff-0.4.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2e06459042ac841ed510196c350ba35a9b24a643e23db60d79b2db92af0c2b"}, + {file = "ruff-0.4.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3afabaf7ba8e9c485a14ad8f4122feff6b2b93cc53cd4dad2fd24ae35112d5c5"}, + {file = "ruff-0.4.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799eb468ea6bc54b95527143a4ceaf970d5aa3613050c6cff54c85fda3fde480"}, + {file = "ruff-0.4.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ec4ba9436a51527fb6931a8839af4c36a5481f8c19e8f5e42c2f7ad3a49f5069"}, + {file = "ruff-0.4.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a2243f8f434e487c2a010c7252150b1fdf019035130f41b77626f5655c9ca22"}, + {file = "ruff-0.4.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8772130a063f3eebdf7095da00c0b9898bd1774c43b336272c3e98667d4fb8fa"}, + {file = "ruff-0.4.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab165ef5d72392b4ebb85a8b0fbd321f69832a632e07a74794c0e598e7a8376"}, + {file = "ruff-0.4.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f32cadf44c2020e75e0c56c3408ed1d32c024766bd41aedef92aa3ca28eef68"}, + {file = "ruff-0.4.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:22e306bf15e09af45ca812bc42fa59b628646fa7c26072555f278994890bc7ac"}, + {file = "ruff-0.4.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:82986bb77ad83a1719c90b9528a9dd663c9206f7c0ab69282af8223566a0c34e"}, + {file = "ruff-0.4.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:652e4ba553e421a6dc2a6d4868bc3b3881311702633eb3672f9f244ded8908cd"}, + {file = "ruff-0.4.2-py3-none-win32.whl", hash = "sha256:7891ee376770ac094da3ad40c116258a381b86c7352552788377c6eb16d784fe"}, + {file = "ruff-0.4.2-py3-none-win_amd64.whl", hash = "sha256:5ec481661fb2fd88a5d6cf1f83403d388ec90f9daaa36e40e2c003de66751798"}, + {file = "ruff-0.4.2-py3-none-win_arm64.whl", hash = "sha256:cbd1e87c71bca14792948c4ccb51ee61c3296e164019d2d484f3eaa2d360dfaf"}, + {file = "ruff-0.4.2.tar.gz", hash = "sha256:33bcc160aee2520664bc0859cfeaebc84bb7323becff3f303b8f1f2d81cb4edc"}, +] + +[[package]] +name = "sentry-sdk" +version = "1.3.0" +summary = "Python client for Sentry (https://sentry.io)" +groups = ["default"] +dependencies = [ + "certifi", + "urllib3>=1.10.0", +] +files = [ + {file = "sentry-sdk-1.3.0.tar.gz", hash = "sha256:5210a712dd57d88d225c1fc3fe3a3626fee493637bcd54e204826cf04b8d769c"}, + {file = "sentry_sdk-1.3.0-py2.py3-none-any.whl", hash = "sha256:6864dcb6f7dec692635e5518c2a5c80010adf673c70340817f1a1b713d65bb41"}, +] + +[[package]] +name = "setuptools" +version = "69.5.1" +requires_python = ">=3.8" +summary = "Easily download, build, install, upgrade, and uninstall Python packages" +groups = ["default"] +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[[package]] +name = "six" +version = "1.16.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Python 2 and 3 compatibility utilities" +groups = ["default", "test"] +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sqlparse" +version = "0.5.0" +requires_python = ">=3.8" +summary = "A non-validating SQL parser." +groups = ["default", "type_check"] +files = [ + {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"}, + {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +summary = "Extract data from python stack frames and tracebacks for informative displays" +groups = ["default", "test"] +dependencies = [ + "asttokens>=2.1.0", + "executing>=1.2.0", + "pure-eval", +] +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[[package]] +name = "stevedore" +version = "5.2.0" +requires_python = ">=3.8" +summary = "Manage dynamic plugins for Python applications" +groups = ["security_check"] +dependencies = [ + "pbr!=2.1.0,>=2.0.0", +] +files = [ + {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, + {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, +] + +[[package]] +name = "structlog" +version = "24.1.0" +requires_python = ">=3.8" +summary = "Structured Logging for Python" +groups = ["default"] +files = [ + {file = "structlog-24.1.0-py3-none-any.whl", hash = "sha256:3f6efe7d25fab6e86f277713c218044669906537bb717c1807a09d46bca0714d"}, + {file = "structlog-24.1.0.tar.gz", hash = "sha256:41a09886e4d55df25bdcb9b5c9674bccfab723ff43e0a86a1b7b236be8e57b16"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +requires_python = ">=3.8" +summary = "Traitlets Python configuration system" +groups = ["default", "test"] +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[[package]] +name = "types-freezegun" +version = "1.1.10" +summary = "Typing stubs for freezegun" +groups = ["type_check"] +files = [ + {file = "types-freezegun-1.1.10.tar.gz", hash = "sha256:cb3a2d2eee950eacbaac0673ab50499823365ceb8c655babb1544a41446409ec"}, + {file = "types_freezegun-1.1.10-py3-none-any.whl", hash = "sha256:fadebe72213e0674036153366205038e1f95c8ca96deb4ef9b71ddc15413543e"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240316" +requires_python = ">=3.8" +summary = "Typing stubs for python-dateutil" +groups = ["type_check"] +files = [ + {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, + {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240311" +requires_python = ">=3.8" +summary = "Typing stubs for PyYAML" +groups = ["type_check"] +files = [ + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, +] + +[[package]] +name = "types-requests" +version = "2.31.0.20240406" +requires_python = ">=3.8" +summary = "Typing stubs for requests" +groups = ["type_check"] +dependencies = [ + "urllib3>=2", +] +files = [ + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["type_check"] +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +requires_python = ">=2" +summary = "Provider of IANA time zone data" +groups = ["default", "type_check"] +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +requires_python = ">=3.8" +summary = "HTTP library with thread-safe connection pooling, file post, and more." +groups = ["default", "type_check"] +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[[package]] +name = "vine" +version = "5.1.0" +requires_python = ">=3.6" +summary = "Python promises." +groups = ["default"] +files = [ + {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, + {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, +] + +[[package]] +name = "virtualenv" +version = "20.26.1" +requires_python = ">=3.7" +summary = "Virtual Python Environment builder" +groups = ["default"] +dependencies = [ + "distlib<1,>=0.3.7", + "filelock<4,>=3.12.2", + "platformdirs<5,>=3.9.1", +] +files = [ + {file = "virtualenv-20.26.1-py3-none-any.whl", hash = "sha256:7aa9982a728ae5892558bff6a2839c00b9ed145523ece2274fad6f414690ae75"}, + {file = "virtualenv-20.26.1.tar.gz", hash = "sha256:604bfdceaeece392802e6ae48e69cec49168b9c5f4a44e483963f9242eb0e78b"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +summary = "Measures the displayed width of unicode strings in a terminal" +groups = ["default", "test"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] diff --git a/promtail/config.yml b/promtail/config.yml new file mode 100644 index 0000000..85605e4 --- /dev/null +++ b/promtail/config.yml @@ -0,0 +1,27 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +clients: + - url: "${LOKI_URL}/api/prom/push" + basic_auth: + username: "${LOKI_USER}" + password: "${LOKI_PASSWORD}" + external_labels: + client: "${LOKI_CLIENT}" + client_server_group: "${LOKI_CLIENT_SERVER_GROUP}" + +scrape_configs: + - job_name: containerlogs + docker_sd_configs: + - host: unix:///var/run/docker.sock + refresh_interval: ${LOKI_REFRESH_INTERVAL} + relabel_configs: + - source_labels: ["__meta_docker_container_name"] + regex: "/(.*)" + target_label: "container" + - source_labels: ["__meta_docker_container_log_stream"] + target_label: "logstream" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..734638c --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,77 @@ +[project] +name = "bittensor-panel" +requires-python = "==3.11.*" +version = "0" +dependencies = [ + "Django~=4.2.4", + "django-constance[database]==3.1.0", + "django-cors-headers~=4.2.0", + "django-environ~=0.11.2", + "django-extensions==3.2.3", + "django-probes==1.7.0", + "django-debug-toolbar==4.1.0", + "django-structlog[celery]==8.0.0", + "celery~=5.3.1", + "gunicorn==20.1.0", + "psycopg2-binary~=2.9.7", + "redis~=4.6.0", + "sentry-sdk==1.3.0", + "ipython~=8.14.0", + "nox==2023.4.22", + ] + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm] +distribution = false + +[tool.pdm.dev-dependencies] +test = [ + 'pytest', + 'pytest-django', + 'pytest-xdist', + 'ipdb', + 'freezegun', +] +lint = [ + "ruff", + "codespell[toml]", +] +type_check = [ + "django-stubs[compatible-mypy]", + "djangorestframework-stubs[compatible-mypy]", + "mypy", + "types-freezegun", + "types-python-dateutil", + "types-requests", +] +security_check = [ + "bandit>=1.7.7", +] + +[tool.ruff] +src = ["app/src"] +line-length = 120 + +[tool.ruff.lint] +# TODO add D +select = [ + "E", "F", "I", "UP", + "TCH005", +] +# TODO: remove E501 once docstrings are formatted +ignore = [ + "D100", "D105", "D107", "D200", "D202", "D203", "D205", "D212", "D400", "D401", "D415", + "D101", "D102","D103", "D104", # TODO remove once we have docstring for all public methods + "E501", # TODO: remove E501 once docstrings are formatted +] + +[tool.ruff.lint.per-file-ignores] +"__init__.py" = ["F401"] +"test/**" = ["D", "F403", "F405"] + +[tool.codespell] +skip = '*.min.js,pdm.lock,*/monitoring_certs/*' +ignore-words-list = 'datas' \ No newline at end of file diff --git a/setup-dev.sh b/setup-dev.sh new file mode 100755 index 0000000..492e735 --- /dev/null +++ b/setup-dev.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +# Copyright 2017, Reef Technologies (reef.pl), All rights reserved. + +set -euo pipefail + +PROJECT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +ENV_DIR="./envs/dev" +# shellcheck disable=SC2164 +cd "${PROJECT_DIR}" + +# Workaround for PDM which sometimes creates a 3.10 venv +# https://github.com/pdm-project/pdm/issues/2789 +if [[ ! -d ".venv" ]]; then + python3.11 -m venv .venv +fi + +# Create a lock file if doesn't exist +if [[ ! -f "pdm.lock" ]]; then + pdm lock --group :all +fi +# Install Python dependencies +pdm sync --group :all + +# Create .env from the template if doesn't exist +if [[ ! -f "${ENV_DIR}/.env" ]]; then + cp "${ENV_DIR}/.env.template" "${ENV_DIR}/.env" +fi + +# Set symlinks +ln -sf "${ENV_DIR}/.env" .env +ln -sf "${ENV_DIR}/docker-compose.yml" docker-compose.yml + +# shellcheck disable=SC2164 +cd "${PROJECT_DIR}/app/" +if [[ -L "Dockerfile" ]]; then + unlink Dockerfile +fi +if [[ -L "src/entrypoint.sh" ]]; then + unlink src/entrypoint.sh +fi diff --git a/setup-prod.sh b/setup-prod.sh new file mode 100755 index 0000000..c1461d6 --- /dev/null +++ b/setup-prod.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +# Copyright 2017, Reef Technologies (reef.pl), All rights reserved. + +set -euo pipefail + +PROJECT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +ENV_DIR="./envs/prod" +# shellcheck disable=SC2164 +cd "${PROJECT_DIR}" + +# Create .env from the template if doesn't exist +if [[ ! -f "${ENV_DIR}/.env" ]]; then + cp "${ENV_DIR}/.env.template" "${ENV_DIR}/.env" +fi + +# Set symlinks +ln -sf "${ENV_DIR}/.env" .env +ln -sf "${ENV_DIR}/docker-compose.yml" docker-compose.yml +# shellcheck disable=SC2164 +cd "${PROJECT_DIR}/app/" +ln -sf "${ENV_DIR}/Dockerfile" Dockerfile +ln -sf ".${ENV_DIR}/entrypoint.sh" src/entrypoint.sh