diff --git a/.github/workflows/account-mailer-cd.yml b/.github/workflows/account-mailer-cd.yml index 65bc3e45f6..a284f4e04a 100644 --- a/.github/workflows/account-mailer-cd.yml +++ b/.github/workflows/account-mailer-cd.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - feature* paths: - "queue_services/account-mailer/**" - "auth-api/src/auth_api/models/**" @@ -12,106 +13,22 @@ on: - "auth-api/src/auth_api/utils/enums.py" workflow_dispatch: inputs: - environment: - description: "Environment (dev/test/prod)" + target: + description: "Deploy To" required: true - default: "dev" - -defaults: - run: - shell: bash - working-directory: ./queue_services/account-mailer - -env: - APP_NAME: "account-mailer" - TAG_NAME: "dev" - + type: choice + options: + - dev + - test + - sandbox + - prod jobs: - account-mailer-cd-by-push: - runs-on: ubuntu-20.04 - - if: github.event_name == 'push' && github.repository == 'bcgov/sbc-auth' - environment: - name: "dev" - - steps: - - uses: actions/checkout@v4 - - - name: Login Openshift - shell: bash - run: | - oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} - - - name: CD Flow - shell: bash - env: - OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} - OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} - OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} - OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} - OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} - TAG_NAME: ${{ env.TAG_NAME }} - run: | - make cd - - - name: Watch new rollout (trigger by image change in Openshift) - shell: bash - run: | - oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w - - - name: Rocket.Chat Notification - uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master - if: failure() - with: - type: ${{ job.status }} - job_name: "*Account Mailer Built and Deployed to ${{env.TAG_NAME}}*" - channel: "#registries-bot" - url: ${{ secrets.ROCKETCHAT_WEBHOOK }} - commit: true - token: ${{ secrets.GITHUB_TOKEN }} - - account-mailer-cd-by-dispatch: - runs-on: ubuntu-20.04 - - if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/sbc-auth' - environment: - name: "${{ github.event.inputs.environment }}" - - steps: - - uses: actions/checkout@v4 - - name: Set env by input - run: | - echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV - - - name: Login Openshift - shell: bash - run: | - oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} - - - name: CD Flow - shell: bash - env: - OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} - OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} - OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} - OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} - OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} - TAG_NAME: ${{ env.TAG_NAME }} - run: | - make cd - - - name: Watch new rollout (trigger by image change in Openshift) - shell: bash - run: | - oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w - - - name: Rocket.Chat Notification - uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master - if: failure() - with: - type: ${{ job.status }} - job_name: "*Account Mailer Built and Deployed to ${{env.TAG_NAME}}*" - channel: "#registries-bot" - url: ${{ secrets.ROCKETCHAT_WEBHOOK }} - commit: true - token: ${{ secrets.GITHUB_TOKEN }} + account-mailer-cd: + uses: bcgov/bcregistry-sre/.github/workflows/backend-cd.yaml@main + with: + target: ${{ inputs.target }} + app_name: "account-mailer" + working_directory: "./queue_services/account-mailer" + secrets: + WORKLOAD_IDENTIFY_POOLS_PROVIDER: ${{ secrets.WORKLOAD_IDENTIFY_POOLS_PROVIDER }} + GCP_SERVICE_ACCOUNT: ${{ secrets.GCP_SERVICE_ACCOUNT }} diff --git a/.github/workflows/account-mailer-ci.yml b/.github/workflows/account-mailer-ci.yml index 1fe783001e..ba6f6d59e5 100644 --- a/.github/workflows/account-mailer-ci.yml +++ b/.github/workflows/account-mailer-ci.yml @@ -10,6 +10,7 @@ on: - "auth-api/src/auth_api/services/gcp_queue/*" - "auth-api/src/auth_api/utils/account_mailer.py" - "auth-api/src/auth_api/utils/enums.py" + - "build-deps/**" defaults: run: @@ -32,12 +33,12 @@ jobs: strategy: matrix: - python-version: [3.8] + python-version: [3.12] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -69,6 +70,9 @@ jobs: BCOL_ADMIN_EMAIL: "test@test.com" runs-on: ubuntu-20.04 + strategy: + matrix: + python-version: [3.12] services: postgres: diff --git a/.github/workflows/auth-api-cd.yml b/.github/workflows/auth-api-cd.yml index d829337348..19348eba01 100644 --- a/.github/workflows/auth-api-cd.yml +++ b/.github/workflows/auth-api-cd.yml @@ -4,110 +4,28 @@ on: push: branches: - main + - feature* paths: - "auth-api/**" workflow_dispatch: inputs: - environment: - description: "Environment (dev/test/prod)" + target: + description: "Deploy To" required: true - default: "dev" - -defaults: - run: - shell: bash - working-directory: ./auth-api - -env: - APP_NAME: "auth-api" - TAG_NAME: "dev" + type: choice + options: + - dev + - test + - sandbox + - prod jobs: - auth-api-cd-by-push: - runs-on: ubuntu-20.04 - - if: github.event_name == 'push' && github.repository == 'bcgov/sbc-auth' - environment: - name: "dev" - - steps: - - uses: actions/checkout@v4 - - - name: Login Openshift - shell: bash - run: | - oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} - - - name: CD Flow - shell: bash - env: - OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} - OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} - OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} - OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} - OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} - TAG_NAME: ${{ env.TAG_NAME }} - run: | - make cd - - - name: Watch new rollout (trigger by image change in Openshift) - shell: bash - run: | - oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w - - - name: Rocket.Chat Notification - uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master - if: failure() - with: - type: ${{ job.status }} - job_name: "*Auth API Built and Deployed to ${{env.TAG_NAME}}*" - channel: "#registries-bot" - url: ${{ secrets.ROCKETCHAT_WEBHOOK }} - commit: true - token: ${{ secrets.GITHUB_TOKEN }} - - auth-api-cd-by-dispatch: - runs-on: ubuntu-20.04 - - if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/sbc-auth' - environment: - name: "${{ github.event.inputs.environment }}" - - steps: - - uses: actions/checkout@v4 - - name: Set env by input - run: | - echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV - - - name: Login Openshift - shell: bash - run: | - oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} - - - name: CD Flow - shell: bash - env: - OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} - OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} - OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} - OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} - OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} - TAG_NAME: ${{ env.TAG_NAME }} - run: | - make cd - - - name: Watch new rollout (trigger by image change in Openshift) - shell: bash - run: | - oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w - - - name: Rocket.Chat Notification - uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master - if: failure() - with: - type: ${{ job.status }} - job_name: "*Auth API Built and Deployed to ${{env.TAG_NAME}}*" - channel: "#registries-bot" - url: ${{ secrets.ROCKETCHAT_WEBHOOK }} - commit: true - token: ${{ secrets.GITHUB_TOKEN }} + auth-api-cd: + uses: bcgov/bcregistry-sre/.github/workflows/backend-cd.yaml@main + with: + target: ${{ inputs.target }} + app_name: "auth-api" + working_directory: "./auth-api" + secrets: + WORKLOAD_IDENTIFY_POOLS_PROVIDER: ${{ secrets.WORKLOAD_IDENTIFY_POOLS_PROVIDER }} + GCP_SERVICE_ACCOUNT: ${{ secrets.GCP_SERVICE_ACCOUNT }} \ No newline at end of file diff --git a/.github/workflows/auth-api-ci.yml b/.github/workflows/auth-api-ci.yml index caf1f14f80..99b1aff289 100644 --- a/.github/workflows/auth-api-ci.yml +++ b/.github/workflows/auth-api-ci.yml @@ -2,10 +2,10 @@ name: Auth API CI on: pull_request: - branches: - - main paths: - "auth-api/**" + - "build-deps/**" + workflow_dispatch: defaults: run: @@ -13,115 +13,9 @@ defaults: working-directory: ./auth-api jobs: - setup-job: - runs-on: ubuntu-20.04 - - if: github.repository == 'bcgov/sbc-auth' - - steps: - - uses: actions/checkout@v4 - - run: "true" - - linting: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - python-version: [3.8] - - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - make setup - - name: Lint with pylint - id: pylint - run: | - make pylint - - name: Lint with flake8 - id: flake8 - run: | - make flake8 - - testing: - needs: setup-job - env: - FLASK_ENV: "testing" - DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/postgres" - - JWT_OIDC_TEST_ISSUER: "http://localhost:8081/auth/realms/demo" - JWT_OIDC_TEST_WELL_KNOWN_CONFIG: "http://localhost:8081/auth/realms/demo/.well-known/openid-configuration" - JWT_OIDC_TEST_ALGORITHMS: "RS256" - JWT_OIDC_TEST_AUDIENCE: "sbc-auth-web" - JWT_OIDC_TEST_CLIENT_SECRET: "1111111111" - JWT_OIDC_TEST_JWKS_CACHE_TIMEOUT: "6000" - - KEYCLOAK_TEST_ADMIN_CLIENTID: "sbc-auth-admin" - KEYCLOAK_TEST_ADMIN_SECRET: "2222222222" - KEYCLOAK_TEST_AUTH_AUDIENCE: "sbc-auth-web" - KEYCLOAK_TEST_AUTH_CLIENT_SECRET: "1111111111" - KEYCLOAK_TEST_BASE_URL: "http://localhost:8081" - KEYCLOAK_TEST_REALMNAME: "demo" - - TOKEN_EXPIRY_PERIOD: 7 - EMAIL_SECURITY_PASSWORD_SALT: "my_pwd_salt" - EMAIL_TOKEN_SECRET_KEY: "mySecretKey" - USE_TEST_KEYCLOAK_DOCKER: "YES" - USE_DOCKER_MOCK: "YES" - STAFF_ADMIN_EMAIL: "test@test.com" - - runs-on: ubuntu-20.04 - - services: - postgres: - image: postgres:12 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - # needed because the postgres container does not provide a healthcheck - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install docker-compose - run: | - sudo curl -L https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose - sudo chmod +x /usr/local/bin/docker-compose - docker-compose version - - name: Install dependencies - run: | - make setup - - name: Test with pytest - id: test - run: | - make test - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - file: ./auth-api/coverage.xml - flags: authapi - name: codecov-auth-api - fail_ci_if_error: false - - build-check: - needs: setup-job - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v4 - - name: build to check strictness - id: build - run: | - make build-nc + auth-api-ci: + uses: bcgov/bcregistry-sre/.github/workflows/backend-ci.yaml@main + with: + app_name: "auth-api" + working_directory: "./auth-api" + codecov_flag: "authapi" diff --git a/.github/workflows/auth-queue-cd.yml b/.github/workflows/auth-queue-cd.yml index 7996d3a8e2..cf22c64ecf 100644 --- a/.github/workflows/auth-queue-cd.yml +++ b/.github/workflows/auth-queue-cd.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - feature* paths: - "queue_services/auth-queue/**" - "auth-api/src/auth_api/models/**" @@ -12,106 +13,23 @@ on: - "auth-api/src/auth_api/utils/enums.py" workflow_dispatch: inputs: - environment: - description: "Environment (dev/test/prod)" + target: + description: "Deploy To" required: true - default: "dev" - -defaults: - run: - shell: bash - working-directory: ./queue_services/auth-queue - -env: - APP_NAME: "auth-queue" - TAG_NAME: "dev" + type: choice + options: + - dev + - test + - sandbox + - prod jobs: - auth-queue-cd-by-push: - runs-on: ubuntu-20.04 - - if: github.event_name == 'push' && github.repository == 'bcgov/sbc-auth' - environment: - name: "dev" - - steps: - - uses: actions/checkout@v4 - - - name: Login Openshift - shell: bash - run: | - oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} - - - name: CD Flow - shell: bash - env: - OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} - OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} - OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} - OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} - OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} - TAG_NAME: ${{ env.TAG_NAME }} - run: | - make cd - - - name: Watch new rollout (trigger by image change in Openshift) - shell: bash - run: | - oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w - - - name: Rocket.Chat Notification - uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master - if: failure() - with: - type: ${{ job.status }} - job_name: "*Business Events Listener Queue Built and Deployed to ${{env.TAG_NAME}}*" - channel: "#registries-bot" - url: ${{ secrets.ROCKETCHAT_WEBHOOK }} - commit: true - token: ${{ secrets.GITHUB_TOKEN }} - - auth-queue-cd-by-dispatch: - runs-on: ubuntu-20.04 - - if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/sbc-auth' - environment: - name: "${{ github.event.inputs.environment }}" - - steps: - - uses: actions/checkout@v4 - - name: Set env by input - run: | - echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV - - - name: Login Openshift - shell: bash - run: | - oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} - - - name: CD Flow - shell: bash - env: - OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} - OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} - OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} - OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} - OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} - TAG_NAME: ${{ env.TAG_NAME }} - run: | - make cd - - - name: Watch new rollout (trigger by image change in Openshift) - shell: bash - run: | - oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w - - - name: Rocket.Chat Notification - uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master - if: failure() - with: - type: ${{ job.status }} - job_name: "*Business Events Listener Queue Built and Deployed to ${{env.TAG_NAME}}*" - channel: "#registries-bot" - url: ${{ secrets.ROCKETCHAT_WEBHOOK }} - commit: true - token: ${{ secrets.GITHUB_TOKEN }} + auth-queue-cd: + uses: bcgov/bcregistry-sre/.github/workflows/backend-cd.yaml@main + with: + target: ${{ inputs.target }} + app_name: "auth-queue" + working_directory: "./queue_services/auth-queue" + secrets: + WORKLOAD_IDENTIFY_POOLS_PROVIDER: ${{ secrets.WORKLOAD_IDENTIFY_POOLS_PROVIDER }} + GCP_SERVICE_ACCOUNT: ${{ secrets.GCP_SERVICE_ACCOUNT }} diff --git a/.github/workflows/auth-queue-ci.yml b/.github/workflows/auth-queue-ci.yml index 473960b8e4..6d299bb72a 100644 --- a/.github/workflows/auth-queue-ci.yml +++ b/.github/workflows/auth-queue-ci.yml @@ -10,6 +10,7 @@ on: - "auth-api/src/auth_api/services/gcp_queue/*" - "auth-api/src/auth_api/services/activity_log_publisher.py" - "auth-api/src/auth_api/utils/enums.py" + - "build-deps/**" defaults: run: @@ -25,14 +26,13 @@ jobs: steps: - uses: actions/checkout@v4 - run: "true" - linting: needs: setup-job runs-on: ubuntu-20.04 strategy: matrix: - python-version: [3.8] + python-version: [3.12] steps: - uses: actions/checkout@v4 @@ -62,7 +62,9 @@ jobs: PAY_API_VERSION: "/api/v1" runs-on: ubuntu-20.04 - + strategy: + matrix: + python-version: [3.12] services: postgres: image: postgres:12 @@ -78,7 +80,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install docker-compose diff --git a/.github/workflows/auth-web-cd.yml b/.github/workflows/auth-web-cd.yml index ca2af35e91..c7ac4270b7 100644 --- a/.github/workflows/auth-web-cd.yml +++ b/.github/workflows/auth-web-cd.yml @@ -4,12 +4,13 @@ on: push: branches: - main + - feature* paths: - "auth-web/**" workflow_dispatch: inputs: - environment: - description: "Environment" + target: + description: "Deploy To" required: true type: choice options: @@ -17,19 +18,14 @@ on: - test - sandbox - prod - tagname: - description: "Specify a previous version (git tag) to deploy" - required: false - default: "" jobs: account-ui-cd: - uses: bcgov/bcregistry-sre/.github/workflows/ui-cd-node20.yaml@main + uses: bcgov/bcregistry-sre/.github/workflows/frontend-cd.yaml@main with: - environment: ${{ inputs.environment }} - tagname: ${{ inputs.tagname }} + target: ${{ inputs.target }} + app_name: "account-ui" working_directory: "./auth-web" secrets: - APP_NAME: "account-ui" - OP_CONNECT_URL: ${{ secrets.OP_CONNECT_URL }} - OP_CONNECT_TOKEN: ${{ secrets.OP_CONNECT_TOKEN }} + WORKLOAD_IDENTIFY_POOLS_PROVIDER: ${{ secrets.WORKLOAD_IDENTIFY_POOLS_PROVIDER }} + GCP_SERVICE_ACCOUNT: ${{ secrets.GCP_SERVICE_ACCOUNT }} diff --git a/.github/workflows/auth-web-ci.yml b/.github/workflows/auth-web-ci.yml index 314fb75ab8..c6d62c9b64 100644 --- a/.github/workflows/auth-web-ci.yml +++ b/.github/workflows/auth-web-ci.yml @@ -4,9 +4,10 @@ on: pull_request: branches: - main - - feature-business-registry-dashboard-updates + - feature* paths: - "auth-web/**" + workflow_dispatch: defaults: run: @@ -14,71 +15,10 @@ defaults: working-directory: ./auth-web jobs: - setup-job: - runs-on: ubuntu-20.04 - - if: github.repository == 'bcgov/sbc-auth' - - steps: - - uses: actions/checkout@v4 - - run: "true" - - linting: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - node-version: [20.5.1] - - steps: - - uses: actions/checkout@v4 - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: Install dependencies - run: | - make setup - - name: Linting - run: | - make lint - - testing-coverage: - needs: setup-job - runs-on: ubuntu-20.04 - - strategy: - matrix: - node-version: [20.5.1] - - steps: - - uses: actions/checkout@v4 - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: Install dependencies - run: | - make setup - - name: Test with Vitest - id: test - run: | - make test - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - flags: authweb - name: codecov-auth-web - fail_ci_if_error: false - - build-check: - needs: setup-job - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v4 - - name: build to check strictness - id: build - run: | - make build-nc + account-ui-ci: + uses: bcgov/bcregistry-sre/.github/workflows/frontend-ci.yaml@main + with: + node_version: "20.5.1" + app_name: "account-ui" + working_directory: "./auth-web" + codecov_flag: "authweb" \ No newline at end of file diff --git a/auth-api/.gitignore b/auth-api/.gitignore index 01017b90cd..e69de29bb2 100644 --- a/auth-api/.gitignore +++ b/auth-api/.gitignore @@ -1 +0,0 @@ -**.env \ No newline at end of file diff --git a/auth-api/.s2i/environment b/auth-api/.s2i/environment deleted file mode 100644 index 55c62ce9c6..0000000000 --- a/auth-api/.s2i/environment +++ /dev/null @@ -1 +0,0 @@ -APP_CONFIG=gunicorn_config.py diff --git a/auth-api/Dockerfile b/auth-api/Dockerfile index 901477d3fc..bd7af474b0 100644 --- a/auth-api/Dockerfile +++ b/auth-api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8.16-bullseye as development_build +FROM python:3.12.5-bullseye as development_build ARG VCS_REF="missing" ARG BUILD_DATE="missing" @@ -12,22 +12,26 @@ LABEL org.label-schema.vcs-ref=${VCS_REF} \ USER root -LABEL maintainer="BCROS" +LABEL maintainer="travissemple" LABEL vendor="BCROS" ARG APP_ENV \ + # Needed for fixing permissions of files created by Docker: UID=1000 \ GID=1000 ENV APP_ENV=${APP_ENV} \ + # python: PYTHONFAULTHANDLER=1 \ PYTHONUNBUFFERED=1 \ PYTHONHASHSEED=random \ PYTHONDONTWRITEBYTECODE=1 \ + # pip: PIP_NO_CACHE_DIR=1 \ PIP_DISABLE_PIP_VERSION_CHECK=1 \ PIP_DEFAULT_TIMEOUT=100 \ PIP_ROOT_USER_ACTION=ignore \ + # poetry: POETRY_VERSION=1.8.3 \ POETRY_NO_INTERACTION=1 \ POETRY_VIRTUALENVS_CREATE=false \ @@ -39,12 +43,17 @@ SHELL ["/bin/bash", "-eo", "pipefail", "-c"] RUN apt-get update && apt-get upgrade -y \ && apt-get install --no-install-recommends -y \ bash \ + brotli \ build-essential \ curl \ + gettext \ git \ libpq-dev \ - && curl -sSL 'https://install.python-poetry.org' | python3 - \ + wait-for-it \ + && curl -sSL 'https://install.python-poetry.org' | python - \ && poetry --version \ + && poetry config installer.max-workers 1 \ + # Cleaning cache: && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ && apt-get clean -y && rm -rf /var/lib/apt/lists/* @@ -54,21 +63,26 @@ RUN groupadd -g "${GID}" -r web \ && useradd -d '/code' -g web -l -r -u "${UID}" web \ && chown web:web -R '/code' +# Copy only requirements, to cache them in docker layer COPY --chown=web:web ./poetry.lock ./pyproject.toml /code/ COPY --chown=web:web ./src /code/src COPY --chown=web:web ./README.md /code +# Project initialization: RUN --mount=type=cache,target="$POETRY_CACHE_DIR" \ echo "$APP_ENV" \ && poetry version \ - && poetry run pip install -U pip \ + # Install deps: + && poetry update \ && poetry install \ $(if [ -z ${APP_ENV+x} ] || [ "$APP_ENV" = 'production' ]; then echo '--only main'; fi) \ --no-interaction --no-ansi +# Running as non-root user: USER web +# The following stage is only for production: FROM development_build AS production_build COPY --chown=web:web . /code diff --git a/auth-api/MANIFEST.in b/auth-api/MANIFEST.in deleted file mode 100644 index a26e5b9b3b..0000000000 --- a/auth-api/MANIFEST.in +++ /dev/null @@ -1,6 +0,0 @@ -include requirements/prod.txt -include config.py -include logging.conf -include LICENSE -include README.md -include src/auth_api/schemas/schemas/*.json \ No newline at end of file diff --git a/auth-api/Makefile b/auth-api/Makefile deleted file mode 100644 index aa23d9d702..0000000000 --- a/auth-api/Makefile +++ /dev/null @@ -1,139 +0,0 @@ -.PHONY: license -.PHONY: setup -.PHONY: ci cd -.PHONY: db run - -MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST))) -CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH))) - -PROJECT_NAME:=auth_api -DOCKER_NAME:=auth-api - -################################################################################# -# COMMANDS -- license # -################################################################################# -license: ## Verify source code license headers. - ./scripts/verify_license_headers.sh $(CURRENT_ABS_DIR)/src $(CURRENT_ABS_DIR)/tests - -################################################################################# -# COMMANDS -- Setup # -################################################################################# -setup: clean install install-dev ## Setup the project - -clean: clean-build clean-pyc clean-test ## Clean the project - rm -rf .venv/ - -clean-build: ## Clean build files - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -fr {} + - -clean-pyc: ## Clean cache files - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-test: ## clean test files - find . -name '.pytest_cache' -exec rm -fr {} + - rm -fr .tox/ - rm -f .coverage - rm -fr htmlcov/ - - -install: clean ## Install python virtrual environment - unset HOME ## unset HOME because it's in the DEV .env file, will cause permissions issues - pip install poetry ;\ - poetry install - -install-dev: ## Instal development dependencies - poetry add --dev pylint astroid - poetry install --with dev - -################################################################################# -# COMMANDS - CI # -################################################################################# -ci: lint flake8 test ## CI flow - -pylint: ## Linting with pylint - poetry run pylint --rcfile=setup.cfg src/$(PROJECT_NAME) - -flake8: ## Linting with flake8 - poetry run flake8 src/$(PROJECT_NAME) tests - -lint: pylint flake8 ## run all lint type scripts - -test: ## Unit testing - poetry run pytest - -mac-cov: local-test ## Run the coverage report and display in a browser window (mac) - @open -a "Google Chrome" htmlcov/index.html - -################################################################################# -# COMMANDS - CD -# expects the terminal to be openshift login -# expects export OPENSHIFT_DOCKER_REGISTRY="" -# expects export OPENSHIFT_SA_NAME="$(oc whoami)" -# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)" -# expects export OPENSHIFT_REPOSITORY="" -# expects export TAG_NAME="dev/test/prod" -# expects export OPS_REPOSITORY="" # -################################################################################# -cd: ## CD flow -ifeq ($(TAG_NAME), test) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME) -else ifeq ($(TAG_NAME), prod) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F) - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME) -else -TAG_NAME=dev -cd: build update-env tag -endif - -build: ## Build the docker container - docker build . -t $(DOCKER_NAME) \ - --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \ - --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -build-nc: ## Build the docker container without caching - docker build --no-cache -t $(DOCKER_NAME) . - -REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME) -push: #build ## Push the docker container to the registry & tag latest - @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\ - docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\ - docker push $(REGISTRY_IMAGE):latest - -VAULTS=`cat devops/vaults.json` -update-env: ## Update env from 1pass - oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \ - -m "secret" \ - -e "$(TAG_NAME)" \ - -a "$(DOCKER_NAME)-$(TAG_NAME)" \ - -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \ - -v "$(VAULTS)" \ - -r "true" \ - -f "false" - -tag: push ## tag image - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME) - -################################################################################# -# COMMANDS - Local # -################################################################################# -run: ## Run the project in local - poetry run flask run -p 5000 - -################################################################################# -# Self Documenting Commands # -################################################################################# -.PHONY: help - -.DEFAULT_GOAL := help - -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/auth-api/README.md b/auth-api/README.md index 7643261475..bd638dc3f9 100644 --- a/auth-api/README.md +++ b/auth-api/README.md @@ -1,8 +1,5 @@ [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](../LICENSE) -[![codecov](https://codecov.io/gh/bcgov/sbc-auth/branch/development/graph/badge.svg?flag=authapi)](https://codecov.io/gh/bcgov/sbc-auth/tree/development/auth-api) -![Auth API CHECK CI](https://github.com/bcgov/sbc-auth/workflows/Auth%20API%20CHECK%20CI/badge.svg) -![Auth API DEV CD](https://github.com/bcgov/sbc-auth/workflows/Auth%20API%20DEV%20CD/badge.svg) -![Auth API TEST CD](https://github.com/bcgov/sbc-auth/workflows/Auth%20API%20TEST%20CD/badge.svg) + # AUTH API BC Registries authentication and authorization services. @@ -13,30 +10,49 @@ BC Registries authentication and authorization services. Follow the instructions of the [Development Readme](https://github.com/bcgov/entity/blob/master/docs/development.md) to setup your local development environment. -## Development Setup +## Technology Stack Used +* Python, Flask +* Postgres - SQLAlchemy, psycopg2-binary & alembic + +### setup +Fork the repo and submitted a PR with accompanning tests. + +Set to use the local repo for the virtual environment +```bash +poetry config virtualenvs.in-project true +``` +Install the dependencies +```bash +poetry install +``` -1. Follow the [instructions](https://github.com/bcgov/entity/blob/master/docs/setup-forking-workflow.md) to checkout the project from GitHub. -2. Open the auth-api directory in VS Code to treat it as a project (or WSL projec). To prevent version clashes, set up a -virtual environment to install the Python packages used by this project. -3. Run `make setup` to set up the virtual environment and install libraries. -4. Next run `pip install .` to set up the environment for running tests. +Configure the .env -You also need to set up the variables used for environment-specific settings: -1. Copy the [dotenv template file](../docs/dotenv_template) to somewhere above the source code and rename to `.env`. You will need to fill in missing values. - .github\workflows\auth-api-ci.yml could provide some hints for unit tests. +### manage the DB +```bash +poetry shell +``` + +```bash +flask db upgrade +``` + +```bash +flask db migrate +``` ## Running the Auth Database on localhost To prepare your local database: 1. In the [root project folder](../docker/docker-compose.yml): `docker-compose up -d` -2. In your `venv` environment: `python manage.py db upgrade` +2. In your environment: `poetry run flask db upgrade` or `flask db upgrade` Note: **[Windows Users]** If using WSL, may need to change the host from localhost -> .local -EX. in config.py and .env. +EX. in config.py and .env. **[Mac Users]** : You might get an error regarding the SSL certificate verification failed. @@ -56,8 +72,8 @@ pip install certifi /Applications/Python\ 3.7/Install\ Certificates.command ## Running AUTH-API -1. Start the flask server with `(python -m flask run -p 5000)` -2. View the [OpenAPI Docs](http://127.0.0.1:5000/api/v1). +1. Start the flask server with `(poetry run flask run)` +2. View the [OpenAPI Docs](http://127.0.0.1:8080/api/v1). ## Running Liniting @@ -82,4 +98,3 @@ pip install certifi /Applications/Python\ 3.7/Install\ Certificates.command View the [document](../docs/build-deploy.md). ## Github Actions - diff --git a/auth-api/__init__.py b/auth-api/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/auth-api/devops/gcp/clouddeploy.yaml b/auth-api/devops/gcp/clouddeploy.yaml new file mode 100644 index 0000000000..a6e53d1ba9 --- /dev/null +++ b/auth-api/devops/gcp/clouddeploy.yaml @@ -0,0 +1,86 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: deploy.cloud.google.com/v1 +kind: DeliveryPipeline +metadata: + name: auth-api-pipeline +description: Deployment pipeline +serialPipeline: + stages: + - targetId: gtksf3-dev + profiles: [dev] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "development" + deploy-project-id: "gtksf3-dev" + service-name: "auth-api-dev" + container-name: "auth-api-dev" + cloudsql-instances: "gtksf3-dev:northamerica-northeast1:auth-db-dev" + service-account: "sa-api@gtksf3-dev.iam.gserviceaccount.com" + container-concurrency: "20" + resources-cpu: "2000m" + resources-memory: "2Gi" + - targetId: gtksf3-test + profiles: [test] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "development" + deploy-project-id: "gtksf3-test" + service-name: "auth-api-test" + container-concurrency: "30" + resources-cpu: "4000m" + resources-memory: "2Gi" + container-name: "auth-api-test" + cloudsql-instances: "gtksf3-test:northamerica-northeast1:auth-db-test" + service-account: "sa-api@gtksf3-test.iam.gserviceaccount.com" + - targetId: gtksf3-sandbox + profiles: [sandbox] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "production" + deploy-project-id: "gtksf3-tools" + container-concurrency: "60" + resources-cpu: "8000m" + resources-memory: "4Gi" + service-name: "auth-api-sandbox" + container-name: "auth-api-sandbox" + cloudsql-instances: "gtksf3-tools:northamerica-northeast1:auth-db-sandbox" + service-account: "sa-api@gtksf3-tools.iam.gserviceaccount.com" + - targetId: gtksf3-prod + profiles: [prod] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "production" + deploy-project-id: "gtksf3-prod" + service-name: "auth-api-prod" + container-name: "auth-api-prod" + container-concurrency: "30" + resources-cpu: "4000m" + resources-memory: "2Gi" + cloudsql-instances: "gtksf3-prod:northamerica-northeast1:auth-db-prod" + service-account: "sa-api@gtksf3-prod.iam.gserviceaccount.com" + max-scale: "10" diff --git a/auth-api/devops/vaults.gcp.env b/auth-api/devops/vaults.gcp.env new file mode 100644 index 0000000000..51b74ecf7f --- /dev/null +++ b/auth-api/devops/vaults.gcp.env @@ -0,0 +1,59 @@ +DATABASE_USERNAME="op://database/$APP_ENV/auth-db-gcp/DATABASE_USERNAME" +DATABASE_PASSWORD="op://database/$APP_ENV/auth-db-gcp/DATABASE_PASSWORD" +DATABASE_PORT="op://database/$APP_ENV/auth-db-gcp/DATABASE_PORT" +DATABASE_NAME="op://database/$APP_ENV/auth-db-gcp/DATABASE_NAME" +DATABASE_UNIX_SOCKET="op://database/$APP_ENV/auth-db-gcp/DATABASE_UNIX_SOCKET" +JWT_OIDC_WELL_KNOWN_CONFIG="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_WELL_KNOWN_CONFIG" +JWT_OIDC_ISSUER="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_ISSUER" +JWT_OIDC_JWKS_CACHE_TIMEOUT="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_JWKS_CACHE_TIMEOUT" +JWT_OIDC_CACHING_ENABLED="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_CACHING_ENABLED" +JWT_OIDC_ALGORITHMS="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_ALGORITHMS" +JWT_OIDC_AUDIENCE="op://keycloak/$APP_ENV/account-services-account/ACCOUNT_SERVICES_SERVICE_ACCOUNT_CLIENT_ID" +KEYCLOAK_BASE_URL="op://keycloak/$APP_ENV/base/KEYCLOAK_BASE_URL" +KEYCLOAK_REALMNAME="op://keycloak/$APP_ENV/base/KEYCLOAK_REALMNAME" +SBC_AUTH_ADMIN_CLIENT_ID="op://keycloak/$APP_ENV/sbc-auth-admin/SBC_AUTH_ADMIN_CLIENT_ID" +SBC_AUTH_ADMIN_CLIENT_SECRET="op://keycloak/$APP_ENV/sbc-auth-admin/SBC_AUTH_ADMIN_CLIENT_SECRET" +ENTITY_SVC_CLIENT_ID="op://keycloak/$APP_ENV/entity-service-account/ENTITY_SERVICE_ACCOUNT_CLIENT_ID" +ENTITY_SVC_CLIENT_SECRET="op://keycloak/$APP_ENV/entity-service-account/ENTITY_SERVICE_ACCOUNT_CLIENT_SECRET" +API_GW_CONSUMERS_API_URL="op://API/$APP_ENV/api_gw/API_GW_CONSUMERS_API_URL" +API_GW_KEY="op://API/$APP_ENV/api_gw/API_GW_KEY" +API_GW_NON_PROD_KEY="op://API/$APP_ENV/api_gw/API_GW_NON_PROD_KEY" +API_GW_EMAIL_SUFFIX="op://API/$APP_ENV/api_gw/API_GW_EMAIL_SUFFIX" +API_GW_KC_CLIENT_ID_PATTERN="op://API/$APP_ENV/api_gw/API_GW_KC_CLIENT_ID_PATTERN" +API_GW_CONSUMERS_SANDBOX_API_URL="op://API/$APP_ENV/api_gw/API_GW_CONSUMERS_SANDBOX_API_URL" +BCOL_API_URL="op://API/$APP_ENV/bcol-api/BCOL_API_URL" +BCOL_API_VERSION="op://API/$APP_ENV/bcol-api/BCOL_API_VERSION" +NAMEX_API_URL="op://API/$APP_ENV/namex-api/NAMEX_API_URL" +NAMEX_API_VERSION="op://API/$APP_ENV/namex-api/NAMEX_API_VERSION" +NOTIFY_API_URL="op://API/$APP_ENV/notify-api/NOTIFY_API_URL" +NOTIFY_API_VERSION="op://API/$APP_ENV/notify-api/NOTIFY_API_VERSION" +PAY_API_URL="op://API/$APP_ENV/pay-api/PAY_API_URL" +PAY_API_VERSION="op://API/$APP_ENV/pay-api/PAY_API_VERSION" +LEGAL_API_URL="op://API/$APP_ENV/legal-api/LEGAL_API_URL" +LEGAL_API_VERSION="op://API/$APP_ENV/legal-api/LEGAL_API_VERSION" +LEGAL_API_VERSION_2="op://API/$APP_ENV/legal-api/LEGAL_API_VERSION_2" +GCP_AUTH_KEY="op://gcp-queue/$APP_ENV/gtksf3/AUTHPAY_GCP_AUTH_KEY" +AUDIENCE="op://gcp-queue/$APP_ENV/base/AUDIENCE" +PUBLISHER_AUDIENCE="op://gcp-queue/$APP_ENV/base/PUBLISHER_AUDIENCE" +ACCOUNT_MAILER_TOPIC="op://gcp-queue/$APP_ENV/topics/ACCOUNT_MAILER_TOPIC" +AUTH_EVENT_TOPIC="op://gcp-queue/$APP_ENV/topics/AUTH_EVENT_TOPIC" +MINIO_ENDPOINT="op://minio/$APP_ENV/base/MINIO_ENDPOINT" +MINIO_ACCESS_KEY="op://minio/$APP_ENV/base/MINIO_ACCESS_KEY" +MINIO_ACCESS_SECRET="op://minio/$APP_ENV/base/MINIO_ACCESS_SECRET" +MINIO_BUCKET_ACCOUNTS="op://minio/$APP_ENV/accounts/MINIO_BUCKET_ACCOUNTS" +MAIL_FROM_ID="op://notify/$APP_ENV/smtp/MAIL_FROM_ID" +FLASK_ENV=="op://relationship/$APP_ENV/auth-api/FLASK_ENV" +EMAIL_SECURITY_PASSWORD_SALT="op://relationship/$APP_ENV/auth-api/EMAIL_SECURITY_PASSWORD_SALT" +EMAIL_TOKEN_SECRET_KEY="op://relationship/$APP_ENV/auth-api/EMAIL_TOKEN_SECRET_KEY" +TOKEN_EXPIRY_PERIOD="op://relationship/$APP_ENV/auth-api/TOKEN_EXPIRY_PERIOD" +STAFF_ADMIN_EMAIL="op://relationship/$APP_ENV/auth-api/STAFF_ADMIN_EMAIL" +REGISTRIES_LOGO_IMAGE_NAME="op://relationship/$APP_ENV/auth-api/REGISTRIES_LOGO_IMAGE_NAME" +WEB_APP_URL="op://relationship/$APP_ENV/auth-api/WEB_APP_URL" +MAX_NUMBER_OF_ORGS="op://relationship/$APP_ENV/auth-api/MAX_NUMBER_OF_ORGS" +BCOL_ACCOUNT_LINK_CHECK="op://relationship/$APP_ENV/auth-api/BCOL_ACCOUNT_LINK_CHECK" +NR_SUPPORTED_REQUEST_TYPES="op://relationship/$APP_ENV/auth-api/NR_SUPPORTED_REQUEST_TYPES" +PAY_API_SANDBOX_URL=op://relationship/$APP_ENV/auth-api/PAY_API_SANDBOX_URL +DIRECT_PAY_ENABLED="op://relationship/$APP_ENV/pay-api/DIRECT_PAY_ENABLED" +DISABLE_ACTIVITY_LOGS="op://relationship/$APP_ENV/pay-api/DISABLE_ACTIVITY_LOGS" +AUTH_LD_SDK_KEY="op://launchdarkly/$APP_ENV/business-api/BUSINESS_API_LD_SDK_KEY" +VPC_CONNECTOR="op://CD/$APP_ENV/auth-api/VPC_CONNECTOR" diff --git a/auth-api/devops/vaults.json b/auth-api/devops/vaults.json deleted file mode 100644 index 71057bda3a..0000000000 --- a/auth-api/devops/vaults.json +++ /dev/null @@ -1,60 +0,0 @@ -[ - { - "vault": "shared", - "application": [ - "email", - "api-endpoints" - ] - }, - { - "vault": "keycloak", - "application": [ - "base", - "jwt-base", - "sbc-auth-admin", - "bcros-keycloak", - "entity-service-account" - ] - }, - { - "vault": "minio", - "application": [ - "base", - "accounts" - ] - }, - { - "vault": "relationship", - "application": [ - "postgres-auth", - "auth-api", - "jwt" - ] - }, - { - "vault": "sentry", - "application": [ - "relationship-api" - ] - }, - { - "vault": "API", - "application": [ - "api_gw", - "legal-api" - ] - }, - { - "vault": "launchdarkly", - "application": [ - "auth" - ] - }, - { - "vault": "gcp-queue", - "application": [ - "topics", - "gtksf3" - ] - } -] diff --git a/auth-api/docs/dotenv_template b/auth-api/docs/dotenv_template index 284ea7d37f..2c0350591a 100644 --- a/auth-api/docs/dotenv_template +++ b/auth-api/docs/dotenv_template @@ -29,12 +29,6 @@ FLASK_DEBUG=True POD_TESTING=True SQLALCHEMY_ECHO=False -# The sentry.io Data Source Name for the project. For local development this should always be blank, to prevent the -# logging (and emailing) of errors. However it can be temporarily set when working with sentry itself. -# -SENTRY_ENABLE=False -SENTRY_DSN= - # keycloak settings JWT_OIDC_ISSUER= JWT_OIDC_WELL_KNOWN_CONFIG= diff --git a/auth-api/env.sample b/auth-api/env.sample new file mode 100644 index 0000000000..b6adc27235 --- /dev/null +++ b/auth-api/env.sample @@ -0,0 +1,76 @@ +USE_TEST_KEYCLOAK_DOCKER = "YES" +USE_DOCKER_MOCK = "YES" + +DATABASE_TEST_USERNAME= +DATABASE_TEST_PASSWORD= +DATABASE_TEST_NAME= +DATABASE_TEST_HOST= +DATABASE_TEST_PORT= + +DATABASE_USERNAME= +DATABASE_PASSWORD= +DATABASE_PORT= +DATABASE_NAME= +DATABASE_UNIX_SOCKET= + +JWT_OIDC_WELL_KNOWN_CONFIG= +JWT_OIDC_ISSUER= +JWT_OIDC_JWKS_CACHE_TIMEOUT="6000" +JWT_OIDC_CACHING_ENABLED="True" +JWT_OIDC_ALGORITHMS= +JWT_OIDC_AUDIENCE= + +KEYCLOAK_BASE_URL= +KEYCLOAK_REALMNAME= +SBC_AUTH_ADMIN_CLIENT_ID= +SBC_AUTH_ADMIN_CLIENT_SECRET= +ENTITY_SVC_CLIENT_ID= +ENTITY_SVC_CLIENT_SECRET= + +API_GW_CONSUMERS_API_URL= +API_GW_KEY= +API_GW_NON_PROD_KEY= +API_GW_EMAIL_SUFFIX= +API_GW_KC_CLIENT_ID_PATTERN= +API_GW_CONSUMERS_SANDBOX_API_URL= + +BCOL_API_URL="https://bcol-api-dev.apps.silver.devops.gov.bc.ca" +BCOL_API_VERSION="/api/v1" +NAMEX_API_URL="https://namex-dev.apps.silver.devops.gov.bc.ca" +NAMEX_API_VERSION="/api/v1" +NOTIFY_API_URL="https://notify-api-dev-5qwaveuroa-nn.a.run.app" +NOTIFY_API_VERSION="/api/v1" +PAY_API_URL="https://pay-api-dev.apps.silver.devops.gov.bc.ca" +PAY_API_VERSION="/api/v1" +LEGAL_API_URL="https://legal-api-dev.apps.silver.devops.gov.bc.ca" +LEGAL_API_VERSION="/api/v1" +LEGAL_API_VERSION_2"/api/v2" + +GCP_AUTH_KEY= +AUDIENCE="https://pubsub.googleapis.com/google.pubsub.v1.Subscriber" +PUBLISHER_AUDIENCE="https://pubsub.googleapis.com/google.pubsub.v1.Publisher" +ACCOUNT_MAILER_TOPIC= +AUTH_EVENT_TOPIC= + +MINIO_ENDPOINT= +MINIO_ACCESS_KEY= +MINIO_ACCESS_SECRET= +MINIO_BUCKET_ACCOUNTS= + +MAIL_FROM_ID= +FLASK_ENV=="development" + +EMAIL_SECURITY_PASSWORD_SALT= +EMAIL_TOKEN_SECRET_KEY= +TOKEN_EXPIRY_PERIOD= +STAFF_ADMIN_EMAIL= +REGISTRIES_LOGO_IMAGE_NAME="bc_logo_for_email.png" +WEB_APP_URL= +MAX_NUMBER_OF_ORGS="10000" +BCOL_ACCOUNT_LINK_CHECK="False" +NR_SUPPORTED_REQUEST_TYPES="BC,CR,UL,CC,CP" +PAY_API_SANDBOX_URL=""" +DIRECT_PAY_ENABLED="true" +DISABLE_ACTIVITY_LOGS="False" + +AUTH_LD_SDK_KEY= \ No newline at end of file diff --git a/auth-api/gunicorn_config.py b/auth-api/gunicorn_config.py index 9cb75dd26b..5c20fcd4d3 100644 --- a/auth-api/gunicorn_config.py +++ b/auth-api/gunicorn_config.py @@ -12,14 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. """The configuration for gunicorn, which picks up the - runtime options from environment variables -""" + runtime options from environment variables. + The best practice so far is For environments with multiple CPU cores, increase the number of workers to be equal to + the cores available. Timeout is set to 0 to disable the timeouts of the workers to allow Cloud Run to handle instance + scaling. Adjust the number of workers and threads on a per-application basis. +""" import os +workers = int(os.environ.get("GUNICORN_PROCESSES", "1")) # pylint: disable=invalid-name +threads = int(os.environ.get("GUNICORN_THREADS", "8")) # pylint: disable=invalid-name +timeout = int(os.environ.get("GUNICORN_TIMEOUT", "0")) # pylint: disable=invalid-name -workers = int(os.environ.get('GUNICORN_PROCESSES', '1')) # pylint: disable=invalid-name -threads = int(os.environ.get('GUNICORN_THREADS', '1')) # pylint: disable=invalid-name - -forwarded_allow_ips = '*' # pylint: disable=invalid-name -secure_scheme_headers = {'X-Forwarded-Proto': 'https'} # pylint: disable=invalid-name +forwarded_allow_ips = "*" # pylint: disable=invalid-name +secure_scheme_headers = {"X-Forwarded-Proto": "https"} # pylint: disable=invalid-name diff --git a/auth-api/logging.conf b/auth-api/logging.conf deleted file mode 100644 index 8a936c32e4..0000000000 --- a/auth-api/logging.conf +++ /dev/null @@ -1,34 +0,0 @@ -[loggers] -keys=root,api,tracing - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=api -propagate=0 - -[logger_tracing] -level=ERROR -handlers=console -qualname=jaeger_tracing -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= \ No newline at end of file diff --git a/auth-api/manage.py b/auth-api/manage.py deleted file mode 100644 index bbbfa4a171..0000000000 --- a/auth-api/manage.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Manage the database and some other items required to run the API -""" -import logging - -from flask import url_for -from flask_migrate import Migrate, MigrateCommand -from flask_script import Manager # class for handling a set of commands - -# models included so that migrate can build the database migrations -from auth_api import models # pylint: disable=unused-import -from auth_api import create_app -from auth_api.models import db - - -APP = create_app() -MIGRATE = Migrate(APP, db) -MANAGER = Manager(APP) - -MANAGER.add_command('db', MigrateCommand) - - -@MANAGER.command -def list_routes(): - output = [] - for rule in APP.url_map.iter_rules(): - - options = {} - for arg in rule.arguments: - options[arg] = "[{0}]".format(arg) - - methods = ','.join(rule.methods) - url = url_for(rule.endpoint, **options) - line = ("{:50s} {:20s} {}".format(rule.endpoint, methods, url)) - output.append(line) - - for line in sorted(output): - print(line) - - -if __name__ == '__main__': - logging.log(logging.INFO, 'Running the Manager') - MANAGER.run() diff --git a/auth-api/migrations/env.py b/auth-api/migrations/env.py index 6defe53041..af16cfa5b6 100644 --- a/auth-api/migrations/env.py +++ b/auth-api/migrations/env.py @@ -1,32 +1,25 @@ from __future__ import with_statement -import logging -from logging.config import fileConfig +import re from alembic import context -from sqlalchemy import engine_from_config, pool - - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata from flask import current_app +from sqlalchemy import engine_from_config, pool +from structured_logging import StructuredLogging +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config -config.set_main_option('sqlalchemy.url', - current_app.config.get('SQLALCHEMY_DATABASE_URI')) -target_metadata = current_app.extensions['migrate'].db.metadata +logger = StructuredLogging.get_logger() +config.set_main_option("sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI")) +target_metadata = current_app.extensions["migrate"].db.metadata # other values from the config, defined by the needs of env.py, # can be acquired: @@ -34,6 +27,21 @@ # ... etc. +def get_list_from_config(config, key): + arr = config.get_main_option(key, []) + if arr: + # split on newlines and commas, then trim (I mean strip) + arr = [token for a in arr.split("\n") for b in a.split(",") if (token := b.strip())] + return arr + + +exclude_tables = get_list_from_config(config, "exclude_tables") + + +def include_object(object, name, type_, reflected, compare_to): + return not (type_ == "table" and name in exclude_tables) + + def run_migrations_offline(): """Run migrations in 'offline' mode. @@ -48,7 +56,7 @@ def run_migrations_offline(): """ url = config.get_main_option("sqlalchemy.url") context.configure( - url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True + url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True, include_object=include_object ) with context.begin_transaction(): @@ -67,15 +75,15 @@ def run_migrations_online(): # when there are no changes to the schema # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): + if getattr(config.cmd_opts, "autogenerate", False): script = directives[0] if script.upgrade_ops.is_empty(): directives[:] = [] - logger.info('No changes in schema detected.') + logger.info("No changes in schema detected.") connectable = engine_from_config( config.get_section(config.config_ini_section), - prefix='sqlalchemy.', + prefix="sqlalchemy.", poolclass=pool.NullPool, ) @@ -84,27 +92,14 @@ def process_revision_directives(context, revision, directives): connection=connection, target_metadata=target_metadata, process_revision_directives=process_revision_directives, - compare_type=True, include_object=include_object, - **current_app.extensions['migrate'].configure_args + **current_app.extensions["migrate"].configure_args, ) with context.begin_transaction(): context.run_migrations() -def include_object(object, name, type_, reflected, compare_to): - """Decide whether the schema needs to be included in migration. - - If the model is in the SKIPPED_MIGRATIONS config value skip the version creation on migration. - """ - - if type_ == 'table' and name in current_app.config.get('SKIPPED_MIGRATIONS'): - return False - - return True - - if context.is_offline_mode(): run_migrations_offline() else: diff --git a/auth-api/migrations/versions/031a07fb0811_add_account_id_column_in_tasks_table.py b/auth-api/migrations/versions/031a07fb0811_add_account_id_column_in_tasks_table.py index 855d0ceb5c..64caf971a8 100644 --- a/auth-api/migrations/versions/031a07fb0811_add_account_id_column_in_tasks_table.py +++ b/auth-api/migrations/versions/031a07fb0811_add_account_id_column_in_tasks_table.py @@ -5,6 +5,7 @@ Create Date: 2021-04-14 17:48:33.958394 """ + from typing import List import sqlalchemy as sa @@ -16,20 +17,24 @@ # revision identifiers, used by Alembic. -revision = '031a07fb0811' -down_revision = '885632ab6357' +revision = "031a07fb0811" +down_revision = "885632ab6357" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('tasks', sa.Column('account_id', sa.Integer(), nullable=True)) - op.add_column('tasks', sa.Column('relationship_status', sa.String(length=100), nullable=True)) + op.add_column("tasks", sa.Column("account_id", sa.Integer(), nullable=True)) + op.add_column("tasks", sa.Column("relationship_status", sa.String(length=100), nullable=True)) conn = op.get_bind() - org_res = conn.execute(f"SELECT * FROM orgs WHERE status_code in ('PENDING_STAFF_REVIEW', 'REJECTED', 'ACTIVE') AND " - f"access_type in ('REGULAR_BCEID', 'EXTRA_PROVINCIAL');") + org_res = conn.execute( + text( + f"SELECT * FROM orgs WHERE status_code in ('PENDING_STAFF_REVIEW', 'REJECTED', 'ACTIVE') AND " + f"access_type in ('REGULAR_BCEID', 'EXTRA_PROVINCIAL');" + ) + ) org_list: List[Org] = org_res.fetchall() for org in org_list: @@ -55,15 +60,23 @@ def upgrade(): status = TaskStatus.COMPLETED.value # Insert into tasks - insert_sql = text("INSERT INTO tasks(created, modified, name, date_submitted, relationship_type, " - "relationship_id, created_by_id, modified_by_id, related_to, status, type, " - "relationship_status) " - "VALUES (:created_time, :created_time, :name, :date_submitted, :task_relationship_type, " - ":org_id, :user_id, :user_id, :user_id, :status, :task_type, :relationship_status)") \ - .params( - created_time=created_time, name=name, date_submitted=date_submitted, - task_relationship_type=task_relationship_type, org_id=org_id, user_id=user_id, status=status, - task_type=task_type, relationship_status=relationship_status) + insert_sql = text( + "INSERT INTO tasks(created, modified, name, date_submitted, relationship_type, " + "relationship_id, created_by_id, modified_by_id, related_to, status, type, " + "relationship_status) " + "VALUES (:created_time, :created_time, :name, :date_submitted, :task_relationship_type, " + ":org_id, :user_id, :user_id, :user_id, :status, :task_type, :relationship_status)" + ).params( + created_time=created_time, + name=name, + date_submitted=date_submitted, + task_relationship_type=task_relationship_type, + org_id=org_id, + user_id=user_id, + status=status, + task_type=task_type, + relationship_status=relationship_status, + ) op.execute(insert_sql) # ### end Alembic commands ### @@ -71,8 +84,8 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('tasks', 'account_id') - op.drop_column('tasks', 'relationship_status') + op.drop_column("tasks", "account_id") + op.drop_column("tasks", "relationship_status") # Delete the tasks op.execute(f"DELETE FROM tasks") # ### end Alembic commands ### diff --git a/auth-api/migrations/versions/09dd4ea64775_verified_user.py b/auth-api/migrations/versions/09dd4ea64775_verified_user.py index b8abee9c98..16334acaae 100644 --- a/auth-api/migrations/versions/09dd4ea64775_verified_user.py +++ b/auth-api/migrations/versions/09dd4ea64775_verified_user.py @@ -5,43 +5,42 @@ Create Date: 2021-11-19 13:05:57.935908 """ + from typing import List import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.sql import column, table from auth_api.models import Affidavit # revision identifiers, used by Alembic. -revision = '09dd4ea64775' -down_revision = 'd00101759be4' +revision = "09dd4ea64775" +down_revision = "d00101759be4" branch_labels = None depends_on = None def upgrade(): - op.add_column('users', sa.Column('verified', sa.Boolean(), nullable=True)) - op.add_column('users_version', sa.Column('verified', sa.Boolean(), autoincrement=False, nullable=True)) - membership_status_table = table('membership_status_codes', - column('id', sa.Integer()), - column('name', sa.String()), - column('description', sa.String())) + op.add_column("users", sa.Column("verified", sa.Boolean(), nullable=True)) + op.add_column("users_version", sa.Column("verified", sa.Boolean(), autoincrement=False, nullable=True)) + membership_status_table = table( + "membership_status_codes", + column("id", sa.Integer()), + column("name", sa.String()), + column("description", sa.String()), + ) op.bulk_insert( - membership_status_table, - [ - { - "id": 5, - "name": "PENDING_STAFF_REVIEW", - "description": "Pending Staff Review" - } - ] + membership_status_table, [{"id": 5, "name": "PENDING_STAFF_REVIEW", "description": "Pending Staff Review"}] ) # Find approved BCeID affidavit users. conn = op.get_bind() - affidavits: List[Affidavit] = conn.execute("select * from affidavits where status_code='APPROVED' ").fetchall() + affidavits: List[Affidavit] = conn.execute( + text("select * from affidavits where status_code='APPROVED' ") + ).fetchall() for affidavit in affidavits: op.execute(f"update users set verified=true where id = {affidavit.user_id};") @@ -50,5 +49,5 @@ def upgrade(): def downgrade(): op.execute("delete from membership_status_codes where name='PENDING_STAFF_REVIEW';") - op.drop_column('users_version', 'verified') - op.drop_column('users', 'verified') + op.drop_column("users_version", "verified") + op.drop_column("users", "verified") diff --git a/auth-api/migrations/versions/0f310e71e51d_affidavit_pending_status_permissions.py b/auth-api/migrations/versions/0f310e71e51d_affidavit_pending_status_permissions.py index 3cdcccb873..99047eca04 100644 --- a/auth-api/migrations/versions/0f310e71e51d_affidavit_pending_status_permissions.py +++ b/auth-api/migrations/versions/0f310e71e51d_affidavit_pending_status_permissions.py @@ -5,40 +5,47 @@ Create Date: 2021-01-04 14:55:28.146682 """ + import sqlalchemy as sa from alembic import op -from sqlalchemy import Boolean, String +from sqlalchemy import text from sqlalchemy.sql import column, table from auth_api.utils.custom_sql import CustomSql # revision identifiers, used by Alembic. -revision = '0f310e71e51d' -down_revision = 'c10f494d7e10' +revision = "0f310e71e51d" +down_revision = "c10f494d7e10" branch_labels = None depends_on = None def upgrade(): - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': 'PENDING_AFFIDAVIT_REVIEW', - 'actions': 'view_account'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": "PENDING_AFFIDAVIT_REVIEW", + "actions": "view_account", + } + ], ) def downgrade(): op.execute( - "delete from permissions where membership_type_code='ADMIN' and org_status_code='PENDING_AFFIDAVIT_REVIEW' and actions='view_account'") + "delete from permissions where membership_type_code='ADMIN' and org_status_code='PENDING_AFFIDAVIT_REVIEW' and actions='view_account'" + ) diff --git a/auth-api/migrations/versions/1a46fdc4d630_permissons_for_activity_log.py b/auth-api/migrations/versions/1a46fdc4d630_permissons_for_activity_log.py index eb061512d9..4529679c33 100644 --- a/auth-api/migrations/versions/1a46fdc4d630_permissons_for_activity_log.py +++ b/auth-api/migrations/versions/1a46fdc4d630_permissons_for_activity_log.py @@ -5,8 +5,10 @@ Create Date: 2021-05-04 10:15:37.494139 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.dialects import postgresql from sqlalchemy.sql import column, table @@ -14,32 +16,37 @@ # revision identifiers, used by Alembic. -revision = '1a46fdc4d630' -down_revision = 'a0f0a77dc77f' +revision = "1a46fdc4d630" +down_revision = "a0f0a77dc77f" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] # Insert code values op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': None, - 'actions': 'view_activitylog'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": None, + "actions": "view_activitylog", + } + ], ) # ### end Alembic commands ### diff --git a/auth-api/migrations/versions/2023_03_26_b8dc42f28583_.py b/auth-api/migrations/versions/2023_03_26_b8dc42f28583_.py index 3f68b5e48b..57270ddf9e 100644 --- a/auth-api/migrations/versions/2023_03_26_b8dc42f28583_.py +++ b/auth-api/migrations/versions/2023_03_26_b8dc42f28583_.py @@ -5,7 +5,10 @@ Create Date: 2023-03-26 09:12:17.295671 """ + from alembic import op +from sqlalchemy import text + import sqlalchemy as sa from flask import current_app from auth_api.models import db @@ -15,27 +18,28 @@ # revision identifiers, used by Alembic. -revision = 'b8dc42f28583' -down_revision = '501d1179b2f0' +revision = "b8dc42f28583" +down_revision = "501d1179b2f0" branch_labels = None depends_on = None -def upgrade(): +def upgrade(): conn = op.get_bind() org_res = conn.execute( - "select distinct org_id from product_subscriptions where product_code in ('MHR','PPR');" + text("select distinct org_id from product_subscriptions where product_code in ('MHR','PPR');") ) - print('Updating keycloak groups retroactively.') + print("Updating keycloak groups retroactively.") orgs = org_res.fetchall() for org_id in orgs: - print('Updating keycloak groups for: ', org_id[0]) - try: + print("Updating keycloak groups for: ", org_id[0]) + try: ProductService.update_org_product_keycloak_groups(org_id[0]) except Exception as exc: - print('Error updating keycloak groups for org: ', org_id[0]) + print("Error updating keycloak groups for org: ", org_id[0]) print(exc) - print('Finished updating keycloak groups retroactively.') + print("Finished updating keycloak groups retroactively.") + def downgrade(): pass diff --git a/auth-api/migrations/versions/2023_07_04_d53a79e9cc89_add_uuid_column_to_orgs_table.py b/auth-api/migrations/versions/2023_07_04_d53a79e9cc89_add_uuid_column_to_orgs_table.py index 81497bc3be..ebc1c1e5ce 100644 --- a/auth-api/migrations/versions/2023_07_04_d53a79e9cc89_add_uuid_column_to_orgs_table.py +++ b/auth-api/migrations/versions/2023_07_04_d53a79e9cc89_add_uuid_column_to_orgs_table.py @@ -5,32 +5,39 @@ Create Date: 2023-07-04 10:52:47.771517 """ + from alembic import op import sqlalchemy as sa +from sqlalchemy import text from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = 'd53a79e9cc89' -down_revision = 'b8dc42f28583' +revision = "d53a79e9cc89" +down_revision = "b8dc42f28583" branch_labels = None depends_on = None def upgrade(): conn = op.get_bind() - org_res = conn.execute( - 'CREATE EXTENSION IF NOT EXISTS "uuid-ossp";' - ) + org_res = conn.execute(text('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";')) # ### commands auto generated by Alembic - please adjust! ### - op.add_column('orgs', sa.Column('uuid', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False)) - op.create_unique_constraint(None, 'orgs', ['uuid']) - op.add_column('orgs_version', sa.Column('uuid', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=True)) + op.add_column( + "orgs", sa.Column("uuid", postgresql.UUID(), server_default=sa.text("uuid_generate_v4()"), nullable=False) + ) + op.create_unique_constraint(None, "orgs", ["uuid"]) + op.add_column( + "orgs_version", + sa.Column( + "uuid", postgresql.UUID(), server_default=sa.text("uuid_generate_v4()"), autoincrement=False, nullable=True + ), + ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('orgs_version', 'uuid') - op.drop_constraint(None, 'orgs', type_='unique') - op.drop_column('orgs', 'uuid') + op.drop_column("orgs_version", "uuid") + op.drop_constraint(None, "orgs", type_="unique") + op.drop_column("orgs", "uuid") # ### end Alembic commands ### diff --git a/auth-api/migrations/versions/2024_08_13_69f7b110a98c_add_version_column.py b/auth-api/migrations/versions/2024_08_13_69f7b110a98c_add_version_column.py new file mode 100644 index 0000000000..04482cd905 --- /dev/null +++ b/auth-api/migrations/versions/2024_08_13_69f7b110a98c_add_version_column.py @@ -0,0 +1,81 @@ +"""add version column + +Revision ID: 69f7b110a98c +Revises: af98933abe93 +Create Date: 2024-08-13 09:55:11.041391 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "69f7b110a98c" +down_revision = "af98933abe93" +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table("account_login_options", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("affiliations", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("affidavits", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("contact_links", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("contacts", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("memberships", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("org_settings", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("orgs", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("product_subscriptions", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + with op.batch_alter_table("users", schema=None) as batch_op: + batch_op.add_column(sa.Column("version", sa.Integer(), nullable=False, server_default="1")) + + +def downgrade(): + with op.batch_alter_table("users", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("product_subscriptions", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("orgs", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("org_settings", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("memberships", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("contacts", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("contact_links", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("affidavits", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("affiliations", schema=None) as batch_op: + batch_op.drop_column("version") + + with op.batch_alter_table("account_login_options", schema=None) as batch_op: + batch_op.drop_column("version") diff --git a/auth-api/migrations/versions/2024_09_20_aa74003de9d8_.py b/auth-api/migrations/versions/2024_09_20_aa74003de9d8_.py new file mode 100644 index 0000000000..a20b4a13bc --- /dev/null +++ b/auth-api/migrations/versions/2024_09_20_aa74003de9d8_.py @@ -0,0 +1,256 @@ +"""empty message + +Revision ID: aa74003de9d8 +Revises: 69f7b110a98c +Create Date: 2024-09-20 11:19:42.551199 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "aa74003de9d8" +down_revision = "69f7b110a98c" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "affidavits_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("document_id", sa.String(length=60), autoincrement=False, nullable=True), + sa.Column("issuer", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("status_code", sa.String(length=15), autoincrement=False, nullable=False), + sa.Column("decision_made_by", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("decision_made_on", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("user_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + with op.batch_alter_table("affidavits_history", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_affidavits_history_document_id"), ["document_id"], unique=False) + + op.create_table( + "orgs_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("uuid", sa.UUID(), autoincrement=False, nullable=False), + sa.Column("type_code", sa.String(length=15), autoincrement=False, nullable=False), + sa.Column("status_code", sa.String(length=30), autoincrement=False, nullable=False), + sa.Column("name", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("branch_name", sa.String(length=100), autoincrement=False, nullable=True), + sa.Column("access_type", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("decision_made_by", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("decision_made_on", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("bcol_user_id", sa.String(length=20), autoincrement=False, nullable=True), + sa.Column("bcol_account_id", sa.String(length=20), autoincrement=False, nullable=True), + sa.Column("bcol_account_name", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("suspended_on", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("suspension_reason_code", sa.String(length=15), autoincrement=False, nullable=True), + sa.Column("has_api_access", sa.Boolean(), autoincrement=False, nullable=True), + sa.Column("business_type", sa.String(length=15), autoincrement=False, nullable=True), + sa.Column("business_size", sa.String(length=15), autoincrement=False, nullable=True), + sa.Column("is_business_account", sa.Boolean(), autoincrement=False, nullable=True), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + with op.batch_alter_table("orgs_history", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_orgs_history_access_type"), ["access_type"], unique=False) + batch_op.create_index(batch_op.f("ix_orgs_history_name"), ["name"], unique=False) + + op.create_table( + "account_login_options_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("login_source", sa.String(length=20), autoincrement=False, nullable=False), + sa.Column("org_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("is_active", sa.Boolean(), autoincrement=False, nullable=True), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + op.create_table( + "affiliations_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("entity_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("org_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("certified_by_name", sa.String(length=100), autoincrement=False, nullable=True), + sa.Column("environment", sa.String(length=20), autoincrement=False, nullable=True), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + with op.batch_alter_table("affiliations_history", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_affiliations_history_entity_id"), ["entity_id"], unique=False) + batch_op.create_index(batch_op.f("ix_affiliations_history_environment"), ["environment"], unique=False) + + op.create_table( + "contacts_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("street", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("street_additional", sa.String(length=250), autoincrement=False, nullable=True), + sa.Column("city", sa.String(length=100), autoincrement=False, nullable=True), + sa.Column("region", sa.String(length=100), autoincrement=False, nullable=True), + sa.Column("country", sa.String(length=20), autoincrement=False, nullable=True), + sa.Column("postal_code", sa.String(length=15), autoincrement=False, nullable=True), + sa.Column("delivery_instructions", sa.String(length=4096), autoincrement=False, nullable=True), + sa.Column("phone", sa.String(length=15), autoincrement=False, nullable=True), + sa.Column("phone_extension", sa.String(length=10), autoincrement=False, nullable=True), + sa.Column("email", sa.String(length=100), autoincrement=False, nullable=True), + sa.Column("entity_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + with op.batch_alter_table("contacts_history", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_contacts_history_street"), ["street"], unique=False) + + op.create_table( + "memberships_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("user_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("org_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("membership_type_code", sa.String(length=15), autoincrement=False, nullable=False), + sa.Column("status", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + with op.batch_alter_table("memberships_history", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_memberships_history_org_id"), ["org_id"], unique=False) + batch_op.create_index(batch_op.f("ix_memberships_history_status"), ["status"], unique=False) + batch_op.create_index(batch_op.f("ix_memberships_history_user_id"), ["user_id"], unique=False) + + op.create_table( + "org_settings_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("org_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("setting", sa.String(length=100), autoincrement=False, nullable=True), + sa.Column("enabled", sa.Boolean(), autoincrement=False, nullable=False), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + op.create_table( + "product_subscriptions_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("org_id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("product_code", sa.String(length=15), autoincrement=False, nullable=False), + sa.Column("status_code", sa.String(length=30), autoincrement=False, nullable=False), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + with op.batch_alter_table("product_subscriptions_history", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_product_subscriptions_history_org_id"), ["org_id"], unique=False) + + op.create_table( + "contact_links_history", + sa.Column("id", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("contact_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("entity_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("user_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("org_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("affidavit_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("created", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("modified", sa.DateTime(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.Integer(), autoincrement=False, nullable=True), + sa.Column("version", sa.Integer(), autoincrement=False, nullable=False), + sa.Column("changed", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id", "version"), + sqlite_autoincrement=True, + ) + with op.batch_alter_table("contact_links_history", schema=None) as batch_op: + batch_op.create_index(batch_op.f("ix_contact_links_history_contact_id"), ["contact_id"], unique=False) + batch_op.create_index(batch_op.f("ix_contact_links_history_entity_id"), ["entity_id"], unique=False) + batch_op.create_index(batch_op.f("ix_contact_links_history_org_id"), ["org_id"], unique=False) + batch_op.create_index(batch_op.f("ix_contact_links_history_user_id"), ["user_id"], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("contact_links_history", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_contact_links_history_user_id")) + batch_op.drop_index(batch_op.f("ix_contact_links_history_org_id")) + batch_op.drop_index(batch_op.f("ix_contact_links_history_entity_id")) + batch_op.drop_index(batch_op.f("ix_contact_links_history_contact_id")) + + op.drop_table("contact_links_history") + with op.batch_alter_table("product_subscriptions_history", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_product_subscriptions_history_org_id")) + + op.drop_table("product_subscriptions_history") + op.drop_table("org_settings_history") + with op.batch_alter_table("memberships_history", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_memberships_history_user_id")) + batch_op.drop_index(batch_op.f("ix_memberships_history_status")) + batch_op.drop_index(batch_op.f("ix_memberships_history_org_id")) + + op.drop_table("memberships_history") + with op.batch_alter_table("contacts_history", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_contacts_history_street")) + + op.drop_table("contacts_history") + with op.batch_alter_table("affiliations_history", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_affiliations_history_environment")) + batch_op.drop_index(batch_op.f("ix_affiliations_history_entity_id")) + + op.drop_table("affiliations_history") + op.drop_table("account_login_options_history") + with op.batch_alter_table("orgs_history", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_orgs_history_name")) + batch_op.drop_index(batch_op.f("ix_orgs_history_access_type")) + + op.drop_table("orgs_history") + with op.batch_alter_table("affidavits_history", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_affidavits_history_document_id")) + + op.drop_table("affidavits_history") + # ### end Alembic commands ### diff --git a/auth-api/migrations/versions/304705f06971_tasks_multiple_reasons.py b/auth-api/migrations/versions/304705f06971_tasks_multiple_reasons.py index a1c19cab39..37619b10b6 100644 --- a/auth-api/migrations/versions/304705f06971_tasks_multiple_reasons.py +++ b/auth-api/migrations/versions/304705f06971_tasks_multiple_reasons.py @@ -5,6 +5,7 @@ Create Date: 2021-09-10 14:50:36.964423 """ + from typing import List from alembic import op import sqlalchemy as sa @@ -14,8 +15,8 @@ from auth_api.models import Task # revision identifiers, used by Alembic. -revision = '304705f06971' -down_revision = '41ac4080a7ed' +revision = "304705f06971" +down_revision = "41ac4080a7ed" branch_labels = None depends_on = None @@ -26,24 +27,27 @@ def upgrade(): # Once all the existing values are turned to array values, alter the column remarks type from character to ARRAY conn = op.get_bind() - task_res = conn.execute("SELECT * FROM tasks WHERE remarks IS NOT NULL;") + task_res = conn.execute(text("SELECT * FROM tasks WHERE remarks IS NOT NULL;")) task_list: List[Task] = task_res.fetchall() for task in task_list: task_id = task.id - remarks = task.remarks.split(',') # to handle edge case, where we upgrade, downgrade and then upgrade again + remarks = task.remarks.split(",") # to handle edge case, where we upgrade, downgrade and then upgrade again # Now updating the new remarks value back in the table - update_sql = text("UPDATE tasks SET remarks = :remarks where id = :task_id") \ - .params( - remarks=remarks, task_id=task_id) + update_sql = text("UPDATE tasks SET remarks = :remarks where id = :task_id").params( + remarks=remarks, task_id=task_id + ) op.execute(update_sql) # ### alter command auto generated by Alembic ### - op.alter_column('tasks', 'remarks', - existing_type=sa.VARCHAR(length=100), - type_=postgresql.ARRAY(sa.String(), dimensions=1), - existing_nullable=True, - postgresql_using='remarks::character varying(100)[]') + op.alter_column( + "tasks", + "remarks", + existing_type=sa.VARCHAR(length=100), + type_=postgresql.ARRAY(sa.String(), dimensions=1), + existing_nullable=True, + postgresql_using="remarks::character varying(100)[]", + ) # ### end Alembic commands ### @@ -53,15 +57,18 @@ def downgrade(): # we populate the values back in the table in string form conn = op.get_bind() - task_res = conn.execute("SELECT * FROM tasks WHERE remarks IS NOT NULL;") + task_res = conn.execute(text("SELECT * FROM tasks WHERE remarks IS NOT NULL;")) task_list: List[Task] = task_res.fetchall() # ### alter command auto generated by Alembic ### - op.alter_column('tasks', 'remarks', - existing_type=postgresql.ARRAY(sa.String(), dimensions=1), - type_=sa.VARCHAR(length=250), - existing_nullable=True, - postgresql_using='remarks::character varying(100)[]') + op.alter_column( + "tasks", + "remarks", + existing_type=postgresql.ARRAY(sa.String(), dimensions=1), + type_=sa.VARCHAR(length=250), + existing_nullable=True, + postgresql_using="remarks::character varying(100)[]", + ) # ### end Alembic command ### # Now convert values to string @@ -69,10 +76,10 @@ def downgrade(): if isinstance(task.remarks, list): task_id = task.id old_remarks = task.remarks - new_remarks = ','.join(old_remarks) + new_remarks = ",".join(old_remarks) if new_remarks is not None: # Now updating the new remarks value back in the table - update_sql = text("UPDATE tasks SET remarks = :remarks where id = :task_id") \ - .params( - remarks=new_remarks, task_id=task_id) + update_sql = text("UPDATE tasks SET remarks = :remarks where id = :task_id").params( + remarks=new_remarks, task_id=task_id + ) op.execute(update_sql) diff --git a/auth-api/migrations/versions/31ec16d4f1e9_product_migration_for_vs.py b/auth-api/migrations/versions/31ec16d4f1e9_product_migration_for_vs.py index f7aa060e86..e8240d53ec 100644 --- a/auth-api/migrations/versions/31ec16d4f1e9_product_migration_for_vs.py +++ b/auth-api/migrations/versions/31ec16d4f1e9_product_migration_for_vs.py @@ -5,9 +5,11 @@ Create Date: 2021-04-30 10:01:10.794355 """ + import sqlalchemy as sa from alembic import op from flask import current_app +from sqlalchemy import text from auth_api.models import db from auth_api.services.products import Product @@ -15,8 +17,8 @@ # revision identifiers, used by Alembic. -revision = '31ec16d4f1e9' -down_revision = 'a37f90e6802d' +revision = "31ec16d4f1e9" +down_revision = "a37f90e6802d" branch_labels = None depends_on = None @@ -25,21 +27,24 @@ def upgrade(): # Query all orgs which are linked to BCOL. conn = op.get_bind() org_res = conn.execute( - "select o.id, o.bcol_user_id from orgs o where bcol_user_id is not null and bcol_account_id is not null and status_code in ('ACTIVE', 'PENDING_STAFF_REVIEW');" + text( + "select o.id, o.bcol_user_id from orgs o where bcol_user_id is not null and bcol_account_id is not null and status_code in ('ACTIVE', 'PENDING_STAFF_REVIEW');" + ) ) orgs = org_res.fetchall() - print('starting migration for BCOL products') + print("starting migration for BCOL products") if len(orgs) > 0: token = RestService.get_service_account_token() for org_id in orgs: try: - print('Getting bcol profile for ', org_id[0], org_id[1]) - bcol_response = RestService.get(endpoint=current_app.config.get('BCOL_API_URL') + f'/profiles/{org_id[1]}', - token=token) - print('BCOL Response', bcol_response.json()) - Product.create_subscription_from_bcol_profile(org_id[0], bcol_response.json().get('profileFlags')) + print("Getting bcol profile for ", org_id[0], org_id[1]) + bcol_response = RestService.get( + endpoint=current_app.config.get("BCOL_API_URL") + f"/profiles/{org_id[1]}", token=token + ) + print("BCOL Response", bcol_response.json()) + Product.create_subscription_from_bcol_profile(org_id[0], bcol_response.json().get("profileFlags")) except Exception as exc: - print('Profile Error') + print("Profile Error") print(exc) raise exc db.session.commit() diff --git a/auth-api/migrations/versions/342ec8814181_update_verified_flag.py b/auth-api/migrations/versions/342ec8814181_update_verified_flag.py index 66d74d695a..63da3d5902 100644 --- a/auth-api/migrations/versions/342ec8814181_update_verified_flag.py +++ b/auth-api/migrations/versions/342ec8814181_update_verified_flag.py @@ -5,15 +5,17 @@ Create Date: 2022-01-13 16:12:10.256555 """ + from typing import List from alembic import op +from sqlalchemy import text from auth_api.models import Affidavit # revision identifiers, used by Alembic. -revision = '342ec8814181' -down_revision = '9f3450623765' +revision = "342ec8814181" +down_revision = "9f3450623765" branch_labels = None depends_on = None @@ -21,7 +23,9 @@ def upgrade(): # Find approved BCeID affidavit users. conn = op.get_bind() - affidavits: List[Affidavit] = conn.execute("select * from affidavits where status_code='APPROVED' ").fetchall() + affidavits: List[Affidavit] = conn.execute( + text("select * from affidavits where status_code='APPROVED' ") + ).fetchall() for affidavit in affidavits: op.execute(f"update users set verified=true where id = {affidavit.user_id};") diff --git a/auth-api/migrations/versions/4c6ca48245be_user_remove_roles.py b/auth-api/migrations/versions/4c6ca48245be_user_remove_roles.py index 5f6cc44bab..6c327ebcff 100644 --- a/auth-api/migrations/versions/4c6ca48245be_user_remove_roles.py +++ b/auth-api/migrations/versions/4c6ca48245be_user_remove_roles.py @@ -5,13 +5,14 @@ Create Date: 2021-03-08 11:37:13.600415 """ + import sqlalchemy as sa from alembic import op - +from sqlalchemy import text # revision identifiers, used by Alembic. -revision = '4c6ca48245be' -down_revision = 'd5b5eb2e8dd0' +revision = "4c6ca48245be" +down_revision = "d5b5eb2e8dd0" branch_labels = None depends_on = None @@ -20,24 +21,24 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute("update users set type='ANONYMOUS_USER' where type='ANONYMOUS'") conn = op.get_bind() - res = conn.execute("select id, type, login_source, username, roles from users where type is null") + res = conn.execute(text("select id, type, login_source, username, roles from users where type is null")) users = res.fetchall() for user in users: id = user[0] username = user[3] - if any(username in s for s in ['@idir', 'idir\\', 'idir@']): + if any(username in s for s in ["@idir", "idir\\", "idir@"]): op.execute(f"update users set type='STAFF' where id={id}") - elif any(username in s for s in ['@bcsc', 'bcsc\\', 'bcsc@', '@bceid', 'bceid\\', 'bceid@']): + elif any(username in s for s in ["@bcsc", "bcsc\\", "bcsc@", "@bceid", "bceid\\", "bceid@"]): op.execute(f"update users set type='PUBLIC_USER' where id={id}") - op.drop_column('users', 'roles') - op.drop_column('users_version', 'roles') + op.drop_column("users", "roles") + op.drop_column("users_version", "roles") # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('users_version', sa.Column('roles', sa.VARCHAR(length=1000), autoincrement=False, nullable=True)) - op.add_column('users', sa.Column('roles', sa.VARCHAR(length=1000), autoincrement=False, nullable=True)) + op.add_column("users_version", sa.Column("roles", sa.VARCHAR(length=1000), autoincrement=False, nullable=True)) + op.add_column("users", sa.Column("roles", sa.VARCHAR(length=1000), autoincrement=False, nullable=True)) # ### end Alembic commands ### diff --git a/auth-api/migrations/versions/68c71d35f671_product_setting_permission_and_view_.py b/auth-api/migrations/versions/68c71d35f671_product_setting_permission_and_view_.py index 1dadca11fa..4d11db2e35 100644 --- a/auth-api/migrations/versions/68c71d35f671_product_setting_permission_and_view_.py +++ b/auth-api/migrations/versions/68c71d35f671_product_setting_permission_and_view_.py @@ -5,8 +5,10 @@ Create Date: 2021-04-06 08:01:35.445983 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.dialects import postgresql from sqlalchemy.sql import column, table @@ -14,63 +16,74 @@ # revision identifiers, used by Alembic. -revision = '68c71d35f671' -down_revision = '19a7733fd001' +revision = "68c71d35f671" +down_revision = "19a7733fd001" branch_labels = None depends_on = None -authorizations_view = CustomSql('authorizations_view', - ' SELECT e.business_identifier,' - 'e.name AS entity_name,' - 'e.folio_number,' - 'e.corp_type_code,' - 'm.membership_type_code AS org_membership,' - 'u.keycloak_guid,' - 'u.id AS user_id,' - 'o.id AS org_id,' - 'o.name AS org_name,' - 'o.status_code,' - 'o.type_code AS org_type,' - 'ps.product_code,' - 'o.bcol_user_id,' - 'o.bcol_account_id' - ' FROM memberships m ' - 'LEFT JOIN orgs o ON m.org_id = o.id ' - 'LEFT JOIN users u ON u.id = m.user_id ' - 'LEFT JOIN affiliations a ON o.id = a.org_id ' - 'LEFT JOIN entities e ON e.id = a.entity_id ' - 'LEFT JOIN product_subscriptions ps ON ps.org_id = o.id AND ps.status_code = ' - '\'ACTIVE\' ' - 'WHERE m.status = 1;') +authorizations_view = CustomSql( + "authorizations_view", + " SELECT e.business_identifier," + "e.name AS entity_name," + "e.folio_number," + "e.corp_type_code," + "m.membership_type_code AS org_membership," + "u.keycloak_guid," + "u.id AS user_id," + "o.id AS org_id," + "o.name AS org_name," + "o.status_code," + "o.type_code AS org_type," + "ps.product_code," + "o.bcol_user_id," + "o.bcol_account_id" + " FROM memberships m " + "LEFT JOIN orgs o ON m.org_id = o.id " + "LEFT JOIN users u ON u.id = m.user_id " + "LEFT JOIN affiliations a ON o.id = a.org_id " + "LEFT JOIN entities e ON e.id = a.entity_id " + "LEFT JOIN product_subscriptions ps ON ps.org_id = o.id AND ps.status_code = " + "'ACTIVE' " + "WHERE m.status = 1;", +) def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] # Insert code values op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': None, - 'actions': 'request_product_package'}, - {'id': latest_id + 2, 'membership_type_code': 'COORDINATOR', 'org_status_code': None, - 'actions': 'request_product_package'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": None, + "actions": "request_product_package", + }, + { + "id": latest_id + 2, + "membership_type_code": "COORDINATOR", + "org_status_code": None, + "actions": "request_product_package", + }, + ], ) - op.execute(f'DROP VIEW IF EXISTS {authorizations_view.name}') - op.execute(f'CREATE VIEW {authorizations_view.name} AS {authorizations_view.sql}') + op.execute(f"DROP VIEW IF EXISTS {authorizations_view.name}") + op.execute(f"CREATE VIEW {authorizations_view.name} AS {authorizations_view.sql}") # ### end Alembic commands ### @@ -78,5 +91,3 @@ def upgrade(): def downgrade(): op.execute('delete from permissions where action="request_product_package"') # ### end Alembic commands ### - - diff --git a/auth-api/migrations/versions/69a7e464fef3_product_role_changes.py b/auth-api/migrations/versions/69a7e464fef3_product_role_changes.py index 7fadfef944..e92ec5ea25 100644 --- a/auth-api/migrations/versions/69a7e464fef3_product_role_changes.py +++ b/auth-api/migrations/versions/69a7e464fef3_product_role_changes.py @@ -3,8 +3,10 @@ Revises: 5053985bdfc6 Create Date: 2021-03-02 12:16:09.152924 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.dialects import postgresql from sqlalchemy.sql import column, table @@ -12,69 +14,73 @@ # revision identifiers, used by Alembic. -revision = '69a7e464fef3' -down_revision = '5053985bdfc6' +revision = "69a7e464fef3" +down_revision = "5053985bdfc6" branch_labels = None depends_on = None -authorizations_view = CustomSql('authorizations_view', - ' SELECT e.business_identifier,' - 'e.name AS entity_name,' - 'e.folio_number,' - 'e.corp_type_code,' - 'm.membership_type_code AS org_membership,' - 'u.keycloak_guid,' - 'u.id AS user_id,' - 'o.id AS org_id,' - 'o.name AS org_name,' - 'o.status_code,' - 'o.type_code AS org_type,' - 'ps.product_code,' - 'o.bcol_user_id,' - 'o.bcol_account_id' - ' FROM memberships m ' - 'LEFT JOIN orgs o ON m.org_id = o.id ' - 'LEFT JOIN users u ON u.id = m.user_id ' - 'LEFT JOIN affiliations a ON o.id = a.org_id ' - 'LEFT JOIN entities e ON e.id = a.entity_id ' - 'LEFT JOIN product_subscriptions ps ON ps.org_id = o.id ' - 'WHERE m.status = 1;') +authorizations_view = CustomSql( + "authorizations_view", + " SELECT e.business_identifier," + "e.name AS entity_name," + "e.folio_number," + "e.corp_type_code," + "m.membership_type_code AS org_membership," + "u.keycloak_guid," + "u.id AS user_id," + "o.id AS org_id," + "o.name AS org_name," + "o.status_code," + "o.type_code AS org_type," + "ps.product_code," + "o.bcol_user_id," + "o.bcol_account_id" + " FROM memberships m " + "LEFT JOIN orgs o ON m.org_id = o.id " + "LEFT JOIN users u ON u.id = m.user_id " + "LEFT JOIN affiliations a ON o.id = a.org_id " + "LEFT JOIN entities e ON e.id = a.entity_id " + "LEFT JOIN product_subscriptions ps ON ps.org_id = o.id " + "WHERE m.status = 1;", +) def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) # Insert code values op.bulk_insert( permissions_table, [ - {'id': 68, 'membership_type_code': 'ADMIN', 'org_status_code': None, 'actions': 'ppr'}, - {'id': 69, 'membership_type_code': 'COORDINATOR', 'org_status_code': None, 'actions': 'ppr'}, - {'id': 70, 'membership_type_code': 'USER', 'org_status_code': None, 'actions': 'ppr'} - ] + {"id": 68, "membership_type_code": "ADMIN", "org_status_code": None, "actions": "ppr"}, + {"id": 69, "membership_type_code": "COORDINATOR", "org_status_code": None, "actions": "ppr"}, + {"id": 70, "membership_type_code": "USER", "org_status_code": None, "actions": "ppr"}, + ], ) - op.execute(f'DROP VIEW IF EXISTS {authorizations_view.name}') - op.execute(f'CREATE VIEW {authorizations_view.name} AS {authorizations_view.sql}') + op.execute(f"DROP VIEW IF EXISTS {authorizations_view.name}") + op.execute(f"CREATE VIEW {authorizations_view.name} AS {authorizations_view.sql}") - op.execute('delete from product_subscription_roles') - op.execute('delete from product_role_codes') - op.drop_table('product_subscription_roles') - op.drop_table('product_subscription_roles_version') + op.execute("delete from product_subscription_roles") + op.execute("delete from product_role_codes") + op.drop_table("product_subscription_roles") + op.drop_table("product_subscription_roles_version") - op.drop_table('product_role_codes') + op.drop_table("product_role_codes") # Create PPR product for PREMIUM accounts. # Delete PPR subscription from all accounts. op.execute("delete from product_subscriptions where product_code='PPR'") conn = op.get_bind() - res = conn.execute("select id from orgs where type_code='PREMIUM' ") + res = conn.execute(text("select id from orgs where type_code='PREMIUM' ")) orgs = res.fetchall() for org in orgs: @@ -87,59 +93,73 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('product_subscription_roles_version', - sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('product_subscription_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('product_role_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('transaction_id', sa.BIGINT(), autoincrement=False, nullable=False), - sa.Column('end_transaction_id', sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column('operation_type', sa.SMALLINT(), autoincrement=False, nullable=False), - sa.Column('created', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('modified', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id', 'transaction_id', name='product_subscription_role_version_pkey') - ) - op.create_index('ix_product_subscription_roles_version_transaction_id', 'product_subscription_roles_version', - ['transaction_id'], unique=False) - op.create_index('ix_product_subscription_roles_version_operation_type', 'product_subscription_roles_version', - ['operation_type'], unique=False) - op.create_index('ix_product_subscription_roles_version_end_transaction_id', 'product_subscription_roles_version', - ['end_transaction_id'], unique=False) - op.create_table('product_role_codes', - sa.Column('created', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('modified', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('code', sa.VARCHAR(length=15), autoincrement=False, nullable=True), - sa.Column('description', sa.VARCHAR(length=100), autoincrement=False, nullable=True), - sa.Column('product_code', sa.VARCHAR(length=15), autoincrement=False, nullable=False), - sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], - name='product_role_code_created_by_id_fkey'), - sa.ForeignKeyConstraint(['modified_by_id'], ['users.id'], - name='product_role_code_modified_by_id_fkey'), - sa.ForeignKeyConstraint(['product_code'], ['product_codes.code'], - name='product_role_code_product_codes_fkey'), - sa.PrimaryKeyConstraint('id', name='product_role_codes_pkey') - ) - op.create_table('product_subscription_roles', - sa.Column('created', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('modified', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column('product_subscription_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('product_role_id', sa.INTEGER(), autoincrement=False, nullable=False), - sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column('modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], name='product_role_created_by_id_fkey'), - sa.ForeignKeyConstraint(['modified_by_id'], ['users.id'], name='product_role_modified_by_id_fkey'), - sa.ForeignKeyConstraint(['product_role_id'], ['product_role_codes.id'], - name='product_role_product_role_id_fkey'), - sa.ForeignKeyConstraint(['product_subscription_id'], ['product_subscriptions.id'], - name='product_role_product_subscription_id_fkey'), - sa.PrimaryKeyConstraint('id', name='product_role_pkey') - ) - - op.create_index('ix_product_role_codes_code', 'product_role_codes', ['code'], unique=False) - op.execute('delete from permissions where id in (68, 69, 70)') + op.create_table( + "product_subscription_roles_version", + sa.Column("id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("product_subscription_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("product_role_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("created_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("transaction_id", sa.BIGINT(), autoincrement=False, nullable=False), + sa.Column("end_transaction_id", sa.BIGINT(), autoincrement=False, nullable=True), + sa.Column("operation_type", sa.SMALLINT(), autoincrement=False, nullable=False), + sa.Column("created", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column("modified", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id", "transaction_id", name="product_subscription_role_version_pkey"), + ) + op.create_index( + "ix_product_subscription_roles_version_transaction_id", + "product_subscription_roles_version", + ["transaction_id"], + unique=False, + ) + op.create_index( + "ix_product_subscription_roles_version_operation_type", + "product_subscription_roles_version", + ["operation_type"], + unique=False, + ) + op.create_index( + "ix_product_subscription_roles_version_end_transaction_id", + "product_subscription_roles_version", + ["end_transaction_id"], + unique=False, + ) + op.create_table( + "product_role_codes", + sa.Column("created", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column("modified", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("code", sa.VARCHAR(length=15), autoincrement=False, nullable=True), + sa.Column("description", sa.VARCHAR(length=100), autoincrement=False, nullable=True), + sa.Column("product_code", sa.VARCHAR(length=15), autoincrement=False, nullable=False), + sa.Column("created_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(["created_by_id"], ["users.id"], name="product_role_code_created_by_id_fkey"), + sa.ForeignKeyConstraint(["modified_by_id"], ["users.id"], name="product_role_code_modified_by_id_fkey"), + sa.ForeignKeyConstraint(["product_code"], ["product_codes.code"], name="product_role_code_product_codes_fkey"), + sa.PrimaryKeyConstraint("id", name="product_role_codes_pkey"), + ) + op.create_table( + "product_subscription_roles", + sa.Column("created", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column("modified", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("product_subscription_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("product_role_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("created_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("modified_by_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint(["created_by_id"], ["users.id"], name="product_role_created_by_id_fkey"), + sa.ForeignKeyConstraint(["modified_by_id"], ["users.id"], name="product_role_modified_by_id_fkey"), + sa.ForeignKeyConstraint( + ["product_role_id"], ["product_role_codes.id"], name="product_role_product_role_id_fkey" + ), + sa.ForeignKeyConstraint( + ["product_subscription_id"], ["product_subscriptions.id"], name="product_role_product_subscription_id_fkey" + ), + sa.PrimaryKeyConstraint("id", name="product_role_pkey"), + ) + + op.create_index("ix_product_role_codes_code", "product_role_codes", ["code"], unique=False) + op.execute("delete from permissions where id in (68, 69, 70)") # ### end Alembic commands ### diff --git a/auth-api/migrations/versions/9c58b78727c8_users_type.py b/auth-api/migrations/versions/9c58b78727c8_users_type.py index d7fe49a134..4b857264e9 100644 --- a/auth-api/migrations/versions/9c58b78727c8_users_type.py +++ b/auth-api/migrations/versions/9c58b78727c8_users_type.py @@ -5,7 +5,9 @@ Create Date: 2021-06-29 14:51:52.950816 """ + from alembic import op +from sqlalchemy import text import sqlalchemy as sa from auth_api.models import User from auth_api.utils.enums import LoginSource @@ -14,23 +16,22 @@ # revision identifiers, used by Alembic. -revision = '9c58b78727c8' -down_revision = '5397c5a5b0ca' +revision = "9c58b78727c8" +down_revision = "5397c5a5b0ca" branch_labels = None depends_on = None def upgrade(): conn = op.get_bind() - user_res = conn.execute( - "SELECT * FROM users WHERE coalesce(TRIM(type), '') = ''") + user_res = conn.execute(text("SELECT * FROM users WHERE coalesce(TRIM(type), '') = ''")) users: List[User] = user_res.fetchall() for user in users: login_source = user.login_source if user.login_source in [LoginSource.BCEID.value, LoginSource.BCSC.value]: user_type = Role.PUBLIC_USER.name - elif user.login_source == LoginSource.BCROS.value or user.username.startswith('bcros/'): + elif user.login_source == LoginSource.BCROS.value or user.username.startswith("bcros/"): user_type = Role.ANONYMOUS_USER.name login_source = LoginSource.BCSC.value elif user.login_source == LoginSource.STAFF.value: diff --git a/auth-api/migrations/versions/a0198b1e2c51_ppr_default.py b/auth-api/migrations/versions/a0198b1e2c51_ppr_default.py index ffed7a8ae9..72bf79a926 100644 --- a/auth-api/migrations/versions/a0198b1e2c51_ppr_default.py +++ b/auth-api/migrations/versions/a0198b1e2c51_ppr_default.py @@ -9,14 +9,15 @@ # revision identifiers, used by Alembic. from alembic import op from flask import current_app +from sqlalchemy import text from auth_api.models import db from auth_api.services.products import Product from auth_api.services.rest_service import RestService # revision identifiers, used by Alembic. -revision = 'a0198b1e2c51' -down_revision = 'e7f46c30d356' +revision = "a0198b1e2c51" +down_revision = "e7f46c30d356" branch_labels = None depends_on = None @@ -26,21 +27,24 @@ def upgrade(): conn = op.get_bind() org_res = conn.execute( - "select o.id, o.bcol_user_id from orgs o where bcol_user_id is not null and bcol_account_id is not null and status_code in ('ACTIVE', 'PENDING_STAFF_REVIEW');" + text( + "select o.id, o.bcol_user_id from orgs o where bcol_user_id is not null and bcol_account_id is not null and status_code in ('ACTIVE', 'PENDING_STAFF_REVIEW');" + ) ) orgs = org_res.fetchall() - print('starting migration for BCOL products') + print("starting migration for BCOL products") if len(orgs) > 0: token = RestService.get_service_account_token() for org_id in orgs: try: - print('Getting bcol profile for ', org_id[0], org_id[1]) - bcol_response = RestService.get(endpoint=current_app.config.get('BCOL_API_URL') + f'/profiles/{org_id[1]}', - token=token) - print('BCOL Response', bcol_response.json()) - Product.create_subscription_from_bcol_profile(org_id[0], bcol_response.json().get('profileFlags')) + print("Getting bcol profile for ", org_id[0], org_id[1]) + bcol_response = RestService.get( + endpoint=current_app.config.get("BCOL_API_URL") + f"/profiles/{org_id[1]}", token=token + ) + print("BCOL Response", bcol_response.json()) + Product.create_subscription_from_bcol_profile(org_id[0], bcol_response.json().get("profileFlags")) except Exception as exc: - print('Profile Error') + print("Profile Error") print(exc) raise exc db.session.commit() diff --git a/auth-api/migrations/versions/a490b2db8b13_added_pad_edit_for_nsf_accounts.py b/auth-api/migrations/versions/a490b2db8b13_added_pad_edit_for_nsf_accounts.py index e30455d143..2e61a0aeaa 100644 --- a/auth-api/migrations/versions/a490b2db8b13_added_pad_edit_for_nsf_accounts.py +++ b/auth-api/migrations/versions/a490b2db8b13_added_pad_edit_for_nsf_accounts.py @@ -5,38 +5,46 @@ Create Date: 2021-01-14 13:19:32.076996 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.sql import column, table # revision identifiers, used by Alembic. -revision = 'a490b2db8b13' -down_revision = '5f51253eeced' +revision = "a490b2db8b13" +down_revision = "5f51253eeced" branch_labels = None depends_on = None def upgrade(): - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': 'NSF_SUSPENDED', - 'actions': 'change_pad_info'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": "NSF_SUSPENDED", + "actions": "change_pad_info", + } + ], ) def downgrade(): op.execute( "delete from permissions where membership_type_code='ADMIN' " - "and org_status_code='NSF_SUSPENDED' and actions in ('change_pad_info')") + "and org_status_code='NSF_SUSPENDED' and actions in ('change_pad_info')" + ) diff --git a/auth-api/migrations/versions/b72d4946fb3c_added_view_edit_for_nsf_account.py b/auth-api/migrations/versions/b72d4946fb3c_added_view_edit_for_nsf_account.py index 1a0b066859..a323516a9f 100644 --- a/auth-api/migrations/versions/b72d4946fb3c_added_view_edit_for_nsf_account.py +++ b/auth-api/migrations/versions/b72d4946fb3c_added_view_edit_for_nsf_account.py @@ -5,41 +5,52 @@ Create Date: 2021-01-06 14:34:14.575613 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.sql import column, table # revision identifiers, used by Alembic. -revision = 'b72d4946fb3c' -down_revision = 'f7362101e761' +revision = "b72d4946fb3c" +down_revision = "f7362101e761" branch_labels = None depends_on = None def upgrade(): - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': 'NSF_SUSPENDED', - 'actions': 'edit'}, - {'id': latest_id + 2, 'membership_type_code': 'ADMIN', 'org_status_code': 'NSF_SUSPENDED', - 'actions': 'view'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": "NSF_SUSPENDED", + "actions": "edit", + }, + { + "id": latest_id + 2, + "membership_type_code": "ADMIN", + "org_status_code": "NSF_SUSPENDED", + "actions": "view", + }, + ], ) def downgrade(): op.execute( "delete from permissions where membership_type_code='ADMIN' " - "and org_status_code='NSF_SUSPENDED' and actions in ('view','edit')") - + "and org_status_code='NSF_SUSPENDED' and actions in ('view','edit')" + ) diff --git a/auth-api/migrations/versions/be4475027882_rename_tables.py b/auth-api/migrations/versions/be4475027882_rename_tables.py index 85886de259..a5bd8a2b0a 100644 --- a/auth-api/migrations/versions/be4475027882_rename_tables.py +++ b/auth-api/migrations/versions/be4475027882_rename_tables.py @@ -5,53 +5,56 @@ Create Date: 2021-01-18 06:27:37.050548 """ + import re from alembic import op from sqlalchemy import MetaData, create_engine from sqlalchemy.engine import reflection + # revision identifiers, used by Alembic. from sqlalchemy.engine.reflection import Inspector -VERSION = '_version' +VERSION = "_version" -revision = 'be4475027882' -down_revision = 'a490b2db8b13' +revision = "be4475027882" +down_revision = "a490b2db8b13" branch_labels = None depends_on = None ### # get_pk_constraint -table_mapping = {'org': 'orgs', - 'corp_type': 'corp_types', - 'product_code': 'product_codes', - 'affidavit': 'affidavits', - 'affidavit_status': 'affidavit_statuses', - 'affiliation': 'affiliations', - 'contact': 'contacts', - 'user': 'users', - 'entity': 'entities', - 'contact_link': 'contact_links', - 'invitation': 'invitations', - 'invitation_membership': 'invitation_memberships', - 'invitation_type': 'invitation_types', - 'invitation_status': 'invitation_statuses', - 'membership': 'memberships', - 'membership_status_code': 'membership_status_codes', - 'membership_type': 'membership_types', - 'org_status': 'org_statuses', - 'org_type': 'org_types', - 'payment_type': 'payment_types', - 'product_role_code': 'product_role_codes', - 'product_subscription': 'product_subscriptions', - 'product_subscription_role': 'product_subscription_roles', - 'product_type_code': 'product_type_codes', - 'user_status_code': 'user_status_codes', - } - -skip_table = ['alembic', 'activity', 'transaction'] +table_mapping = { + "org": "orgs", + "corp_type": "corp_types", + "product_code": "product_codes", + "affidavit": "affidavits", + "affidavit_status": "affidavit_statuses", + "affiliation": "affiliations", + "contact": "contacts", + "user": "users", + "entity": "entities", + "contact_link": "contact_links", + "invitation": "invitations", + "invitation_membership": "invitation_memberships", + "invitation_type": "invitation_types", + "invitation_status": "invitation_statuses", + "membership": "memberships", + "membership_status_code": "membership_status_codes", + "membership_type": "membership_types", + "org_status": "org_statuses", + "org_type": "org_types", + "payment_type": "payment_types", + "product_role_code": "product_role_codes", + "product_subscription": "product_subscriptions", + "product_subscription_role": "product_subscription_roles", + "product_type_code": "product_type_codes", + "user_status_code": "user_status_codes", +} + +skip_table = ["alembic", "activity", "transaction"] def upgrade(): @@ -67,7 +70,8 @@ def upgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) tables = inspector.get_table_names() - metadata = MetaData(conn, reflect=True) + metadata = MetaData() + metadata.reflect(conn) table: str for table in tables: @@ -80,7 +84,8 @@ def downgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) tables = inspector.get_table_names() - metadata = MetaData(conn, reflect=True) + metadata = MetaData() + metadata.reflect(conn) table_mapping_reversed = {y: x for x, y in table_mapping.items()} table: str for table in tables: @@ -91,7 +96,7 @@ def downgrade(): def _rename_obj(inspector, metadata, table: str, name_dict, tables): _rename_fks(inspector, table, table_mapping) - new_table_name: str = name_dict.get(table, '') + new_table_name: str = name_dict.get(table, "") if new_table_name: _rename_indexes(inspector, new_table_name, table) _rename_pk(inspector, new_table_name, table) @@ -104,7 +109,6 @@ def _rename_obj(inspector, metadata, table: str, name_dict, tables): _rename_table(versioned_table_name, versioned_table_new_name) - def _suffix_version(table): return table + VERSION @@ -115,31 +119,31 @@ def _rename_table(table, new_table_name): def _rename_sequence(m, new_table_name, table): """Rename the autogenerated sequnces for the table.""" - id_column = m.tables[table].columns.get('id') + id_column = m.tables[table].columns.get("id") if id_column is not None and id_column.server_default is not None: seq = id_column.server_default.arg.text # format is 'nextval(\'org_id_seq\'::regclass)' - seq_name = re.search("nextval\(\'(.*)\'::regclass", seq).group(1) + seq_name = re.search("nextval\\('(.*)'::regclass", seq).group(1) new_seq_name = seq_name.replace(table, new_table_name, 1) if seq_name != new_seq_name: - op.execute(f'ALTER sequence {seq_name} RENAME TO {new_seq_name}') + op.execute(f"ALTER sequence {seq_name} RENAME TO {new_seq_name}") def _rename_pk(inspector, new_table_name, table): """Rename primary key.From org_pkey to orgs_pkey""" - pk_name = inspector.get_pk_constraint(table).get('name') + pk_name = inspector.get_pk_constraint(table).get("name") new_pk_name = pk_name.replace(table, new_table_name, 1) if pk_name != new_pk_name: - op.execute(f'ALTER index {pk_name} RENAME TO {new_pk_name}') + op.execute(f"ALTER index {pk_name} RENAME TO {new_pk_name}") def _rename_indexes(inspector, new_table_name, table): """Rename the indexes.ie ix_org_access_type to ix_orgs_access_type""" for index in inspector.get_indexes(table): - old_index_name = index.get('name') + old_index_name = index.get("name") new_index_name = old_index_name.replace(table, new_table_name, 1) if old_index_name != new_index_name: - op.execute(f'ALTER index {old_index_name} RENAME TO {new_index_name}') + op.execute(f"ALTER index {old_index_name} RENAME TO {new_index_name}") def _rename_fks(inspector, table: str, name_dict): @@ -151,9 +155,9 @@ def _rename_fks(inspector, table: str, name_dict): """ foreign_keys = inspector.get_foreign_keys(table) for fk in foreign_keys: - fk_name = fk.get('name') - referred_table = fk.get('referred_table') - referred_table_new_name = name_dict.get(referred_table, '') + fk_name = fk.get("name") + referred_table = fk.get("referred_table") + referred_table_new_name = name_dict.get(referred_table, "") if referred_table_new_name: new_fk_name = fk_name.replace(referred_table, referred_table_new_name) if fk_name != new_fk_name: diff --git a/auth-api/migrations/versions/c44fff21c830_pending_approval_view_permission.py b/auth-api/migrations/versions/c44fff21c830_pending_approval_view_permission.py index 5d795de363..d216308add 100644 --- a/auth-api/migrations/versions/c44fff21c830_pending_approval_view_permission.py +++ b/auth-api/migrations/versions/c44fff21c830_pending_approval_view_permission.py @@ -5,8 +5,10 @@ Create Date: 2021-06-25 09:30:56.448783 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.dialects import postgresql from sqlalchemy.sql import column, table @@ -14,31 +16,36 @@ # revision identifiers, used by Alembic. -revision = 'c44fff21c830' -down_revision = 'e6e295695b9a' +revision = "c44fff21c830" +down_revision = "e6e295695b9a" branch_labels = None depends_on = None def upgrade(): - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) - op.execute('delete from permissions where actions=\'request_product_package\'') + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) + op.execute("delete from permissions where actions='request_product_package'") conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] # Insert code values op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': 'PENDING_AFFIDAVIT_REVIEW', - 'actions': 'view'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": "PENDING_AFFIDAVIT_REVIEW", + "actions": "view", + } + ], ) diff --git a/auth-api/migrations/versions/eaef1147f25c_permissions_for_frcr_review.py b/auth-api/migrations/versions/eaef1147f25c_permissions_for_frcr_review.py index c58a65e428..615d2ac9a4 100644 --- a/auth-api/migrations/versions/eaef1147f25c_permissions_for_frcr_review.py +++ b/auth-api/migrations/versions/eaef1147f25c_permissions_for_frcr_review.py @@ -5,39 +5,45 @@ Create Date: 2021-01-09 08:11:26.337104 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.sql import column, table # revision identifiers, used by Alembic. -revision = 'eaef1147f25c' -down_revision = 'b72d4946fb3c' +revision = "eaef1147f25c" +down_revision = "b72d4946fb3c" branch_labels = None depends_on = None def upgrade(): - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] op.bulk_insert( permissions_table, [ - {'id': (latest_id := latest_id + 1), 'membership_type_code': 'ADMIN', 'actions': 'change_payment_method'}, - {'id': (latest_id := latest_id + 1), 'membership_type_code': 'ADMIN', 'actions': 'change_pad_info'}, - {'id': (latest_id := latest_id + 1), 'membership_type_code': 'ADMIN', 'org_status_code': 'NSF_SUSPENDED', - 'actions': 'change_pad_info'}, - {'id': (latest_id := latest_id + 1), 'membership_type_code': 'ADMIN', 'actions': 'view_auth_options'}, - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'actions': 'change_statement_settings'} - - ] + {"id": (latest_id := latest_id + 1), "membership_type_code": "ADMIN", "actions": "change_payment_method"}, + {"id": (latest_id := latest_id + 1), "membership_type_code": "ADMIN", "actions": "change_pad_info"}, + { + "id": (latest_id := latest_id + 1), + "membership_type_code": "ADMIN", + "org_status_code": "NSF_SUSPENDED", + "actions": "change_pad_info", + }, + {"id": (latest_id := latest_id + 1), "membership_type_code": "ADMIN", "actions": "view_auth_options"}, + {"id": latest_id + 1, "membership_type_code": "ADMIN", "actions": "change_statement_settings"}, + ], ) op.execute("update permissions set actions='change_auth_options' where actions='set_auth_options'") @@ -45,5 +51,6 @@ def upgrade(): def downgrade(): op.execute( "delete from permissions where membership_type_code='ADMIN' " - "and actions in ('change_payment_method','change_pad_info','view_auth_options','change_statement_settings')") + "and actions in ('change_payment_method','change_pad_info','view_auth_options','change_statement_settings')" + ) op.execute("update permissions set actions=set_auth_options where actions='change_auth_options'") diff --git a/auth-api/migrations/versions/f8f82c74f1fc_permissons_for_product_package.py b/auth-api/migrations/versions/f8f82c74f1fc_permissons_for_product_package.py index 2974979b88..2aa4326284 100644 --- a/auth-api/migrations/versions/f8f82c74f1fc_permissons_for_product_package.py +++ b/auth-api/migrations/versions/f8f82c74f1fc_permissons_for_product_package.py @@ -5,8 +5,10 @@ Create Date: 2021-05-04 16:04:09.108771 """ + import sqlalchemy as sa from alembic import op +from sqlalchemy import text from sqlalchemy.dialects import postgresql from sqlalchemy.sql import column, table @@ -14,64 +16,92 @@ # revision identifiers, used by Alembic. -revision = 'f8f82c74f1fc' -down_revision = '1a46fdc4d630' +revision = "f8f82c74f1fc" +down_revision = "1a46fdc4d630" branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) - op.execute('delete from permissions where actions=\'request_product_package\'') + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) + op.execute(text("delete from permissions where actions='request_product_package'")) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(text(f"select max(id) from permissions;")) latest_id = res.fetchall()[0][0] # Insert code values op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': None, - 'actions': 'edit_request_product_package'}, - {'id': latest_id + 2, 'membership_type_code': 'COORDINATOR', 'org_status_code': None, - 'actions': 'edit_request_product_package'}, - {'id': latest_id + 3, 'membership_type_code': 'ADMIN', 'org_status_code': None, - 'actions': 'view_request_product_package'}, - {'id': latest_id + 4, 'membership_type_code': 'COORDINATOR', 'org_status_code': None, - 'actions': 'view_request_product_package'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": None, + "actions": "edit_request_product_package", + }, + { + "id": latest_id + 2, + "membership_type_code": "COORDINATOR", + "org_status_code": None, + "actions": "edit_request_product_package", + }, + { + "id": latest_id + 3, + "membership_type_code": "ADMIN", + "org_status_code": None, + "actions": "view_request_product_package", + }, + { + "id": latest_id + 4, + "membership_type_code": "COORDINATOR", + "org_status_code": None, + "actions": "view_request_product_package", + }, + ], ) # ### end Alembic commands ### def downgrade(): - op.execute('delete from permissions where actions in (\'edit_request_product_package\', \'view_request_product_package\')') + op.execute( + "delete from permissions where actions in ('edit_request_product_package', 'view_request_product_package')" + ) # ### end Alembic commands ### - permissions_table = table('permissions', - column('id', sa.Integer()), - column('membership_type_code', sa.String(length=15)), - column('org_status_code', sa.String(length=25)), - column('actions', sa.String(length=100))) + permissions_table = table( + "permissions", + column("id", sa.Integer()), + column("membership_type_code", sa.String(length=15)), + column("org_status_code", sa.String(length=25)), + column("actions", sa.String(length=100)), + ) conn = op.get_bind() - res = conn.execute( - f"select max(id) from permissions;") + res = conn.execute(f"select max(id) from permissions;") latest_id = res.fetchall()[0][0] # Insert code values op.bulk_insert( permissions_table, [ - {'id': latest_id + 1, 'membership_type_code': 'ADMIN', 'org_status_code': None, - 'actions': 'request_product_package'}, - {'id': latest_id + 2, 'membership_type_code': 'COORDINATOR', 'org_status_code': None, - 'actions': 'request_product_package'} - ] + { + "id": latest_id + 1, + "membership_type_code": "ADMIN", + "org_status_code": None, + "actions": "request_product_package", + }, + { + "id": latest_id + 2, + "membership_type_code": "COORDINATOR", + "org_status_code": None, + "actions": "request_product_package", + }, + ], ) diff --git a/auth-api/openshift/templates/bc.yaml b/auth-api/openshift/templates/bc.yaml deleted file mode 100644 index a3fbd389db..0000000000 --- a/auth-api/openshift/templates/bc.yaml +++ /dev/null @@ -1,121 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - labels: - name: ${NAME} - name: ${NAME}-build -objects: -- apiVersion: v1 - kind: ImageStream - metadata: - name: ${NAME} - labels: - name: ${NAME} -- apiVersion: v1 - kind: BuildConfig - metadata: - name: ${NAME} - labels: - name: ${NAME} - spec: - output: - to: - kind: ImageStreamTag - name: ${NAME}:${OUTPUT_IMAGE_TAG} - resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} - requests: - cpu: ${CPU_REQUEST} - memory: ${MEMORY_REQUEST} - runPolicy: Serial - source: - contextDir: ${SOURCE_CONTEXT_DIR} - git: - ref: ${GIT_REF} - uri: ${GIT_REPO_URL} - dockerfile: | - FROM docker-remote.artifacts.developer.gov.bc.ca/python:3.8.5-buster - - USER root - - # Create working directory - RUN mkdir /opt/app-root && chmod 755 /opt/app-root - WORKDIR /opt/app-root - - # Install the requirements - COPY ./requirements.txt . - - RUN pip install --upgrade pip - RUN pip install --no-cache-dir -r requirements.txt - - COPY . . - - RUN pip install . - - USER 1001 - - # Set Python path - ENV PYTHONPATH=/opt/app-root/src - - ENTRYPOINT ["bash", "docker-entrypoint.sh"] - type: Git - strategy: - type: Docker - dockerStrategy: - pullSecret: - name: artifactory-creds - - triggers: - - type: ConfigChange -parameters: -- description: | - The name assigned to all of the objects defined in this template. - You should keep this as default unless your know what your doing. - displayName: Name - name: NAME - required: true - value: auth-api -- description: | - The URL to your GIT repo, don't use the this default unless - your just experimenting. - displayName: Git Repo URL - name: GIT_REPO_URL - required: true - value: https://github.com/bcgov/sbc-auth.git -- description: The git reference or branch. - displayName: Git Reference - name: GIT_REF - required: true - value: development -- description: The source context directory. - displayName: Source Context Directory - name: SOURCE_CONTEXT_DIR - required: false - value: auth-api -- description: The tag given to the built image. - displayName: Output Image Tag - name: OUTPUT_IMAGE_TAG - required: true - value: latest -- description: The resources CPU limit (in cores) for this build. - displayName: Resources CPU Limit - name: CPU_LIMIT - required: true - value: "2" -- description: The resources Memory limit (in Mi, Gi, etc) for this build. - displayName: Resources Memory Limit - name: MEMORY_LIMIT - required: true - value: 2Gi -- description: The resources CPU request (in cores) for this build. - displayName: Resources CPU Request - name: CPU_REQUEST - required: true - value: "1" -- description: The resources Memory request (in Mi, Gi, etc) for this build. - displayName: Resources Memory Request - name: MEMORY_REQUEST - required: true - value: 2Gi diff --git a/auth-api/openshift/templates/dc.yaml b/auth-api/openshift/templates/dc.yaml deleted file mode 100644 index e9a6d56d40..0000000000 --- a/auth-api/openshift/templates/dc.yaml +++ /dev/null @@ -1,216 +0,0 @@ ---- -kind: Template -apiVersion: v1 -metadata: - name: ${NAME}-${TAG}-deployment-template - annotations: - description: - Deployment template for an API application and connect to database. - tags: Flask - iconClass: icon-python -objects: - - kind: Service - apiVersion: v1 - metadata: - name: ${NAME}-${TAG} - labels: - name: ${NAME} - environment: ${TAG} - role: ${ROLE} - annotations: - description: Exposes and load balances the application pods - service.alpha.openshift.io/dependencies: "[{name: ${DB_NAME}-${TAG}, kind: Service}]" - spec: - ports: - - name: ${NAME}-${TAG}-tcp - port: 8080 - targetPort: 8080 - selector: - name: ${NAME} - environment: ${TAG} - - - kind: Route - apiVersion: v1 - metadata: - name: ${NAME}-${TAG} - labels: - name: ${NAME} - environment: ${TAG} - role: ${ROLE} - annotations: - haproxy.router.openshift.io/ip_whitelist: ${ALLOW_LIST} - spec: - host: ${APPLICATION_DOMAIN} - tls: - insecureEdgeTerminationPolicy: Redirect - termination: edge - port: - targetPort: ${NAME}-${TAG}-tcp - to: - kind: Service - name: ${NAME}-${TAG} - weight: 100 - - - kind: DeploymentConfig - apiVersion: v1 - metadata: - name: ${NAME}-${TAG} - labels: - name: ${NAME} - environment: ${TAG} - role: ${ROLE} - annotations: - description: Defines how to deploy the application server - spec: - strategy: - rollingParams: - intervalSeconds: 1 - maxSurge: 25% - maxUnavailable: 25% - pre: - failurePolicy: Abort - execNewPod: - command: - - /opt/app-root/pre-hook-update-db.sh - containerName: ${NAME}-${TAG} - timeoutSeconds: 600 - updatePeriodSeconds: 1 - type: Rolling - triggers: - - type: ImageChange - imageChangeParams: - automatic: true - containerNames: - - ${NAME}-${TAG} - from: - kind: ImageStreamTag - namespace: ${NAMESPACE}-${IMAGE_NAMESPACE} - name: ${NAME}:${TAG} - replicas: 1 - selector: - name: ${NAME} - environment: ${TAG} - template: - metadata: - name: ${NAME}-${TAG} - labels: - name: ${NAME} - environment: ${TAG} - role: ${ROLE} - spec: - containers: - - name: ${NAME}-${TAG} - image: ${IMAGE_REGISTRY}/${NAMESPACE}-${IMAGE_NAMESPACE}/${NAME}:${TAG} - ports: - - containerPort: 8080 - protocol: TCP - readinessProbe: - initialDelaySeconds: 3 - timeoutSeconds: 30 - httpGet: - path: /ops/readyz - port: 8080 - livenessProbe: - initialDelaySeconds: 120 - timeoutSeconds: 30 - httpGet: - path: /ops/healthz - port: 8080 - - - kind: HorizontalPodAutoscaler - apiVersion: autoscaling/v1 - metadata: - name: ${NAME}-${TAG} - labels: - name: ${NAME} - environment: ${TAG} - role: ${ROLE} - spec: - scaleTargetRef: - kind: DeploymentConfig - name: ${NAME}-${TAG} - minReplicas: ${{MIN_REPLICAS}} - maxReplicas: ${{MAX_REPLICAS}} - -parameters: - - name: NAME - displayName: Name - description: The name assigned to all of the OpenShift resources associated to the server instance. - required: true - value: auth-api - - - name: TAG - displayName: Environment TAG name - description: The TAG name for this environment, e.g., dev, test, prod - value: dev - required: true - - - name: ROLE - displayName: Role - description: Role - required: true - value: api - - - name: NAMESPACE - displayName: Namespace Name - description: The base namespace name for the project. - required: true - value: 6e0e49 - - - name: IMAGE_NAMESPACE - displayName: Image Namespace - required: true - description: The namespace of the OpenShift project containing the imagestream for the application. - value: tools - - - name: IMAGE_REGISTRY - displayName: Image Registry - required: true - description: The image registry of the OpenShift project. - value: image-registry.openshift-image-registry.svc:5000 - - - name: APPLICATION_DOMAIN - displayName: Application Domain - description: The endpoint used for routing access to the application. - value: auth-api-dev.apps.silver.devops.gov.bc.ca - - - name: ALLOW_LIST - displayName: Allow List - description: The allow list containing all of the trusted source IPs. - required: false - - - name: MIN_REPLICAS - displayName: Minimum Replicas - description: The minimum number of pods to have running. - required: true - value: "1" - - - name: MAX_REPLICAS - displayName: Maximum Replicas - description: The maximum number of pods to have running. - required: true - value: "1" - - - name: CPU_REQUEST - displayName: Resources CPU Request - description: The resources CPU request (in cores) for this build. - required: true - value: 10m - - - name: CPU_LIMIT - displayName: Resources CPU Limit - description: The resources CPU limit (in cores) for this build. - required: true - value: 500m - - - name: MEMORY_REQUEST - displayName: Resources Memory Request - description: The resources Memory request (in Mi, Gi, etc) for this build. - required: true - value: 10Mi - - - name: MEMORY_LIMIT - displayName: Resources Memory Limit - description: The resources Memory limit (in Mi, Gi, etc) for this build. - required: true - value: 1Gi diff --git a/auth-api/poetry.lock b/auth-api/poetry.lock index 838d11ad26..6bf0d853c0 100644 --- a/auth-api/poetry.lock +++ b/auth-api/poetry.lock @@ -2,108 +2,122 @@ [[package]] name = "aiohappyeyeballs" -version = "2.3.5" +version = "2.4.3" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, ] [[package]] name = "aiohttp" -version = "3.10.2" +version = "3.10.9" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95213b3d79c7e387144e9cb7b9d2809092d6ff2c044cb59033aedc612f38fb6d"}, - {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1aa005f060aff7124cfadaa2493f00a4e28ed41b232add5869e129a2e395935a"}, - {file = "aiohttp-3.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eabe6bf4c199687592f5de4ccd383945f485779c7ffb62a9b9f1f8a3f9756df8"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e010736fc16d21125c7e2dc5c350cd43c528b85085c04bf73a77be328fe944"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99f81f9c1529fd8e03be4a7bd7df32d14b4f856e90ef6e9cbad3415dbfa9166c"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d611d1a01c25277bcdea06879afbc11472e33ce842322496b211319aa95441bb"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00191d38156e09e8c81ef3d75c0d70d4f209b8381e71622165f22ef7da6f101"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74c091a5ded6cb81785de2d7a8ab703731f26de910dbe0f3934eabef4ae417cc"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:18186a80ec5a701816adbf1d779926e1069392cf18504528d6e52e14b5920525"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5a7ceb2a0d2280f23a02c64cd0afdc922079bb950400c3dd13a1ab2988428aac"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8bd7be6ff6c162a60cb8fce65ee879a684fbb63d5466aba3fa5b9288eb04aefa"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fae962b62944eaebff4f4fddcf1a69de919e7b967136a318533d82d93c3c6bd1"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a0fde16d284efcacbe15fb0c1013f0967b6c3e379649239d783868230bf1db42"}, - {file = "aiohttp-3.10.2-cp310-cp310-win32.whl", hash = "sha256:f81cd85a0e76ec7b8e2b6636fe02952d35befda4196b8c88f3cec5b4fb512839"}, - {file = "aiohttp-3.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:54ba10eb5a3481c28282eb6afb5f709aedf53cf9c3a31875ffbdc9fc719ffd67"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:87fab7f948e407444c2f57088286e00e2ed0003ceaf3d8f8cc0f60544ba61d91"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6ad66ed660d46503243cbec7b2b3d8ddfa020f984209b3b8ef7d98ce69c3f2"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4be88807283bd96ae7b8e401abde4ca0bab597ba73b5e9a2d98f36d451e9aac"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01c98041f90927c2cbd72c22a164bb816fa3010a047d264969cf82e1d4bcf8d1"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54e36c67e1a9273ecafab18d6693da0fb5ac48fd48417e4548ac24a918c20998"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7de3ddb6f424af54535424082a1b5d1ae8caf8256ebd445be68c31c662354720"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd9c7db94b4692b827ce51dcee597d61a0e4f4661162424faf65106775b40e7"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e57e21e1167705f8482ca29cc5d02702208d8bf4aff58f766d94bcd6ead838cd"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a1a50e59b720060c29e2951fd9f13c01e1ea9492e5a527b92cfe04dd64453c16"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:686c87782481fda5ee6ba572d912a5c26d9f98cc5c243ebd03f95222af3f1b0f"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:dafb4abb257c0ed56dc36f4e928a7341b34b1379bd87e5a15ce5d883c2c90574"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:494a6f77560e02bd7d1ab579fdf8192390567fc96a603f21370f6e63690b7f3d"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6fe8503b1b917508cc68bf44dae28823ac05e9f091021e0c41f806ebbb23f92f"}, - {file = "aiohttp-3.10.2-cp311-cp311-win32.whl", hash = "sha256:4ddb43d06ce786221c0dfd3c91b4892c318eaa36b903f7c4278e7e2fa0dd5102"}, - {file = "aiohttp-3.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:ca2f5abcb0a9a47e56bac173c01e9f6c6e7f27534d91451c5f22e6a35a5a2093"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:14eb6b17f6246959fb0b035d4f4ae52caa870c4edfb6170aad14c0de5bfbf478"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:465e445ec348d4e4bd349edd8b22db75f025da9d7b6dc1369c48e7935b85581e"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:341f8ece0276a828d95b70cd265d20e257f5132b46bf77d759d7f4e0443f2906"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01fbb87b5426381cd9418b3ddcf4fc107e296fa2d3446c18ce6c76642f340a3"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c474af073e1a6763e1c5522bbb2d85ff8318197e4c6c919b8d7886e16213345"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9076810a5621236e29b2204e67a68e1fe317c8727ee4c9abbfbb1083b442c38"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f515d6859e673940e08de3922b9c4a2249653b0ac181169313bd6e4b1978ac"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:655e583afc639bef06f3b2446972c1726007a21003cd0ef57116a123e44601bc"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8da9449a575133828cc99985536552ea2dcd690e848f9d41b48d8853a149a959"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19073d57d0feb1865d12361e2a1f5a49cb764bf81a4024a3b608ab521568093a"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c8e98e1845805f184d91fda6f9ab93d7c7b0dddf1c07e0255924bfdb151a8d05"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:377220a5efde6f9497c5b74649b8c261d3cce8a84cb661be2ed8099a2196400a"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92f7f4a4dc9cdb5980973a74d43cdbb16286dacf8d1896b6c3023b8ba8436f8e"}, - {file = "aiohttp-3.10.2-cp312-cp312-win32.whl", hash = "sha256:9bb2834a6f11d65374ce97d366d6311a9155ef92c4f0cee543b2155d06dc921f"}, - {file = "aiohttp-3.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:518dc3cb37365255708283d1c1c54485bbacccd84f0a0fb87ed8917ba45eda5b"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7f98e70bbbf693086efe4b86d381efad8edac040b8ad02821453083d15ec315f"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f6f0b252a009e98fe84028a4ec48396a948e7a65b8be06ccfc6ef68cf1f614d"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9360e3ffc7b23565600e729e8c639c3c50d5520e05fdf94aa2bd859eef12c407"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3988044d1635c7821dd44f0edfbe47e9875427464e59d548aece447f8c22800a"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a9d59da1543a6f1478c3436fd49ec59be3868bca561a33778b4391005e499d"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f49bdb94809ac56e09a310a62f33e5f22973d6fd351aac72a39cd551e98194"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfd2dca3f11c365d6857a07e7d12985afc59798458a2fdb2ffa4a0332a3fd43"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c1508ec97b2cd3e120bfe309a4ff8e852e8a7460f1ef1de00c2c0ed01e33c"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:49904f38667c44c041a0b44c474b3ae36948d16a0398a8f8cd84e2bb3c42a069"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:352f3a4e5f11f3241a49b6a48bc5b935fabc35d1165fa0d87f3ca99c1fcca98b"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:fc61f39b534c5d5903490478a0dd349df397d2284a939aa3cbaa2fb7a19b8397"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ad2274e707be37420d0b6c3d26a8115295fe9d8e6e530fa6a42487a8ca3ad052"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c836bf3c7512100219fe1123743fd8dd9a2b50dd7cfb0c3bb10d041309acab4b"}, - {file = "aiohttp-3.10.2-cp38-cp38-win32.whl", hash = "sha256:53e8898adda402be03ff164b0878abe2d884e3ea03a4701e6ad55399d84b92dc"}, - {file = "aiohttp-3.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:7cc8f65f5b22304693de05a245b6736b14cb5bc9c8a03da6e2ae9ef15f8b458f"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9dfc906d656e14004c5bc672399c1cccc10db38df2b62a13fb2b6e165a81c316"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:91b10208b222ddf655c3a3d5b727879d7163db12b634492df41a9182a76edaae"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fd16b5e1a7bdd14668cd6bde60a2a29b49147a535c74f50d8177d11b38433a7"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2bfdda4971bd79201f59adbad24ec2728875237e1c83bba5221284dbbf57bda"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69d73f869cf29e8a373127fc378014e2b17bcfbe8d89134bc6fb06a2f67f3cb3"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df59f8486507c421c0620a2c3dce81fbf1d54018dc20ff4fecdb2c106d6e6abc"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df930015db36b460aa9badbf35eccbc383f00d52d4b6f3de2ccb57d064a6ade"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:562b1153ab7f766ee6b8b357ec777a302770ad017cf18505d34f1c088fccc448"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d984db6d855de58e0fde1ef908d48fe9a634cadb3cf715962722b4da1c40619d"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:14dc3fcb0d877911d775d511eb617a486a8c48afca0a887276e63db04d3ee920"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b52a27a5c97275e254704e1049f4b96a81e67d6205f52fa37a4777d55b0e98ef"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:cd33d9de8cfd006a0d0fe85f49b4183c57e91d18ffb7e9004ce855e81928f704"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1238fc979160bc03a92fff9ad021375ff1c8799c6aacb0d8ea1b357ea40932bb"}, - {file = "aiohttp-3.10.2-cp39-cp39-win32.whl", hash = "sha256:e2f43d238eae4f0b04f58d4c0df4615697d4ca3e9f9b1963d49555a94f0f5a04"}, - {file = "aiohttp-3.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:947847f07a8f81d7b39b2d0202fd73e61962ebe17ac2d8566f260679e467da7b"}, - {file = "aiohttp-3.10.2.tar.gz", hash = "sha256:4d1f694b5d6e459352e5e925a42e05bac66655bfde44d81c59992463d2897014"}, + {file = "aiohttp-3.10.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8b3fb28a9ac8f2558760d8e637dbf27aef1e8b7f1d221e8669a1074d1a266bb2"}, + {file = "aiohttp-3.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91aa966858593f64c8a65cdefa3d6dc8fe3c2768b159da84c1ddbbb2c01ab4ef"}, + {file = "aiohttp-3.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63649309da83277f06a15bbdc2a54fbe75efb92caa2c25bb57ca37762789c746"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e7fabedb3fe06933f47f1538df7b3a8d78e13d7167195f51ca47ee12690373"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c070430fda1a550a1c3a4c2d7281d3b8cfc0c6715f616e40e3332201a253067"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d0a4901b27272ae54e42067bc4b9a90e619a690b4dc43ea5950eb3070afc32"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fec5fac7aea6c060f317f07494961236434928e6f4374e170ef50b3001e14581"}, + {file = "aiohttp-3.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:172ad884bb61ad31ed7beed8be776eb17e7fb423f1c1be836d5cb357a096bf12"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d646fdd74c25bbdd4a055414f0fe32896c400f38ffbdfc78c68e62812a9e0257"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e86260b76786c28acf0b5fe31c8dca4c2add95098c709b11e8c35b424ebd4f5b"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d7cafc11d70fdd8801abfc2ff276744ae4cb39d8060b6b542c7e44e5f2cfc2"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc262c3df78c8ff6020c782d9ce02e4bcffe4900ad71c0ecdad59943cba54442"}, + {file = "aiohttp-3.10.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:482c85cf3d429844396d939b22bc2a03849cb9ad33344689ad1c85697bcba33a"}, + {file = "aiohttp-3.10.9-cp310-cp310-win32.whl", hash = "sha256:aeebd3061f6f1747c011e1d0b0b5f04f9f54ad1a2ca183e687e7277bef2e0da2"}, + {file = "aiohttp-3.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:fa430b871220dc62572cef9c69b41e0d70fcb9d486a4a207a5de4c1f25d82593"}, + {file = "aiohttp-3.10.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:16e6a51d8bc96b77f04a6764b4ad03eeef43baa32014fce71e882bd71302c7e4"}, + {file = "aiohttp-3.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8bd9125dd0cc8ebd84bff2be64b10fdba7dc6fd7be431b5eaf67723557de3a31"}, + {file = "aiohttp-3.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dcf354661f54e6a49193d0b5653a1b011ba856e0b7a76bda2c33e4c6892f34ea"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42775de0ca04f90c10c5c46291535ec08e9bcc4756f1b48f02a0657febe89b10"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d1e4185c5d7187684d41ebb50c9aeaaaa06ca1875f4c57593071b0409d2444"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2695c61cf53a5d4345a43d689f37fc0f6d3a2dc520660aec27ec0f06288d1f9"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a3f063b41cc06e8d0b3fcbbfc9c05b7420f41287e0cd4f75ce0a1f3d80729e6"}, + {file = "aiohttp-3.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d37f4718002863b82c6f391c8efd4d3a817da37030a29e2682a94d2716209de"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2746d8994ebca1bdc55a1e998feff4e94222da709623bb18f6e5cfec8ec01baf"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6f3c6648aa123bcd73d6f26607d59967b607b0da8ffcc27d418a4b59f4c98c7c"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:558b3d223fd631ad134d89adea876e7fdb4c93c849ef195049c063ada82b7d08"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4e6cb75f8ddd9c2132d00bc03c9716add57f4beff1263463724f6398b813e7eb"}, + {file = "aiohttp-3.10.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:608cecd8d58d285bfd52dbca5b6251ca8d6ea567022c8a0eaae03c2589cd9af9"}, + {file = "aiohttp-3.10.9-cp311-cp311-win32.whl", hash = "sha256:36d4fba838be5f083f5490ddd281813b44d69685db910907636bc5dca6322316"}, + {file = "aiohttp-3.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:8be1a65487bdfc285bd5e9baf3208c2132ca92a9b4020e9f27df1b16fab998a9"}, + {file = "aiohttp-3.10.9-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4fd16b30567c5b8e167923be6e027eeae0f20cf2b8a26b98a25115f28ad48ee0"}, + {file = "aiohttp-3.10.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:40ff5b7660f903dc587ed36ef08a88d46840182d9d4b5694e7607877ced698a1"}, + {file = "aiohttp-3.10.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4edc3fd701e2b9a0d605a7b23d3de4ad23137d23fc0dbab726aa71d92f11aaaf"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e525b69ee8a92c146ae5b4da9ecd15e518df4d40003b01b454ad694a27f498b5"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5002a02c17fcfd796d20bac719981d2fca9c006aac0797eb8f430a58e9d12431"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4ceeae2fb8cabdd1b71c82bfdd39662473d3433ec95b962200e9e752fb70d0"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e395c3d1f773cf0651cd3559e25182eb0c03a2777b53b4575d8adc1149c6e9"}, + {file = "aiohttp-3.10.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbdb8def5268f3f9cd753a265756f49228a20ed14a480d151df727808b4531dd"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f82ace0ec57c94aaf5b0e118d4366cff5889097412c75aa14b4fd5fc0c44ee3e"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6ebdc3b3714afe1b134b3bbeb5f745eed3ecbcff92ab25d80e4ef299e83a5465"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f9ca09414003c0e96a735daa1f071f7d7ed06962ef4fa29ceb6c80d06696d900"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1298b854fd31d0567cbb916091be9d3278168064fca88e70b8468875ef9ff7e7"}, + {file = "aiohttp-3.10.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:60ad5b8a7452c0f5645c73d4dad7490afd6119d453d302cd5b72b678a85d6044"}, + {file = "aiohttp-3.10.9-cp312-cp312-win32.whl", hash = "sha256:1a0ee6c0d590c917f1b9629371fce5f3d3f22c317aa96fbdcce3260754d7ea21"}, + {file = "aiohttp-3.10.9-cp312-cp312-win_amd64.whl", hash = "sha256:c46131c6112b534b178d4e002abe450a0a29840b61413ac25243f1291613806a"}, + {file = "aiohttp-3.10.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2bd9f3eac515c16c4360a6a00c38119333901b8590fe93c3257a9b536026594d"}, + {file = "aiohttp-3.10.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8cc0d13b4e3b1362d424ce3f4e8c79e1f7247a00d792823ffd640878abf28e56"}, + {file = "aiohttp-3.10.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ba1a599255ad6a41022e261e31bc2f6f9355a419575b391f9655c4d9e5df5ff5"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:776e9f3c9b377fcf097c4a04b241b15691e6662d850168642ff976780609303c"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8debb45545ad95b58cc16c3c1cc19ad82cffcb106db12b437885dbee265f0ab5"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2555e4949c8d8782f18ef20e9d39730d2656e218a6f1a21a4c4c0b56546a02e"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c54dc329cd44f7f7883a9f4baaefe686e8b9662e2c6c184ea15cceee587d8d69"}, + {file = "aiohttp-3.10.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e709d6ac598c5416f879bb1bae3fd751366120ac3fa235a01de763537385d036"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:17c272cfe7b07a5bb0c6ad3f234e0c336fb53f3bf17840f66bd77b5815ab3d16"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0c21c82df33b264216abffff9f8370f303dab65d8eee3767efbbd2734363f677"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9331dd34145ff105177855017920dde140b447049cd62bb589de320fd6ddd582"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ac3196952c673822ebed8871cf8802e17254fff2a2ed4835d9c045d9b88c5ec7"}, + {file = "aiohttp-3.10.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2c33fa6e10bb7ed262e3ff03cc69d52869514f16558db0626a7c5c61dde3c29f"}, + {file = "aiohttp-3.10.9-cp313-cp313-win32.whl", hash = "sha256:a14e4b672c257a6b94fe934ee62666bacbc8e45b7876f9dd9502d0f0fe69db16"}, + {file = "aiohttp-3.10.9-cp313-cp313-win_amd64.whl", hash = "sha256:a35ed3d03910785f7d9d6f5381f0c24002b2b888b298e6f941b2fc94c5055fcd"}, + {file = "aiohttp-3.10.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f392ef50e22c31fa49b5a46af7f983fa3f118f3eccb8522063bee8bfa6755f8"}, + {file = "aiohttp-3.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d1f5c9169e26db6a61276008582d945405b8316aae2bb198220466e68114a0f5"}, + {file = "aiohttp-3.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8d9d10d10ec27c0d46ddaecc3c5598c4db9ce4e6398ca872cdde0525765caa2f"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d97273a52d7f89a75b11ec386f786d3da7723d7efae3034b4dda79f6f093edc1"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d271f770b52e32236d945911b2082f9318e90ff835d45224fa9e28374303f729"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7003f33f5f7da1eb02f0446b0f8d2ccf57d253ca6c2e7a5732d25889da82b517"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6e00c8a92e7663ed2be6fcc08a2997ff06ce73c8080cd0df10cc0321a3168d7"}, + {file = "aiohttp-3.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a61df62966ce6507aafab24e124e0c3a1cfbe23c59732987fc0fd0d71daa0b88"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:60555211a006d26e1a389222e3fab8cd379f28e0fbf7472ee55b16c6c529e3a6"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d15a29424e96fad56dc2f3abed10a89c50c099f97d2416520c7a543e8fddf066"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:a19caae0d670771ea7854ca30df76f676eb47e0fd9b2ee4392d44708f272122d"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:99f9678bf0e2b1b695e8028fedac24ab6770937932eda695815d5a6618c37e04"}, + {file = "aiohttp-3.10.9-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2914caa46054f3b5ff910468d686742ff8cff54b8a67319d75f5d5945fd0a13d"}, + {file = "aiohttp-3.10.9-cp38-cp38-win32.whl", hash = "sha256:0bc059ecbce835630e635879f5f480a742e130d9821fbe3d2f76610a6698ee25"}, + {file = "aiohttp-3.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:e883b61b75ca6efc2541fcd52a5c8ccfe288b24d97e20ac08fdf343b8ac672ea"}, + {file = "aiohttp-3.10.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fcd546782d03181b0b1d20b43d612429a90a68779659ba8045114b867971ab71"}, + {file = "aiohttp-3.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:85711eec2d875cd88c7eb40e734c4ca6d9ae477d6f26bd2b5bb4f7f60e41b156"}, + {file = "aiohttp-3.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02d1d6610588bcd743fae827bd6f2e47e0d09b346f230824b4c6fb85c6065f9c"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3668d0c2a4d23fb136a753eba42caa2c0abbd3d9c5c87ee150a716a16c6deec1"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7c071235a47d407b0e93aa6262b49422dbe48d7d8566e1158fecc91043dd948"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac74e794e3aee92ae8f571bfeaa103a141e409863a100ab63a253b1c53b707eb"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bbf94d4a0447705b7775417ca8bb8086cc5482023a6e17cdc8f96d0b1b5aba6"}, + {file = "aiohttp-3.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb0b2d5d51f96b6cc19e6ab46a7b684be23240426ae951dcdac9639ab111b45e"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e83dfefb4f7d285c2d6a07a22268344a97d61579b3e0dce482a5be0251d672ab"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f0a44bb40b6aaa4fb9a5c1ee07880570ecda2065433a96ccff409c9c20c1624a"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c2b627d3c8982691b06d89d31093cee158c30629fdfebe705a91814d49b554f8"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:03690541e4cc866eef79626cfa1ef4dd729c5c1408600c8cb9e12e1137eed6ab"}, + {file = "aiohttp-3.10.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad3675c126f2a95bde637d162f8231cff6bc0bc9fbe31bd78075f9ff7921e322"}, + {file = "aiohttp-3.10.9-cp39-cp39-win32.whl", hash = "sha256:1321658f12b6caffafdc35cfba6c882cb014af86bef4e78c125e7e794dfb927b"}, + {file = "aiohttp-3.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:9fdf5c839bf95fc67be5794c780419edb0dbef776edcfc6c2e5e2ffd5ee755fa"}, + {file = "aiohttp-3.10.9.tar.gz", hash = "sha256:143b0026a9dab07a05ad2dd9e46aa859bffdd6348ddc5967b42161168c24f857"}, ] [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" +yarl = ">=1.12.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] @@ -124,18 +138,16 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.3" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, + {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, ] [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} Mako = "*" SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" @@ -201,104 +213,154 @@ dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] [[package]] -name = "astroid" -version = "3.2.3" -description = "An abstract syntax tree for Python with inference support." +name = "asn1crypto" +version = "1.5.1" +description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" optional = false -python-versions = ">=3.8.0" +python-versions = "*" files = [ - {file = "astroid-3.2.3-py3-none-any.whl", hash = "sha256:3eae9ea67c11c858cdd2c91337d2e816bd019ac897ca07d7b346ac10105fceb3"}, - {file = "astroid-3.2.3.tar.gz", hash = "sha256:7099b5a60985529d8d46858befa103b82d0d05a5a5e8b816b5303ed96075e1d9"}, + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - [[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" +name = "astroid" +version = "3.3.5" +description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9.0" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, + {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, ] [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] -name = "autopep8" -version = "1.7.0" -description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" +name = "bandit" +version = "1.7.10" +description = "Security oriented static analyser for python code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "autopep8-1.7.0-py2.py3-none-any.whl", hash = "sha256:6f09e90a2be784317e84dc1add17ebfc7abe3924239957a37e5040e27d812087"}, - {file = "autopep8-1.7.0.tar.gz", hash = "sha256:ca9b1a83e53a7fad65d731dc7a2a2d50aa48f43850407c59f6a1a306c4201142"}, + {file = "bandit-1.7.10-py3-none-any.whl", hash = "sha256:665721d7bebbb4485a339c55161ac0eedde27d51e638000d91c8c2d68343ad02"}, + {file = "bandit-1.7.10.tar.gz", hash = "sha256:59ed5caf5d92b6ada4bf65bc6437feea4a9da1093384445fed4d472acc6cff7b"}, ] [package.dependencies] -pycodestyle = ">=2.9.1" -toml = "*" +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] [[package]] name = "bcrypt" -version = "4.1.3" +version = "4.2.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:48429c83292b57bf4af6ab75809f8f4daf52aa5d480632e53707805cc1ce9b74"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8bea4c152b91fd8319fef4c6a790da5c07840421c2b785084989bf8bbb7455"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d3b317050a9a711a5c7214bf04e28333cf528e0ed0ec9a4e55ba628d0f07c1a"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:094fd31e08c2b102a14880ee5b3d09913ecf334cd604af27e1013c76831f7b05"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4fb253d65da30d9269e0a6f4b0de32bd657a0208a6f4e43d3e645774fb5457f3"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:193bb49eeeb9c1e2db9ba65d09dc6384edd5608d9d672b4125e9320af9153a15"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8cbb119267068c2581ae38790e0d1fbae65d0725247a930fc9900c285d95725d"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6cac78a8d42f9d120b3987f82252bdbeb7e6e900a5e1ba37f6be6fe4e3848286"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01746eb2c4299dd0ae1670234bf77704f581dd72cc180f444bfe74eb80495b64"}, - {file = "bcrypt-4.1.3-cp37-abi3-win32.whl", hash = "sha256:037c5bf7c196a63dcce75545c8874610c600809d5d82c305dd327cd4969995bf"}, - {file = "bcrypt-4.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:8a893d192dfb7c8e883c4576813bf18bb9d59e2cfd88b68b725990f033f1b978"}, - {file = "bcrypt-4.1.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d4cf6ef1525f79255ef048b3489602868c47aea61f375377f0d00514fe4a78c"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5698ce5292a4e4b9e5861f7e53b1d89242ad39d54c3da451a93cac17b61921a"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec3c2e1ca3e5c4b9edb94290b356d082b721f3f50758bce7cce11d8a7c89ce84"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3a5be252fef513363fe281bafc596c31b552cf81d04c5085bc5dac29670faa08"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5f7cd3399fbc4ec290378b541b0cf3d4398e4737a65d0f938c7c0f9d5e686611"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:c4c8d9b3e97209dd7111bf726e79f638ad9224b4691d1c7cfefa571a09b1b2d6"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:31adb9cbb8737a581a843e13df22ffb7c84638342de3708a98d5c986770f2834"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:551b320396e1d05e49cc18dd77d970accd52b322441628aca04801bbd1d52a73"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6717543d2c110a155e6821ce5670c1f512f602eabb77dba95717ca76af79867d"}, - {file = "bcrypt-4.1.3-cp39-abi3-win32.whl", hash = "sha256:6004f5229b50f8493c49232b8e75726b568535fd300e5039e255d919fc3a07f2"}, - {file = "bcrypt-4.1.3-cp39-abi3-win_amd64.whl", hash = "sha256:2505b54afb074627111b5a8dc9b6ae69d0f01fea65c2fcaea403448c503d3991"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:cb9c707c10bddaf9e5ba7cdb769f3e889e60b7d4fea22834b261f51ca2b89fed"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9f8ea645eb94fb6e7bea0cf4ba121c07a3a182ac52876493870033141aa687bc"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f44a97780677e7ac0ca393bd7982b19dbbd8d7228c1afe10b128fd9550eef5f1"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d84702adb8f2798d813b17d8187d27076cca3cd52fe3686bb07a9083930ce650"}, - {file = "bcrypt-4.1.3.tar.gz", hash = "sha256:2ee15dd749f5952fe3f0430d0ff6b74082e159c50332a1413d51b5689cf06623"}, + {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, + {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, + {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, + {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, + {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, + {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, + {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, ] [package.extras] tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "black" +version = "24.8.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "blinker" version = "1.8.2" @@ -310,6 +372,60 @@ files = [ {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] +[[package]] +name = "build-deps" +version = "1.0.0" +description = "common dependencies for all auth tools" +optional = false +python-versions = "^3.12" +files = [] +develop = false + +[package.dependencies] +attrs = "24.2.0" +CacheControl = "0.14.0" +cachetools = "5.5.0" +certifi = "2024.8.30" +flask = "3.0.2" +gcp-queue = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/gcp-queue"} +itsdangerous = "2.1.2" +jinja2 = "3.1.3" +jsonschema = "4.17.3" +launchdarkly-server-sdk = "9.5.0" +MarkupSafe = "2.1.1" +python-dotenv = "^1.0.1" +requests = "2.32.3" +sbc-common-components = {git = "https://github.com/bolyachevets/sbc-common-components.git", rev = "camel_case_empty_dict", subdirectory = "python"} +structured-logging = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/structured-logging"} +Werkzeug = "3.0.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-auth.git" +reference = "feature-gcp-migration" +resolved_reference = "289c7db2ceac0b4ccd9554a638c8fa6cfe3e8a75" +subdirectory = "build-deps" + +[[package]] +name = "cachecontrol" +version = "0.14.0" +description = "httplib2 caching for requests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, + {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, +] + +[package.dependencies] +msgpack = ">=0.5.2,<2.0.0" +requests = ">=2.16.0" + +[package.extras] +dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] +filecache = ["filelock (>=3.8.0)"] +redis = ["redis (>=2.10.5)"] + [[package]] name = "cachelib" version = "0.9.0" @@ -323,13 +439,13 @@ files = [ [[package]] name = "cachetools" -version = "5.4.0" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] @@ -345,8 +461,6 @@ files = [ [package.dependencies] attrs = ">=23.1.0" -exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} [package.extras] bson = ["pymongo (>=4.4.0)"] @@ -359,74 +473,89 @@ ujson = ["ujson (>=5.7.0)"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -533,15 +662,18 @@ files = [ [[package]] name = "click" -version = "7.1.2" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -555,77 +687,114 @@ files = [ [[package]] name = "coverage" -version = "5.5" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -files = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.extras] -toml = ["toml"] +toml = ["tomli"] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -650,20 +819,6 @@ six = ">=1.9.0" gmpy = ["gmpy"] gmpy2 = ["gmpy2"] -[[package]] -name = "exceptiongroup" -version = "1.2.1" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, -] - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "expiringdict" version = "1.2.2" @@ -680,129 +835,70 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"] [[package]] name = "faker" -version = "8.16.0" +version = "25.9.2" description = "Faker is a Python package that generates fake data for you." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "Faker-8.16.0-py3-none-any.whl", hash = "sha256:bb10913b9d3ac2aa37180f816c82040e81f9e0c32cb08445533f293cec8930bf"}, - {file = "Faker-8.16.0.tar.gz", hash = "sha256:d70b375d0af0e4c3abd594003691a1055a96281a414884e623d27bccc7d781da"}, + {file = "Faker-25.9.2-py3-none-any.whl", hash = "sha256:7f8cbd179a7351648bea31f53d021a2bdfdeb59e9b830e121a635916615e0ecd"}, + {file = "Faker-25.9.2.tar.gz", hash = "sha256:ca94843600a4089a91394023fef014bb41fee509f8c4beef1530018373e770fb"}, ] [package.dependencies] python-dateutil = ">=2.4" -text-unidecode = "1.3" [[package]] name = "flake8" -version = "5.0.4" +version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.8.1" files = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" [[package]] -name = "flake8-blind-except" -version = "0.1.1" -description = "A flake8 extension that checks for blind except: statements" +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" optional = false -python-versions = "*" +python-versions = ">= 3.6" files = [ - {file = "flake8-blind-except-0.1.1.tar.gz", hash = "sha256:aca3356633825544cec51997260fe31a8f24a1a2795ce8e81696b9916745e599"}, + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, ] [package.dependencies] -setuptools = "*" - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-isort" -version = "4.2.0" -description = "flake8 plugin that integrates isort ." -optional = false -python-versions = "*" -files = [ - {file = "flake8-isort-4.2.0.tar.gz", hash = "sha256:26571500cd54976bbc0cf1006ffbcd1a68dd102f816b7a1051b219616ba9fee0"}, - {file = "flake8_isort-4.2.0-py3-none-any.whl", hash = "sha256:5b87630fb3719bf4c1833fd11e0d9534f43efdeba524863e15d8f14a7ef6adbf"}, -] - -[package.dependencies] -flake8 = ">=3.2.1,<6" -isort = ">=4.3.5,<6" +Flake8 = ">=5" [package.extras] -test = ["pytest-cov"] - -[[package]] -name = "flake8-polyfill" -version = "1.0.2" -description = "Polyfill package for Flake8 plugins" -optional = false -python-versions = "*" -files = [ - {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, - {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, -] - -[package.dependencies] -flake8 = "*" - -[[package]] -name = "flake8-quotes" -version = "3.4.0" -description = "Flake8 lint for quotes." -optional = false -python-versions = "*" -files = [ - {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, -] - -[package.dependencies] -flake8 = "*" -setuptools = "*" +dev = ["pyTest", "pyTest-cov"] [[package]] name = "flask" -version = "1.1.4" +version = "3.0.2" description = "A simple framework for building complex web applications." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "Flask-1.1.4-py2.py3-none-any.whl", hash = "sha256:c34f04500f2cbbea882b1acb02002ad6fe6b7ffa64a6164577995657f50aed22"}, - {file = "Flask-1.1.4.tar.gz", hash = "sha256:0fbeb6180d383a9186d0d6ed954e0042ad9f18e0e8de088b2b419d526927d196"}, + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, ] [package.dependencies] -click = ">=5.1,<8.0" -itsdangerous = ">=0.24,<2.0" -Jinja2 = ">=2.10.1,<3.0" -Werkzeug = ">=0.15,<2.0" +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" [package.extras] -dev = ["coverage", "pallets-sphinx-themes", "pytest", "sphinx", "sphinx-issues", "sphinxcontrib-log-cabinet", "tox"] -docs = ["pallets-sphinx-themes", "sphinx", "sphinx-issues", "sphinxcontrib-log-cabinet"] +async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] [[package]] @@ -836,20 +932,24 @@ Flask = ">=0.9" [[package]] name = "flask-jwt-oidc" -version = "0.3.0" -description = "Flask_JWT_OIDC" +version = "0.7.0" +description = "Opinionated flask oidc client" optional = false -python-versions = "*" -files = [ - {file = "flask_jwt_oidc-0.3.0-py3-none-any.whl", hash = "sha256:c2a62ecb463a506155fabf70fd20684830213f4e48aa98cd7abd48f253e8ad18"}, - {file = "flask_jwt_oidc-0.3.0.tar.gz", hash = "sha256:5859015847fa44f291082555fdf6ca986294f6c57166f9ea9da2a2b3fdda5138"}, -] +python-versions = "^3.9" +files = [] +develop = false [package.dependencies] -cachelib = "*" -flask = "*" -python-jose = "*" -six = "*" +cachelib = "0.*" +Flask = ">=2" +python-jose = "^3.3.0" +six = "^1.16.0" + +[package.source] +type = "git" +url = "https://github.com/seeker25/flask-jwt-oidc.git" +reference = "main" +resolved_reference = "d208d4643e3b17358f7295bee0f955e67ba6ac88" [[package]] name = "flask-mail" @@ -868,58 +968,59 @@ flask = "*" [[package]] name = "flask-marshmallow" -version = "0.11.0" +version = "1.2.1" description = "Flask + marshmallow for beautiful APIs" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "flask-marshmallow-0.11.0.tar.gz", hash = "sha256:01520ef1851ccb64d4ffb33196cddff895cc1302ae1585bff1abf58684a8111a"}, - {file = "flask_marshmallow-0.11.0-py2.py3-none-any.whl", hash = "sha256:28b969193958d9602ab5d6add6d280e0e360c8e373d3492c2f73b024ecd36374"}, + {file = "flask_marshmallow-1.2.1-py3-none-any.whl", hash = "sha256:10b5048ecfaa26f7c8d0aed7d81083164450e6be8e81c04b3d4a586b3f7b6678"}, + {file = "flask_marshmallow-1.2.1.tar.gz", hash = "sha256:00ee96399ed664963afff3b5d6ee518640b0f91dbc2aace2b5abcf32f40ef23a"}, ] [package.dependencies] -Flask = "*" -marshmallow = ">=2.0.0" -six = ">=1.9.0" +Flask = ">=2.2" +marshmallow = ">=3.0.0" [package.extras] -dev = ["flake8 (==3.7.9)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=1.20,<3.0)", "pytest", "tox"] -docs = ["Sphinx (==2.3.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] -lint = ["flake8 (==3.7.9)", "flake8-bugbear (==20.1.4)", "pre-commit (>=1.20,<3.0)"] -sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] -tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] +dev = ["flask-marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["Sphinx (==7.2.6)", "marshmallow-sqlalchemy (>=0.19.0)", "sphinx-issues (==4.0.0)"] +sqlalchemy = ["flask-sqlalchemy (>=3.0.0)", "marshmallow-sqlalchemy (>=0.29.0)"] +tests = ["flask-marshmallow[sqlalchemy]", "pytest"] [[package]] name = "flask-migrate" -version = "2.7.0" -description = "SQLAlchemy database migrations for Flask applications using Alembic" +version = "4.0.7" +description = "SQLAlchemy database migrations for Flask applications using Alembic." optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "Flask-Migrate-2.7.0.tar.gz", hash = "sha256:ae2f05671588762dd83a21d8b18c51fe355e86783e24594995ff8d7380dffe38"}, - {file = "Flask_Migrate-2.7.0-py2.py3-none-any.whl", hash = "sha256:26871836a4e46d2d590cf8e558c6d60039e1c003079b240689d845726b6b57c0"}, + {file = "Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622"}, + {file = "Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617"}, ] [package.dependencies] -alembic = ">=0.7" +alembic = ">=1.9.0" Flask = ">=0.9" Flask-SQLAlchemy = ">=1.0" [[package]] name = "flask-moment" -version = "1.0.5" +version = "1.0.6" description = "Formatting of dates and times in Flask templates using moment.js." optional = false python-versions = ">=3.6" files = [ - {file = "Flask-Moment-1.0.5.tar.gz", hash = "sha256:33307ecd4af8290b6df6a9828ff55ac0977d0567817f9bc0f69803d22ed2b55c"}, - {file = "Flask_Moment-1.0.5-py3-none-any.whl", hash = "sha256:6e7b3eef89e2137bbbee975405f241a68a44edfa34bf052c92d84364992adca6"}, + {file = "Flask_Moment-1.0.6-py3-none-any.whl", hash = "sha256:3ae8baea20a41e99f457b9710ecd1368911dd5133f09a27583eb0dcb3491e31d"}, + {file = "flask_moment-1.0.6.tar.gz", hash = "sha256:2f8969907cbacde4a88319792e8f920ba5c9dd9d99ced2346cad563795302b88"}, ] [package.dependencies] Flask = "*" packaging = ">=14.1" +[package.extras] +docs = ["sphinx"] + [[package]] name = "flask-opentracing" version = "1.1.0" @@ -937,33 +1038,20 @@ opentracing = ">=2.0,<3" [package.extras] tests = ["flake8", "flake8-quotes", "mock", "pytest", "pytest-cov"] -[[package]] -name = "flask-script" -version = "2.0.6" -description = "Scripting support for Flask" -optional = false -python-versions = "*" -files = [ - {file = "Flask-Script-2.0.6.tar.gz", hash = "sha256:6425963d91054cfcc185807141c7314a9c5ad46325911bd24dcb489bd0161c65"}, -] - -[package.dependencies] -Flask = "*" - [[package]] name = "flask-sqlalchemy" -version = "2.5.1" -description = "Adds SQLAlchemy support to your Flask application." +version = "3.1.1" +description = "Add SQLAlchemy support to your Flask application." optional = false -python-versions = ">= 2.7, != 3.0.*, != 3.1.*, != 3.2.*, != 3.3.*" +python-versions = ">=3.8" files = [ - {file = "Flask-SQLAlchemy-2.5.1.tar.gz", hash = "sha256:2bda44b43e7cacb15d4e05ff3cc1f8bc97936cc464623424102bfc2c35e95912"}, - {file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"}, + {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, + {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, ] [package.dependencies] -Flask = ">=0.10" -SQLAlchemy = ">=0.8.0" +flask = ">=2.2.5" +sqlalchemy = ">=2.0.16" [[package]] name = "freezegun" @@ -1082,51 +1170,27 @@ simple-cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py.git"} [package.source] type = "git" -url = "https://github.com/seeker25/sbc-connect-common.git" -reference = "small_tweaks" -resolved_reference = "cf0d1d76e37ecb9b051393f6e9f4a419177ee2b4" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" subdirectory = "python/gcp-queue" [[package]] name = "google-api-core" -version = "1.34.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-1.34.1.tar.gz", hash = "sha256:3399c92887a97d33038baa4bfd3bf07acc05d474b0171f333e1f641c1364e552"}, - {file = "google_api_core-1.34.1-py3-none-any.whl", hash = "sha256:52bcc9d9937735f8a3986fa0bbf9135ae9cf5393a722387e5eced520e39c774a"}, -] - -[package.dependencies] -google-auth = ">=1.25.0,<3.0dev" -googleapis-common-protos = ">=1.56.2,<2.0dev" -grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.0.0dev" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] - -[[package]] -name = "google-api-core" -version = "2.19.1" +version = "2.20.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, + {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, + {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, ] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""} -grpcio-status = {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""} +grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1138,13 +1202,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.32.0" +version = "2.35.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, - {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, ] [package.dependencies] @@ -1154,20 +1218,20 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-pubsub" -version = "2.21.4" +version = "2.25.2" description = "Google Cloud Pub/Sub API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-pubsub-2.21.4.tar.gz", hash = "sha256:2cb8c7698adbfea0448db6c89b78aa7217fce07c1e37b46d3e535fa1633f3ae6"}, - {file = "google_cloud_pubsub-2.21.4-py2.py3-none-any.whl", hash = "sha256:e80a4c37c3cd45b68ee3c75eea45af228beeebc3f03915e8ee0b2fccd72e9926"}, + {file = "google_cloud_pubsub-2.25.2-py2.py3-none-any.whl", hash = "sha256:1e8470586136804b2a49c290f0e75082d949f778e1aaae2ebb9759741d829014"}, + {file = "google_cloud_pubsub-2.25.2.tar.gz", hash = "sha256:e0db6bfa52f04bf17347c4afbfea3dc094d31fc54259a1581407b4cd784da433"}, ] [package.dependencies] @@ -1176,122 +1240,215 @@ google-auth = ">=2.14.1,<3.0.0dev" grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" grpcio = ">=1.51.3,<2.0dev" grpcio-status = ">=1.33.2" -proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, -] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +opentelemetry-api = {version = ">=1.27.0", markers = "python_version >= \"3.8\""} +opentelemetry-sdk = {version = ">=1.27.0", markers = "python_version >= \"3.8\""} +proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [package.extras] libcst = ["libcst (>=0.3.10)"] [[package]] name = "googleapis-common-protos" -version = "1.63.1" +version = "1.65.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.1.tar.gz", hash = "sha256:c6442f7a0a6b2a80369457d79e6672bb7dcbaab88e0848302497e3ec80780a6a"}, - {file = "googleapis_common_protos-1.63.1-py2.py3-none-any.whl", hash = "sha256:0e1c2cdfcbc354b76e4a211a35ea35d6926a835cba1377073c4861db904a1877"}, + {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, + {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, ] [package.dependencies] grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + [[package]] name = "grpc-google-iam-v1" -version = "0.13.0" +version = "0.13.1" description = "IAM API client library" optional = false python-versions = ">=3.7" files = [ - {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, - {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, + {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, + {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, ] [package.dependencies] googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [[package]] name = "grpcio" -version = "1.65.1" +version = "1.66.2" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.65.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062"}, - {file = "grpcio-1.65.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:7af64838b6e615fff0ec711960ed9b6ee83086edfa8c32670eafb736f169d719"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb64b4166362d9326f7efbf75b1c72106c1aa87f13a8c8b56a1224fac152f5c"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8422dc13ad93ec8caa2612b5032a2b9cd6421c13ed87f54db4a3a2c93afaf77"}, - {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4effc0562b6c65d4add6a873ca132e46ba5e5a46f07c93502c37a9ae7f043857"}, - {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a6c71575a2fedf259724981fd73a18906513d2f306169c46262a5bae956e6364"}, - {file = "grpcio-1.65.1-cp310-cp310-win32.whl", hash = "sha256:34966cf526ef0ea616e008d40d989463e3db157abb213b2f20c6ce0ae7928875"}, - {file = "grpcio-1.65.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca931de5dd6d9eb94ff19a2c9434b23923bce6f767179fef04dfa991f282eaad"}, - {file = "grpcio-1.65.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:bbb46330cc643ecf10bd9bd4ca8e7419a14b6b9dedd05f671c90fb2c813c6037"}, - {file = "grpcio-1.65.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d827a6fb9215b961eb73459ad7977edb9e748b23e3407d21c845d1d8ef6597e5"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:6e71aed8835f8d9fbcb84babc93a9da95955d1685021cceb7089f4f1e717d719"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a1c84560b3b2d34695c9ba53ab0264e2802721c530678a8f0a227951f453462"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27adee2338d697e71143ed147fe286c05810965d5d30ec14dd09c22479bfe48a"}, - {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f62652ddcadc75d0e7aa629e96bb61658f85a993e748333715b4ab667192e4e8"}, - {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:71a05fd814700dd9cb7d9a507f2f6a1ef85866733ccaf557eedacec32d65e4c2"}, - {file = "grpcio-1.65.1-cp311-cp311-win32.whl", hash = "sha256:b590f1ad056294dfaeac0b7e1b71d3d5ace638d8dd1f1147ce4bd13458783ba8"}, - {file = "grpcio-1.65.1-cp311-cp311-win_amd64.whl", hash = "sha256:12e9bdf3b5fd48e5fbe5b3da382ad8f97c08b47969f3cca81dd9b36b86ed39e2"}, - {file = "grpcio-1.65.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:54cb822e177374b318b233e54b6856c692c24cdbd5a3ba5335f18a47396bac8f"}, - {file = "grpcio-1.65.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aaf3c54419a28d45bd1681372029f40e5bfb58e5265e3882eaf21e4a5f81a119"}, - {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:557de35bdfbe8bafea0a003dbd0f4da6d89223ac6c4c7549d78e20f92ead95d9"}, - {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bfd95ef3b097f0cc86ade54eafefa1c8ed623aa01a26fbbdcd1a3650494dd11"}, - {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6a8f3d6c41e6b642870afe6cafbaf7b61c57317f9ec66d0efdaf19db992b90"}, - {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1faaf7355ceed07ceaef0b9dcefa4c98daf1dd8840ed75c2de128c3f4a4d859d"}, - {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:60f1f38eed830488ad2a1b11579ef0f345ff16fffdad1d24d9fbc97ba31804ff"}, - {file = "grpcio-1.65.1-cp312-cp312-win32.whl", hash = "sha256:e75acfa52daf5ea0712e8aa82f0003bba964de7ae22c26d208cbd7bc08500177"}, - {file = "grpcio-1.65.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff5a84907e51924973aa05ed8759210d8cdae7ffcf9e44fd17646cf4a902df59"}, - {file = "grpcio-1.65.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:1fbd6331f18c3acd7e09d17fd840c096f56eaf0ef830fbd50af45ae9dc8dfd83"}, - {file = "grpcio-1.65.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:de5b6be29116e094c5ef9d9e4252e7eb143e3d5f6bd6d50a78075553ab4930b0"}, - {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:e4a3cdba62b2d6aeae6027ae65f350de6dc082b72e6215eccf82628e79efe9ba"}, - {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941c4869aa229d88706b78187d60d66aca77fe5c32518b79e3c3e03fc26109a2"}, - {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f40cebe5edb518d78b8131e87cb83b3ee688984de38a232024b9b44e74ee53d3"}, - {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2ca684ba331fb249d8a1ce88db5394e70dbcd96e58d8c4b7e0d7b141a453dce9"}, - {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8558f0083ddaf5de64a59c790bffd7568e353914c0c551eae2955f54ee4b857f"}, - {file = "grpcio-1.65.1-cp38-cp38-win32.whl", hash = "sha256:8d8143a3e3966f85dce6c5cc45387ec36552174ba5712c5dc6fcc0898fb324c0"}, - {file = "grpcio-1.65.1-cp38-cp38-win_amd64.whl", hash = "sha256:76e81a86424d6ca1ce7c16b15bdd6a964a42b40544bf796a48da241fdaf61153"}, - {file = "grpcio-1.65.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb5175f45c980ff418998723ea1b3869cce3766d2ab4e4916fbd3cedbc9d0ed3"}, - {file = "grpcio-1.65.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b12c1aa7b95abe73b3e04e052c8b362655b41c7798da69f1eaf8d186c7d204df"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3019fb50128b21a5e018d89569ffaaaa361680e1346c2f261bb84a91082eb3d3"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ae15275ed98ea267f64ee9ddedf8ecd5306a5b5bb87972a48bfe24af24153e8"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f096ffb881f37e8d4f958b63c74bfc400c7cebd7a944b027357cd2fb8d91a57"}, - {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2f56b5a68fdcf17a0a1d524bf177218c3c69b3947cb239ea222c6f1867c3ab68"}, - {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:941596d419b9736ab548aa0feb5bbba922f98872668847bf0720b42d1d227b9e"}, - {file = "grpcio-1.65.1-cp39-cp39-win32.whl", hash = "sha256:5fd7337a823b890215f07d429f4f193d24b80d62a5485cf88ee06648591a0c57"}, - {file = "grpcio-1.65.1-cp39-cp39-win_amd64.whl", hash = "sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c"}, - {file = "grpcio-1.65.1.tar.gz", hash = "sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b"}, + {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, + {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, + {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, + {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, + {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, + {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, + {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, + {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, + {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, + {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, + {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, + {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, + {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, + {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, + {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, + {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, + {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, + {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, + {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, + {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, + {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, + {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, + {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, + {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, + {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.65.1)"] +protobuf = ["grpcio-tools (>=1.66.2)"] [[package]] name = "grpcio-status" -version = "1.48.2" +version = "1.66.2" description = "Status proto mapping for gRPC" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"}, - {file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"}, + {file = "grpcio_status-1.66.2-py3-none-any.whl", hash = "sha256:e5fe189f6897d12aa9cd74408a17ca41e44fad30871cf84f5cbd17bd713d2455"}, + {file = "grpcio_status-1.66.2.tar.gz", hash = "sha256:fb55cbb5c2e67062f7a4d5c99e489d074fb57e98678d5c3c6692a2d74d89e9ae"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.48.2" -protobuf = ">=3.12.0" +grpcio = ">=1.66.2" +protobuf = ">=5.26.1,<6.0dev" [[package]] name = "gunicorn" @@ -1316,51 +1473,36 @@ tornado = ["tornado (>=0.2)"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "6.4.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -1389,13 +1531,13 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "itsdangerous" -version = "1.1.0" -description = "Various helpers to pass data to untrusted environments and back." +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" files = [ - {file = "itsdangerous-1.1.0-py2.py3-none-any.whl", hash = "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"}, - {file = "itsdangerous-1.1.0.tar.gz", hash = "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19"}, + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, ] [[package]] @@ -1419,20 +1561,20 @@ tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-import [[package]] name = "jinja2" -version = "2.11.3" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, - {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] -MarkupSafe = ">=0.23" +MarkupSafe = ">=2.0" [package.extras] -i18n = ["Babel (>=0.8)"] +i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" @@ -1447,36 +1589,50 @@ files = [ [package.dependencies] attrs = ">=17.4.0" -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +[[package]] +name = "launchdarkly-eventsource" +version = "1.2.0" +description = "LaunchDarkly SSE Client" +optional = false +python-versions = ">=3.8" +files = [ + {file = "launchdarkly_eventsource-1.2.0-py3-none-any.whl", hash = "sha256:9b5ec7149e2ad9995be22ad5361deb480c229701e6b0cc799e94aa14f067b77b"}, + {file = "launchdarkly_eventsource-1.2.0.tar.gz", hash = "sha256:8cb3301ec0daeb5e17eaa37b3b65f6660fab851b317e69271185ef2fb42c2fde"}, +] + +[package.dependencies] +urllib3 = ">=1.26.0,<3" + [[package]] name = "launchdarkly-server-sdk" -version = "8.2.1" +version = "9.5.0" description = "LaunchDarkly SDK for Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "launchdarkly-server-sdk-8.2.1.tar.gz", hash = "sha256:94adbd52f635ad2f1a8b4a835cbbe4ce77919a6915136b303eaca3e2a54903be"}, - {file = "launchdarkly_server_sdk-8.2.1-py3-none-any.whl", hash = "sha256:b7680a4d5856da133b0dad8eca820e48bb5f2fb6dc34ebbf7f1a3a681033b426"}, + {file = "launchdarkly_server_sdk-9.5.0-py3-none-any.whl", hash = "sha256:bf2cf213f9eb71cd43d5f20f2ac9ec9235c693036459e5038a69015a6648c035"}, + {file = "launchdarkly_server_sdk-9.5.0.tar.gz", hash = "sha256:af64d985621a03257107210266c563c5e268ca8320d1d71b5c18d9592d14fef7"}, ] [package.dependencies] certifi = ">=2018.4.16" expiringdict = ">=1.1.4" +launchdarkly-eventsource = ">=1.1.0,<2.0.0" pyRFC3339 = ">=1.0" semver = ">=2.10.2" -urllib3 = ">=1.22.0,<3" +urllib3 = ">=1.26.0,<3" [package.extras] consul = ["python-consul (>=1.0.1)"] dynamodb = ["boto3 (>=1.9.71)"] redis = ["redis (>=2.10.5)"] +test-filesource = ["pyyaml (>=5.3.1)", "watchdog (>=3.0.0)"] [[package]] name = "lovely-pytest-docker" @@ -1511,93 +1667,88 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] [[package]] name = "marshmallow" -version = "3.21.2" +version = "3.22.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.2-py3-none-any.whl", hash = "sha256:70b54a6282f4704d12c0a41599682c5c5450e843b9ec406308653b47c59648a1"}, - {file = "marshmallow-3.21.2.tar.gz", hash = "sha256:82408deadd8b33d56338d2182d455db632c6313aa2af61916672146bb32edc56"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [package.dependencies] @@ -1605,29 +1756,28 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] name = "marshmallow-sqlalchemy" -version = "0.25.0" +version = "1.1.0" description = "SQLAlchemy integration with the marshmallow (de)serialization library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "marshmallow-sqlalchemy-0.25.0.tar.gz", hash = "sha256:f1491f83833ac9c8406ba603458b1447fdfd904194833aab4b3cc01ef3646944"}, - {file = "marshmallow_sqlalchemy-0.25.0-py2.py3-none-any.whl", hash = "sha256:f861888ae3299f2c1f18cd94f02147ced70cd1b4986b2c5077e4a1036018d2a2"}, + {file = "marshmallow_sqlalchemy-1.1.0-py3-none-any.whl", hash = "sha256:cce261148e4c6ec4ee275f3d29352933380a1afa2fd3933f5e9ecd02fdc16ade"}, + {file = "marshmallow_sqlalchemy-1.1.0.tar.gz", hash = "sha256:2ab092da269dafa8a05d51a58409af71a8d2183958ba47143127dd239e0359d8"}, ] [package.dependencies] -marshmallow = ">=3.0.0" -SQLAlchemy = ">=1.2.0" +marshmallow = ">=3.18.0" +SQLAlchemy = ">=1.4.40,<3.0" [package.extras] -dev = ["flake8 (==3.9.1)", "flake8-bugbear (==21.4.3)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture", "tox"] -docs = ["alabaster (==0.7.12)", "sphinx (==3.5.4)", "sphinx-issues (==1.2.0)"] -lint = ["flake8 (==3.9.1)", "flake8-bugbear (==21.4.3)", "pre-commit (>=2.0,<3.0)"] -tests = ["pytest", "pytest-lazy-fixture"] +dev = ["marshmallow-sqlalchemy[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==1.0.0)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)"] +tests = ["pytest (<9)", "pytest-lazy-fixtures"] [[package]] name = "mccabe" @@ -1640,15 +1790,26 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "minio" -version = "7.2.7" +version = "7.2.9" description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" optional = false -python-versions = "*" +python-versions = ">3.8" files = [ - {file = "minio-7.2.7-py3-none-any.whl", hash = "sha256:59d1f255d852fe7104018db75b3bebbd987e538690e680f7c5de835e422de837"}, - {file = "minio-7.2.7.tar.gz", hash = "sha256:473d5d53d79f340f3cd632054d0c82d2f93177ce1af2eac34a235bea55708d98"}, + {file = "minio-7.2.9-py3-none-any.whl", hash = "sha256:fe5523d9c4a4d6cfc07e96905852841bccdb22b22770e1efca4bf5ae8b65774b"}, + {file = "minio-7.2.9.tar.gz", hash = "sha256:a83c2fcd981944602a8dc11e8e07543ed9cda0a9462264e3f46a13171c56bccb"}, ] [package.dependencies] @@ -1659,120 +1820,236 @@ typing-extensions = "*" urllib3 = "*" [[package]] -name = "mock" -version = "4.0.3" -description = "Rolling backport of unittest.mock for all Pythons" +name = "msgpack" +version = "1.1.0" +description = "MessagePack serializer" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, - {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, ] -[package.extras] -build = ["blurb", "twine", "wheel"] -docs = ["sphinx"] -test = ["pytest (<5.4)", "pytest-cov"] - [[package]] name = "multidict" -version = "6.0.5" +version = "6.1.0" description = "multidict implementation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "opentelemetry-api" +version = "1.27.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, + {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, + {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, ] +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=8.4.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.27.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, + {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, +] + +[package.dependencies] +opentelemetry-api = "1.27.0" +opentelemetry-semantic-conventions = "0.48b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.48b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, + {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.27.0" + [[package]] name = "opentracing" version = "2.4.0" @@ -1788,110 +2065,133 @@ tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pyte [[package]] name = "orjson" -version = "3.10.3" +version = "3.10.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, - {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, - {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, - {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, - {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, - {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, - {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, - {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, - {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, - {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, - {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, - {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, - {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, - {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, - {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, - {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, ] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] -name = "pep8-naming" -version = "0.11.1" -description = "Check PEP-8 naming conventions, plugin for flake8" +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "pep8-naming-0.11.1.tar.gz", hash = "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724"}, - {file = "pep8_naming-0.11.1-py2.py3-none-any.whl", hash = "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[package.dependencies] -flake8-polyfill = ">=1.0.2,<2" +[[package]] +name = "pbr" +version = "6.1.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, + {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, +] [[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." +name = "pg8000" +version = "1.31.2" +description = "PostgreSQL interface library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, + {file = "pg8000-1.31.2-py3-none-any.whl", hash = "sha256:436c771ede71af4d4c22ba867a30add0bc5c942d7ab27fadbb6934a487ecc8f6"}, + {file = "pg8000-1.31.2.tar.gz", hash = "sha256:1ea46cf09d8eca07fe7eaadefd7951e37bee7fabe675df164f1a572ffb300876"}, ] +[package.dependencies] +python-dateutil = ">=2.8.2" +scramp = ">=1.4.5" + [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -1927,139 +2227,66 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "3.19.6" -description = "Protocol Buffers" +version = "5.28.2" +description = "" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "protobuf-3.19.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:010be24d5a44be7b0613750ab40bc8b8cedc796db468eae6c779b395f50d1fa1"}, - {file = "protobuf-3.19.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11478547958c2dfea921920617eb457bc26867b0d1aa065ab05f35080c5d9eb6"}, - {file = "protobuf-3.19.6-cp310-cp310-win32.whl", hash = "sha256:559670e006e3173308c9254d63facb2c03865818f22204037ab76f7a0ff70b5f"}, - {file = "protobuf-3.19.6-cp310-cp310-win_amd64.whl", hash = "sha256:347b393d4dd06fb93a77620781e11c058b3b0a5289262f094379ada2920a3730"}, - {file = "protobuf-3.19.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a8ce5ae0de28b51dff886fb922012dad885e66176663950cb2344c0439ecb473"}, - {file = "protobuf-3.19.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b0d02163c4e67279ddb6dc25e063db0130fc299aefabb5d481053509fae5c8"}, - {file = "protobuf-3.19.6-cp36-cp36m-win32.whl", hash = "sha256:30f5370d50295b246eaa0296533403961f7e64b03ea12265d6dfce3a391d8992"}, - {file = "protobuf-3.19.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0c0714b025ec057b5a7600cb66ce7c693815f897cfda6d6efb58201c472e3437"}, - {file = "protobuf-3.19.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5057c64052a1f1dd7d4450e9aac25af6bf36cfbfb3a1cd89d16393a036c49157"}, - {file = "protobuf-3.19.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:bb6776bd18f01ffe9920e78e03a8676530a5d6c5911934c6a1ac6eb78973ecb6"}, - {file = "protobuf-3.19.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a04134866861b11556a82dd91ea6daf1f4925746b992f277b84013a7cc1229"}, - {file = "protobuf-3.19.6-cp37-cp37m-win32.whl", hash = "sha256:4bc98de3cdccfb5cd769620d5785b92c662b6bfad03a202b83799b6ed3fa1fa7"}, - {file = "protobuf-3.19.6-cp37-cp37m-win_amd64.whl", hash = "sha256:aa3b82ca1f24ab5326dcf4ea00fcbda703e986b22f3d27541654f749564d778b"}, - {file = "protobuf-3.19.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2b2d2913bcda0e0ec9a784d194bc490f5dc3d9d71d322d070b11a0ade32ff6ba"}, - {file = "protobuf-3.19.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d0b635cefebd7a8a0f92020562dead912f81f401af7e71f16bf9506ff3bdbb38"}, - {file = "protobuf-3.19.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a552af4dc34793803f4e735aabe97ffc45962dfd3a237bdde242bff5a3de684"}, - {file = "protobuf-3.19.6-cp38-cp38-win32.whl", hash = "sha256:0469bc66160180165e4e29de7f445e57a34ab68f49357392c5b2f54c656ab25e"}, - {file = "protobuf-3.19.6-cp38-cp38-win_amd64.whl", hash = "sha256:91d5f1e139ff92c37e0ff07f391101df77e55ebb97f46bbc1535298d72019462"}, - {file = "protobuf-3.19.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c0ccd3f940fe7f3b35a261b1dd1b4fc850c8fde9f74207015431f174be5976b3"}, - {file = "protobuf-3.19.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:30a15015d86b9c3b8d6bf78d5b8c7749f2512c29f168ca259c9d7727604d0e39"}, - {file = "protobuf-3.19.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:878b4cd080a21ddda6ac6d1e163403ec6eea2e206cf225982ae04567d39be7b0"}, - {file = "protobuf-3.19.6-cp39-cp39-win32.whl", hash = "sha256:5a0d7539a1b1fb7e76bf5faa0b44b30f812758e989e59c40f77a7dab320e79b9"}, - {file = "protobuf-3.19.6-cp39-cp39-win_amd64.whl", hash = "sha256:bbf5cea5048272e1c60d235c7bd12ce1b14b8a16e76917f371c718bd3005f045"}, - {file = "protobuf-3.19.6-py2.py3-none-any.whl", hash = "sha256:14082457dc02be946f60b15aad35e9f5c69e738f80ebbc0900a19bc83734a5a4"}, - {file = "protobuf-3.19.6.tar.gz", hash = "sha256:5f5540d57a43042389e87661c6eaa50f47c19c6176e8cf1c4f287aeefeccb5c4"}, -] - -[[package]] -name = "psycopg2-binary" + {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, + {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, + {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, + {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, + {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, + {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, + {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, + {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, + {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, +] + +[[package]] +name = "psycopg2" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, ] [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] @@ -2067,13 +2294,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" -version = "2.9.1" +version = "2.12.1" description = "Python style guide checker" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -2089,70 +2316,70 @@ files = [ [[package]] name = "pycryptodome" -version = "3.20.0" +version = "3.21.0" description = "Cryptographic library for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, - {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, -] - -[[package]] -name = "pydocstyle" -version = "5.1.1" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.5" -files = [ - {file = "pydocstyle-5.1.1-py3-none-any.whl", hash = "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678"}, - {file = "pydocstyle-5.1.1.tar.gz", hash = "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"}, + {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, ] -[package.dependencies] -snowballstemmer = "*" - [[package]] name = "pyflakes" -version = "2.5.0" +version = "3.2.0" description = "passive checker of Python programs" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pyhumps" version = "3.8.0" @@ -2166,61 +2393,28 @@ files = [ [[package]] name = "pylint" -version = "3.2.3" +version = "3.3.1" description = "python code static checker" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, - {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, ] [package.dependencies] -astroid = ">=3.2.2,<=3.3.0-dev0" +astroid = ">=3.3.4,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, -] +dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] -[[package]] -name = "pylint-flask" -version = "0.6" -description = "pylint-flask is a Pylint plugin to aid Pylint in recognizing and understanding errors caused when using Flask" -optional = false -python-versions = "*" -files = [ - {file = "pylint-flask-0.6.tar.gz", hash = "sha256:f4d97de2216bf7bfce07c9c08b166e978fe9f2725de2a50a9845a97de7e31517"}, -] - -[package.dependencies] -pylint-plugin-utils = ">=0.2.1" - -[[package]] -name = "pylint-plugin-utils" -version = "0.8.2" -description = "Utilities and helpers for writing Pylint plugins" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "pylint_plugin_utils-0.8.2-py3-none-any.whl", hash = "sha256:ae11664737aa2effbf26f973a9e0b6779ab7106ec0adc5fe104b0907ca04e507"}, - {file = "pylint_plugin_utils-0.8.2.tar.gz", hash = "sha256:d3cebf68a38ba3fba23a873809155562571386d4c1b03e5b4c4cc26c3eee93e4"}, -] - -[package.dependencies] -pylint = ">=1.7" - [[package]] name = "pyrfc3339" version = "1.1" @@ -2278,90 +2472,59 @@ files = [ [[package]] name = "pytest" -version = "8.1.1" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.4,<2.0" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2" [package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.21.0" +version = "0.23.8" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, - {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, ] [package.dependencies] -pytest = ">=7.0.0" +pytest = ">=7.0.0,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "2.12.1" +version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, - {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] -coverage = ">=5.2.1" +coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" -toml = "*" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-dotenv" -version = "0.5.2" -description = "A py.test plugin that parses environment files before running tests" -optional = false -python-versions = "*" -files = [ - {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, - {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, -] - -[package.dependencies] -pytest = ">=5.0.0" -python-dotenv = ">=0.9.1" - -[[package]] -name = "pytest-env" -version = "0.6.2" -description = "py.test plugin that allows you to add environment variables." -optional = false -python-versions = "*" -files = [ - {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, -] - -[package.dependencies] -pytest = ">=2.6.0" +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mock" @@ -2429,55 +2592,88 @@ cryptography = ["cryptography (>=3.4.0)"] pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] -[[package]] -name = "python-memcached" -version = "1.62" -description = "Pure python memcached client" -optional = false -python-versions = "*" -files = [ - {file = "python-memcached-1.62.tar.gz", hash = "sha256:0285470599b7f593fbf3bec084daa1f483221e68c1db2cf1d846a9f7c2655103"}, - {file = "python_memcached-1.62-py2.py3-none-any.whl", hash = "sha256:1bdd8d2393ff53e80cd5e9442d750e658e0b35c3eebb3211af137303e3b729d1"}, -] - [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] -name = "redis" -version = "5.0.4" -description = "Python client for Redis database and key-value store" +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "redis-5.0.4-py3-none-any.whl", hash = "sha256:7adc2835c7a9b5033b7ad8f8918d09b7344188228809c98df07af226d39dec91"}, - {file = "redis-5.0.4.tar.gz", hash = "sha256:ec31f2ed9675cc54c21ba854cfe0462e6faf1d83c8ce5944709db8a4700b9c61"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2490,6 +2686,41 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-mock" +version = "1.12.1" +description = "Mock out responses from the requests package" +optional = false +python-versions = ">=3.5" +files = [ + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, +] + +[package.dependencies] +requests = ">=2.22,<3" + +[package.extras] +fixture = ["fixtures"] + +[[package]] +name = "rich" +version = "13.9.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"}, + {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "rsa" version = "4.9" @@ -2522,91 +2753,36 @@ jaeger-client = "*" [package.source] type = "git" -url = "https://github.com/bcgov/sbc-common-components.git" -reference = "master" -resolved_reference = "22978d810dc4e85c51c3129936686b0a17124e64" +url = "https://github.com/bolyachevets/sbc-common-components.git" +reference = "camel_case_empty_dict" +resolved_reference = "20ce13be6d59946583385c857a5aca1c4c517ad0" subdirectory = "python" [[package]] -name = "semver" -version = "3.0.2" -description = "Python helper for Semantic Versioning (https://semver.org)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, - {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, -] - -[[package]] -name = "sentry-sdk" -version = "2.10.0" -description = "Python client for Sentry (https://sentry.io)" +name = "scramp" +version = "1.4.5" +description = "An implementation of the SCRAM protocol." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "sentry_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:87b3d413c87d8e7f816cc9334bff255a83d8b577db2b22042651c30c19c09190"}, - {file = "sentry_sdk-2.10.0.tar.gz", hash = "sha256:545fcc6e36c335faa6d6cda84669b6e17025f31efbf3b2211ec14efe008b75d1"}, + {file = "scramp-1.4.5-py3-none-any.whl", hash = "sha256:50e37c464fc67f37994e35bee4151e3d8f9320e9c204fca83a5d313c121bbbe7"}, + {file = "scramp-1.4.5.tar.gz", hash = "sha256:be3fbe774ca577a7a658117dca014e5d254d158cecae3dd60332dfe33ce6d78e"}, ] [package.dependencies] -blinker = {version = ">=1.1", optional = true, markers = "extra == \"flask\""} -certifi = "*" -flask = {version = ">=0.11", optional = true, markers = "extra == \"flask\""} -markupsafe = {version = "*", optional = true, markers = "extra == \"flask\""} -urllib3 = ">=1.26.11" +asn1crypto = ">=1.5.1" -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -anthropic = ["anthropic (>=0.16)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -celery-redbeat = ["celery-redbeat (>=2)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -huggingface-hub = ["huggingface-hub (>=0.22)"] -langchain = ["langchain (>=0.0.210)"] -loguru = ["loguru (>=0.5)"] -openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-instrumentation-aio-pika (==0.46b0)", "opentelemetry-instrumentation-aiohttp-client (==0.46b0)", "opentelemetry-instrumentation-aiopg (==0.46b0)", "opentelemetry-instrumentation-asgi (==0.46b0)", "opentelemetry-instrumentation-asyncio (==0.46b0)", "opentelemetry-instrumentation-asyncpg (==0.46b0)", "opentelemetry-instrumentation-aws-lambda (==0.46b0)", "opentelemetry-instrumentation-boto (==0.46b0)", "opentelemetry-instrumentation-boto3sqs (==0.46b0)", "opentelemetry-instrumentation-botocore (==0.46b0)", "opentelemetry-instrumentation-cassandra (==0.46b0)", "opentelemetry-instrumentation-celery (==0.46b0)", "opentelemetry-instrumentation-confluent-kafka (==0.46b0)", "opentelemetry-instrumentation-dbapi (==0.46b0)", "opentelemetry-instrumentation-django (==0.46b0)", "opentelemetry-instrumentation-elasticsearch (==0.46b0)", "opentelemetry-instrumentation-falcon (==0.46b0)", "opentelemetry-instrumentation-fastapi (==0.46b0)", "opentelemetry-instrumentation-flask (==0.46b0)", "opentelemetry-instrumentation-grpc (==0.46b0)", "opentelemetry-instrumentation-httpx (==0.46b0)", "opentelemetry-instrumentation-jinja2 (==0.46b0)", "opentelemetry-instrumentation-kafka-python (==0.46b0)", "opentelemetry-instrumentation-logging (==0.46b0)", "opentelemetry-instrumentation-mysql (==0.46b0)", "opentelemetry-instrumentation-mysqlclient (==0.46b0)", "opentelemetry-instrumentation-pika (==0.46b0)", "opentelemetry-instrumentation-psycopg (==0.46b0)", "opentelemetry-instrumentation-psycopg2 (==0.46b0)", "opentelemetry-instrumentation-pymemcache (==0.46b0)", "opentelemetry-instrumentation-pymongo (==0.46b0)", "opentelemetry-instrumentation-pymysql (==0.46b0)", "opentelemetry-instrumentation-pyramid (==0.46b0)", "opentelemetry-instrumentation-redis (==0.46b0)", "opentelemetry-instrumentation-remoulade (==0.46b0)", "opentelemetry-instrumentation-requests (==0.46b0)", "opentelemetry-instrumentation-sklearn (==0.46b0)", "opentelemetry-instrumentation-sqlalchemy (==0.46b0)", "opentelemetry-instrumentation-sqlite3 (==0.46b0)", "opentelemetry-instrumentation-starlette (==0.46b0)", "opentelemetry-instrumentation-system-metrics (==0.46b0)", "opentelemetry-instrumentation-threading (==0.46b0)", "opentelemetry-instrumentation-tornado (==0.46b0)", "opentelemetry-instrumentation-tortoiseorm (==0.46b0)", "opentelemetry-instrumentation-urllib (==0.46b0)", "opentelemetry-instrumentation-urllib3 (==0.46b0)", "opentelemetry-instrumentation-wsgi (==0.46b0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=6)"] - -[[package]] -name = "setuptools" -version = "71.0.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" +[[package]] +name = "semver" +version = "3.0.2" +description = "Python helper for Semantic Versioning (https://semver.org)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "setuptools-71.0.3-py3-none-any.whl", hash = "sha256:f501b6e6db709818dc76882582d9c516bf3b67b948864c5fa1d1624c09a49207"}, - {file = "setuptools-71.0.3.tar.gz", hash = "sha256:3d8531791a27056f4a38cd3e54084d8b1c4228ff9cf3f2d7dd075ec99f9fd70d"}, + {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, + {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, ] -[package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (<7.4)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "simple-cloudevent" version = "0.0.2" @@ -2637,94 +2813,107 @@ files = [ ] [[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +name = "sql-versioning" +version = "0.1.0" +description = "" optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] +python-versions = "^3.10" +files = [] +develop = false + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/sql-versioning" [[package]] name = "sqlalchemy" -version = "1.3.24" +version = "2.0.35" description = "Database Abstraction Library" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win_amd64.whl", hash = "sha256:fc4cddb0b474b12ed7bdce6be1b9edc65352e8ce66bc10ff8cbbfb3d4047dbf4"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f1149d6e5c49d069163e58a3196865e4321bad1803d7886e07d8710de392c548"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:14f0eb5db872c231b20c18b1e5806352723a3a89fb4254af3b3e14f22eaaec75"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:e98d09f487267f1e8d1179bf3b9d7709b30a916491997137dd24d6ae44d18d79"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:fc1f2a5a5963e2e73bac4926bdaf7790c4d7d77e8fc0590817880e22dd9d0b8b"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win32.whl", hash = "sha256:f3c5c52f7cb8b84bfaaf22d82cb9e6e9a8297f7c2ed14d806a0f5e4d22e83fb7"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win_amd64.whl", hash = "sha256:0352db1befcbed2f9282e72843f1963860bf0e0472a4fa5cf8ee084318e0e6ab"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2ed6343b625b16bcb63c5b10523fd15ed8934e1ed0f772c534985e9f5e73d894"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:34fcec18f6e4b24b4a5f6185205a04f1eab1e56f8f1d028a2a03694ebcc2ddd4"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e47e257ba5934550d7235665eee6c911dc7178419b614ba9e1fbb1ce6325b14f"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:816de75418ea0953b5eb7b8a74933ee5a46719491cd2b16f718afc4b291a9658"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win32.whl", hash = "sha256:26155ea7a243cbf23287f390dba13d7927ffa1586d3208e0e8d615d0c506f996"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win_amd64.whl", hash = "sha256:f03bd97650d2e42710fbe4cf8a59fae657f191df851fc9fc683ecef10746a375"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a006d05d9aa052657ee3e4dc92544faae5fcbaafc6128217310945610d862d39"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1e2f89d2e5e3c7a88e25a3b0e43626dba8db2aa700253023b82e630d12b37109"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0d5d862b1cfbec5028ce1ecac06a3b42bc7703eb80e4b53fceb2738724311443"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0172423a27fbcae3751ef016663b72e1a516777de324a76e30efa170dbd3dd2d"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win32.whl", hash = "sha256:d37843fb8df90376e9e91336724d78a32b988d3d20ab6656da4eb8ee3a45b63c"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win_amd64.whl", hash = "sha256:c10ff6112d119f82b1618b6dc28126798481b9355d8748b64b9b55051eb4f01b"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:861e459b0e97673af6cc5e7f597035c2e3acdfb2608132665406cded25ba64c7"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5de2464c254380d8a6c20a2746614d5a436260be1507491442cf1088e59430d2"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d375d8ccd3cebae8d90270f7aa8532fe05908f79e78ae489068f3b4eee5994e8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:014ea143572fee1c18322b7908140ad23b3994036ef4c0d630110faf942652f8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win32.whl", hash = "sha256:6607ae6cd3a07f8a4c3198ffbf256c261661965742e2b5265a77cd5c679c9bba"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win_amd64.whl", hash = "sha256:fcb251305fa24a490b6a9ee2180e5f8252915fb778d3dafc70f9cc3f863827b9"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01aa5f803db724447c1d423ed583e42bf5264c597fd55e4add4301f163b0be48"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d0e3515ef98aa4f0dc289ff2eebb0ece6260bbf37c2ea2022aad63797eacf60"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bce28277f308db43a6b4965734366f533b3ff009571ec7ffa583cb77539b84d6"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8110e6c414d3efc574543109ee618fe2c1f96fa31833a1ff36cc34e968c4f233"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win32.whl", hash = "sha256:ee5f5188edb20a29c1cc4a039b074fdc5575337c9a68f3063449ab47757bb064"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win_amd64.whl", hash = "sha256:09083c2487ca3c0865dc588e07aeaa25416da3d95f7482c07e92f47e080aa17b"}, - {file = "SQLAlchemy-1.3.24.tar.gz", hash = "sha256:ebbb777cbf9312359b897bf81ba00dae0f5cb69fba2a18265dcc18a6f5ef7519"}, -] + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" [package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mysql = ["mysqlclient"] -oracle = ["cx-oracle"] -postgresql = ["psycopg2"] -postgresql-pg8000 = ["pg8000 (<1.16.6)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] - -[[package]] -name = "sqlalchemy-continuum" -version = "1.3.15" -description = "Versioning and auditing extension for SQLAlchemy." -optional = false -python-versions = "*" -files = [ - {file = "SQLAlchemy-Continuum-1.3.15.tar.gz", hash = "sha256:a52689d1580daabc496ca8420fa742d62b371bc43921d6c06374192e52e0c409"}, - {file = "SQLAlchemy_Continuum-1.3.15-py3-none-any.whl", hash = "sha256:d2ecc798500f5d5195ecd2143f7d1eabb0aa46e0f213e38bfeb4331d25cc08d3"}, -] - -[package.dependencies] -six = "*" -SQLAlchemy = ">=1.0.8" -SQLAlchemy-Utils = ">=0.30.12" - -[package.extras] -flask = ["Flask (>=0.9)"] -flask-login = ["Flask-Login (>=0.2.9)"] -flask-sqlalchemy = ["Flask-SQLAlchemy (>=1.0,<3.0.0)"] -flexmock = ["flexmock (>=0.9.7)"] -i18n = ["SQLAlchemy-i18n (>=0.8.4,!=1.1.0)"] -test = ["Flask (>=0.9)", "Flask-Login (>=0.2.9)", "Flask-SQLAlchemy (>=1.0,<3.0.0)", "PyMySQL (>=0.8.0)", "SQLAlchemy-i18n (>=0.8.4,!=1.1.0)", "flexmock (>=0.9.7)", "psycopg2 (>=2.4.6)", "pytest (>=2.3.5)", "six (>=1.4.0)"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-utils" @@ -2754,6 +2943,20 @@ test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3 timezone = ["python-dateutil"] url = ["furl (>=0.4.1)"] +[[package]] +name = "stevedore" +version = "5.3.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.3.0-py3-none-any.whl", hash = "sha256:1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78"}, + {file = "stevedore-5.3.0.tar.gz", hash = "sha256:9a64265f4060312828151c204efbe9b7a9852a0d9228756344dbc7e4023e375a"}, +] + +[package.dependencies] +pbr = ">=2.0.0" + [[package]] name = "strict-rfc3339" version = "0.7" @@ -2765,16 +2968,42 @@ files = [ ] [[package]] -name = "text-unidecode" -version = "1.3" -description = "The most basic Text::Unidecode port" +name = "structlog" +version = "24.4.0" +description = "Structured Logging for Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, - {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, + {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"}, + {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"}, ] +[package.extras] +dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"] +tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy (>=1.4)", "rich", "twisted"] + +[[package]] +name = "structured-logging" +version = "0.4.0" +description = "" +optional = false +python-versions = "^3.9" +files = [] +develop = false + +[package.dependencies] +flask = ">= 1" +structlog = "^24.1.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/structured-logging" + [[package]] name = "threadloop" version = "1.0.2" @@ -2791,12 +3020,12 @@ tornado = "*" [[package]] name = "thrift" -version = "0.20.0" +version = "0.21.0" description = "Python bindings for the Apache Thrift RPC system" optional = false python-versions = "*" files = [ - {file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"}, + {file = "thrift-0.21.0.tar.gz", hash = "sha256:5e6f7c50f936ebfa23e924229afc95eb219f8c8e5a83202dd4a391244803e402"}, ] [package.dependencies] @@ -2807,37 +3036,15 @@ all = ["tornado (>=4.0)", "twisted"] tornado = ["tornado (>=4.0)"] twisted = ["twisted"] -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "tomlkit" -version = "0.13.0" +version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" files = [ - {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, - {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] @@ -2862,24 +3069,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, - {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -2890,116 +3097,199 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "werkzeug" -version = "1.0.1" +version = "3.0.0" description = "The comprehensive WSGI web application library." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" files = [ - {file = "Werkzeug-1.0.1-py2.py3-none-any.whl", hash = "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43"}, - {file = "Werkzeug-1.0.1.tar.gz", hash = "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c"}, + {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, + {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, ] +[package.dependencies] +MarkupSafe = ">=2.1.1" + [package.extras] -dev = ["coverage", "pallets-sphinx-themes", "pytest", "pytest-timeout", "sphinx", "sphinx-issues", "tox"] -watchdog = ["watchdog"] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] [[package]] name = "yarl" -version = "1.9.4" +version = "1.13.1" description = "Yet another URL library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec9dd328016d8d25702a24ee274932aebf6be9787ed1c28d021945d264235b3c"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5820bd4178e6a639b3ef1db8b18500a82ceab6d8b89309e121a6859f56585b05"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86c438ce920e089c8c2388c7dcc8ab30dfe13c09b8af3d306bcabb46a053d6f7"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3de86547c820e4f4da4606d1c8ab5765dd633189791f15247706a2eeabc783ae"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca53632007c69ddcdefe1e8cbc3920dd88825e618153795b57e6ebcc92e752a"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4ee1d240b84e2f213565f0ec08caef27a0e657d4c42859809155cf3a29d1735"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c49f3e379177f4477f929097f7ed4b0622a586b0aa40c07ac8c0f8e40659a1ac"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5c5e32fef09ce101fe14acd0f498232b5710effe13abac14cd95de9c274e689e"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab9524e45ee809a083338a749af3b53cc7efec458c3ad084361c1dbf7aaf82a2"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b1481c048fe787f65e34cb06f7d6824376d5d99f1231eae4778bbe5c3831076d"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:31497aefd68036d8e31bfbacef915826ca2e741dbb97a8d6c7eac66deda3b606"}, + {file = "yarl-1.13.1-cp310-cp310-win32.whl", hash = "sha256:1fa56f34b2236f5192cb5fceba7bbb09620e5337e0b6dfe2ea0ddbd19dd5b154"}, + {file = "yarl-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:1bbb418f46c7f7355084833051701b2301092e4611d9e392360c3ba2e3e69f88"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:216a6785f296169ed52cd7dcdc2612f82c20f8c9634bf7446327f50398732a51"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40c6e73c03a6befb85b72da213638b8aaa80fe4136ec8691560cf98b11b8ae6e"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2430cf996113abe5aee387d39ee19529327205cda975d2b82c0e7e96e5fdabdc"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb4134cc6e005b99fa29dbc86f1ea0a298440ab6b07c6b3ee09232a3b48f495"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309c104ecf67626c033845b860d31594a41343766a46fa58c3309c538a1e22b2"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f90575e9fe3aae2c1e686393a9689c724cd00045275407f71771ae5d690ccf38"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d2e1626be8712333a9f71270366f4a132f476ffbe83b689dd6dc0d114796c74"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b66c87da3c6da8f8e8b648878903ca54589038a0b1e08dde2c86d9cd92d4ac9"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf1ad338620249f8dd6d4b6a91a69d1f265387df3697ad5dc996305cf6c26fb2"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9915300fe5a0aa663c01363db37e4ae8e7c15996ebe2c6cce995e7033ff6457f"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:703b0f584fcf157ef87816a3c0ff868e8c9f3c370009a8b23b56255885528f10"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1d8e3ca29f643dd121f264a7c89f329f0fcb2e4461833f02de6e39fef80f89da"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7055bbade838d68af73aea13f8c86588e4bcc00c2235b4b6d6edb0dbd174e246"}, + {file = "yarl-1.13.1-cp311-cp311-win32.whl", hash = "sha256:a3442c31c11088e462d44a644a454d48110f0588de830921fd201060ff19612a"}, + {file = "yarl-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:81bad32c8f8b5897c909bf3468bf601f1b855d12f53b6af0271963ee67fff0d2"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f452cc1436151387d3d50533523291d5f77c6bc7913c116eb985304abdbd9ec9"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9cec42a20eae8bebf81e9ce23fb0d0c729fc54cf00643eb251ce7c0215ad49fe"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d959fe96e5c2712c1876d69af0507d98f0b0e8d81bee14cfb3f6737470205419"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c837ab90c455f3ea8e68bee143472ee87828bff19ba19776e16ff961425b57"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a993f976cdcb2dc1b855d8b89b792893220db8862d1a619efa7451817c836b"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2442a415a5f4c55ced0fade7b72123210d579f7d950e0b5527fc598866e62c"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fdbf0418489525231723cdb6c79e7738b3cbacbaed2b750cb033e4ea208f220"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b7f6e699304717fdc265a7e1922561b02a93ceffdaefdc877acaf9b9f3080b8"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bcd5bf4132e6a8d3eb54b8d56885f3d3a38ecd7ecae8426ecf7d9673b270de43"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2a93a4557f7fc74a38ca5a404abb443a242217b91cd0c4840b1ebedaad8919d4"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:22b739f99c7e4787922903f27a892744189482125cc7b95b747f04dd5c83aa9f"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2db874dd1d22d4c2c657807562411ffdfabec38ce4c5ce48b4c654be552759dc"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4feaaa4742517eaceafcbe74595ed335a494c84634d33961214b278126ec1485"}, + {file = "yarl-1.13.1-cp312-cp312-win32.whl", hash = "sha256:bbf9c2a589be7414ac4a534d54e4517d03f1cbb142c0041191b729c2fa23f320"}, + {file = "yarl-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:d07b52c8c450f9366c34aa205754355e933922c79135125541daae6cbf31c799"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:95c6737f28069153c399d875317f226bbdea939fd48a6349a3b03da6829fb550"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cd66152561632ed4b2a9192e7f8e5a1d41e28f58120b4761622e0355f0fe034c"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6a2acde25be0cf9be23a8f6cbd31734536a264723fca860af3ae5e89d771cd71"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18595e6a2ee0826bf7dfdee823b6ab55c9b70e8f80f8b77c37e694288f5de1"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a31d21089894942f7d9a8df166b495101b7258ff11ae0abec58e32daf8088813"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45f209fb4bbfe8630e3d2e2052535ca5b53d4ce2d2026bed4d0637b0416830da"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f722f30366474a99745533cc4015b1781ee54b08de73260b2bbe13316079851"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3bf60444269345d712838bb11cc4eadaf51ff1a364ae39ce87a5ca8ad3bb2c8"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:942c80a832a79c3707cca46bd12ab8aa58fddb34b1626d42b05aa8f0bcefc206"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:44b07e1690f010c3c01d353b5790ec73b2f59b4eae5b0000593199766b3f7a5c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:396e59b8de7e4d59ff5507fb4322d2329865b909f29a7ed7ca37e63ade7f835c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3bb83a0f12701c0b91112a11148b5217617982e1e466069d0555be9b372f2734"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c92b89bffc660f1274779cb6fbb290ec1f90d6dfe14492523a0667f10170de26"}, + {file = "yarl-1.13.1-cp313-cp313-win32.whl", hash = "sha256:269c201bbc01d2cbba5b86997a1e0f73ba5e2f471cfa6e226bcaa7fd664b598d"}, + {file = "yarl-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:1d0828e17fa701b557c6eaed5edbd9098eb62d8838344486248489ff233998b8"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8be8cdfe20787e6a5fcbd010f8066227e2bb9058331a4eccddec6c0db2bb85b2"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08d7148ff11cb8e886d86dadbfd2e466a76d5dd38c7ea8ebd9b0e07946e76e4b"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4afdf84610ca44dcffe8b6c22c68f309aff96be55f5ea2fa31c0c225d6b83e23"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0d12fe78dcf60efa205e9a63f395b5d343e801cf31e5e1dda0d2c1fb618073d"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298c1eecfd3257aa16c0cb0bdffb54411e3e831351cd69e6b0739be16b1bdaa8"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c14c16831b565707149c742d87a6203eb5597f4329278446d5c0ae7a1a43928e"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9bacedbb99685a75ad033fd4de37129449e69808e50e08034034c0bf063f99"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658e8449b84b92a4373f99305de042b6bd0d19bf2080c093881e0516557474a5"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:373f16f38721c680316a6a00ae21cc178e3a8ef43c0227f88356a24c5193abd6"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:45d23c4668d4925688e2ea251b53f36a498e9ea860913ce43b52d9605d3d8177"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f7917697bcaa3bc3e83db91aa3a0e448bf5cde43c84b7fc1ae2427d2417c0224"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5989a38ba1281e43e4663931a53fbf356f78a0325251fd6af09dd03b1d676a09"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11b3ca8b42a024513adce810385fcabdd682772411d95bbbda3b9ed1a4257644"}, + {file = "yarl-1.13.1-cp38-cp38-win32.whl", hash = "sha256:dcaef817e13eafa547cdfdc5284fe77970b891f731266545aae08d6cce52161e"}, + {file = "yarl-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:7addd26594e588503bdef03908fc207206adac5bd90b6d4bc3e3cf33a829f57d"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0ae6637b173d0c40b9c1462e12a7a2000a71a3258fa88756a34c7d38926911c"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:576365c9f7469e1f6124d67b001639b77113cfd05e85ce0310f5f318fd02fe85"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78f271722423b2d4851cf1f4fa1a1c4833a128d020062721ba35e1a87154a049"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d74f3c335cfe9c21ea78988e67f18eb9822f5d31f88b41aec3a1ec5ecd32da5"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1891d69a6ba16e89473909665cd355d783a8a31bc84720902c5911dbb6373465"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb382fd7b4377363cc9f13ba7c819c3c78ed97c36a82f16f3f92f108c787cbbf"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8854b9f80693d20cec797d8e48a848c2fb273eb6f2587b57763ccba3f3bd4b"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf2c3f04ff50f16404ce70f822cdc59760e5e2d7965905f0e700270feb2bbfc"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb9f59f3848edf186a76446eb8bcf4c900fe147cb756fbbd730ef43b2e67c6a7"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef9b85fa1bc91c4db24407e7c4da93a5822a73dd4513d67b454ca7064e8dc6a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:098b870c18f1341786f290b4d699504e18f1cd050ed179af8123fd8232513424"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8c723c91c94a3bc8033dd2696a0f53e5d5f8496186013167bddc3fb5d9df46a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44a4c40a6f84e4d5955b63462a0e2a988f8982fba245cf885ce3be7618f6aa7d"}, + {file = "yarl-1.13.1-cp39-cp39-win32.whl", hash = "sha256:84bbcdcf393139f0abc9f642bf03f00cac31010f3034faa03224a9ef0bb74323"}, + {file = "yarl-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:fc2931ac9ce9c61c9968989ec831d3a5e6fcaaff9474e7cfa8de80b7aff5a093"}, + {file = "yarl-1.13.1-py3-none-any.whl", hash = "sha256:6a5185ad722ab4dd52d5fb1f30dcc73282eb1ed494906a92d1a228d3f89607b0"}, + {file = "yarl-1.13.1.tar.gz", hash = "sha256:ec8cfe2295f3e5e44c51f57272afbd69414ae629ec7c6b27f5a410efc78b70a0"}, ] [package.dependencies] @@ -3008,20 +3298,24 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.19.1" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, - {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "014a3d925813db9035baec665a62c96284d4b7023523d2b194daad46a2c09023" +python-versions = "^3.12" +content-hash = "825b0312c3e9765cc9fd00497d83cb481981a01fef671928ecdf00dd34c66a1a" diff --git a/auth-api/pre-hook-update-db.sh b/auth-api/pre-hook-update-db.sh deleted file mode 100755 index 5624bf62f3..0000000000 --- a/auth-api/pre-hook-update-db.sh +++ /dev/null @@ -1,4 +0,0 @@ -#! /bin/sh -cd /opt/app-root -echo 'starting upgrade' -python3 manage.py db upgrade diff --git a/auth-api/pre_hook_create_database.py b/auth-api/pre_hook_create_database.py deleted file mode 100644 index bb4be1e9a9..0000000000 --- a/auth-api/pre_hook_create_database.py +++ /dev/null @@ -1,30 +0,0 @@ -import contextlib -import os -import sys - -import sqlalchemy -import sqlalchemy.exc - -from auth_api.config import ProdConfig - - -DB_ADMIN_PASSWORD = os.getenv('DB_ADMIN_PASSWORD', None) - -if not hasattr(ProdConfig, 'DB_NAME') or not DB_ADMIN_PASSWORD: - print("Unable to create database.", sys.stdout) - sys.exit(-1) - -DATABASE_URI = 'postgresql://postgres:{password}@{host}:{port}/{name}'.format( - password=DB_ADMIN_PASSWORD, - host=ProdConfig.DB_HOST, - port=int(ProdConfig.DB_PORT), - name='postgres', -) - -with contextlib.suppress(sqlalchemy.exc.ProgrammingError): - with sqlalchemy.create_engine( - DATABASE_URI, - isolation_level='AUTOCOMMIT' - ).connect() as connection: - database = ProdConfig.DB_NAME - connection.execute(f'CREATE DATABASE {database}') diff --git a/auth-api/pyproject.toml b/auth-api/pyproject.toml index 8f20db4f73..ccac3a2160 100644 --- a/auth-api/pyproject.toml +++ b/auth-api/pyproject.toml @@ -1,120 +1,180 @@ [tool.poetry] name = "auth-api" -version = "0.1.0" +version = "3.0.6" description = "" authors = ["\"BC Registries and Online Services\""] readme = "README.md" [tool.poetry.dependencies] -python = "^3.8" +python = "^3.12" +flask-cors = "^5.0.0" +flask-migrate = "^4.0.7" +flask-moment = "^1.0.6" +flask-sqlalchemy = "^3.1.1" +flask-marshmallow = "^1.2.1" +gunicorn = "^22.0.0" +pg8000 = "^1.31.2" +flask-mail = "^0.10.0" +bcrypt = "^4.2.0" +pyhumps = "^3.8.0" +marshmallow-sqlalchemy = "^1.0.0" flask-caching = "2.3.0" -flask-cors = "5.0.0" -flask-mail = "0.10.0" -flask-migrate = "<3" -flask-moment = "1.0.5" -flask-sqlalchemy = "2.5.1" -flask-script = "2.0.6" -flask = "1.1.4" -jinja2 = "2.11.3" -mako = "1.3.5" -markupsafe = "2.0.1" -sqlalchemy-continuum = "1.3.15" -sqlalchemy-utils = "0.41.2" -sqlalchemy = "<1.4" -werkzeug = "<2" -aiohttp = "3.10.2" -aiosignal = "1.3.1" -alembic = "1.13.1" -argon2-cffi-bindings = "21.2.0" -argon2-cffi = "23.1.0" -async-timeout = "4.0.3" -attrs = "23.2.0" -bcrypt = "4.1.3" -blinker = "1.8.2" cachelib = "0.9.0" -cattrs = "23.2.3" -certifi = "2024.7.4" -cffi = "1.16.0" -charset-normalizer = "3.3.2" -click = "7.1.2" -ecdsa = "0.19.0" -exceptiongroup = "1.2.1" -expiringdict = "1.2.2" -flask-jwt-oidc = "0.3.0" -flask-marshmallow = "0.11.0" -frozenlist = "1.4.1" -gunicorn = "22.0.0" -idna = "3.7" -importlib-metadata = "7.1.0" -importlib-resources = "6.4.0" -itsdangerous = "1.1.0" -jaeger-client = "4.8.0" -jsonschema = "4.17.3" -launchdarkly-server-sdk = "8.2.1" -marshmallow-sqlalchemy = "0.25.0" -marshmallow = "3.21.2" -minio = "7.2.7" -multidict = "6.0.5" -opentracing = "2.4.0" -orjson = "3.10.3" -packaging = "24.0" -pkgutil-resolve-name = "1.3.10" -protobuf = ">=3.19.5,<3.20.0" -psycopg2-binary = "2.9.9" -pyrfc3339 = "1.1" -pyasn1 = "0.6.0" -pycparser = "2.22" -pycryptodome = "3.20.0" -pyhumps = "3.8.0" -pyrsistent = "0.20.0" -python-dotenv = "1.0.1" -python-jose = "3.3.0" -python-memcached = "1.62" -pytz = "2024.1" -redis = "5.0.4" -requests = "2.32.2" -rsa = "4.9" -semver = "3.0.2" -sentry-sdk = {extras = ["flask"], version = "^2.10.0"} -six = "1.16.0" -threadloop = "1.0.2" -thrift = "0.20.0" -tornado = "6.4.1" -typing-extensions = "4.12.0" -urllib3 = "2.2.2" -yarl = "1.9.4" -zipp = "3.19.1" +minio = "^7.2.7" +aiohttp = "^3.10.2" +cattrs = "^23.2.3" +orjson = "^3.10.7" +sqlalchemy-utils = "^0.41.2" +psycopg2 = "^2.9.9" # VCS dependencies -sbc-common-components = {git = "https://github.com/bcgov/sbc-common-components.git", rev = "master", subdirectory = "python"} -gcp-queue = {git = "https://github.com/seeker25/sbc-connect-common.git", branch = "small_tweaks", subdirectory = "python/gcp-queue"} -simple-cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py.git"} +sql-versioning = { git = "https://github.com/bcgov/sbc-connect-common.git", subdirectory = "python/sql-versioning", branch = "main" } +flask-jwt-oidc = {git = "https://github.com/seeker25/flask-jwt-oidc.git", branch = "main" } +build-deps = { git = "https://github.com/bcgov/sbc-auth.git", rev = "feature-gcp-migration", subdirectory = "build-deps" } -[tool.poetry.group.dev.dependencies] -pylint = "3.2.3" -coverage = "^5.5" -pylint-flask = "^0.6.0" -pytest = "8.1.1" -pytest-cov = "^2.11.1" -pytest-env = "^0.6.2" -pytest-dotenv = "^0.5.2" -pytest-mock = "^3.5.1" -requests = "^2.25.1" -flake8 = "5.0.4" -flake8-blind-except = "^0.1.1" -flake8-docstrings = "^1.6.0" -flake8-isort = "^4.0.0" -flake8-quotes = "^3.3.0" -pep8-naming = "^0.11.1" -autopep8 = "^1.5.6" -pydocstyle = "^5.1.1" -freezegun = "^1.1.0" -faker = "^8.1.1" -pytest-asyncio = "0.21.0" -mock = "^4.0.3" +[tool.poetry.group.test.dependencies] +psycopg2 = "^2.9.9" +pytest = "^8.3.2" +pytest-cov = "^5.0.0" +pytest-mock = "^3.14.0" +requests-mock = "^1.12.1" +faker = "^25.9.2" +freezegun = "^1.5.1" +pytest-asyncio = "^0.23.8" lovely-pytest-docker = "^0.3.1" -astroid = "^3.2.3" + +[tool.poetry.group.dev.dependencies] +black = "^24.8.0" +pylint = "^3.2.6" +bandit = "^1.7.9" +flake8-pyproject = "^1.2.3" +isort = "^5.13.2" + +[tool.bandit] +exclude_dirs = [".venv","tests"] +skips = ["B104"] + +[tool.flake8] +ignore = ["F401","E402", "Q000", "E203", "W503"] +exclude = [ + ".venv", + "./venv", + ".git", + ".history", + "devops", + "*migrations*", +] +per-file-ignores = [ + "__init__.py:F401", + "*.py:B902" +] +max-line-length = 120 +docstring-min-length=10 +count = true + +[tool.zimports] +black-line-length = 120 +keep-unused-type-checking = true + +[tool.black] +target-version = ["py310", "py311", "py312"] +line-length = 120 +include = '\.pyi?$' +extend-exclude = ''' +/( + # The following are specific to Black, you probably don't want those. + migrations + | devops + | .history +)/ +''' + +[tool.isort] +atomic = true +profile = "black" +line_length = 120 +skip_gitignore = true +skip_glob = ["migrations", "devops"] + +[tool.pylint.main] +fail-under = 10 +max-line-length = 120 +ignore = [ "migrations", "devops", "tests"] +ignore-patterns = ["^\\.#"] +ignored-modules= ["flask_sqlalchemy", "sqlalchemy", "SQLAlchemy" , "alembic", "scoped_session"] +ignored-classes= "scoped_session" +ignore-long-lines = "^\\s*(# )??$" +extension-pkg-whitelist = "pydantic" +notes = ["FIXME","XXX","TODO"] +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +disable = "C0209,C0301,W0511,W0613,W0703,W1514,W1203,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101" +argument-naming-style = "snake_case" +attr-naming-style = "snake_case" +class-attribute-naming-style = "any" +class-const-naming-style = "UPPER_CASE" +class-naming-style = "PascalCase" +const-naming-style = "UPPER_CASE" +function-naming-style = "snake_case" +inlinevar-naming-style = "any" +method-naming-style = "snake_case" +module-naming-style = "any" +variable-naming-style = "snake_case" +docstring-min-length = -1 +good-names = ["i", "j", "k", "ex", "Run", "_"] +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +valid-classmethod-first-arg = ["cls"] +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +minversion = "2.0" +testpaths = [ + "tests", +] +addopts = "--verbose --strict -p no:warnings --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml" +python_files = [ + "test*.py" +] +norecursedirs = [ + ".git", ".tox", "venv*", "requirements*", "build", +] +log_cli = true +log_cli_level = "1" +filterwarnings = [ + "ignore::UserWarning" +] +markers = [ + "slow", + "serial", +] + +[tool.coverage.run] +branch = true +source = [ + "src/auth_api", +] +omit = [ + "wsgi.py", + "gunicorn_config.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "from", + "import", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + 'if __name__ == "__main__":', +] [build-system] -requires = ["poetry-core"] +requires = ["poetry-core>=1.5.0"] build-backend = "poetry.core.masonry.api" diff --git a/auth-api/setup.cfg b/auth-api/setup.cfg deleted file mode 100644 index 40689abe1b..0000000000 --- a/auth-api/setup.cfg +++ /dev/null @@ -1,90 +0,0 @@ -[metadata] -name = auth_api -url = https://github.com/thorwolpert/auth_api -author = Relationships Team -author_email = -classifiers = - Development Status :: Beta - Intended Audience :: Developers / QA - Topic :: Authentication - License :: OSI Approved :: Apache Software License - Natural Language :: English - Programming Language :: Python :: 3.7 -license = Apache Software License Version 2.0 -description = A short description of the project -long_description = file: README.md -keywords = - -[options] -zip_safe = True -python_requires = >=3.6 -include_package_data = True -packages = find: - -[options.package_data] -auth_api = - -[wheel] -universal = 1 - -[bdist_wheel] -universal = 1 - -[aliases] -test = pytest - -[flake8] -ignore = I001, I003, I004, E126, W504 -exclude = .git,*migrations* -max-line-length = 120 -docstring-min-length=10 -per-file-ignores = - */__init__.py:F401 - -[pycodestyle] -max_line_length = 120 -ignore = E501 -docstring-min-length=10 -notes=FIXME,XXX # TODO is ignored -match_dir = src/auth_api -ignored-modules=flask_sqlalchemy - sqlalchemy -per-file-ignores = - */__init__.py:F401 -good-names= - b, - d, - i, - e, - f, - u, - rv, - logger, - id - -[pylint] -ignore=migrations,test -notes=FIXME,XXX,TODO -ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session -ignored-classes=scoped_session -generated-members=Error # allows dynamically generated member references -min-similarity-lines=15 -disable=C0301,W0511 -load-plugins=pylint_flask -good-names= - i, - e, - logger, - id - -[isort] -line_length = 120 -indent = 4 -multi_line_output = 4 -lines_after_imports = 2 - -[tool:pytest] -addopts = --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml -testpaths = tests/unit -filterwarnings = - ignore::UserWarning diff --git a/auth-api/setup.py b/auth-api/setup.py deleted file mode 100644 index 009b98e7ad..0000000000 --- a/auth-api/setup.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright © 2019 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module -""" -from glob import glob -from os.path import basename, splitext - -from setuptools import find_packages, setup - - -def read_requirements(filename): - """ - Get application requirements from - the requirements.txt file. - :return: Python requirements - :rtype: list - """ - with open(filename, 'r') as req: - requirements = req.readlines() - install_requires = [r.strip() for r in requirements if r.find('git+') != 0] - return install_requires - - -def read(filepath): - """ - Read the contents from a file. - :param str filepath: path to the file to be read - :return: file contents - :rtype: str - """ - with open(filepath, 'r') as file_handle: - content = file_handle.read() - return content - - -REQUIREMENTS = read_requirements('requirements/prod.txt') - -setup( - name="auth_api", - packages=find_packages('src'), - package_dir={'': 'src'}, - py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')], - include_package_data=True, - license=read('LICENSE'), - long_description=read('README.md'), - zip_safe=False, - install_requires=REQUIREMENTS, - setup_requires=["pytest-runner"], - tests_require=["pytest"], -) diff --git a/auth-api/src/auth_api/__init__.py b/auth-api/src/auth_api/__init__.py index 3680b12c6d..d4ec31ca45 100644 --- a/auth-api/src/auth_api/__init__.py +++ b/auth-api/src/auth_api/__init__.py @@ -15,104 +15,87 @@ This module is the API for the Authroization system. """ - -import json import os -import sentry_sdk # noqa: I001; pylint: disable=ungrouped-imports,wrong-import-order; conflicts with Flake8 -from flask import Flask, g, request -from humps.main import camelize -from sbc_common_components.exception_handling.exception_handler import ExceptionHandler # noqa: I001 -from sentry_sdk.integrations.flask import FlaskIntegration # noqa: I001 +from flask import Flask +from flask_cors import CORS +from flask_migrate import Migrate, upgrade +from sbc_common_components.utils.camel_case_response import convert_to_camel +from structured_logging import StructuredLogging import auth_api.config as config # pylint:disable=consider-using-from-import -from auth_api import models -from auth_api.auth import jwt -from auth_api.config import _Config +from auth_api.exceptions import ExceptionHandler from auth_api.extensions import mail from auth_api.models import db, ma from auth_api.resources import endpoints from auth_api.services.flags import flags from auth_api.services.gcp_queue import queue +from auth_api.utils.auth import jwt from auth_api.utils.cache import cache -from auth_api.utils.run_version import get_run_version -from auth_api.utils.util_logging import setup_logging -setup_logging(os.path.join(_Config.PROJECT_ROOT, 'logging.conf')) # important to do this first +logger = StructuredLogging.get_logger() -def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): +def create_app(run_mode=os.getenv("DEPLOYMENT_ENV", "production")): """Return a configured Flask App using the Factory method.""" app = Flask(__name__) + app.config["ENV"] = run_mode app.config.from_object(config.CONFIGURATION[run_mode]) - if str(app.config.get('SENTRY_ENABLE')).lower() == 'true': - if app.config.get('SENTRY_DSN', None): - sentry_sdk.init( # pylint: disable=abstract-class-instantiated - dsn=app.config.get('SENTRY_DSN'), - integrations=[FlaskIntegration()] - ) - - flags.init_app(app) + CORS(app, resources="*") db.init_app(app) - ma.init_app(app) - mail.init_app(app) - queue.init_app(app) - endpoints.init_app(app) - if os.getenv('FLASK_ENV', 'production') != 'testing': + if run_mode == "migration": + Migrate(app, db) + app.logger.info("Running migration upgrade.") + with app.app_context(): + execute_migrations(app) + app.logger.info("Finished migration upgrade.") + else: + flags.init_app(app) + ma.init_app(app) + queue.init_app(app) + mail.init_app(app) + endpoints.init_app(app) + + app.after_request(convert_to_camel) + + ExceptionHandler(app) setup_jwt_manager(app, jwt) + register_shellcontext(app) + build_cache(app) - ExceptionHandler(app) - - @app.before_request - def set_origin(): - g.origin_url = request.environ.get('HTTP_ORIGIN', 'localhost') - - @app.after_request - def handle_after_request(response): # pylint: disable=unused-variable - add_version(response) - camelize_json(response) - set_access_control_header(response) - return response - - def set_access_control_header(response): - response.headers['Access-Control-Allow-Origin'] = '*' - response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type, registries-trace-id, ' \ - 'invitation_token, account-id' - - def add_version(response): - version = get_run_version() - response.headers['API'] = f'auth_api/{version}' + return app - def camelize_json(response): - if (response.headers['Content-Type'] == 'application/json' and - 'swagger.json' not in request.base_url): - response.set_data(json.dumps(camelize(json.loads(response.get_data())))) - register_shellcontext(app) - build_cache(app) - - return app +def execute_migrations(app): + """Execute the database migrations.""" + try: + upgrade(directory="migrations", revision="head", sql=False, tag=None) + except Exception as e: # NOQA pylint: disable=broad-except + app.logger.disabled = False + app.logger.error("Error processing migrations:", exc_info=True) + raise e def setup_jwt_manager(app, jwt_manager): """Use flask app to configure the JWTManager to work for a particular Realm.""" def get_roles(a_dict): - return a_dict['realm_access']['roles'] # pragma: no cover + return a_dict["realm_access"]["roles"] # pragma: no cover - app.config['JWT_ROLE_CALLBACK'] = get_roles + app.config["JWT_ROLE_CALLBACK"] = get_roles jwt_manager.init_app(app) def register_shellcontext(app): """Register shell context objects.""" + from auth_api import models # pylint: disable=import-outside-toplevel def shell_context(): """Shell context objects.""" - return {'app': app, 'jwt': jwt, 'db': db, 'models': models} # pragma: no cover + return {"app": app, "jwt": jwt, "db": db, "models": models} # pragma: no cover app.shell_context_processor(shell_context) @@ -122,14 +105,14 @@ def build_cache(app): cache.init_app(app) with app.app_context(): cache.clear() - if not app.config.get('TESTING', False): + if not app.config.get("TESTING", False): try: - from auth_api.services.permissions import \ - Permissions as PermissionService # pylint: disable=import-outside-toplevel - from auth_api.services.products import \ - Product as ProductService # pylint: disable=import-outside-toplevel + # pylint: disable=import-outside-toplevel + from auth_api.services.permissions import Permissions as PermissionService + from auth_api.services.products import Product as ProductService + PermissionService.build_all_permission_cache() ProductService.build_all_products_cache() except Exception as e: # NOQA # pylint:disable=broad-except - app.logger.error('Error on caching ') - app.logger.error(e) + error_msg = f"Error on caching {e}" + logger.error(error_msg) diff --git a/auth-api/src/auth_api/config.py b/auth-api/src/auth_api/config.py index f5ea7f85df..3aa3cfb3d8 100644 --- a/auth-api/src/auth_api/config.py +++ b/auth-api/src/auth_api/config.py @@ -19,7 +19,6 @@ rather than reading environment variables directly or by accessing this configuration directly. """ -import logging import os import sys from typing import List @@ -30,23 +29,24 @@ load_dotenv(find_dotenv()) CONFIGURATION = { - 'development': 'auth_api.config.DevConfig', - 'testing': 'auth_api.config.TestConfig', - 'production': 'auth_api.config.ProdConfig', - 'default': 'auth_api.config.ProdConfig' + "development": "auth_api.config.DevConfig", + "testing": "auth_api.config.TestConfig", + "production": "auth_api.config.ProdConfig", + "default": "auth_api.config.ProdConfig", + "migration": "auth_api.config.MigrationConfig", } -def get_named_config(config_name: str = 'production'): +def get_named_config(config_name: str = "production"): """Return the configuration object based on the name. :raise: KeyError: if an unknown configuration is requested """ - if config_name in ['production', 'staging', 'default']: + if config_name in ["production", "staging", "default"]: config = ProdConfig() - elif config_name == 'testing': + elif config_name == "testing": config = TestConfig() - elif config_name == 'development': + elif config_name == "development": config = DevConfig() else: raise KeyError("Unknown configuration '{config_name}'") @@ -58,148 +58,147 @@ class _Config: # pylint: disable=too-few-public-methods PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - SECRET_KEY = 'a secret' - AUTH_LD_SDK_KEY = os.getenv('AUTH_LD_SDK_KEY', None) - + SECRET_KEY = "a secret" TESTING = False DEBUG = False - ALEMBIC_INI = 'migrations/alembic.ini' + ALEMBIC_INI = "migrations/alembic.ini" # Config to skip migrations when alembic migrate is used - SKIPPED_MIGRATIONS = ['authorizations_view'] + SKIPPED_MIGRATIONS = ["authorizations_view"] - # POSTGRESQL - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - if DB_UNIX_SOCKET := os.getenv('DATABASE_UNIX_SOCKET', None): - SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}' - else: - SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' SQLALCHEMY_ECHO = False SQLALCHEMY_TRACK_MODIFICATIONS = False + # POSTGRESQL + DB_USER = os.getenv("DATABASE_USERNAME", "") + DB_PASSWORD = os.getenv("DATABASE_PASSWORD", "") + DB_NAME = os.getenv("DATABASE_NAME", "") + DB_HOST = os.getenv("DATABASE_HOST", "") + DB_PORT = int(os.getenv("DATABASE_PORT", "5432")) # POSTGRESQL + if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): + SQLALCHEMY_DATABASE_URI = ( + f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" + ) + else: + SQLALCHEMY_DATABASE_URI = f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" + # JWT_OIDC Settings - JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv('JWT_OIDC_WELL_KNOWN_CONFIG') - JWT_OIDC_ALGORITHMS = os.getenv('JWT_OIDC_ALGORITHMS') - JWT_OIDC_JWKS_URI = os.getenv('JWT_OIDC_JWKS_URI') - JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_ISSUER') - JWT_OIDC_AUDIENCE = os.getenv('JWT_OIDC_AUDIENCE') - JWT_OIDC_CLIENT_SECRET = os.getenv('JWT_OIDC_CLIENT_SECRET') - JWT_OIDC_CACHING_ENABLED = os.getenv('JWT_OIDC_CACHING_ENABLED') + JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv("JWT_OIDC_WELL_KNOWN_CONFIG") + JWT_OIDC_ALGORITHMS = os.getenv("JWT_OIDC_ALGORITHMS") + JWT_OIDC_JWKS_URI = os.getenv("JWT_OIDC_JWKS_URI") + JWT_OIDC_ISSUER = os.getenv("JWT_OIDC_ISSUER") + JWT_OIDC_AUDIENCE = os.getenv("JWT_OIDC_AUDIENCE") + JWT_OIDC_CLIENT_SECRET = os.getenv("JWT_OIDC_CLIENT_SECRET") + JWT_OIDC_CACHING_ENABLED = os.getenv("JWT_OIDC_CACHING_ENABLED") try: - JWT_OIDC_JWKS_CACHE_TIMEOUT = int(os.getenv('JWT_OIDC_JWKS_CACHE_TIMEOUT')) + JWT_OIDC_JWKS_CACHE_TIMEOUT = int(os.getenv("JWT_OIDC_JWKS_CACHE_TIMEOUT")) except: # pylint:disable=bare-except # noqa: B901, E722 JWT_OIDC_JWKS_CACHE_TIMEOUT = 300 # Keycloak auth config baseurl - KEYCLOAK_BASE_URL = os.getenv('KEYCLOAK_BASE_URL') - KEYCLOAK_REALMNAME = os.getenv('KEYCLOAK_REALMNAME') - KEYCLOAK_ADMIN_USERNAME = os.getenv('SBC_AUTH_ADMIN_CLIENT_ID') - KEYCLOAK_ADMIN_SECRET = os.getenv('SBC_AUTH_ADMIN_CLIENT_SECRET') + KEYCLOAK_BASE_URL = os.getenv("KEYCLOAK_BASE_URL") + KEYCLOAK_REALMNAME = os.getenv("KEYCLOAK_REALMNAME") + KEYCLOAK_ADMIN_USERNAME = os.getenv("SBC_AUTH_ADMIN_CLIENT_ID") + KEYCLOAK_ADMIN_SECRET = os.getenv("SBC_AUTH_ADMIN_CLIENT_SECRET") # keycloak service account token lifepan try: - CACHE_DEFAULT_TIMEOUT = int(os.getenv('ACCESS_TOKEN_LIFESPAN')) + CACHE_DEFAULT_TIMEOUT = int(os.getenv("ACCESS_TOKEN_LIFESPAN")) except: # pylint:disable=bare-except # noqa: B901, E722 CACHE_DEFAULT_TIMEOUT = 300 - CACHE_MEMCACHED_SERVERS = os.getenv('CACHE_MEMCACHED_SERVERS') - - CACHE_REDIS_HOST = os.getenv('CACHE_REDIS_HOST') - CACHE_REDIS_PORT = os.getenv('CACHE_REDIS_PORT') + CACHE_MEMCACHED_SERVERS = os.getenv("CACHE_MEMCACHED_SERVERS") + CACHE_REDIS_HOST = os.getenv("CACHE_REDIS_HOST") + CACHE_REDIS_PORT = os.getenv("CACHE_REDIS_PORT") # Service account details - KEYCLOAK_SERVICE_ACCOUNT_ID = os.getenv('SBC_AUTH_ADMIN_CLIENT_ID') - KEYCLOAK_SERVICE_ACCOUNT_SECRET = os.getenv('SBC_AUTH_ADMIN_CLIENT_SECRET') + KEYCLOAK_SERVICE_ACCOUNT_ID = os.getenv("SBC_AUTH_ADMIN_CLIENT_ID") + KEYCLOAK_SERVICE_ACCOUNT_SECRET = os.getenv("SBC_AUTH_ADMIN_CLIENT_SECRET") - ENTITY_SVC_CLIENT_ID = os.getenv('ENTITY_SERVICE_ACCOUNT_CLIENT_ID') - ENTITY_SVC_CLIENT_SECRET = os.getenv('ENTITY_SERVICE_ACCOUNT_CLIENT_SECRET') + ENTITY_SVC_CLIENT_ID = os.getenv("ENTITY_SVC_CLIENT_ID") + ENTITY_SVC_CLIENT_SECRET = os.getenv("ENTITY_SVC_CLIENT_SECRET") - # Upstream Keycloak settings - KEYCLOAK_BCROS_BASE_URL = os.getenv('KEYCLOAK_BCROS_BASE_URL') - KEYCLOAK_BCROS_REALMNAME = os.getenv('KEYCLOAK_BCROS_REALMNAME') - KEYCLOAK_BCROS_ADMIN_CLIENTID = os.getenv('KEYCLOAK_BCROS_ADMIN_CLIENTID') - KEYCLOAK_BCROS_ADMIN_SECRET = os.getenv('KEYCLOAK_BCROS_ADMIN_SECRET') + # Upstream Keycloak setting - should be removed + KEYCLOAK_BCROS_BASE_URL = os.getenv("KEYCLOAK_BCROS_BASE_URL") + KEYCLOAK_BCROS_REALMNAME = os.getenv("KEYCLOAK_BCROS_REALMNAME") + KEYCLOAK_BCROS_ADMIN_CLIENTID = os.getenv("KEYCLOAK_BCROS_ADMIN_CLIENTID") + KEYCLOAK_BCROS_ADMIN_SECRET = os.getenv("KEYCLOAK_BCROS_ADMIN_SECRET") # API Endpoints - BCOL_API_URL = os.getenv('BCOL_API_URL') - LEGAL_API_URL = os.getenv('LEGAL_API_URL', '') - NAMEX_API_URL = os.getenv('NAMEX_API_URL', '') - NOTIFY_API_URL = os.getenv('NOTIFY_API_URL') - PAY_API_SANDBOX_URL = os.getenv('PAY_API_SANDBOX_URL') - PAY_API_URL = os.getenv('PAY_API_URL') + BCOL_API_URL = os.getenv("BCOL_API_URL", "") + os.getenv("BCOL_API_VERSION", "") + NAMEX_API_URL = os.getenv("NAMEX_API_URL", "") + os.getenv("NAMEX_API_VERSION", "") + NOTIFY_API_URL = os.getenv("NOTIFY_API_URL", "") + os.getenv("NOTIFY_API_VERSION", "") + PAY_API_URL = os.getenv("PAY_API_URL", "") + os.getenv("PAY_API_VERSION", "") - LEGAL_API_VERSION = os.getenv('LEGAL_API_VERSION') - LEGAL_API_VERSION_2 = os.getenv('LEGAL_API_VERSION_2', '') + LEGAL_API_URL = os.getenv("LEGAL_API_URL", "") + LEGAL_API_VERSION = os.getenv("LEGAL_API_VERSION") + LEGAL_API_VERSION_2 = os.getenv("LEGAL_API_VERSION_2", "") - LEAR_AFFILIATION_DETAILS_URL = f'{LEGAL_API_URL + LEGAL_API_VERSION_2}/businesses/search' - NAMEX_AFFILIATION_DETAILS_URL = f'{NAMEX_API_URL}/requests/search' + LEAR_AFFILIATION_DETAILS_URL = f"{LEGAL_API_URL + LEGAL_API_VERSION_2}/businesses/search" + NAMEX_AFFILIATION_DETAILS_URL = f"{NAMEX_API_URL}/requests/search" + PAY_API_SANDBOX_URL = os.getenv("PAY_API_SANDBOX_URL") # PUB/SUB - PUB: account-mailer-dev, auth-event-dev - ACCOUNT_MAILER_TOPIC = os.getenv('ACCOUNT_MAILER_TOPIC', 'account-mailer-dev') - AUTH_EVENT_TOPIC = os.getenv('AUTH_EVENT_TOPIC', 'auth-event-dev') - GCP_AUTH_KEY = os.getenv('AUTHPAY_GCP_AUTH_KEY', None) + ACCOUNT_MAILER_TOPIC = os.getenv("ACCOUNT_MAILER_TOPIC", "account-mailer-dev") + AUTH_EVENT_TOPIC = os.getenv("AUTH_EVENT_TOPIC", "auth-event-dev") + GCP_AUTH_KEY = os.getenv("AUTHPAY_GCP_AUTH_KEY", None) # Minio configuration values - MINIO_ENDPOINT = os.getenv('MINIO_ENDPOINT') - MINIO_ACCESS_KEY = os.getenv('MINIO_ACCESS_KEY') - MINIO_ACCESS_SECRET = os.getenv('MINIO_ACCESS_SECRET') - MINIO_BUCKET_ACCOUNTS = os.getenv('MINIO_BUCKET_ACCOUNTS', 'accounts') + MINIO_ENDPOINT = os.getenv("MINIO_ENDPOINT") + MINIO_ACCESS_KEY = os.getenv("MINIO_ACCESS_KEY") + MINIO_ACCESS_SECRET = os.getenv("MINIO_ACCESS_SECRET") + MINIO_BUCKET_ACCOUNTS = os.getenv("MINIO_BUCKET_ACCOUNTS", "accounts") MINIO_SECURE = True # email - MAIL_FROM_ID = os.getenv('MAIL_FROM_ID') + MAIL_FROM_ID = os.getenv("MAIL_FROM_ID") - # mail token configuration - EMAIL_SECURITY_PASSWORD_SALT = os.getenv('EMAIL_SECURITY_PASSWORD_SALT') - EMAIL_TOKEN_SECRET_KEY = os.getenv('EMAIL_TOKEN_SECRET_KEY') - TOKEN_EXPIRY_PERIOD = os.getenv('TOKEN_EXPIRY_PERIOD') - AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS = os.getenv('AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS', '15') - STAFF_ADMIN_EMAIL = os.getenv('STAFF_ADMIN_EMAIL') - - # Sentry Config - SENTRY_ENABLE = os.getenv('SENTRY_ENABLE', 'False') - SENTRY_DSN = os.getenv('SENTRY_DSN', None) + # mail token configuration + EMAIL_SECURITY_PASSWORD_SALT = os.getenv("EMAIL_SECURITY_PASSWORD_SALT") + EMAIL_TOKEN_SECRET_KEY = os.getenv("EMAIL_TOKEN_SECRET_KEY") + TOKEN_EXPIRY_PERIOD = os.getenv("TOKEN_EXPIRY_PERIOD") + AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS = os.getenv("AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS", "15") + STAFF_ADMIN_EMAIL = os.getenv("STAFF_ADMIN_EMAIL") # front end serves this image in this name.can be moved to openshift config as well.. - REGISTRIES_LOGO_IMAGE_NAME = 'bc_logo_for_email.png' + REGISTRIES_LOGO_IMAGE_NAME = "bc_logo_for_email.png" # url for the front end app - WEB_APP_URL = os.getenv('WEB_APP_URL') + WEB_APP_URL = os.getenv("WEB_APP_URL") # url for the front end app - BCEID_SIGNIN_ROUTE = os.getenv('BCEID_SIGNIN_ROUTE', 'signin/bceid') + BCEID_SIGNIN_ROUTE = os.getenv("BCEID_SIGNIN_ROUTE", "signin/bceid") # url for the front end app - BCEID_ACCOUNT_SETUP_ROUTE = os.getenv('BCEID_ACCOUNT_SETUP_ROUTE', 'setup-non-bcsc-account') - BCEID_ADMIN_SETUP_ROUTE = os.getenv('BCEID_ADMIN_SETUP_ROUTE', 're-upload-affidavit') + BCEID_ACCOUNT_SETUP_ROUTE = os.getenv("BCEID_ACCOUNT_SETUP_ROUTE", "setup-non-bcsc-account") + BCEID_ADMIN_SETUP_ROUTE = os.getenv("BCEID_ADMIN_SETUP_ROUTE", "re-upload-affidavit") try: - MAX_NUMBER_OF_ORGS = int(os.getenv('MAX_NUMBER_OF_ORGS')) + MAX_NUMBER_OF_ORGS = int(os.getenv("MAX_NUMBER_OF_ORGS")) except: # pylint:disable=bare-except # noqa: B901, E722 MAX_NUMBER_OF_ORGS = 3 - BCOL_ACCOUNT_LINK_CHECK = os.getenv('BCOL_ACCOUNT_LINK_CHECK', 'True').lower() == 'true' + BCOL_ACCOUNT_LINK_CHECK = os.getenv("BCOL_ACCOUNT_LINK_CHECK", "True").lower() == "true" # Till direct pay is fully ready , keep this value false - DIRECT_PAY_ENABLED = os.getenv('DIRECT_PAY_ENABLED', 'False').lower() == 'true' + DIRECT_PAY_ENABLED = os.getenv("DIRECT_PAY_ENABLED", "False").lower() == "true" # Config value to disable activity logs - DISABLE_ACTIVITY_LOGS = os.getenv('DISABLE_ACTIVITY_LOGS', 'False').lower() == 'true' + DISABLE_ACTIVITY_LOGS = os.getenv("DISABLE_ACTIVITY_LOGS", "False").lower() == "true" # API gateway config - API_GW_CONSUMERS_API_URL = os.getenv('API_GW_CONSUMERS_API_URL', None) - API_GW_KEY = os.getenv('API_GW_KEY', None) - API_GW_CONSUMERS_SANDBOX_API_URL = os.getenv('API_GW_CONSUMERS_SANDBOX_API_URL', None) - API_GW_NON_PROD_KEY = os.getenv('API_GW_NON_PROD_KEY', None) - API_GW_EMAIL_SUFFIX = os.getenv('API_GW_EMAIL_SUFFIX', None) - API_GW_KC_CLIENT_ID_PATTERN = os.getenv('API_GW_KC_CLIENT_ID_PATTERN', 'api-key-account-{account_id}') + API_GW_CONSUMERS_API_URL = os.getenv("API_GW_CONSUMERS_API_URL", None) + API_GW_KEY = os.getenv("API_GW_KEY", None) + API_GW_CONSUMERS_SANDBOX_API_URL = os.getenv("API_GW_CONSUMERS_SANDBOX_API_URL", None) + API_GW_NON_PROD_KEY = os.getenv("API_GW_NON_PROD_KEY", None) + API_GW_EMAIL_SUFFIX = os.getenv("API_GW_EMAIL_SUFFIX", None) + API_GW_KC_CLIENT_ID_PATTERN = os.getenv("API_GW_KC_CLIENT_ID_PATTERN", "api-key-account-{account_id}") # NR Supported Request types. - NR_SUPPORTED_REQUEST_TYPES: List[str] = os.getenv('NR_SUPPORTED_REQUEST_TYPES', 'BC').replace(' ', '').split(',') - AUTH_WEB_SANDBOX_HOST = os.getenv('AUTH_WEB_SANDBOX_HOST', 'localhost') + NR_SUPPORTED_REQUEST_TYPES: List[str] = os.getenv("NR_SUPPORTED_REQUEST_TYPES", "BC").replace(" ", "").split(",") + AUTH_WEB_SANDBOX_HOST = os.getenv("AUTH_WEB_SANDBOX_HOST", "localhost") + + # LaunchDarkly SDK key + AUTH_LD_SDK_KEY = os.getenv("AUTH_LD_SDK_KEY", None) class DevConfig(_Config): # pylint: disable=too-few-public-methods @@ -207,8 +206,6 @@ class DevConfig(_Config): # pylint: disable=too-few-public-methods TESTING = False DEBUG = True - if os.getenv('DISABLE_JAEGER_TRACING', 'False').lower() == 'true': - logging.getLogger('jaeger_tracing').disabled = True class TestConfig(_Config): # pylint: disable=too-few-public-methods @@ -217,53 +214,54 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods DEBUG = True TESTING = True # POSTGRESQL - DB_USER = os.getenv('DATABASE_TEST_USERNAME', 'postgres') - DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', 'postgres') - DB_NAME = os.getenv('DATABASE_TEST_NAME', 'postgres') - DB_HOST = os.getenv('DATABASE_TEST_HOST', 'localhost') - DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432') - SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_TEST_URL', - f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}') + DB_USER = os.getenv("DATABASE_TEST_USERNAME", "postgres") + DB_PASSWORD = os.getenv("DATABASE_TEST_PASSWORD", "postgres") + DB_NAME = os.getenv("DATABASE_TEST_NAME", "postgres") + DB_HOST = os.getenv("DATABASE_TEST_HOST", "localhost") + DB_PORT = os.getenv("DATABASE_TEST_PORT", "5432") + SQLALCHEMY_DATABASE_URI = os.getenv( + "DATABASE_TEST_URL", f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}" + ) # JWT OIDC settings # JWT_OIDC_TEST_MODE will set jwt_manager to use JWT_OIDC_TEST_MODE = True - JWT_OIDC_TEST_AUDIENCE = os.getenv('JWT_OIDC_TEST_AUDIENCE') - JWT_OIDC_TEST_CLIENT_SECRET = os.getenv('JWT_OIDC_TEST_CLIENT_SECRET') - JWT_OIDC_TEST_ISSUER = os.getenv('JWT_OIDC_TEST_ISSUER') - JWT_OIDC_TEST_ALGORITHMS = os.getenv('JWT_OIDC_TEST_ALGORITHMS') + JWT_OIDC_TEST_AUDIENCE = os.getenv("JWT_OIDC_TEST_AUDIENCE") + JWT_OIDC_TEST_CLIENT_SECRET = os.getenv("JWT_OIDC_TEST_CLIENT_SECRET") + JWT_OIDC_TEST_ISSUER = os.getenv("JWT_OIDC_TEST_ISSUER") + JWT_OIDC_TEST_ALGORITHMS = os.getenv("JWT_OIDC_TEST_ALGORITHMS") JWT_OIDC_TEST_KEYS = { - 'keys': [ + "keys": [ { - 'kid': 'sbc-auth-web', - 'kty': 'RSA', - 'alg': 'RS256', - 'use': 'sig', - 'n': 'AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-' - 'TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR', - 'e': 'AQAB' + "kid": "sbc-auth-web", + "kty": "RSA", + "alg": "RS256", + "use": "sig", + "n": "AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-" + "TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR", + "e": "AQAB", } ] } JWT_OIDC_TEST_PRIVATE_KEY_JWKS = { - 'keys': [ + "keys": [ { - 'kid': 'sbc-auth-web', - 'kty': 'RSA', - 'alg': 'RS256', - 'use': 'sig', - 'n': 'AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-' - 'TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR', - 'e': 'AQAB', - 'd': 'C0G3QGI6OQ6tvbCNYGCqq043YI_8MiBl7C5dqbGZmx1ewdJBhMNJPStuckhskURaDwk4-' - '8VBW9SlvcfSJJrnZhgFMjOYSSsBtPGBIMIdM5eSKbenCCjO8Tg0BUh_' - 'xa3CHST1W4RQ5rFXadZ9AeNtaGcWj2acmXNO3DVETXAX3x0', - 'p': 'APXcusFMQNHjh6KVD_hOUIw87lvK13WkDEeeuqAydai9Ig9JKEAAfV94W6Aftka7tGgE7ulg1vo3eJoLWJ1zvKM', - 'q': 'AOjX3OnPJnk0ZFUQBwhduCweRi37I6DAdLTnhDvcPTrrNWuKPg9uGwHjzFCJgKd8KBaDQ0X1rZTZLTqi3peT43s', - 'dp': 'AN9kBoA5o6_Rl9zeqdsIdWFmv4DB5lEqlEnC7HlAP-3oo3jWFO9KQqArQL1V8w2D4aCd0uJULiC9pCP7aTHvBhc', - 'dq': 'ANtbSY6njfpPploQsF9sU26U0s7MsuLljM1E8uml8bVJE1mNsiu9MgpUvg39jEu9BtM2tDD7Y51AAIEmIQex1nM', - 'qi': 'XLE5O360x-MhsdFXx8Vwz4304-MJg-oGSJXCK_ZWYOB_FGXFRTfebxCsSYi0YwJo-oNu96bvZCuMplzRI1liZw' + "kid": "sbc-auth-web", + "kty": "RSA", + "alg": "RS256", + "use": "sig", + "n": "AN-fWcpCyE5KPzHDjigLaSUVZI0uYrcGcc40InVtl-rQRDmAh-C2W8H4_Hxhr5VLc6crsJ2LiJTV_E72S03pzpOOaaYV6-" + "TzAjCou2GYJIXev7f6Hh512PuG5wyxda_TlBSsI-gvphRTPsKCnPutrbiukCYrnPuWxX5_cES9eStR", + "e": "AQAB", + "d": "C0G3QGI6OQ6tvbCNYGCqq043YI_8MiBl7C5dqbGZmx1ewdJBhMNJPStuckhskURaDwk4-" + "8VBW9SlvcfSJJrnZhgFMjOYSSsBtPGBIMIdM5eSKbenCCjO8Tg0BUh_" + "xa3CHST1W4RQ5rFXadZ9AeNtaGcWj2acmXNO3DVETXAX3x0", + "p": "APXcusFMQNHjh6KVD_hOUIw87lvK13WkDEeeuqAydai9Ig9JKEAAfV94W6Aftka7tGgE7ulg1vo3eJoLWJ1zvKM", + "q": "AOjX3OnPJnk0ZFUQBwhduCweRi37I6DAdLTnhDvcPTrrNWuKPg9uGwHjzFCJgKd8KBaDQ0X1rZTZLTqi3peT43s", + "dp": "AN9kBoA5o6_Rl9zeqdsIdWFmv4DB5lEqlEnC7HlAP-3oo3jWFO9KQqArQL1V8w2D4aCd0uJULiC9pCP7aTHvBhc", + "dq": "ANtbSY6njfpPploQsF9sU26U0s7MsuLljM1E8uml8bVJE1mNsiu9MgpUvg39jEu9BtM2tDD7Y51AAIEmIQex1nM", + "qi": "XLE5O360x-MhsdFXx8Vwz4304-MJg-oGSJXCK_ZWYOB_FGXFRTfebxCsSYi0YwJo-oNu96bvZCuMplzRI1liZw", } ] } @@ -285,59 +283,79 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods 4H8UZcVFN95vEKxJiLRjAmj6g273pu9kK4ymXNEjWWJn -----END RSA PRIVATE KEY-----""" - KEYCLOAK_ADMIN_USERNAME = KEYCLOAK_BCROS_ADMIN_CLIENTID = os.getenv('KEYCLOAK_TEST_ADMIN_CLIENTID') - KEYCLOAK_ADMIN_SECRET = KEYCLOAK_BCROS_ADMIN_SECRET = os.getenv('KEYCLOAK_TEST_ADMIN_SECRET') - KEYCLOAK_BASE_URL = KEYCLOAK_BCROS_BASE_URL = os.getenv('KEYCLOAK_TEST_BASE_URL') - KEYCLOAK_REALMNAME = KEYCLOAK_BCROS_REALMNAME = os.getenv('KEYCLOAK_TEST_REALMNAME') - JWT_OIDC_AUDIENCE = os.getenv('JWT_OIDC_TEST_AUDIENCE') - JWT_OIDC_CLIENT_SECRET = os.getenv('JWT_OIDC_TEST_CLIENT_SECRET') - JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_TEST_ISSUER') + KEYCLOAK_ADMIN_USERNAME = KEYCLOAK_BCROS_ADMIN_CLIENTID = os.getenv("KEYCLOAK_TEST_ADMIN_CLIENTID") + KEYCLOAK_ADMIN_SECRET = KEYCLOAK_BCROS_ADMIN_SECRET = os.getenv("KEYCLOAK_TEST_ADMIN_SECRET") + KEYCLOAK_BASE_URL = KEYCLOAK_BCROS_BASE_URL = os.getenv("KEYCLOAK_TEST_BASE_URL") + KEYCLOAK_REALMNAME = KEYCLOAK_BCROS_REALMNAME = os.getenv("KEYCLOAK_TEST_REALMNAME") + JWT_OIDC_AUDIENCE = os.getenv("JWT_OIDC_TEST_AUDIENCE") + JWT_OIDC_CLIENT_SECRET = os.getenv("JWT_OIDC_TEST_CLIENT_SECRET") + JWT_OIDC_ISSUER = os.getenv("JWT_OIDC_TEST_ISSUER") # Service account details - KEYCLOAK_SERVICE_ACCOUNT_ID = os.getenv('KEYCLOAK_TEST_ADMIN_CLIENTID') - KEYCLOAK_SERVICE_ACCOUNT_SECRET = os.getenv('KEYCLOAK_TEST_ADMIN_SECRET') + KEYCLOAK_SERVICE_ACCOUNT_ID = os.getenv("KEYCLOAK_TEST_ADMIN_CLIENTID") + KEYCLOAK_SERVICE_ACCOUNT_SECRET = os.getenv("KEYCLOAK_TEST_ADMIN_SECRET") # Legal-API URL - ENTITY_SVC_CLIENT_ID = os.getenv('KEYCLOAK_TEST_ADMIN_CLIENTID') - ENTITY_SVC_CLIENT_SECRET = os.getenv('KEYCLOAK_TEST_ADMIN_SECRET') + ENTITY_SVC_CLIENT_ID = os.getenv("KEYCLOAK_TEST_ADMIN_CLIENTID") + ENTITY_SVC_CLIENT_SECRET = os.getenv("KEYCLOAK_TEST_ADMIN_SECRET") - LEGAL_API_URL = 'https://mock-auth-tools.pathfinder.gov.bc.ca/rest/legal-api/2.7' - LEGAL_API_VERSION_2 = '/api/v1' + LEGAL_API_URL = "https://mock-auth-tools.pathfinder.gov.bc.ca/rest/legal-api/2.7" + LEGAL_API_VERSION_2 = "/api/v1" - NOTIFY_API_URL = 'http://localhost:8080/notify-api/api/v1' - BCOL_API_URL = 'http://localhost:8080/bcol-api/api/v1' - PAY_API_URL = 'http://localhost:8080/pay-api/api/v1' - PAY_API_SANDBOX_URL = 'http://localhost:8080/pay-api/api/v1' + NOTIFY_API_URL = "http://localhost:8080/notify-api/api/v1" + BCOL_API_URL = "http://localhost:8080/bcol-api/api/v1" + PAY_API_URL = "http://localhost:8080/pay-api/api/v1" + PAY_API_SANDBOX_URL = "http://localhost:8080/pay-api/api/v1" # If any value is present in this flag, starts up a keycloak docker - USE_TEST_KEYCLOAK_DOCKER = os.getenv('USE_TEST_KEYCLOAK_DOCKER', None) - USE_DOCKER_MOCK = os.getenv('USE_DOCKER_MOCK', None) + USE_TEST_KEYCLOAK_DOCKER = os.getenv("USE_TEST_KEYCLOAK_DOCKER", None) + USE_DOCKER_MOCK = os.getenv("USE_DOCKER_MOCK", None) MAX_NUMBER_OF_ORGS = 3 BCOL_ACCOUNT_LINK_CHECK = True # Minio variables - MINIO_ENDPOINT = 'localhost:9000' - MINIO_ACCESS_KEY = 'minio' - MINIO_ACCESS_SECRET = 'minio123' - MINIO_BUCKET_ACCOUNTS = 'accounts' + MINIO_ENDPOINT = "localhost:9000" + MINIO_ACCESS_KEY = "minio" + MINIO_ACCESS_SECRET = "minio123" + MINIO_BUCKET_ACCOUNTS = "accounts" MINIO_SECURE = False - STAFF_ADMIN_EMAIL = 'test@test.com' - ACCOUNT_MAILER_TOPIC = os.getenv('ACCOUNT_MAILER_TOPIC', 'account-mailer-dev') + STAFF_ADMIN_EMAIL = "test@test.com" + ACCOUNT_MAILER_TOPIC = os.getenv("ACCOUNT_MAILER_TOPIC", "account-mailer-dev") - API_GW_CONSUMERS_API_URL = 'https://bcregistry-bcregistry-mock.apigee.net/mockTarget' - API_GW_CONSUMERS_SANDBOX_API_URL = 'https://bcregistry-bcregistry-mock.apigee.net/mockTarget' - API_GW_CONSUMER_EMAIL = 'test.all.mc@gov.bc.ca' + API_GW_CONSUMERS_API_URL = "https://bcregistry-bcregistry-mock.apigee.net/mockTarget" + API_GW_CONSUMERS_SANDBOX_API_URL = "https://bcregistry-bcregistry-mock.apigee.net/mockTarget" + API_GW_CONSUMER_EMAIL = "test.all.mc@gov.bc.ca" class ProdConfig(_Config): # pylint: disable=too-few-public-methods """Production environment configuration.""" - SECRET_KEY = os.getenv('SECRET_KEY', None) + SECRET_KEY = os.getenv("SECRET_KEY", None) if not SECRET_KEY: SECRET_KEY = os.urandom(24) - print('WARNING: SECRET_KEY being set as a one-shot', file=sys.stderr) + print("WARNING: SECRET_KEY being set as a one-shot", file=sys.stderr) TESTING = False DEBUG = False + + +class MigrationConfig: # pylint: disable=too-few-public-methods + """Config for db migration.""" + + TESTING = False + DEBUG = True + + # POSTGRESQL + DB_USER = os.getenv("DATABASE_USERNAME", "") + DB_PASSWORD = os.getenv("DATABASE_PASSWORD", "") + DB_NAME = os.getenv("DATABASE_NAME", "") + DB_HOST = os.getenv("DATABASE_HOST", "") + DB_PORT = int(os.getenv("DATABASE_PORT", "5432")) # POSTGRESQL + if DB_UNIX_SOCKET := os.getenv("DATABASE_UNIX_SOCKET", None): + SQLALCHEMY_DATABASE_URI = ( + f"postgresql+pg8000://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?unix_sock={DB_UNIX_SOCKET}/.s.PGSQL.5432" + ) + else: + SQLALCHEMY_DATABASE_URI = f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}" diff --git a/auth-api/src/auth_api/exceptions/__init__.py b/auth-api/src/auth_api/exceptions/__init__.py index fd2fc32700..ac7bdb4fc2 100644 --- a/auth-api/src/auth_api/exceptions/__init__.py +++ b/auth-api/src/auth_api/exceptions/__init__.py @@ -6,47 +6,7 @@ error - a description of the error {code / description: classname / full text} status_code - where possible use HTTP Error Codes """ -from http import HTTPStatus - -import traceback - -from sbc_common_components.tracing.exception_tracing import ExceptionTracing # noqa: I001, I003 from auth_api.exceptions.errors import Error # noqa: I001, I003 - - -class BusinessException(Exception): # noqa: N818 - """Exception that adds error code and error name, that can be used for i18n support.""" - - def __init__(self, error, exception, *args, **kwargs): - """Return a valid BusinessException.""" - super().__init__(*args, **kwargs) - - self.message = error.message - self.error = error.message - self.code = error.name - self.status_code = error.status_code - self.detail = exception - - # log/tracing exception - ExceptionTracing.trace(self, traceback.format_exc()) - - -class ServiceUnavailableException(Exception): # noqa: N818 - """Exception to be raised if third party service is unavailable.""" - - def __init__(self, error, *args, **kwargs): - """Return a valid ServiceUnavailableException.""" - super().__init__(*args, **kwargs) - self.error = error - self.status_code = Error.SERVICE_UNAVAILABLE.name - - -class CustomException: - """A custom exception object to be used propagate errors.""" - - def __init__(self, message, status_code, name=None): - """Return a Custom exception when enum cant be used.""" - self.message = message - self.status_code = status_code - self.name = name +from auth_api.exceptions.exception_handler import ExceptionHandler +from auth_api.exceptions.exceptions import BusinessException, CustomException, ServiceUnavailableException diff --git a/auth-api/src/auth_api/exceptions/errors.py b/auth-api/src/auth_api/exceptions/errors.py index 4c2ccf1092..8e349a187e 100644 --- a/auth-api/src/auth_api/exceptions/errors.py +++ b/auth-api/src/auth_api/exceptions/errors.py @@ -17,96 +17,119 @@ """ from enum import Enum - -from auth_api import status as http_status +from http import HTTPStatus class Error(Enum): """Error Codes.""" - INVALID_INPUT = 'Invalid input, please check.', http_status.HTTP_400_BAD_REQUEST - DATA_NOT_FOUND = 'No matching record found.', http_status.HTTP_404_NOT_FOUND - DATA_ALREADY_EXISTS = 'The data you want to insert already exists.', http_status.HTTP_400_BAD_REQUEST - INVALID_USER_CREDENTIALS = 'Invalid user credentials.', http_status.HTTP_401_UNAUTHORIZED - INVALID_REFRESH_TOKEN = 'Invalid refresh token.', http_status.HTTP_400_BAD_REQUEST - UNDEFINED_ERROR = 'Undefined error.', http_status.HTTP_400_BAD_REQUEST - DATA_CONFLICT = 'New data conflict with existing data.', http_status.HTTP_409_CONFLICT - ACTIONED_INVITATION = 'The invitation has already been accepted.', http_status.HTTP_400_BAD_REQUEST - ACTIONED_AFFILIATION_INVITATION = 'The affiliation invitation has already been accepted.', \ - http_status.HTTP_400_BAD_REQUEST - INVALID_BUSINESS_EMAIL = 'Business contact email not valid.', http_status.HTTP_400_BAD_REQUEST - EXPIRED_INVITATION = 'The invitation has expired.', http_status.HTTP_400_BAD_REQUEST - EXPIRED_AFFILIATION_INVITATION = 'The affiliation invitation has expired.', http_status.HTTP_400_BAD_REQUEST - INVALID_AFFILIATION_INVITATION_STATE = 'The affiliation invitation is in an invalid state for this action.', \ - http_status.HTTP_400_BAD_REQUEST - INVALID_AFFILIATION_INVITATION_TOKEN = 'The affiliation invitation token is invalid.', \ - http_status.HTTP_400_BAD_REQUEST - FAILED_AFFILIATION_INVITATION = 'Failed to dispatch the affiliation invitation', \ - http_status.HTTP_500_INTERNAL_SERVER_ERROR - AFFILIATION_INVITATION_BUSINESS_NOT_FOUND = 'The business specified for the affiliation ' \ - 'invitation could not be found.', \ - http_status.HTTP_400_BAD_REQUEST - FAILED_INVITATION = 'Failed to dispatch the invitation', http_status.HTTP_500_INTERNAL_SERVER_ERROR - FAILED_NOTIFICATION = 'Failed to dispatch the notification', http_status.HTTP_500_INTERNAL_SERVER_ERROR - DELETE_FAILED_ONLY_OWNER = 'Cannot delete as user is the only Account Administrator of some teams', \ - http_status.HTTP_400_BAD_REQUEST - DELETE_FAILED_INACTIVE_USER = 'User is already inactive', http_status.HTTP_400_BAD_REQUEST - CHANGE_ROLE_FAILED_ONLY_OWNER = 'User is only Account Administrator in org', http_status.HTTP_400_BAD_REQUEST - OWNER_CANNOT_BE_REMOVED = 'Account Administrator cannot be removed by anyone', http_status.HTTP_400_BAD_REQUEST - MAX_NUMBER_OF_ORGS_LIMIT = 'Maximum number of organisations reached', http_status.HTTP_400_BAD_REQUEST - ALREADY_CLAIMED_PASSCODE = 'Passcode you entered has already been claimed', http_status.HTTP_406_NOT_ACCEPTABLE - ORG_CANNOT_BE_DISSOLVED = 'Organization cannot be dissolved', http_status.HTTP_406_NOT_ACCEPTABLE - FAILED_ADDING_USER_IN_KEYCLOAK = 'Error adding user to keycloak', http_status.HTTP_500_INTERNAL_SERVER_ERROR - ACCCESS_TYPE_MANDATORY = 'staff created orgs needs access type', http_status.HTTP_400_BAD_REQUEST - USER_CANT_CREATE_ANONYMOUS_ORG = 'Only staff can create anonymous org', http_status.HTTP_401_UNAUTHORIZED - USER_CANT_CREATE_GOVM_ORG = 'Only staff can create govt ministy org', http_status.HTTP_401_UNAUTHORIZED + INVALID_INPUT = "Invalid input, please check.", HTTPStatus.BAD_REQUEST + DATA_NOT_FOUND = "No matching record found.", HTTPStatus.NOT_FOUND + DATA_ALREADY_EXISTS = "The data you want to insert already exists.", HTTPStatus.BAD_REQUEST + INVALID_USER_CREDENTIALS = "Invalid user credentials.", HTTPStatus.UNAUTHORIZED + INVALID_REFRESH_TOKEN = "Invalid refresh token.", HTTPStatus.BAD_REQUEST + UNDEFINED_ERROR = "Undefined error.", HTTPStatus.BAD_REQUEST + DATA_CONFLICT = "New data conflict with existing data.", HTTPStatus.CONFLICT + ACTIONED_INVITATION = "The invitation has already been accepted.", HTTPStatus.BAD_REQUEST + ACTIONED_AFFILIATION_INVITATION = ( + "The affiliation invitation has already been accepted.", + HTTPStatus.BAD_REQUEST, + ) + INVALID_BUSINESS_EMAIL = "Business contact email not valid.", HTTPStatus.BAD_REQUEST + EXPIRED_INVITATION = "The invitation has expired.", HTTPStatus.BAD_REQUEST + EXPIRED_AFFILIATION_INVITATION = "The affiliation invitation has expired.", HTTPStatus.BAD_REQUEST + INVALID_AFFILIATION_INVITATION_STATE = ( + "The affiliation invitation is in an invalid state for this action.", + HTTPStatus.BAD_REQUEST, + ) + INVALID_AFFILIATION_INVITATION_TOKEN = ( + "The affiliation invitation token is invalid.", + HTTPStatus.BAD_REQUEST, + ) + FAILED_AFFILIATION_INVITATION = ( + "Failed to dispatch the affiliation invitation", + HTTPStatus.INTERNAL_SERVER_ERROR, + ) + AFFILIATION_INVITATION_BUSINESS_NOT_FOUND = ( + "The business specified for the affiliation invitation could not be found.", + HTTPStatus.BAD_REQUEST, + ) + FAILED_INVITATION = "Failed to dispatch the invitation", HTTPStatus.INTERNAL_SERVER_ERROR + FAILED_NOTIFICATION = "Failed to dispatch the notification", HTTPStatus.INTERNAL_SERVER_ERROR + DELETE_FAILED_ONLY_OWNER = ( + "Cannot delete as user is the only Account Administrator of some teams", + HTTPStatus.BAD_REQUEST, + ) + DELETE_FAILED_INACTIVE_USER = "User is already inactive", HTTPStatus.BAD_REQUEST + CHANGE_ROLE_FAILED_ONLY_OWNER = "User is only Account Administrator in org", HTTPStatus.BAD_REQUEST + OWNER_CANNOT_BE_REMOVED = "Account Administrator cannot be removed by anyone", HTTPStatus.BAD_REQUEST + MAX_NUMBER_OF_ORGS_LIMIT = "Maximum number of organisations reached", HTTPStatus.BAD_REQUEST + ALREADY_CLAIMED_PASSCODE = "Passcode you entered has already been claimed", HTTPStatus.NOT_ACCEPTABLE + ORG_CANNOT_BE_DISSOLVED = "Organization cannot be dissolved", HTTPStatus.NOT_ACCEPTABLE + FAILED_ADDING_USER_IN_KEYCLOAK = "Error adding user to keycloak", HTTPStatus.INTERNAL_SERVER_ERROR + ACCCESS_TYPE_MANDATORY = "staff created orgs needs access type", HTTPStatus.BAD_REQUEST + USER_CANT_CREATE_ANONYMOUS_ORG = "Only staff can create anonymous org", HTTPStatus.UNAUTHORIZED + USER_CANT_CREATE_GOVM_ORG = "Only staff can create govt ministy org", HTTPStatus.UNAUTHORIZED - USER_CANT_CREATE_EXTRA_PROVINCIAL_ORG = 'Only out of province users can create extra provincial org', \ - http_status.HTTP_401_UNAUTHORIZED - USER_CANT_CREATE_REGULAR_ORG = 'Only out of province users cannot create regular org', \ - http_status.HTTP_401_UNAUTHORIZED - USER_ALREADY_EXISTS_IN_KEYCLOAK = 'User Already exists in keycloak', http_status.HTTP_409_CONFLICT - USER_ALREADY_EXISTS = 'The username is already taken', http_status.HTTP_409_CONFLICT - FAILED_ADDING_USER_ERROR = 'Adding User Failed', http_status.HTTP_500_INTERNAL_SERVER_ERROR - BCOL_ACCOUNT_ALREADY_LINKED = 'The BC Online account you have requested to link is already taken.', \ - http_status.HTTP_409_CONFLICT - BCOL_INVALID_USERNAME_PASSWORD = 'Invalid User Id or Password', http_status.HTTP_400_BAD_REQUEST + USER_CANT_CREATE_EXTRA_PROVINCIAL_ORG = ( + "Only out of province users can create extra provincial org", + HTTPStatus.UNAUTHORIZED, + ) + USER_CANT_CREATE_REGULAR_ORG = ( + "Only out of province users cannot create regular org", + HTTPStatus.UNAUTHORIZED, + ) + USER_ALREADY_EXISTS_IN_KEYCLOAK = "User Already exists in keycloak", HTTPStatus.CONFLICT + USER_ALREADY_EXISTS = "The username is already taken", HTTPStatus.CONFLICT + FAILED_ADDING_USER_ERROR = "Adding User Failed", HTTPStatus.INTERNAL_SERVER_ERROR + BCOL_ACCOUNT_ALREADY_LINKED = ( + "The BC Online account you have requested to link is already taken.", + HTTPStatus.CONFLICT, + ) + BCOL_INVALID_USERNAME_PASSWORD = "Invalid User Id or Password", HTTPStatus.BAD_REQUEST - # NR_EXPIRED = 'The specified name request has expired', http_status.HTTP_400_BAD_REQUEST - NR_CONSUMED = 'The specified name request has already been consumed.', http_status.HTTP_400_BAD_REQUEST - NR_NOT_APPROVED = 'The specified name request has not been approved.', http_status.HTTP_400_BAD_REQUEST - NR_INVALID_STATUS = 'The specified name request cannot be used.', http_status.HTTP_400_BAD_REQUEST - NR_NOT_FOUND = 'The specified name request number could not be found.', http_status.HTTP_400_BAD_REQUEST - NR_NOT_PAID = 'The payment for the specified name request number is not complete.', http_status.HTTP_400_BAD_REQUEST - NR_INVALID_CONTACT = 'Invalid email or phone number.', http_status.HTTP_400_BAD_REQUEST - NR_INVALID_CORP_TYPE = 'The business type associated with this name request is not yet supported.', \ - http_status.HTTP_400_BAD_REQUEST - NR_INVALID_APPLICANTS = 'The specified name request must have at least one applicant. Please contact staff ' + \ - 'to fix this name request.', \ - http_status.HTTP_400_BAD_REQUEST + # NR_EXPIRED = 'The specified name request has expired', HTTPStatus.BAD_REQUEST + NR_CONSUMED = "The specified name request has already been consumed.", HTTPStatus.BAD_REQUEST + NR_NOT_APPROVED = "The specified name request has not been approved.", HTTPStatus.BAD_REQUEST + NR_INVALID_STATUS = "The specified name request cannot be used.", HTTPStatus.BAD_REQUEST + NR_NOT_FOUND = "The specified name request number could not be found.", HTTPStatus.BAD_REQUEST + NR_NOT_PAID = "The payment for the specified name request number is not complete.", HTTPStatus.BAD_REQUEST + NR_INVALID_CONTACT = "Invalid email or phone number.", HTTPStatus.BAD_REQUEST + NR_INVALID_CORP_TYPE = ( + "The business type associated with this name request is not yet supported.", + HTTPStatus.BAD_REQUEST, + ) + NR_INVALID_APPLICANTS = ( + "The specified name request must have at least one applicant. Please contact staff to fix this name request.", + HTTPStatus.BAD_REQUEST, + ) - ENTITY_DELETE_FAILED = 'Cannot delete entity due to related records.', http_status.HTTP_400_BAD_REQUEST + ENTITY_DELETE_FAILED = "Cannot delete entity due to related records.", HTTPStatus.BAD_REQUEST - ACTIVE_AFFIDAVIT_EXISTS = 'Cannot upload new affidavit as a Pending affidavit is present.', \ - http_status.HTTP_400_BAD_REQUEST - BCEID_USERS_CANT_BE_OWNERS = 'BCEID Users cant be owners', http_status.HTTP_400_BAD_REQUEST - ACCOUNT_CREATION_FAILED_IN_PAY = 'Account creation failed in Pay', http_status.HTTP_500_INTERNAL_SERVER_ERROR - GOVM_ACCOUNT_DATA_MISSING = 'GOVM account creation needs payment info , gl code and mailing address', \ - http_status.HTTP_400_BAD_REQUEST - PRODUCT_SUBSCRIPTION_EXISTS = 'Org has subscription to the product exists.', http_status.HTTP_409_CONFLICT - INVALID_PRODUCT_RESUB_STATE = 'Product is not in a valid state for re-submission.', \ - http_status.HTTP_400_BAD_REQUEST - INVALID_PRODUCT_RESUBMISSION = 'Product is not valid for re-submission.', \ - http_status.HTTP_400_BAD_REQUEST + ACTIVE_AFFIDAVIT_EXISTS = ( + "Cannot upload new affidavit as a Pending affidavit is present.", + HTTPStatus.BAD_REQUEST, + ) + BCEID_USERS_CANT_BE_OWNERS = "BCEID Users cant be owners", HTTPStatus.BAD_REQUEST + ACCOUNT_CREATION_FAILED_IN_PAY = "Account creation failed in Pay", HTTPStatus.INTERNAL_SERVER_ERROR + GOVM_ACCOUNT_DATA_MISSING = ( + "GOVM account creation needs payment info , gl code and mailing address", + HTTPStatus.BAD_REQUEST, + ) + PRODUCT_SUBSCRIPTION_EXISTS = "Org has subscription to the product exists.", HTTPStatus.CONFLICT + INVALID_PRODUCT_RESUB_STATE = "Product is not in a valid state for re-submission.", HTTPStatus.BAD_REQUEST + INVALID_PRODUCT_RESUBMISSION = "Product is not valid for re-submission.", HTTPStatus.BAD_REQUEST - OUTSTANDING_CREDIT = 'Account have credits remaining on account.', http_status.HTTP_400_BAD_REQUEST - TRANSACTIONS_IN_PROGRESS = 'Account have payment transactions in progress.', http_status.HTTP_400_BAD_REQUEST - NOT_ACTIVE_ACCOUNT = 'Account is not active.', http_status.HTTP_400_BAD_REQUEST - PAY_ACCOUNT_DEACTIVATE_ERROR = 'An error occurred while attempting to deactivate your account.Please try again', \ - http_status.HTTP_400_BAD_REQUEST - PATCH_INVALID_ACTION = 'PATCH_INVALID_ACTION', http_status.HTTP_400_BAD_REQUEST - SERVICE_UNAVAILABLE = '3rd party application unavailable', http_status.HTTP_503_SERVICE_UNAVAILABLE - NOT_AUTHORIZED_TO_PERFORM_THIS_ACTION = 'Not authorized to perform this action', http_status.HTTP_403_FORBIDDEN + OUTSTANDING_CREDIT = "Account have credits remaining on account.", HTTPStatus.BAD_REQUEST + TRANSACTIONS_IN_PROGRESS = "Account have payment transactions in progress.", HTTPStatus.BAD_REQUEST + NOT_ACTIVE_ACCOUNT = "Account is not active.", HTTPStatus.BAD_REQUEST + PAY_ACCOUNT_DEACTIVATE_ERROR = ( + "An error occurred while attempting to deactivate your account.Please try again", + HTTPStatus.BAD_REQUEST, + ) + PATCH_INVALID_ACTION = "PATCH_INVALID_ACTION", HTTPStatus.BAD_REQUEST + SERVICE_UNAVAILABLE = "3rd party application unavailable", HTTPStatus.SERVICE_UNAVAILABLE + NOT_AUTHORIZED_TO_PERFORM_THIS_ACTION = "Not authorized to perform this action", HTTPStatus.FORBIDDEN def __new__(cls, message, status_code): """Attributes for the enum.""" diff --git a/auth-api/src/auth_api/exceptions/exception_handler.py b/auth-api/src/auth_api/exceptions/exception_handler.py new file mode 100644 index 0000000000..930e89dd6f --- /dev/null +++ b/auth-api/src/auth_api/exceptions/exception_handler.py @@ -0,0 +1,100 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Function to handle all exceptions.""" +import traceback + +from flask import request +from flask_jwt_oidc import AuthError +from sqlalchemy.exc import SQLAlchemyError +from structured_logging import StructuredLogging +from werkzeug.exceptions import HTTPException, default_exceptions + +logger = StructuredLogging.get_logger() + +RESPONSE_HEADERS = {"Content-Type": "application/json", "Access-Control-Allow-Origin": "*"} + + +class ExceptionHandler: + """Class to handle exceptions.""" + + def __init__(self, app=None): + """Private constructor.""" + if app: + self.init_app(app) + + def auth_handler(self, error): # pylint: disable=useless-option-value + """Handle AuthError.""" + logger.warning(error.error) + return error.error, error.status_code, RESPONSE_HEADERS + + def db_handler(self, error): # pylint: disable=useless-option-value + """Handle Database error.""" + stack_trace = traceback.format_exc() + message_text = str(error.__dict__["orig"]) if "orig" in error.__dict__ else "Internal server error" + error_message = f"{{error: {message_text}, stack_trace: {stack_trace}}}" + logger.exception(error_message) + error_text = error.__dict__["code"] if hasattr(error.__dict__, "code") else "" + status_code = error.status_code if hasattr(error, "status_code") else 500 + return {"error": "{}".format(error_text), "message": "{}".format(message_text)}, status_code, RESPONSE_HEADERS + + def std_handler(self, error): # pylint: disable=useless-option-value + """Handle standard exception.""" + if isinstance(error, HTTPException): + error_message = ( + f"{{error code: {error.code}, " + f"method: {request.method}, " + f"path: {request.path}, " + f"params: {request.query_string}, " + f"origin: {request.remote_addr}, " + f"headers: {request.headers} }}" + ) + logger.warning(error_message) + message = {"message": error.description, "path": request.path} + else: + stack_trace = traceback.format_exc() + error_message = f"{{error: {error}, stack_trace: {stack_trace}}}" + logger.exception(error_message) + message = {"message": "Internal server error"} + + return message, error.code if isinstance(error, HTTPException) else 500, RESPONSE_HEADERS + + def init_app(self, app): + """Register common exceptons or errors.""" + self.app = app + self.register(AuthError, self.auth_handler) + self.register(SQLAlchemyError, self.db_handler) + self.register(Exception) + for exception in default_exceptions: + self.register(self._get_exc_class_and_code(exception)) + + def register(self, exception_or_code, handler=None): + """Register exception with handler.""" + self.app.errorhandler(exception_or_code)(handler or self.std_handler) + + @staticmethod + def _get_exc_class_and_code(exc_class_or_code): + """Get the exception class being handled. + + For HTTP status codes or ``HTTPException`` subclasses, return both the exception and status code. + + :param exc_class_or_code: Any exception class, or an HTTP status code as an integer. + """ + if isinstance(exc_class_or_code, int): + exc_class = default_exceptions[exc_class_or_code] + else: + exc_class = exc_class_or_code + + assert issubclass(exc_class, Exception) + + return exc_class diff --git a/auth-api/src/auth_api/exceptions/exceptions.py b/auth-api/src/auth_api/exceptions/exceptions.py new file mode 100644 index 0000000000..599cf9848e --- /dev/null +++ b/auth-api/src/auth_api/exceptions/exceptions.py @@ -0,0 +1,44 @@ +"""Application Specific Exceptions, to manage the user errors. + +@log_error - a decorator to automatically log the exception to the logger provided + +UserException - error, status_code - user rules error +error - a description of the error {code / description: classname / full text} +status_code - where possible use HTTP Error Codes +""" + +from auth_api.exceptions.errors import Error # noqa: I001, I003 + + +class BusinessException(Exception): # noqa: N818 + """Exception that adds error code and error name, that can be used for i18n support.""" + + def __init__(self, error, exception, *args, **kwargs): + """Return a valid BusinessException.""" + super().__init__(*args, **kwargs) + + self.message = error.message + self.error = error.message + self.code = error.name + self.status_code = error.status_code + self.detail = exception + + +class ServiceUnavailableException(Exception): # noqa: N818 + """Exception to be raised if third party service is unavailable.""" + + def __init__(self, error, *args, **kwargs): + """Return a valid ServiceUnavailableException.""" + super().__init__(*args, **kwargs) + self.error = error + self.status_code = Error.SERVICE_UNAVAILABLE.name + + +class CustomException: + """A custom exception object to be used propagate errors.""" + + def __init__(self, message, status_code, name=None): + """Return a Custom exception when enum cant be used.""" + self.message = message + self.status_code = status_code + self.name = name diff --git a/auth-api/src/auth_api/extensions/mail.py b/auth-api/src/auth_api/extensions/mail.py index 8cdb4832da..3596ce522d 100644 --- a/auth-api/src/auth_api/extensions/mail.py +++ b/auth-api/src/auth_api/extensions/mail.py @@ -18,5 +18,4 @@ from flask_mail import Mail - mail = Mail() # pylint: disable=invalid-name diff --git a/auth-api/src/auth_api/version.py b/auth-api/src/auth_api/metadata.py similarity index 62% rename from auth-api/src/auth_api/version.py rename to auth-api/src/auth_api/metadata.py index 4a006897bb..b730aab821 100644 --- a/auth-api/src/auth_api/version.py +++ b/auth-api/src/auth_api/metadata.py @@ -11,15 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -"""Version of this service in PEP440. - -[N!]N(.N)*[{a|b|rc}N][.postN][.devN] -Epoch segment: N! -Release segment: N(.N)* -Pre-release segment: {a|b|rc}N -Post-release segment: .postN -Development release segment: .devN +"""Get metadata information from pyproject.toml """ +import os +from importlib.metadata import metadata, version -__version__ = '2.10.5' # pylint: disable=invalid-name +meta = metadata(__package__ or __name__) +APP_NAME = meta["Name"] +APP_VERSION = meta["Version"] +APP_RUNNING_PROJECT = os.getenv("DEPLOYMENT_PROJECT", "local") +APP_RUNNING_ENVIRONMENT = os.getenv("DEPLOYMENT_ENV", "production") +FLASK_VERSION = version("flask") diff --git a/auth-api/src/auth_api/models/__init__.py b/auth-api/src/auth_api/models/__init__.py index 7710f712dc..6c6de14dce 100644 --- a/auth-api/src/auth_api/models/__init__.py +++ b/auth-api/src/auth_api/models/__init__.py @@ -15,7 +15,7 @@ """This exports all of the models and schemas used by the application.""" # noqa: I004 # noqa: I001, I003, I004 -from sbc_common_components.tracing.db_tracing import DBTracing # noqa: I001, I004 + from sqlalchemy import event # noqa: I001 from sqlalchemy.engine import Engine # noqa: I001, I003, I004 @@ -51,12 +51,9 @@ from .product_subscriptions_status import ProductSubscriptionsStatus from .product_type_code import ProductTypeCode from .pubsub_message_processing import PubSubMessageProcessing +from .staff_remark_code import StaffRemarkCode from .suspension_reason_code import SuspensionReasonCode from .task import Task from .user import User from .user_settings import UserSettings from .user_status_code import UserStatusCode -from .staff_remark_code import StaffRemarkCode - - -event.listen(Engine, 'before_cursor_execute', DBTracing.query_tracing) diff --git a/auth-api/src/auth_api/models/account_login_options.py b/auth-api/src/auth_api/models/account_login_options.py index 9008d2d9f2..d748c78935 100644 --- a/auth-api/src/auth_api/models/account_login_options.py +++ b/auth-api/src/auth_api/models/account_login_options.py @@ -13,25 +13,26 @@ # limitations under the License. """This manages the login options for an account/org.""" +from sql_versioning import Versioned from sqlalchemy import Boolean, Column, ForeignKey, Integer, String from sqlalchemy.orm import relationship -from .base_model import VersionedModel +from .base_model import BaseModel -class AccountLoginOptions(VersionedModel): # pylint: disable=too-few-public-methods +class AccountLoginOptions(Versioned, BaseModel): # pylint: disable=too-few-public-methods """Model for Account login options.""" - __tablename__ = 'account_login_options' + __tablename__ = "account_login_options" id = Column(Integer, primary_key=True) login_source = Column(String(20), nullable=False) - org_id = Column(ForeignKey('orgs.id'), nullable=False) + org_id = Column(ForeignKey("orgs.id"), nullable=False) is_active = Column(Boolean(), default=True) - org = relationship('Org', foreign_keys=[org_id], lazy='select') + org = relationship("Org", foreign_keys=[org_id], lazy="select") @classmethod - def find_active_by_org_id(cls, account_id): + def find_active_by_org_id(cls, account_id: int): """Find an account setting instance that matches the provided org_id.""" - return cls.query.filter_by(org_id=account_id).filter_by(is_active=True).first() + return cls.query.filter_by(org_id=int(account_id or -1)).filter_by(is_active=True).first() diff --git a/auth-api/src/auth_api/models/activity_log.py b/auth-api/src/auth_api/models/activity_log.py index 4054d30e38..c94842c1fd 100644 --- a/auth-api/src/auth_api/models/activity_log.py +++ b/auth-api/src/auth_api/models/activity_log.py @@ -21,7 +21,7 @@ class ActivityLog(BaseModel): # pylint: disable=too-few-public-methods,too-many-instance-attributes """Model for ActivityLog Org record.""" - __tablename__ = 'activity_logs' + __tablename__ = "activity_logs" id = Column(Integer, primary_key=True) actor_id = Column(Integer, nullable=True, index=True) # who did the activity, refers to user id in the user table. @@ -34,16 +34,24 @@ class ActivityLog(BaseModel): # pylint: disable=too-few-public-methods,too-many org_id = Column(Integer, nullable=True, index=True) @classmethod - def fetch_activity_logs_for_account(cls, org_id: int, item_name: str, # pylint:disable=too-many-arguments - item_type: str, - action: str, - page: int, limit: int): + def fetch_activity_logs_for_account( # pylint: disable=too-many-positional-arguments,too-many-arguments + cls, + org_id: int, + item_name: str, + item_type: str, + action: str, + page: int, + limit: int, + ): """Fetch all activity logs.""" from . import User # pylint:disable=cyclic-import, import-outside-toplevel - query = db.session.query(ActivityLog, User). \ - outerjoin(User, User.id == ActivityLog.actor_id). \ - filter(ActivityLog.org_id == org_id). \ - order_by(desc(ActivityLog.created)) + + query = ( + db.session.query(ActivityLog, User) + .outerjoin(User, User.id == ActivityLog.actor_id) + .filter(ActivityLog.org_id == int(org_id or -1)) + .order_by(desc(ActivityLog.created)) + ) if item_name: query = query.filter(ActivityLog.item_name == item_name) diff --git a/auth-api/src/auth_api/models/affidavit.py b/auth-api/src/auth_api/models/affidavit.py index afa8d5de04..999231e687 100644 --- a/auth-api/src/auth_api/models/affidavit.py +++ b/auth-api/src/auth_api/models/affidavit.py @@ -12,56 +12,61 @@ # See the License for the specific language governing permissions and # limitations under the License. """This manages an Affidavit record in the Auth service.""" +from sql_versioning import Versioned from sqlalchemy import Column, DateTime, ForeignKey, Integer, String from sqlalchemy.orm import relationship from auth_api.utils.enums import AffidavitStatus -from .base_model import VersionedModel +from .base_model import BaseModel from .db import db from .membership import Membership from .org import Org from .user import User -class Affidavit(VersionedModel): +class Affidavit(Versioned, BaseModel): """This is the model for a Affidavit.""" - __tablename__ = 'affidavits' + __tablename__ = "affidavits" id = Column(Integer, primary_key=True, autoincrement=True) document_id = Column(String(60), index=True) issuer = Column(String(250)) - status_code = Column(ForeignKey('affidavit_statuses.code'), nullable=False) + status_code = Column(ForeignKey("affidavit_statuses.code"), nullable=False) decision_made_by = Column(String(250)) decision_made_on = Column(DateTime, nullable=True) - user_id = Column(ForeignKey('users.id'), nullable=False) + user_id = Column(ForeignKey("users.id"), nullable=False) - contacts = relationship('ContactLink', primaryjoin='Affidavit.id == ContactLink.affidavit_id', lazy='select') - status = relationship('AffidavitStatus', foreign_keys=[status_code], lazy='select') - user = relationship('User', foreign_keys=[user_id], lazy='select') + contacts = relationship( + "ContactLink", primaryjoin="Affidavit.id == ContactLink.affidavit_id", lazy="select", back_populates="affidavit" + ) + status = relationship("AffidavitStatus", foreign_keys=[status_code], lazy="select") + user = relationship("User", foreign_keys=[user_id], lazy="select") @classmethod def find_by_org_id(cls, org_id: int, filtered_affidavit_statuses=None): """Find an affidavit by org id.""" if filtered_affidavit_statuses is None: filtered_affidavit_statuses = [AffidavitStatus.INACTIVE.value] - return db.session.query(Affidavit) \ - .join(Membership, Membership.user_id == Affidavit.user_id) \ - .join(Org, Org.id == Membership.org_id) \ - .filter(Org.id == org_id) \ - .filter(Affidavit.status_code.notin_(filtered_affidavit_statuses)) \ - .one_or_none() # There should be only one record at most, else throw error + return ( + db.session.query(Affidavit) + .join(Membership, Membership.user_id == Affidavit.user_id) + .join(Org, Org.id == Membership.org_id) + .filter(Org.id == org_id) + .filter(Affidavit.status_code.notin_(filtered_affidavit_statuses)) + .one_or_none() + ) # There should be only one record at most, else throw error @classmethod def find_pending_by_user_id(cls, user_id: int): """Find pending affidavit by user id.""" - return cls.query.filter_by(user_id=user_id, status_code=AffidavitStatus.PENDING.value).one_or_none() + return cls.query.filter_by(user_id=int(user_id or -1), status_code=AffidavitStatus.PENDING.value).one_or_none() @classmethod def find_approved_by_user_id(cls, user_id: int): """Find pending affidavit by user id.""" - return cls.query.filter_by(user_id=user_id, status_code=AffidavitStatus.APPROVED.value).one_or_none() + return cls.query.filter_by(user_id=int(user_id or -1), status_code=AffidavitStatus.APPROVED.value).one_or_none() @classmethod def find_effective_by_user_guid(cls, user_guid: str, status: str = None): @@ -70,8 +75,10 @@ def find_effective_by_user_guid(cls, user_guid: str, status: str = None): affidavit_status = [status] else: affidavit_status = [AffidavitStatus.PENDING.value, AffidavitStatus.APPROVED.value] - return db.session.query(Affidavit) \ - .join(User, User.id == Affidavit.user_id) \ - .filter(Affidavit.status_code.in_(affidavit_status)) \ - .filter(User.keycloak_guid == user_guid) \ + return ( + db.session.query(Affidavit) + .join(User, User.id == Affidavit.user_id) + .filter(Affidavit.status_code.in_(affidavit_status)) + .filter(User.keycloak_guid == user_guid) .one_or_none() + ) diff --git a/auth-api/src/auth_api/models/affidavit_status.py b/auth-api/src/auth_api/models/affidavit_status.py index eac76d3e31..636dae7270 100644 --- a/auth-api/src/auth_api/models/affidavit_status.py +++ b/auth-api/src/auth_api/models/affidavit_status.py @@ -20,10 +20,11 @@ class AffidavitStatus( - BaseCodeModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined + BaseCodeModel +): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """This is the model for an Affidavit Status record.""" - __tablename__ = 'affidavit_statuses' + __tablename__ = "affidavit_statuses" @classmethod def get_default_status(cls): diff --git a/auth-api/src/auth_api/models/affiliation.py b/auth-api/src/auth_api/models/affiliation.py index 37f2f53add..8e9d0f7334 100644 --- a/auth-api/src/auth_api/models/affiliation.py +++ b/auth-api/src/auth_api/models/affiliation.py @@ -16,29 +16,33 @@ An Affiliation is between an Org and an Entity. """ from __future__ import annotations + from typing import List +from sql_versioning import Versioned from sqlalchemy import Column, ForeignKey, Integer, String from sqlalchemy.orm import contains_eager, relationship -from .base_model import VersionedModel +from .base_model import BaseModel from .db import db from .entity import Entity as EntityModel -class Affiliation(VersionedModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined +class Affiliation( + Versioned, BaseModel +): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """This is the model for an Affiliation.""" - __tablename__ = 'affiliations' + __tablename__ = "affiliations" id = Column(Integer, primary_key=True, autoincrement=True) - entity_id = Column(ForeignKey('entities.id'), nullable=False, index=True) - org_id = Column(ForeignKey('orgs.id'), nullable=False) + entity_id = Column(ForeignKey("entities.id"), nullable=False, index=True) + org_id = Column(ForeignKey("orgs.id"), nullable=False) certified_by_name = Column(String(100), nullable=True) environment = Column(String(20), nullable=True, index=True) - entity = relationship('Entity', foreign_keys=[entity_id], lazy='select') - org = relationship('Org', foreign_keys=[org_id], lazy='select') + entity = relationship("Entity", foreign_keys=[entity_id], lazy="select") + org = relationship("Org", foreign_keys=[org_id], lazy="select") @classmethod def filter_environment(cls, environment: str): @@ -51,28 +55,34 @@ def filter_environment(cls, environment: str): return query @classmethod - def find_affiliation_by_org_and_entity_ids(cls, org_id, entity_id, environment) -> Affiliation: + def find_affiliation_by_org_and_entity_ids(cls, org_id: int, entity_id: int, environment) -> Affiliation: """Return an affiliation for the provided org and entity ids.""" - query = cls.filter_environment(environment).filter_by(org_id=org_id, entity_id=entity_id) + query = cls.filter_environment(environment).filter_by(org_id=int(org_id or -1), entity_id=int(entity_id or -1)) return query.one_or_none() @classmethod - def find_affiliations_by_entity_id(cls, entity_id, environment) -> List[Affiliation]: + def find_affiliations_by_entity_id(cls, entity_id: int, environment) -> List[Affiliation]: """Return affiliations for the provided entity id.""" - return cls.filter_environment(environment).filter_by(entity_id=entity_id).all() + return cls.filter_environment(environment).filter_by(entity_id=int(entity_id or -1)).all() @classmethod def find_affiliation_by_ids(cls, org_id: int, affiliation_id: int) -> Affiliation: """Return the first Affiliation with the provided ids.""" - return cls.query.filter_by(org_id=org_id).filter_by(id=affiliation_id).one_or_none() + return cls.query.filter_by(org_id=int(org_id or -1)).filter_by(id=int(affiliation_id or -1)).one_or_none() @classmethod def find_affiliations_by_org_id(cls, org_id: int, environment: str) -> List[Affiliation]: """Return the affiliations with the provided org id.""" - query = db.session.query(Affiliation).join(EntityModel) \ - .options(contains_eager(Affiliation.entity).load_only( - EntityModel.business_identifier, EntityModel.corp_type_code)) \ - .filter(Affiliation.org_id == org_id) + query = ( + db.session.query(Affiliation) + .join(EntityModel) + .options( + contains_eager(Affiliation.entity).load_only( + EntityModel.business_identifier, EntityModel.corp_type_code + ) + ) + .filter(Affiliation.org_id == int(org_id or -1)) + ) if environment: query = query.filter(Affiliation.environment == environment) else: @@ -82,20 +92,29 @@ def find_affiliations_by_org_id(cls, org_id: int, environment: str) -> List[Affi @classmethod def find_affiliations_by_business_identifier(cls, business_identifier: str, environment: str): """Return the affiliations with the provided business identifier.""" - return cls.filter_environment(environment).\ - join(EntityModel).filter(EntityModel.business_identifier == business_identifier).all() + return ( + cls.filter_environment(environment) + .join(EntityModel) + .filter(EntityModel.business_identifier == business_identifier) + .all() + ) @classmethod - def find_affiliation_by_org_id_and_business_identifier(cls, - org_id: int, - business_identifier: str, - environment: str) -> Affiliation: + def find_affiliation_by_org_id_and_business_identifier( + cls, org_id: int, business_identifier: str, environment: str + ) -> Affiliation: """Return the affiliations with the provided org id and business identifier.""" - query = db.session.query(Affiliation).join(EntityModel) \ - .options(contains_eager(Affiliation.entity).load_only( - EntityModel.business_identifier, EntityModel.corp_type_code)) \ - .filter(Affiliation.org_id == org_id) \ + query = ( + db.session.query(Affiliation) + .join(EntityModel) + .options( + contains_eager(Affiliation.entity).load_only( + EntityModel.business_identifier, EntityModel.corp_type_code + ) + ) + .filter(Affiliation.org_id == int(org_id or -1)) .filter(EntityModel.business_identifier == business_identifier) + ) if environment: query = query.filter(Affiliation.environment == environment) else: diff --git a/auth-api/src/auth_api/models/affiliation_invitation.py b/auth-api/src/auth_api/models/affiliation_invitation.py index d80043a0f1..168588a073 100644 --- a/auth-api/src/auth_api/models/affiliation_invitation.py +++ b/auth-api/src/auth_api/models/affiliation_invitation.py @@ -20,10 +20,10 @@ from sqlalchemy.orm import relationship from auth_api.config import get_named_config -from auth_api.utils.enums import InvitationStatus as InvitationStatuses from auth_api.utils.enums import AffiliationInvitationType as AffiliationInvitationTypeEnum -from .affiliation_invitation_type import AffiliationInvitationType +from auth_api.utils.enums import InvitationStatus as InvitationStatuses +from .affiliation_invitation_type import AffiliationInvitationType from .base_model import BaseModel from .dataclass import AffiliationInvitationSearch from .db import db @@ -33,31 +33,31 @@ class AffiliationInvitation(BaseModel): # pylint: disable=too-many-instance-attributes """Model for an Affiliation Invitation record.""" - __tablename__ = 'affiliation_invitations' + __tablename__ = "affiliation_invitations" id = Column(Integer, primary_key=True) - from_org_id = Column(ForeignKey('orgs.id'), nullable=False, index=True) - to_org_id = Column(ForeignKey('orgs.id'), nullable=True, index=True) - entity_id = Column(ForeignKey('entities.id'), nullable=False, index=True) - affiliation_id = Column(ForeignKey('affiliations.id'), nullable=True, index=True) - sender_id = Column(ForeignKey('users.id'), nullable=False) - approver_id = Column(ForeignKey('users.id'), nullable=True) + from_org_id = Column(ForeignKey("orgs.id"), nullable=False, index=True) + to_org_id = Column(ForeignKey("orgs.id"), nullable=True, index=True) + entity_id = Column(ForeignKey("entities.id"), nullable=False, index=True) + affiliation_id = Column(ForeignKey("affiliations.id"), nullable=True, index=True) + sender_id = Column(ForeignKey("users.id"), nullable=False) + approver_id = Column(ForeignKey("users.id"), nullable=True) recipient_email = Column(String(8000), nullable=True) sent_date = Column(DateTime, nullable=False) accepted_date = Column(DateTime, nullable=True) token = Column(String(100), nullable=True) # stores the one time affiliation invitation token login_source = Column(String(20), nullable=True) - invitation_status_code = Column(ForeignKey('invitation_statuses.code'), nullable=False, default='PENDING') - type = Column(ForeignKey('affiliation_invitation_types.code'), nullable=False, default='EMAIL') + invitation_status_code = Column(ForeignKey("invitation_statuses.code"), nullable=False, default="PENDING") + type = Column(ForeignKey("affiliation_invitation_types.code"), nullable=False, default="EMAIL") additional_message = Column(String(4000), nullable=True) is_deleted = Column(Boolean(), default=False) - invitation_status = relationship('InvitationStatus', foreign_keys=[invitation_status_code]) - sender = relationship('User', foreign_keys=[sender_id]) - entity = relationship('Entity', foreign_keys=[entity_id], lazy='select') - from_org = relationship('Org', foreign_keys=[from_org_id], lazy='select') - to_org = relationship('Org', foreign_keys=[to_org_id], lazy='select') - affiliation = relationship('Affiliation', foreign_keys=[affiliation_id], lazy='select') + invitation_status = relationship("InvitationStatus", foreign_keys=[invitation_status_code]) + sender = relationship("User", foreign_keys=[sender_id]) + entity = relationship("Entity", foreign_keys=[entity_id], lazy="select") + from_org = relationship("Org", foreign_keys=[from_org_id], lazy="select") + to_org = relationship("Org", foreign_keys=[to_org_id], lazy="select") + affiliation = relationship("Affiliation", foreign_keys=[affiliation_id], lazy="select") @hybrid_property def expires_on(self): @@ -76,7 +76,8 @@ def status(self): if self.invitation_status_code == InvitationStatuses.PENDING.value: expiry_time = self.sent_date + timedelta( - minutes=int(get_named_config().AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS)) + minutes=int(get_named_config().AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS) + ) if current_time >= expiry_time: return InvitationStatuses.EXPIRED.value return self.invitation_status_code @@ -90,14 +91,14 @@ def create_from_dict(cls, invitation_info: dict, user_id, affiliation_id=None): affiliation_invitation = AffiliationInvitation() affiliation_invitation.sender_id = user_id affiliation_invitation.affiliation_id = affiliation_id - affiliation_invitation.from_org_id = invitation_info['fromOrgId'] - affiliation_invitation.to_org_id = invitation_info['toOrgId'] - affiliation_invitation.entity_id = invitation_info['entityId'] - affiliation_invitation.recipient_email = invitation_info.get('recipientEmail') + affiliation_invitation.from_org_id = invitation_info["fromOrgId"] + affiliation_invitation.to_org_id = invitation_info["toOrgId"] + affiliation_invitation.entity_id = invitation_info["entityId"] + affiliation_invitation.recipient_email = invitation_info.get("recipientEmail") affiliation_invitation.sent_date = datetime.now() - affiliation_invitation.type = invitation_info.get('type') + affiliation_invitation.type = invitation_info.get("type") affiliation_invitation.invitation_status = InvitationStatus.get_default_status() - affiliation_invitation.additional_message = invitation_info.get('additionalMessage', None) + affiliation_invitation.additional_message = invitation_info.get("additionalMessage", None) if affiliation_invitation.type is None: affiliation_invitation.type = AffiliationInvitationType.get_default_type().code @@ -117,32 +118,34 @@ def filter_by(cls, search_filter: AffiliationInvitationSearch, query=None) -> li results = db.session.query(AffiliationInvitation) if search_filter.from_org_id: - results = results.filter(AffiliationInvitation.from_org_id == search_filter.from_org_id) + results = results.filter(AffiliationInvitation.from_org_id == int(search_filter.from_org_id)) filter_set = True if search_filter.to_org_id: - results = results.filter(AffiliationInvitation.to_org_id == search_filter.to_org_id) + to_org_id = int(search_filter.to_org_id) if search_filter.to_org_id not in ["NaN", ""] else -1 + results = results.filter(AffiliationInvitation.to_org_id == to_org_id) filter_set = True if search_filter.sender_id: - results = results.filter(AffiliationInvitation.sender_id == search_filter.sender_id) + results = results.filter(AffiliationInvitation.sender_id == int(search_filter.sender_id)) filter_set = True if search_filter.approver_id: - results = results.filter(AffiliationInvitation.approver_id == search_filter.approver_id) + results = results.filter(AffiliationInvitation.approver_id == int(search_filter.approver_id)) filter_set = True if search_filter.entity_id: - results = results.filter(AffiliationInvitation.entity_id == search_filter.entity_id) + results = results.filter(AffiliationInvitation.entity_id == int(search_filter.entity_id)) filter_set = True if search_filter.affiliation_id: - results = results.filter(AffiliationInvitation.affiliation_id == search_filter.affiliation_id) + results = results.filter(AffiliationInvitation.affiliation_id == int(search_filter.affiliation_id)) filter_set = True if search_filter.status_codes: results = results.filter( - AffiliationInvitation.status.in_(search_filter.status_codes)) # pylint: disable=no-member + AffiliationInvitation.invitation_status_code.in_(search_filter.status_codes) + ) # pylint: disable=no-member filter_set = True if search_filter.invitation_types: @@ -152,51 +155,57 @@ def filter_by(cls, search_filter: AffiliationInvitationSearch, query=None) -> li results = results.filter(AffiliationInvitation.is_deleted == search_filter.is_deleted) if not filter_set: - raise ValueError('At least one filter has to be set!') + raise ValueError("At least one filter has to be set!") return results.all() @classmethod - def find_invitation_by_id(cls, invitation_id): + def find_invitation_by_id(cls, invitation_id: int): """Find an affiliation invitation record that matches the id.""" - return cls.query.filter_by(id=invitation_id).first() + return cls.query.filter_by(id=int(invitation_id or -1)).first() @classmethod - def find_invitations_from_org(cls, org_id, status=None): + def find_invitations_from_org(cls, org_id: int, status=None): """Find all affiliation invitations sent from a specific org filtered by status.""" - results = db.session.query(AffiliationInvitation) \ - .filter(AffiliationInvitation.from_org_id == org_id) + results = db.session.query(AffiliationInvitation).filter(AffiliationInvitation.from_org_id == int(org_id or -1)) return results.filter(AffiliationInvitation.status == status.value).all() if status else results.all() @classmethod - def find_invitations_to_org(cls, org_id, status=None): + def find_invitations_to_org(cls, org_id: int, status=None): """Find all affiliation invitations sent to a specific org filtered by status.""" - results = db.session.query(AffiliationInvitation) \ - .filter(AffiliationInvitation.to_org_id == org_id) + results = db.session.query(AffiliationInvitation).filter(AffiliationInvitation.to_org_id == int(org_id or -1)) return results.filter(AffiliationInvitation.status == status.value).all() if status else results.all() @classmethod - def find_invitations_by_entity(cls, entity_id, status=None): + def find_invitations_by_entity(cls, entity_id: int, status=None): """Find all affiliation invitations sent for specific entity filtered by status.""" - results = db.session.query(AffiliationInvitation) \ - .filter(AffiliationInvitation.entity_id == entity_id) + results = db.session.query(AffiliationInvitation).filter( + AffiliationInvitation.entity_id == int(entity_id or -1) + ) return results.filter(AffiliationInvitation.status == status.value).all() if status else results.all() @classmethod - def find_invitations_by_affiliation(cls, affiliation_id): + def find_invitations_by_affiliation(cls, affiliation_id: int): """Find all affiliation invitations associated to an affiliation.""" - return cls.query.filter_by(affiliation_id=affiliation_id).all() + return cls.query.filter_by(affiliation_id=int(affiliation_id or -1)).all() @staticmethod def find_invitations_by_org_entity_ids(from_org_id: int, entity_id: int): """Find all affiliation invitation for org and entity ids.""" - return db.session.query(AffiliationInvitation) \ - .filter(AffiliationInvitation.from_org_id == from_org_id) \ - .filter(AffiliationInvitation.entity_id == entity_id) \ - .filter(or_(AffiliationInvitation.invitation_status_code == InvitationStatuses.PENDING.value, - AffiliationInvitation.invitation_status_code == InvitationStatuses.ACCEPTED.value)).all() + return ( + db.session.query(AffiliationInvitation) + .filter(AffiliationInvitation.from_org_id == int(from_org_id or -1)) + .filter(AffiliationInvitation.entity_id == int(entity_id or -1)) + .filter( + or_( + AffiliationInvitation.invitation_status_code == InvitationStatuses.PENDING.value, + AffiliationInvitation.invitation_status_code == InvitationStatuses.ACCEPTED.value, + ) + ) + .all() + ) - def update_invitation_as_retried(self, sender_id): + def update_invitation_as_retried(self, sender_id: int): """Update this affiliation invitation with the new data.""" self.sender_id = sender_id self.sent_date = datetime.now() @@ -211,11 +220,13 @@ def set_status(self, new_status_code: str): return self @classmethod - def find_all_related_to_org(cls, org_id, search_filter=AffiliationInvitationSearch()): + def find_all_related_to_org(cls, org_id: int, search_filter=AffiliationInvitationSearch()): """Return all affiliation invitations that are related to the org (from org or to org) filtered by statuses.""" - query = db.session.query(AffiliationInvitation) \ - .filter( - or_(AffiliationInvitation.to_org_id == org_id, AffiliationInvitation.from_org_id == org_id) + query = db.session.query(AffiliationInvitation).filter( + or_( + AffiliationInvitation.to_org_id == int(org_id or -1), + AffiliationInvitation.from_org_id == int(org_id or -1), + ) ) return cls.filter_by(search_filter=search_filter, query=query) diff --git a/auth-api/src/auth_api/models/affiliation_invitation_type.py b/auth-api/src/auth_api/models/affiliation_invitation_type.py index 57a7b9acea..236f09d8f4 100644 --- a/auth-api/src/auth_api/models/affiliation_invitation_type.py +++ b/auth-api/src/auth_api/models/affiliation_invitation_type.py @@ -22,7 +22,7 @@ class AffiliationInvitationType(BaseCodeModel): # pylint: disable=too-few-public-methods """This is the Affiliation Invitation Type model for the Auth service.""" - __tablename__ = 'affiliation_invitation_types' + __tablename__ = "affiliation_invitation_types" @classmethod def get_default_type(cls): diff --git a/auth-api/src/auth_api/models/base_model.py b/auth-api/src/auth_api/models/base_model.py index 58749a0da9..869416dd5e 100644 --- a/auth-api/src/auth_api/models/base_model.py +++ b/auth-api/src/auth_api/models/base_model.py @@ -15,13 +15,11 @@ import datetime -from flask import current_app, g from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import relationship -from sqlalchemy_continuum.plugins.flask import fetch_remote_addr -from .db import activity_plugin, db +from .db import db class BaseModel(db.Model): @@ -32,22 +30,22 @@ class BaseModel(db.Model): @declared_attr def created_by_id(cls): # pylint:disable=no-self-argument, # noqa: N805 """Return foreign key for created by.""" - return Column(ForeignKey('users.id'), default=cls._get_current_user) + return Column(ForeignKey("users.id"), default=cls._get_current_user) @declared_attr def modified_by_id(cls): # pylint:disable=no-self-argument, # noqa: N805 """Return foreign key for modified by.""" - return Column(ForeignKey('users.id'), onupdate=cls._get_current_user) + return Column(ForeignKey("users.id"), onupdate=cls._get_current_user) @declared_attr def created_by(cls): # pylint:disable=no-self-argument, # noqa: N805 """Return relationship for created by.""" - return relationship('User', foreign_keys=[cls.created_by_id], post_update=True, uselist=False) + return relationship("User", foreign_keys=[cls.created_by_id], post_update=True, uselist=False) @declared_attr def modified_by(cls): # pylint:disable=no-self-argument, # noqa: N805 """Return relationship for modified by.""" - return relationship('User', foreign_keys=[cls.modified_by_id], post_update=True, uselist=False) + return relationship("User", foreign_keys=[cls.modified_by_id], post_update=True, uselist=False) @classmethod def find_by_id(cls, identifier: int): @@ -62,6 +60,7 @@ def _get_current_user(): """ try: from .user import User as UserModel # pylint:disable=cyclic-import, import-outside-toplevel + user = UserModel.find_by_jwt_token() if not user: return None @@ -77,16 +76,16 @@ def update_from_dict(self, **kwargs): Will not update readonly, private fields, or relationship fields. """ - readonly = ['id', 'created', 'modified', 'created_by_id', 'modified_by_id'] + readonly = ["id", "created", "modified", "created_by_id", "modified_by_id"] columns = self.__table__.columns.keys() relationships = self.__mapper__.relationships.keys() - _excluded_fields = kwargs.pop('_exclude', ()) + _excluded_fields = kwargs.pop("_exclude", ()) changes = {} for key in columns: # don't update private/protected - if key.startswith('_'): + if key.startswith("_"): continue # only update if allowed, exists, and is not a relationship @@ -97,7 +96,7 @@ def update_from_dict(self, **kwargs): if allowed and exists and not is_relationship: val = getattr(self, key) if val != kwargs[key]: - changes[key] = {'old': val, 'new': kwargs[key]} + changes[key] = {"old": val, "new": kwargs[key]} setattr(self, key, kwargs[key]) return changes @@ -110,7 +109,6 @@ def flush(self): """Save and flush.""" db.session.add(self) db.session.flush() - self.create_activity(self) return self def add_to_session(self): @@ -121,7 +119,6 @@ def save(self): """Save and commit.""" db.session.add(self) db.session.flush() - self.create_activity(self) db.session.commit() return self @@ -130,7 +127,6 @@ def delete(self): """Delete and commit.""" db.session.delete(self) db.session.flush() - self.create_activity(self, is_delete=True) db.session.commit() @staticmethod @@ -144,23 +140,6 @@ def reset(self): db.session.delete(self) db.session.commit() - @classmethod - def create_activity(cls, obj, is_delete=False): - """Create activity records if the model is versioned.""" - if isinstance(obj, VersionedModel) and not current_app.config.get('DISABLE_ACTIVITY_LOGS'): - if is_delete: - verb = 'delete' - else: - verb = 'update' if obj.modified_by is not None else 'create' - - activity = activity_plugin.activity_cls(verb=verb, object=obj, data={ - 'user_name': g.jwt_oidc_token_info.get('preferred_username', - None) if g and 'jwt_oidc_token_info' in g else None, - 'remote_addr': fetch_remote_addr() - }) - - db.session.add(activity) - class BaseCodeModel(db.Model): """This class manages all of the base code, type or status model functions.""" @@ -187,13 +166,3 @@ def save(self): db.session.add(self) db.session.commit() return self - - -class VersionedModel(BaseModel): - """This class manages all of the base code, type or status model functions.""" - - __abstract__ = True - - __versioned__ = { - 'exclude': [] - } diff --git a/auth-api/src/auth_api/models/business_size_code.py b/auth-api/src/auth_api/models/business_size_code.py index 3b2243cf0e..198b28df29 100644 --- a/auth-api/src/auth_api/models/business_size_code.py +++ b/auth-api/src/auth_api/models/business_size_code.py @@ -25,4 +25,4 @@ class BusinessSizeCode(BaseCodeModel): # pylint: disable=too-few-public-methods That is supported by auth system. """ - __tablename__ = 'business_size_codes' + __tablename__ = "business_size_codes" diff --git a/auth-api/src/auth_api/models/business_type_code.py b/auth-api/src/auth_api/models/business_type_code.py index 2b0b91e5df..449c8558db 100644 --- a/auth-api/src/auth_api/models/business_type_code.py +++ b/auth-api/src/auth_api/models/business_type_code.py @@ -17,6 +17,7 @@ """ from sqlalchemy import Boolean, Column + from .base_model import BaseCodeModel @@ -26,7 +27,7 @@ class BusinessTypeCode(BaseCodeModel): # pylint: disable=too-few-public-methods That is supported by auth system. """ - __tablename__ = 'business_type_codes' + __tablename__ = "business_type_codes" is_government_agency = Column(Boolean(), nullable=True) is_business = Column(Boolean(), nullable=True) diff --git a/auth-api/src/auth_api/models/contact.py b/auth-api/src/auth_api/models/contact.py index 22c9cbe441..4fad87fdda 100644 --- a/auth-api/src/auth_api/models/contact.py +++ b/auth-api/src/auth_api/models/contact.py @@ -17,29 +17,30 @@ physical addresses, emails, and phone numbers. """ +from sql_versioning import Versioned from sqlalchemy import Column, ForeignKey, Integer, String from sqlalchemy.orm import relationship -from .base_model import VersionedModel +from .base_model import BaseModel -class Contact(VersionedModel): # pylint: disable=too-few-public-methods +class Contact(Versioned, BaseModel): # pylint: disable=too-few-public-methods """This class manages contact information for orgs and entities.""" - __tablename__ = 'contacts' + __tablename__ = "contacts" id = Column(Integer, primary_key=True) - street = Column('street', String(250), index=True) - street_additional = Column('street_additional', String(250)) - city = Column('city', String(100)) - region = Column('region', String(100)) - country = Column('country', String(20)) - postal_code = Column('postal_code', String(15)) - delivery_instructions = Column('delivery_instructions', String(4096)) - phone = Column('phone', String(15)) - phone_extension = Column('phone_extension', String(10)) - email = Column('email', String(100)) + street = Column("street", String(250), index=True) + street_additional = Column("street_additional", String(250)) + city = Column("city", String(100)) + region = Column("region", String(100)) + country = Column("country", String(20)) + postal_code = Column("postal_code", String(15)) + delivery_instructions = Column("delivery_instructions", String(4096)) + phone = Column("phone", String(15)) + phone_extension = Column("phone_extension", String(10)) + email = Column("email", String(100)) # MVP contact has been migrated over to the contact linking table (revised data model) - entity_id = Column(Integer, ForeignKey('entities.id')) + entity_id = Column(Integer, ForeignKey("entities.id")) - links = relationship('ContactLink', cascade='all, delete-orphan') + links = relationship("ContactLink", cascade="all, delete-orphan", back_populates="contact") diff --git a/auth-api/src/auth_api/models/contact_link.py b/auth-api/src/auth_api/models/contact_link.py index 7bb255b8ad..7f90409c61 100644 --- a/auth-api/src/auth_api/models/contact_link.py +++ b/auth-api/src/auth_api/models/contact_link.py @@ -18,49 +18,50 @@ which requires this type of linkage to avoid duplication. """ +from sql_versioning import Versioned from sqlalchemy import Column, ForeignKey, Integer from sqlalchemy.orm import relationship -from .base_model import VersionedModel +from .base_model import BaseModel -class ContactLink(VersionedModel): # pylint: disable=too-few-public-methods +class ContactLink(Versioned, BaseModel): # pylint: disable=too-few-public-methods """This class manages linkages between contacts and other data entities.""" - __tablename__ = 'contact_links' + __tablename__ = "contact_links" id = Column(Integer, primary_key=True) - contact_id = Column(Integer, ForeignKey('contacts.id'), index=True) - entity_id = Column(Integer, ForeignKey('entities.id'), index=True) - user_id = Column(Integer, ForeignKey('users.id'), index=True) - org_id = Column(Integer, ForeignKey('orgs.id'), index=True) - affidavit_id = Column(Integer, ForeignKey('affidavits.id')) + contact_id = Column(Integer, ForeignKey("contacts.id"), index=True) + entity_id = Column(Integer, ForeignKey("entities.id"), index=True) + user_id = Column(Integer, ForeignKey("users.id"), index=True) + org_id = Column(Integer, ForeignKey("orgs.id"), index=True) + affidavit_id = Column(Integer, ForeignKey("affidavits.id")) - contact = relationship('Contact', foreign_keys=[contact_id]) - entity = relationship('Entity', back_populates='contacts', foreign_keys=[entity_id]) - user = relationship('User', foreign_keys=[user_id], lazy='select') - org = relationship('Org', foreign_keys=[org_id], lazy='select') - affidavit = relationship('Affidavit', foreign_keys=[affidavit_id], lazy='select') + contact = relationship("Contact", foreign_keys=[contact_id]) + entity = relationship("Entity", foreign_keys=[entity_id]) + user = relationship("User", foreign_keys=[user_id], lazy="select") + org = relationship("Org", foreign_keys=[org_id], lazy="select") + affidavit = relationship("Affidavit", foreign_keys=[affidavit_id], lazy="select") @classmethod - def find_by_entity_id(cls, entity_id): + def find_by_entity_id(cls, entity_id: int): """Return the first contact link with the provided entity id.""" - return cls.query.filter_by(entity_id=entity_id).first() + return cls.query.filter_by(entity_id=int(entity_id or -1)).first() @classmethod - def find_by_user_id(cls, user_id): + def find_by_user_id(cls, user_id: int): """Return the first contact link with the provided user id.""" - return cls.query.filter_by(user_id=user_id).first() + return cls.query.filter_by(user_id=int(user_id or -1)).first() @classmethod - def find_by_org_id(cls, org_id): + def find_by_org_id(cls, org_id: int): """Return the first contact link with the provided org id.""" - return cls.query.filter_by(org_id=org_id).first() + return cls.query.filter_by(org_id=int(org_id or -1)).first() @classmethod - def find_by_affidavit_id(cls, affidavit_id): + def find_by_affidavit_id(cls, affidavit_id: int): """Return the first contact link with the provided affidavit id.""" - return cls.query.filter_by(affidavit_id=affidavit_id).one_or_none() + return cls.query.filter_by(affidavit_id=int(affidavit_id or -1)).one_or_none() def has_links(self): """Check whether there are any remaining links for this contact.""" diff --git a/auth-api/src/auth_api/models/corp_type.py b/auth-api/src/auth_api/models/corp_type.py index 75a1882ec4..e9e181d60d 100644 --- a/auth-api/src/auth_api/models/corp_type.py +++ b/auth-api/src/auth_api/models/corp_type.py @@ -19,7 +19,7 @@ class CorpType(BaseCodeModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """This class manages all of the base data about a Corp Type.""" - __tablename__ = 'corp_types' + __tablename__ = "corp_types" @classmethod def get_default_corp_type(cls): diff --git a/auth-api/src/auth_api/utils/custom_query.py b/auth-api/src/auth_api/models/custom_query.py similarity index 51% rename from auth-api/src/auth_api/utils/custom_query.py rename to auth-api/src/auth_api/models/custom_query.py index e4356e2b71..3545194567 100644 --- a/auth-api/src/auth_api/utils/custom_query.py +++ b/auth-api/src/auth_api/models/custom_query.py @@ -1,4 +1,4 @@ -# Copyright © 2024 Province of British Columbia +# Copyright © 2023 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,16 +11,25 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# pylint: disable=W0223 """Custom Query class to extend BaseQuery class functionality.""" from datetime import date, datetime -from flask_sqlalchemy import BaseQuery -from sqlalchemy import String, func +from flask_sqlalchemy.query import Query +from sqlalchemy import and_, func -class CustomQuery(BaseQuery): # pylint: disable=too-few-public-methods +class CustomQuery(Query): # pylint: disable=too-many-ancestors """Custom Query class to extend the base query class for helper functionality.""" + def filter_boolean(self, search_criteria, model_attribute): + """Add query filter for boolean value.""" + if search_criteria is False: + return self + if search_criteria is None: + raise ValueError("Invalid search criteria None, not True or False") + return self.filter(model_attribute == search_criteria) + def filter_conditionally(self, search_criteria, model_attribute, is_like: bool = False): """Add query filter if present.""" if search_criteria is None: @@ -33,6 +42,23 @@ def filter_conditionally(self, search_criteria, model_attribute, is_like: bool = if is_like: # Ensure any updates for this kind of LIKE searches are using SQL Alchemy functions as it uses # bind variables to mitigate SQL Injection - return self.filter(func.cast(model_attribute, String).ilike(f'%{search_criteria}%')) + return self.filter(func.lower(model_attribute).ilike(f"%{search_criteria}%")) return self.filter(model_attribute == search_criteria) + + def filter_conditional_date_range(self, start_date: date, end_date: date, model_attribute): + """Add query filter for a date range if present.""" + # Dates in DB are stored as UTC, you may need to take into account timezones and adjust the input dates + # depending on the needs + query = self + + if start_date and end_date: + return query.filter(and_(func.DATE(model_attribute) >= start_date, func.DATE(model_attribute) <= end_date)) + + if start_date: + query = query.filter(func.DATE(model_attribute) >= start_date) + + if end_date: + query = query.filter(func.DATE(model_attribute) <= end_date) + + return query diff --git a/auth-api/src/auth_api/models/dataclass.py b/auth-api/src/auth_api/models/dataclass.py index b28d3009c8..3dfddabca3 100644 --- a/auth-api/src/auth_api/models/dataclass.py +++ b/auth-api/src/auth_api/models/dataclass.py @@ -13,8 +13,8 @@ # limitations under the License. """This module holds data classes.""" -from typing import List, Optional from dataclasses import dataclass, field +from typing import List, Optional from auth_api.utils.enums import KeycloakGroupActions @@ -123,7 +123,7 @@ class OrgSearch: # pylint: disable=too-many-instance-attributes @dataclass -class SimpleOrgSearch: # pylint: disable=too-many-instance-attributes +class SimpleOrgSearch: # pylint: disable=too-many-instance-attributes """Used for searching organizations.""" id: str @@ -137,17 +137,17 @@ class SimpleOrgSearch: # pylint: disable=too-many-instance-attributes @dataclass -class TaskSearch: # pylint: disable=too-many-instance-attributes +class TaskSearch: # pylint: disable=too-many-instance-attributes """Used for searching tasks.""" status: List[str] = field() - relationship_status: str = '' - name: str = '' - start_date: str = '' - end_date: str = '' - type: str = '' - modified_by: str = '' - submitted_sort_order: str = 'asc' + relationship_status: str = "" + name: str = "" + start_date: str = "" + end_date: str = "" + type: str = "" + modified_by: str = "" + submitted_sort_order: str = "asc" page: int = 1 limit: int = 10 diff --git a/auth-api/src/auth_api/models/db.py b/auth-api/src/auth_api/models/db.py index 885c537188..91466e13f4 100644 --- a/auth-api/src/auth_api/models/db.py +++ b/auth-api/src/auth_api/models/db.py @@ -17,16 +17,12 @@ """ from flask_marshmallow import Marshmallow from flask_sqlalchemy import SQLAlchemy -from sqlalchemy_continuum import make_versioned -from sqlalchemy_continuum.plugins import ActivityPlugin +from sql_versioning import versioned_session -from auth_api.utils.custom_query import CustomQuery +from .custom_query import CustomQuery # by convention in the Flask community these are lower case, # whereas pylint wants them upper case ma = Marshmallow() # pylint: disable=invalid-name db = SQLAlchemy(query_class=CustomQuery) # pylint: disable=invalid-name - -activity_plugin = ActivityPlugin() # pylint: disable=invalid-name - -make_versioned(user_cls=None, plugins=[activity_plugin]) +versioned_session(db.session) diff --git a/auth-api/src/auth_api/models/documents.py b/auth-api/src/auth_api/models/documents.py index 2f551b8f94..7dd1bf37c7 100644 --- a/auth-api/src/auth_api/models/documents.py +++ b/auth-api/src/auth_api/models/documents.py @@ -27,22 +27,32 @@ class Documents(BaseModel): """This is the model for a documents.""" - __tablename__ = 'documents' + __tablename__ = "documents" # TODO version concept is not well refined..this is the first version..refine it version_id = Column(String(10), primary_key=True, autoincrement=False) - type = Column('type', String(30), nullable=False) - content_type = Column('content_type', String(20), nullable=False) - content = Column('content', Text) + type = Column("type", String(30), nullable=False) + content_type = Column("content_type", String(20), nullable=False) + content = Column("content", Text) @classmethod def fetch_latest_document_by_type(cls, file_type) -> Documents: """Fetch latest document of any time.""" - return db.session.query(Documents).filter( - Documents.type == file_type).order_by(desc(Documents.version_id)).limit(1).one_or_none() + return ( + db.session.query(Documents) + .filter(Documents.type == file_type) + .order_by(desc(Documents.version_id)) + .limit(1) + .one_or_none() + ) @classmethod def find_latest_version_by_type(cls, file_type) -> Documents: """Fetch latest document of any time.""" - return db.session.query(Documents.version_id).filter( - Documents.type == file_type).order_by(desc(Documents.version_id)).limit(1).scalar() + return ( + db.session.query(Documents.version_id) + .filter(Documents.type == file_type) + .order_by(desc(Documents.version_id)) + .limit(1) + .scalar() + ) diff --git a/auth-api/src/auth_api/models/entity.py b/auth-api/src/auth_api/models/entity.py index af75978029..a3f2777ed8 100644 --- a/auth-api/src/auth_api/models/entity.py +++ b/auth-api/src/auth_api/models/entity.py @@ -15,38 +15,39 @@ The class and schema are both present in this module. """ - -from flask import current_app from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String from sqlalchemy.orm import relationship from auth_api.utils.passcode import passcode_hash from auth_api.utils.util import camelback2snake + from .base_model import BaseModel class Entity(BaseModel): # pylint: disable=too-few-public-methods, too-many-instance-attributes """This is the Entity model for the Auth service.""" - __tablename__ = 'entities' + __tablename__ = "entities" id = Column(Integer, primary_key=True) - business_identifier = Column('business_identifier', String(75), unique=True, nullable=False) - pass_code = Column('pass_code', String(75), unique=False, nullable=True) - pass_code_claimed = Column('pass_code_claimed', Boolean(), default=False) - business_number = Column('business_number', String(100), nullable=True) - name = Column('name', String(250), nullable=True) - corp_type_code = Column(String(15), ForeignKey('corp_types.code'), nullable=False) - corp_sub_type_code = Column(String(15), ForeignKey('corp_types.code')) - folio_number = Column('folio_number', String(50), nullable=True, index=True) + business_identifier = Column("business_identifier", String(75), unique=True, nullable=False) + pass_code = Column("pass_code", String(75), unique=False, nullable=True) + pass_code_claimed = Column("pass_code_claimed", Boolean(), default=False) + business_number = Column("business_number", String(100), nullable=True) + name = Column("name", String(250), nullable=True) + corp_type_code = Column(String(15), ForeignKey("corp_types.code"), nullable=False) + corp_sub_type_code = Column(String(15), ForeignKey("corp_types.code")) + folio_number = Column("folio_number", String(50), nullable=True, index=True) status = Column(String(), nullable=True) last_modified_by = Column(String(), nullable=True) last_modified = Column(DateTime, default=None, nullable=True) - contacts = relationship('ContactLink', back_populates='entity') - corp_type = relationship('CorpType', foreign_keys=[corp_type_code], lazy='joined', innerjoin=True) - corp_sub_type = relationship('CorpType', foreign_keys=[corp_sub_type_code]) - affiliations = relationship('Affiliation', cascade='all,delete,delete-orphan', lazy='joined') + contacts = relationship("ContactLink", back_populates="entity") + corp_type = relationship("CorpType", foreign_keys=[corp_type_code], lazy="joined", innerjoin=True) + corp_sub_type = relationship("CorpType", foreign_keys=[corp_sub_type_code]) + affiliations = relationship( + "Affiliation", cascade="all,delete,delete-orphan", lazy="joined", back_populates="entity" + ) @classmethod def find_by_business_identifier(cls, business_identifier): @@ -59,21 +60,20 @@ def create_from_dict(cls, entity_info: dict): if entity_info: entity = Entity(**camelback2snake(entity_info)) entity.pass_code = passcode_hash(entity.pass_code) - current_app.logger.debug(f'Creating entity from dictionary {entity_info}') entity.save() return entity return None @classmethod - def find_by_entity_id(cls, entity_id): + def find_by_entity_id(cls, entity_id: int): """Find an Entity instance that matches the provided id.""" - return cls.query.filter_by(id=entity_id).first() + return cls.query.filter_by(id=int(entity_id or -1)).first() def reset(self): """Reset an Entity back to init state.""" self.pass_code_claimed = False self.folio_number = None - self.name = 'Test ' + self.business_identifier + ' Name' + self.name = "Test " + self.business_identifier + " Name" self.created_by_id = None self.created = None self.modified_by_id = None diff --git a/auth-api/src/auth_api/models/invitation.py b/auth-api/src/auth_api/models/invitation.py index cc32e2992b..19183d15e2 100644 --- a/auth-api/src/auth_api/models/invitation.py +++ b/auth-api/src/auth_api/models/invitation.py @@ -30,26 +30,26 @@ class Invitation(BaseModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """Model for a Invitation record.""" - __tablename__ = 'invitations' + __tablename__ = "invitations" id = Column(Integer, primary_key=True) - sender_id = Column(ForeignKey('users.id'), nullable=False) + sender_id = Column(ForeignKey("users.id"), nullable=False) recipient_email = Column(String(100), nullable=False) sent_date = Column(DateTime, nullable=False) accepted_date = Column(DateTime, nullable=True) token = Column(String(100), nullable=True) # stores the one time invitation token - invitation_status_code = Column(ForeignKey('invitation_statuses.code'), nullable=False, default='PENDING') - type = Column(ForeignKey('invitation_types.code'), nullable=False, default='STANDARD') + invitation_status_code = Column(ForeignKey("invitation_statuses.code"), nullable=False, default="PENDING") + type = Column(ForeignKey("invitation_types.code"), nullable=False, default="STANDARD") - invitation_status = relationship('InvitationStatus', foreign_keys=[invitation_status_code]) - sender = relationship('User', foreign_keys=[sender_id]) - membership = relationship('InvitationMembership', cascade='all,delete') + invitation_status = relationship("InvitationStatus", foreign_keys=[invitation_status_code]) + sender = relationship("User", foreign_keys=[sender_id]) + membership = relationship("InvitationMembership", cascade="all,delete", back_populates="invitation") login_source = Column(String(20), nullable=True) @hybrid_property def expires_on(self): """Calculate the expiry date based on the config value.""" - if self.invitation_status_code == 'PENDING': + if self.invitation_status_code == "PENDING": return self.sent_date + timedelta(days=int(get_named_config().TOKEN_EXPIRY_PERIOD)) return None @@ -57,27 +57,27 @@ def expires_on(self): def status(self): """Calculate the status based on the config value.""" current_time = datetime.now() - if self.invitation_status_code == 'PENDING': + if self.invitation_status_code == "PENDING": expiry_time = self.sent_date + timedelta(days=int(get_named_config().TOKEN_EXPIRY_PERIOD)) if current_time >= expiry_time: - return 'EXPIRED' + return "EXPIRED" return self.invitation_status_code @classmethod - def create_from_dict(cls, invitation_info: dict, user_id, invitation_type): + def create_from_dict(cls, invitation_info: dict, user_id: int, invitation_type): """Create a new Invitation from the provided dictionary.""" if invitation_info: invitation = Invitation() invitation.sender_id = user_id invitation.type = invitation_type - invitation.recipient_email = invitation_info['recipientEmail'] + invitation.recipient_email = invitation_info["recipientEmail"] invitation.sent_date = datetime.now() invitation.invitation_status = InvitationStatus.get_default_status() - for member in invitation_info['membership']: + for member in invitation_info["membership"]: invitation_membership = InvitationMembership() - invitation_membership.org_id = member['orgId'] - invitation_membership.membership_type_code = member['membershipType'] + invitation_membership.org_id = member["orgId"] + invitation_membership.membership_type_code = member["membershipType"] invitation.membership.append(invitation_membership) invitation.save() @@ -85,41 +85,50 @@ def create_from_dict(cls, invitation_info: dict, user_id, invitation_type): return None @classmethod - def find_invitations_by_user(cls, user_id): + def find_invitations_by_user(cls, user_id: int): """Find all invitation sent by the given user.""" - return cls.query.filter_by(sender_id=user_id).all() + return cls.query.filter_by(sender_id=int(user_id or -1)).all() @classmethod - def find_invitation_by_id(cls, invitation_id): + def find_invitation_by_id(cls, invitation_id: int): """Find an invitation record that matches the id.""" - return cls.query.filter_by(id=invitation_id).first() + return cls.query.filter_by(id=int(invitation_id or -1)).first() @classmethod - def find_invitations_by_org(cls, org_id, status=None): + def find_invitations_by_org(cls, org_id: int, status=None): """Find all invitations sent for specific org filtered by status.""" - results = cls.query.filter(Invitation.membership.any(InvitationMembership.org_id == org_id)) + results = cls.query.filter(Invitation.membership.any(InvitationMembership.org_id == int(org_id or -1))) return results.filter(Invitation.status == status.value).all() if status else results.all() @staticmethod - def find_pending_invitations_by_user(user_id): + def find_pending_invitations_by_user(user_id: int): """Find all invitations that are not in accepted state.""" - return db.session.query(Invitation). \ - filter(Invitation.sender_id == user_id). \ - filter(Invitation.invitation_status_code != 'ACCEPTED').all() + return ( + db.session.query(Invitation) + .filter(Invitation.sender_id == user_id) + .filter(Invitation.invitation_status_code != "ACCEPTED") + .all() + ) @staticmethod - def find_pending_invitations_by_org(org_id): + def find_pending_invitations_by_org(org_id: int): """Find all invitations that are not in accepted state.""" - return db.session.query(Invitation) \ - .filter(Invitation.membership.any(InvitationMembership.org_id == org_id)) \ - .filter(Invitation.invitation_status_code != 'ACCEPTED').all() + return ( + db.session.query(Invitation) + .filter(Invitation.membership.any(InvitationMembership.org_id == int(org_id or -1))) + .filter(Invitation.invitation_status_code != "ACCEPTED") + .all() + ) @staticmethod - def find_invitations_by_status(user_id, status): + def find_invitations_by_status(user_id: int, status): """Find all invitations that are not in accepted state.""" - return db.session.query(Invitation). \ - filter(Invitation.sender_id == user_id). \ - filter(Invitation.invitation_status_code == status).all() + return ( + db.session.query(Invitation) + .filter(Invitation.sender_id == int(user_id or -1)) + .filter(Invitation.invitation_status_code == status) + .all() + ) def update_invitation_as_retried(self): """Update this invitation with the new data.""" diff --git a/auth-api/src/auth_api/models/invitation_membership.py b/auth-api/src/auth_api/models/invitation_membership.py index da9704c72c..62898c514f 100644 --- a/auth-api/src/auth_api/models/invitation_membership.py +++ b/auth-api/src/auth_api/models/invitation_membership.py @@ -25,13 +25,13 @@ class InvitationMembership(BaseModel): # pylint: disable=too-few-public-methods """Model for Invitation Membership. Associates Invitation, Orgs and Membership type.""" - __tablename__ = 'invitation_memberships' + __tablename__ = "invitation_memberships" id = Column(Integer, primary_key=True) - invitation_id = Column(ForeignKey('invitations.id'), nullable=False, index=True) - org_id = Column(ForeignKey('orgs.id'), nullable=False) - membership_type_code = Column(ForeignKey('membership_types.code'), nullable=False) + invitation_id = Column(ForeignKey("invitations.id"), nullable=False, index=True) + org_id = Column(ForeignKey("orgs.id"), nullable=False) + membership_type_code = Column(ForeignKey("membership_types.code"), nullable=False) - membership_type = relationship('MembershipType', foreign_keys=[membership_type_code]) - org = relationship('Org', foreign_keys=[org_id]) - invitation = relationship('Invitation', foreign_keys=[invitation_id]) + membership_type = relationship("MembershipType", foreign_keys=[membership_type_code]) + org = relationship("Org", foreign_keys=[org_id]) + invitation = relationship("Invitation", foreign_keys=[invitation_id]) diff --git a/auth-api/src/auth_api/models/invitation_type.py b/auth-api/src/auth_api/models/invitation_type.py index 837ffe1d1c..7540606658 100644 --- a/auth-api/src/auth_api/models/invitation_type.py +++ b/auth-api/src/auth_api/models/invitation_type.py @@ -22,7 +22,7 @@ class InvitationType(BaseCodeModel): # pylint: disable=too-few-public-methods """This is the Invitation Type model for the Auth service.""" - __tablename__ = 'invitation_types' + __tablename__ = "invitation_types" @classmethod def get_default_type(cls): diff --git a/auth-api/src/auth_api/models/invite_status.py b/auth-api/src/auth_api/models/invite_status.py index 9c1f77570c..a0dd9bdd4c 100644 --- a/auth-api/src/auth_api/models/invite_status.py +++ b/auth-api/src/auth_api/models/invite_status.py @@ -22,7 +22,7 @@ class InvitationStatus(BaseCodeModel): # pylint: disable=too-few-public-methods """This is the Invitation Status model for the Auth service.""" - __tablename__ = 'invitation_statuses' + __tablename__ = "invitation_statuses" @classmethod def get_default_status(cls): diff --git a/auth-api/src/auth_api/models/membership.py b/auth-api/src/auth_api/models/membership.py index ce049b0aaa..1d9b956f3d 100644 --- a/auth-api/src/auth_api/models/membership.py +++ b/auth-api/src/auth_api/models/membership.py @@ -20,107 +20,119 @@ from typing import List +from sql_versioning import Versioned from sqlalchemy import Column, ForeignKey, Integer, and_, desc, func from sqlalchemy.orm import relationship from auth_api.utils.enums import OrgType, Status from auth_api.utils.roles import ADMIN, COORDINATOR, USER, VALID_ORG_STATUSES, VALID_STATUSES -from .base_model import VersionedModel + +from .base_model import BaseModel from .db import db from .membership_status_code import MembershipStatusCode from .membership_type import MembershipType from .org import Org as OrgModel -class Membership(VersionedModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined +class Membership( + Versioned, BaseModel +): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """Model for a Membership model. Associates Users and Orgs.""" - __tablename__ = 'memberships' + __tablename__ = "memberships" id = Column(Integer, primary_key=True) - user_id = Column(ForeignKey('users.id'), nullable=False, index=True) - org_id = Column(ForeignKey('orgs.id'), nullable=False, index=True) - membership_type_code = Column( - ForeignKey('membership_types.code'), nullable=False - ) - status = Column( - ForeignKey('membership_status_codes.id'), index=True - ) - membership_type = relationship('MembershipType', foreign_keys=[membership_type_code], lazy='select') - membership_status = relationship('MembershipStatusCode', foreign_keys=[status], lazy='select') - user = relationship('User', foreign_keys=[user_id], lazy='select') - org = relationship('Org', foreign_keys=[org_id], lazy='select') + user_id = Column(ForeignKey("users.id"), nullable=False, index=True) + org_id = Column(ForeignKey("orgs.id"), nullable=False, index=True) + membership_type_code = Column(ForeignKey("membership_types.code"), nullable=False) + status = Column(ForeignKey("membership_status_codes.id"), index=True) + membership_type = relationship("MembershipType", foreign_keys=[membership_type_code], lazy="select") + membership_status = relationship("MembershipStatusCode", foreign_keys=[status], lazy="select") + user = relationship("User", foreign_keys=[user_id], lazy="select") + org = relationship("Org", foreign_keys=[org_id], lazy="select") def __init__(self, **kwargs): # pylint: disable=super-init-not-called """Initialize a new membership.""" - self.org_id = kwargs.get('org_id') - self.user_id = kwargs.get('user_id') + self.org_id = kwargs.get("org_id") + self.user_id = kwargs.get("user_id") - self.membership_type_code = kwargs.get('membership_type_code') + self.membership_type_code = kwargs.get("membership_type_code") if self.membership_type_code is None: self.membership_type = MembershipType.get_default_type() - self.status = kwargs.get('membership_type_status') + self.status = kwargs.get("membership_type_status") if self.status is None: self.status = MembershipStatusCode.get_default_type() else: - self.status = kwargs.get('membership_type_status') + self.status = kwargs.get("membership_type_status") @classmethod - def find_membership_by_id(cls, membership_id) -> Membership: + def find_membership_by_id(cls, membership_id: int) -> Membership: """Find the first membership with the given id and return it.""" - return cls.query.filter_by(id=membership_id).first() + return cls.query.filter_by(id=int(membership_id or -1)).first() @classmethod - def find_members_by_org_id(cls, org_id) -> List[Membership]: + def find_members_by_org_id(cls, org_id: int) -> List[Membership]: """Return all members of the org with a status.""" - return cls.query.filter_by(org_id=org_id).all() + return cls.query.filter_by(org_id=int(org_id or -1)).all() @classmethod - def get_pending_members_count_by_org_id(cls, org_id) -> int: + def get_pending_members_count_by_org_id(cls, org_id: int) -> int: """Return the count of pending members.""" - query = db.session.query(Membership).filter( - and_(Membership.status == Status.PENDING_APPROVAL.value)). \ - join(OrgModel).filter(OrgModel.id == org_id) - count_q = query.statement.with_only_columns([func.count()]).order_by(None) + query = ( + db.session.query(Membership) + .filter(and_(Membership.status == Status.PENDING_APPROVAL.value)) + .join(OrgModel) + .filter(OrgModel.id == int(org_id or -1)) + ) + # pylint:disable=not-callable + count_q = query.statement.with_only_columns(func.count(), maintain_column_froms=True).order_by(None) count = query.session.execute(count_q).scalar() return count @classmethod - def find_members_by_org_id_by_status_by_roles(cls, org_id, roles, status=Status.ACTIVE.value) -> List[Membership]: + def find_members_by_org_id_by_status_by_roles( + cls, org_id: int, roles, status=Status.ACTIVE.value + ) -> List[Membership]: """Return all members of the org with a status.""" - return db.session.query(Membership).filter( - and_(Membership.status == status, Membership.membership_type_code.in_(roles))). \ - join(OrgModel).filter(OrgModel.id == org_id).all() + return ( + db.session.query(Membership) + .filter(and_(Membership.status == status, Membership.membership_type_code.in_(roles))) + .join(OrgModel) + .filter(OrgModel.id == int(org_id or -1)) + .all() + ) @classmethod - def find_orgs_for_user(cls, user_id, valid_statuses=VALID_STATUSES) -> List[OrgModel]: + def find_orgs_for_user(cls, user_id: int, valid_statuses=VALID_STATUSES) -> List[OrgModel]: """Find the orgs for a user.""" - records = cls.query \ - .join(OrgModel) \ - .filter(cls.user_id == user_id) \ - .filter(cls.status.in_(valid_statuses)) \ - .filter(OrgModel.status_code.in_(VALID_ORG_STATUSES)) \ + records = ( + cls.query.join(OrgModel) + .filter(cls.user_id == int(user_id or -1)) + .filter(cls.status.in_(valid_statuses)) + .filter(OrgModel.status_code.in_(VALID_ORG_STATUSES)) .all() + ) return list(map(lambda x: x.org, records)) @classmethod - def find_active_staff_org_memberships_for_user(cls, user_id) -> List[Membership]: + def find_active_staff_org_memberships_for_user(cls, user_id: int) -> List[Membership]: """Find staff orgs memberships for a user.""" - return cls.query \ - .join(OrgModel) \ - .filter(cls.user_id == user_id) \ - .filter(cls.status == Status.ACTIVE.value) \ - .filter(OrgModel.status_code.in_(VALID_ORG_STATUSES)) \ - .filter(OrgModel.type_code == OrgType.STAFF.value) \ + return ( + cls.query.join(OrgModel) + .filter(cls.user_id == int(user_id or -1)) + .filter(cls.status == Status.ACTIVE.value) + .filter(OrgModel.status_code.in_(VALID_ORG_STATUSES)) + .filter(OrgModel.type_code == OrgType.STAFF.value) .all() + ) @classmethod - def add_membership_for_staff(cls, user_id): + def add_membership_for_staff(cls, user_id: int): """Add staff membership.""" - if (staff_orgs := OrgModel.find_by_org_type(OrgType.STAFF.value)): + if staff_orgs := OrgModel.find_by_org_type(OrgType.STAFF.value): membership = cls.find_membership_by_user_and_org(user_id, staff_orgs[0].id) if not membership: membership = Membership(org_id=staff_orgs[0].id, user_id=user_id, membership_type_code=USER) @@ -128,7 +140,7 @@ def add_membership_for_staff(cls, user_id): membership.save() @classmethod - def remove_membership_for_staff(cls, user_id): + def remove_membership_for_staff(cls, user_id: int): """Remove staff membership.""" staff_memberships = cls.find_active_staff_org_memberships_for_user(user_id) for staff_membership in staff_memberships: @@ -136,87 +148,112 @@ def remove_membership_for_staff(cls, user_id): staff_membership.save() @classmethod - def find_membership_by_user_and_org(cls, user_id, org_id) -> Membership: + def find_membership_by_user_and_org(cls, user_id: int, org_id: int) -> Membership: """Get the membership for the specified user and org.""" - records = cls.query \ - .filter(cls.user_id == user_id) \ - .filter(cls.org_id == org_id) \ - .filter(cls.status.in_(VALID_STATUSES)) \ - .order_by(desc(Membership.created)) \ + records = ( + cls.query.filter(cls.user_id == int(user_id or -1)) + .filter(cls.org_id == int(org_id or -1)) + .filter(cls.status.in_(VALID_STATUSES)) + .order_by(desc(Membership.created)) .first() + ) return records @classmethod - def find_membership_by_userid(cls, user_id) -> Membership: + def find_membership_by_userid(cls, user_id: int) -> Membership: """Get the membership for the specified user.""" - records = cls.query \ - .filter(cls.user_id == user_id) \ - .order_by(desc(Membership.created)) \ - .first() + records = cls.query.filter(cls.user_id == int(user_id or -1)).order_by(desc(Membership.created)).first() return records @classmethod - def find_memberships_by_user_ids(cls, user_id) -> List[Membership]: + def find_memberships_by_user_ids(cls, user_id: int) -> List[Membership]: """Get the memberships for the specified user ids.""" - records = cls.query \ - .filter(cls.user_id == user_id) \ - .order_by(desc(Membership.created)) \ - .all() + records = cls.query.filter(cls.user_id == int(user_id or -1)).order_by(desc(Membership.created)).all() return records @classmethod - def find_membership_by_user_and_org_all_status(cls, user_id, org_id) -> Membership: + def find_membership_by_user_and_org_all_status(cls, user_id: int, org_id: int) -> Membership: """Get the membership for the specified user and org with all membership statuses.""" - records = cls.query \ - .filter(cls.user_id == user_id) \ - .filter(cls.org_id == org_id) \ - .order_by(desc(Membership.created)) \ + records = ( + cls.query.filter(cls.user_id == int(user_id or -1)) + .filter(cls.org_id == int(org_id or -1)) + .order_by(desc(Membership.created)) .first() + ) return records @classmethod - def get_count_active_owner_org_id(cls, org_id) -> int: + def get_count_active_owner_org_id(cls, org_id: int) -> int: """Return the count of pending members.""" - query = db.session.query(Membership).filter( - and_(Membership.org_id == org_id, Membership.status == Status.ACTIVE.value, - Membership.membership_type_code == ADMIN)). \ - join(OrgModel).filter(OrgModel.id == org_id) - count_q = query.statement.with_only_columns([func.count()]).order_by(None) + query = ( + db.session.query(Membership) + .filter( + and_( + Membership.org_id == int(org_id or -1), + Membership.status == Status.ACTIVE.value, + Membership.membership_type_code == ADMIN, + ) + ) + .join(OrgModel) + .filter(OrgModel.id == int(org_id or -1)) + ) + + # pylint:disable=not-callable + count_q = query.statement.with_only_columns(func.count(), maintain_column_froms=True).order_by(None) count = query.session.execute(count_q).scalar() return count @classmethod - def check_if_active_admin_or_owner_org_id(cls, org_id, user_id) -> int: + def check_if_active_admin_or_owner_org_id(cls, org_id: int, user_id: int) -> int: """Return the count of pending members.""" - query = db.session.query(Membership).filter( - and_(Membership.user_id == user_id, Membership.org_id == org_id, Membership.status == Status.ACTIVE.value, - Membership.membership_type_code.in_((ADMIN, COORDINATOR)))). \ - join(OrgModel).filter(OrgModel.id == org_id) - count_q = query.statement.with_only_columns([func.count()]).order_by(None) + query = ( + db.session.query(Membership) + .filter( + and_( + Membership.user_id == int(user_id or -1), + Membership.org_id == int(org_id or -1), + Membership.status == Status.ACTIVE.value, + Membership.membership_type_code.in_((ADMIN, COORDINATOR)), + ) + ) + .join(OrgModel) + .filter(OrgModel.id == int(org_id or -1)) + ) + # pylint:disable=not-callable + count_q = query.statement.with_only_columns(func.count(), maintain_column_froms=True).order_by(None) count = query.session.execute(count_q).scalar() return count @classmethod def check_if_sbc_staff(cls, user_id: int) -> bool: """Return True if the use has membership to sbc staff organization.""" - return db.session.query(Membership).filter( - and_(Membership.user_id == user_id, Membership.status == Status.ACTIVE.value, - Membership.org.has(OrgModel.type_code == OrgType.SBC_STAFF.value))).count() > 0 + return ( + db.session.query(Membership) + .filter( + and_( + Membership.user_id == int(user_id or -1), + Membership.status == Status.ACTIVE.value, + Membership.org.has(OrgModel.type_code == OrgType.SBC_STAFF.value), + ) + ) + .count() + > 0 + ) def reset(self): """Reset member.""" - if self.membership_type_code == 'ADMIN': + if self.membership_type_code == "ADMIN": # if an org only have one admin, we need to prompt a coordiantor or user to admin to avoid failure. members = self.find_members_by_org_id_by_status_by_roles(self.org_id, (ADMIN, ADMIN)) count_members = len(members) if count_members == 1: members = self.find_members_by_org_id_by_status_by_roles(self.org_id, (COORDINATOR, USER)) for member in members: - member.membership_type_code = 'ADMIN' + member.membership_type_code = "ADMIN" db.session.add(member) db.session.commit() member.modified_by = None diff --git a/auth-api/src/auth_api/models/membership_status_code.py b/auth-api/src/auth_api/models/membership_status_code.py index 3f02d9ba80..e935427dcf 100644 --- a/auth-api/src/auth_api/models/membership_status_code.py +++ b/auth-api/src/auth_api/models/membership_status_code.py @@ -24,7 +24,7 @@ class MembershipStatusCode(BaseModel): # pylint: disable=too-few-public-methods """This is the Membership Status model for the Auth service.""" - __tablename__ = 'membership_status_codes' + __tablename__ = "membership_status_codes" id = Column(Integer, primary_key=True, autoincrement=False) name = Column(String(25)) diff --git a/auth-api/src/auth_api/models/membership_type.py b/auth-api/src/auth_api/models/membership_type.py index a9b4359210..b657d0bbce 100644 --- a/auth-api/src/auth_api/models/membership_type.py +++ b/auth-api/src/auth_api/models/membership_type.py @@ -24,7 +24,7 @@ class MembershipType(BaseCodeModel): # pylint: disable=too-few-public-methods """This is the Membership Type model for the Auth service.""" - __tablename__ = 'membership_types' + __tablename__ = "membership_types" label = Column(String(200)) icon = Column(String(100)) diff --git a/auth-api/src/auth_api/models/org.py b/auth-api/src/auth_api/models/org.py index ccb502ef19..ba9c7f7269 100644 --- a/auth-api/src/auth_api/models/org.py +++ b/auth-api/src/auth_api/models/org.py @@ -16,10 +16,12 @@ Basic users will have an internal Org that is not created explicitly, but implicitly upon User account creation. """ from typing import List -from flask import current_app + +from sql_versioning import Versioned from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, and_, cast, desc, event, func, text -from sqlalchemy.orm import contains_eager, relationship from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import contains_eager, relationship +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error @@ -30,7 +32,7 @@ from auth_api.utils.enums import OrgType as OrgTypeEnum from auth_api.utils.roles import EXCLUDED_FIELDS, VALID_STATUSES -from .base_model import VersionedModel +from .base_model import BaseModel from .contact import Contact from .contact_link import ContactLink from .db import db @@ -38,18 +40,20 @@ from .org_status import OrgStatus from .org_type import OrgType +logger = StructuredLogging.get_logger() + -class Org(VersionedModel): # pylint: disable=too-few-public-methods,too-many-instance-attributes +class Org(Versioned, BaseModel): # pylint: disable=too-few-public-methods,too-many-instance-attributes """Model for an Org record.""" - __tablename__ = 'orgs' + __tablename__ = "orgs" id = Column(Integer, primary_key=True) - uuid = Column(UUID, nullable=False, server_default=text('uuid_generate_v4()'), unique=True) - type_code = Column(ForeignKey('org_types.code'), nullable=False) - status_code = Column(ForeignKey('org_statuses.code'), nullable=False) + uuid = Column(UUID, nullable=False, server_default=text("uuid_generate_v4()"), unique=True) + type_code = Column(ForeignKey("org_types.code"), nullable=False) + status_code = Column(ForeignKey("org_statuses.code"), nullable=False) name = Column(String(250), index=True) - branch_name = Column(String(100), nullable=True, default='') # used for any additional info as branch name + branch_name = Column(String(100), nullable=True, default="") # used for any additional info as branch name access_type = Column(String(250), index=True, nullable=True) # for ANONYMOUS ACCESS decision_made_by = Column(String(250)) decision_made_on = Column(DateTime, nullable=True) @@ -57,29 +61,41 @@ class Org(VersionedModel): # pylint: disable=too-few-public-methods,too-many-in bcol_account_id = Column(String(20)) bcol_account_name = Column(String(250)) suspended_on = Column(DateTime, nullable=True) - suspension_reason_code = Column(String(15), ForeignKey('suspension_reason_codes.code', - ondelete='SET NULL', - name='orgs_suspension_reason_code_fkey'), nullable=True) - has_api_access = Column('has_api_access', Boolean(), default=False, nullable=True) - business_type = Column(String(15), ForeignKey('business_type_codes.code', - ondelete='SET NULL', - name='orgs_business_type_fkey'), nullable=True) - business_size = Column(String(15), ForeignKey('business_size_codes.code', - ondelete='SET NULL', - name='orgs_business_size_fkey'), nullable=True) - is_business_account = Column('is_business_account', Boolean(), default=False) - - contacts = relationship('ContactLink', lazy='select') - org_type = relationship('OrgType') - org_status = relationship('OrgStatus') - members = relationship('Membership', cascade='all,delete,delete-orphan', lazy='select') - affiliated_entities = relationship('Affiliation', lazy='select') - invitations = relationship('InvitationMembership', cascade='all,delete,delete-orphan', lazy='select') - products = relationship('ProductSubscription', cascade='all,delete,delete-orphan', lazy='select') - login_options = relationship('AccountLoginOptions', cascade='all,delete,delete-orphan', - primaryjoin='and_(Org.id == AccountLoginOptions.org_id, ' - 'AccountLoginOptions.is_active == True)', lazy='select') - suspension_reason = relationship('SuspensionReasonCode') + suspension_reason_code = Column( + String(15), + ForeignKey("suspension_reason_codes.code", ondelete="SET NULL", name="orgs_suspension_reason_code_fkey"), + nullable=True, + ) + has_api_access = Column("has_api_access", Boolean(), default=False, nullable=True) + business_type = Column( + String(15), + ForeignKey("business_type_codes.code", ondelete="SET NULL", name="orgs_business_type_fkey"), + nullable=True, + ) + business_size = Column( + String(15), + ForeignKey("business_size_codes.code", ondelete="SET NULL", name="orgs_business_size_fkey"), + nullable=True, + ) + is_business_account = Column("is_business_account", Boolean(), default=False) + + contacts = relationship("ContactLink", lazy="select", back_populates="org") + org_type = relationship("OrgType") + org_status = relationship("OrgStatus") + members = relationship("Membership", cascade="all,delete,delete-orphan", lazy="select", back_populates="org") + affiliated_entities = relationship("Affiliation", lazy="select", back_populates="org") + invitations = relationship( + "InvitationMembership", cascade="all,delete,delete-orphan", lazy="select", back_populates="org" + ) + products = relationship("ProductSubscription", cascade="all,delete,delete-orphan", lazy="select") + login_options = relationship( + "AccountLoginOptions", + cascade="all,delete,delete-orphan", + primaryjoin="and_(Org.id == AccountLoginOptions.org_id, " "AccountLoginOptions.is_active == True)", + lazy="select", + back_populates="org", + ) + suspension_reason = relationship("SuspensionReasonCode") @classmethod def create_from_dict(cls, org_info: dict): @@ -87,7 +103,7 @@ def create_from_dict(cls, org_info: dict): if org_info: org = Org(**org_info) - current_app.logger.debug(f'Creating org from dictionary {org_info}') + logger.debug(f"Creating org from dictionary {org_info}") if org.type_code: org.org_type = OrgType.get_type_for_code(org.type_code) else: @@ -104,15 +120,18 @@ def find_by_org_uuid(cls, org_uuid): return cls.query.filter_by(uuid=org_uuid).first() @classmethod - def find_by_org_id(cls, org_id): + def find_by_org_id(cls, org_id: int): """Find an Org instance that matches the provided id.""" - return cls.query.filter_by(id=org_id).first() + return cls.query.filter_by(id=int(org_id or -1)).first() @classmethod def find_by_bcol_id(cls, bcol_account_id): """Find an Org instance that matches the provided id and not in INACTIVE status.""" - return cls.query.filter(Org.bcol_account_id == bcol_account_id).filter( - ~Org.status_code.in_([OrgStatusEnum.INACTIVE.value, OrgStatusEnum.REJECTED.value])).first() + return ( + cls.query.filter(Org.bcol_account_id == bcol_account_id) + .filter(~Org.status_code.in_([OrgStatusEnum.INACTIVE.value, OrgStatusEnum.REJECTED.value])) + .first() + ) @classmethod def find_by_org_name(cls, org_name): @@ -123,32 +142,35 @@ def find_by_org_name(cls, org_name): @classmethod def find_by_org_type(cls, org_type): """Find Orgs that match the provided org type and not in INACTIVE status.""" - query = db.session.query(Org).filter(Org.status_code != OrgStatusEnum.INACTIVE.value, - Org.org_type == OrgType.get_type_for_code(org_type)) + query = db.session.query(Org).filter( + Org.status_code != OrgStatusEnum.INACTIVE.value, Org.org_type == OrgType.get_type_for_code(org_type) + ) return query.all() @classmethod def search_org(cls, search: OrgSearch, environment: str): """Find all orgs with the given type.""" - query = db.session.query(Org) \ - .outerjoin(ContactLink) \ - .outerjoin(Contact) \ - .options(contains_eager('contacts').contains_eager('contact')) + query = ( + db.session.query(Org) + .outerjoin(ContactLink) + .outerjoin(Contact) + .options(contains_eager(Org.contacts).load_only(ContactLink.org_id)) + ) if search.access_type: query = query.filter(Org.access_type.in_(search.access_type)) if search.id: - query = query.filter(cast(Org.id, String).like(f'%{search.id}%')) + query = query.filter(cast(Org.id, String).like(f"%{search.id}%")) if search.org_type: query = query.filter(Org.org_type == OrgType.get_type_for_code(search.org_type)) if search.decision_made_by: - query = query.filter(Org.decision_made_by.ilike(f'%{search.decision_made_by}%')) + query = query.filter(Org.decision_made_by.ilike(f"%{search.decision_made_by}%")) if search.bcol_account_id: - query = query.filter(Org.bcol_account_id.ilike(f'%{search.bcol_account_id}%')) + query = query.filter(Org.bcol_account_id.ilike(f"%{search.bcol_account_id}%")) if search.branch_name: - query = query.filter(Org.branch_name.ilike(f'%{search.branch_name}%')) + query = query.filter(Org.branch_name.ilike(f"%{search.branch_name}%")) if search.name: - query = query.filter(Org.name.ilike(f'%{search.name}%')) + query = query.filter(Org.name.ilike(f"%{search.name}%")) query = cls._search_by_business_identifier(query, search.business_identifier, environment) query = cls._search_for_statuses(query, search.statuses) @@ -163,16 +185,12 @@ def get_order_by(cls, search, query): """Handle search query order by.""" # If searching by id, surface the perfect matches to the top if search.id: - return query.order_by(desc(Org.id == search.id), Org.created.desc()) + return query.order_by(desc(Org.id == int(search.id or -1)), Org.created.desc()) return query.order_by(Org.created.desc()) @classmethod - def search_orgs_by_business_identifier(cls, - business_identifier, - environment, - excluded_org_types: List[str] = None - ): + def search_orgs_by_business_identifier(cls, business_identifier, environment, excluded_org_types: List[str] = None): """Find all orgs affiliated with provided business identifier.""" query = db.session.query(Org) @@ -200,39 +218,51 @@ def _search_for_statuses(cls, query, statuses): query = query.filter(Org.status_code.in_(statuses)) # If status is active, need to exclude the dir search orgs who haven't accepted the invitation yet if not statuses or OrgStatusEnum.ACTIVE.value in statuses: - pending_inv_subquery = db.session.query(Org.id) \ - .outerjoin(InvitationMembership, InvitationMembership.org_id == Org.id) \ - .outerjoin(Invitation, Invitation.id == InvitationMembership.invitation_id) \ - .filter(Invitation.invitation_status_code == InvitationStatus.PENDING.value) \ + pending_inv_subquery = ( + db.session.query(Org.id) + .outerjoin(InvitationMembership, InvitationMembership.org_id == Org.id) + .outerjoin(Invitation, Invitation.id == InvitationMembership.invitation_id) + .filter(Invitation.invitation_status_code == InvitationStatus.PENDING.value) .filter( - ((Invitation.type == InvitationType.DIRECTOR_SEARCH.value) & - (Org.status_code == OrgStatusEnum.ACTIVE.value) & - (Org.access_type == AccessType.ANONYMOUS.value)) | - ((Invitation.type == InvitationType.GOVM.value) & - (Org.status_code == OrgStatusEnum.PENDING_INVITE_ACCEPT.value) & - (Org.access_type == AccessType.GOVM.value)) - ) + ( + (Invitation.type == InvitationType.DIRECTOR_SEARCH.value) + & (Org.status_code == OrgStatusEnum.ACTIVE.value) + & (Org.access_type == AccessType.ANONYMOUS.value) + ) + | ( + (Invitation.type == InvitationType.GOVM.value) + & (Org.status_code == OrgStatusEnum.PENDING_INVITE_ACCEPT.value) + & (Org.access_type == AccessType.GOVM.value) + ) + ) + ) query = query.filter(Org.id.notin_(pending_inv_subquery)) return query @classmethod - def search_pending_activation_orgs(cls, name): + def search_pending_activation_orgs(cls, name: str): """Find all orgs with the given type.""" - query = db.session.query(Org) \ - .outerjoin(InvitationMembership, InvitationMembership.org_id == Org.id) \ - .outerjoin(Invitation, Invitation.id == InvitationMembership.invitation_id) \ - .options(contains_eager('invitations').contains_eager('invitation')) \ - .filter(Invitation.invitation_status_code == InvitationStatus.PENDING.value) \ + query = ( + db.session.query(Org) + .outerjoin(InvitationMembership, InvitationMembership.org_id == Org.id) + .outerjoin(Invitation, Invitation.id == InvitationMembership.invitation_id) + .options(contains_eager(Org.invitations).load_only(InvitationMembership.invitation_id)) + .filter(Invitation.invitation_status_code == InvitationStatus.PENDING.value) .filter( - ((Invitation.type == InvitationType.DIRECTOR_SEARCH.value) & - (Org.status_code == OrgStatusEnum.ACTIVE.value) & - (Org.access_type == AccessType.ANONYMOUS.value)) | - ((Invitation.type == InvitationType.GOVM.value) & - (Org.status_code == OrgStatusEnum.PENDING_INVITE_ACCEPT.value) & - (Org.access_type == AccessType.GOVM.value)) - ) + ( + (Invitation.type == InvitationType.DIRECTOR_SEARCH.value) + & (Org.status_code == OrgStatusEnum.ACTIVE.value) + & (Org.access_type == AccessType.ANONYMOUS.value) + ) + | ( + (Invitation.type == InvitationType.GOVM.value) + & (Org.status_code == OrgStatusEnum.PENDING_INVITE_ACCEPT.value) + & (Org.access_type == AccessType.GOVM.value) + ) + ) + ) if name: - query = query.filter(Org.name.ilike(f'%{name}%')) + query = query.filter(Org.name.ilike(f"%{name}%")) orgs = query.order_by(Org.created.desc()).all() return orgs, len(orgs) @@ -243,11 +273,13 @@ def find_by_org_access_type(cls, org_type): return cls.query.filter_by(access_type=org_type).all() @classmethod - def find_similar_org_by_name(cls, name, org_id=None, branch_name=None): + def find_similar_org_by_name(cls, name, org_id: int = None, branch_name=None): """Find an Org instance that matches the provided name.""" - query = cls.query.filter(and_( - func.upper(Org.name) == name.upper(), - (func.upper(func.coalesce(Org.branch_name, '')) == ((branch_name or '').upper()))) + query = cls.query.filter( + and_( + func.upper(Org.name) == name.upper(), + (func.upper(func.coalesce(Org.branch_name, "")) == ((branch_name or "").upper())), + ) ).filter(~Org.status_code.in_([OrgStatusEnum.INACTIVE.value, OrgStatusEnum.REJECTED.value])) if org_id: @@ -257,9 +289,16 @@ def find_similar_org_by_name(cls, name, org_id=None, branch_name=None): @classmethod def get_count_of_org_created_by_user_id(cls, user_id: int): """Find the count of the organisations created by the user.""" - return cls.query.filter(and_( - Org.created_by_id == user_id, Org.status_code == 'ACTIVE' # pylint: disable=comparison-with-callable - )).with_entities(func.count()).scalar() + return ( + cls.query.filter( + and_( + Org.created_by_id == user_id, + Org.status_code == "ACTIVE", # pylint: disable=comparison-with-callable + ) + ) + .with_entities(func.count()) + .scalar() + ) def update_org_from_dict(self, org_info: dict, exclude=EXCLUDED_FIELDS): """Update this org with the provided dictionary.""" @@ -289,23 +328,17 @@ def reset(self): super().reset() -@event.listens_for(Org, 'before_insert') +@event.listens_for(Org, "before_insert") def receive_before_insert(mapper, connection, target): # pylint: disable=unused-argument; SQLAlchemy callback signature """Rejects invalid type_codes on insert.""" org = target if org.type_code in (OrgTypeEnum.SBC_STAFF.value, OrgTypeEnum.STAFF.value): - raise BusinessException( - Error.INVALID_INPUT, - None - ) + raise BusinessException(Error.INVALID_INPUT, None) -@event.listens_for(Org, 'before_update', raw=True) +@event.listens_for(Org, "before_update", raw=True) def receive_before_update(mapper, connection, state): # pylint: disable=unused-argument; SQLAlchemy callback signature """Rejects invalid type_codes on update.""" if Org.type_code.key in state.unmodified: return - raise BusinessException( - Error.INVALID_INPUT, - None - ) + raise BusinessException(Error.INVALID_INPUT, None) diff --git a/auth-api/src/auth_api/models/org_settings.py b/auth-api/src/auth_api/models/org_settings.py index fb640724a7..e3e6dac58d 100644 --- a/auth-api/src/auth_api/models/org_settings.py +++ b/auth-api/src/auth_api/models/org_settings.py @@ -16,32 +16,37 @@ This is a mapping between status codes and descriptions for Org objects. """ +from sql_versioning import Versioned from sqlalchemy import Boolean, Column, ForeignKey, Integer, String from sqlalchemy.orm import relationship -from .base_model import VersionedModel +from .base_model import BaseModel -class OrgSettings(VersionedModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined +class OrgSettings( + Versioned, BaseModel +): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """This is the model for an Org Settings record.""" - __tablename__ = 'org_settings' + __tablename__ = "org_settings" id = Column(Integer, primary_key=True) - org_id = Column(ForeignKey('orgs.id'), nullable=False) + org_id = Column(ForeignKey("orgs.id"), nullable=False) setting = Column(String(100)) enabled = Column(Boolean(), default=False, nullable=False) - org = relationship('Org') + org = relationship("Org") @classmethod - def get_org_settings(cls, org_id): + def get_org_settings(cls, org_id: int): """Return the default status code for an Org.""" - return cls.query.filter_by(org_id=org_id).all() + return cls.query.filter_by(org_id=int(org_id or -1)).all() @classmethod - def is_admin_auto_approved_invitees(cls, org_id): + def is_admin_auto_approved_invitees(cls, org_id: int): """Return the default status code for an Org.""" - org_model = cls.query.filter_by(org_id=org_id, setting='ADMIN_AUTO_APPROVAL_FOR_MEMBER_ACCEPTANCE').first() + org_model = cls.query.filter_by( + org_id=int(org_id or -1), setting="ADMIN_AUTO_APPROVAL_FOR_MEMBER_ACCEPTANCE" + ).first() if org_model is not None: return org_model.enabled return False diff --git a/auth-api/src/auth_api/models/org_status.py b/auth-api/src/auth_api/models/org_status.py index 6a81a76148..59161c595a 100644 --- a/auth-api/src/auth_api/models/org_status.py +++ b/auth-api/src/auth_api/models/org_status.py @@ -25,7 +25,7 @@ class OrgStatus(BaseCodeModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """This is the model for an Org Status record.""" - __tablename__ = 'org_statuses' + __tablename__ = "org_statuses" @declared_attr def code(cls): # pylint:disable=no-self-argument, # noqa: N805 diff --git a/auth-api/src/auth_api/models/org_type.py b/auth-api/src/auth_api/models/org_type.py index 641ee9aae7..3e32ba4b87 100644 --- a/auth-api/src/auth_api/models/org_type.py +++ b/auth-api/src/auth_api/models/org_type.py @@ -22,7 +22,7 @@ class OrgType(BaseCodeModel): # pylint: disable=too-few-public-methods """This is the model for an Org Type record.""" - __tablename__ = 'org_types' + __tablename__ = "org_types" @classmethod def get_default_type(cls): diff --git a/auth-api/src/auth_api/models/payment_type.py b/auth-api/src/auth_api/models/payment_type.py index e0d037d5a9..88cd8bb643 100644 --- a/auth-api/src/auth_api/models/payment_type.py +++ b/auth-api/src/auth_api/models/payment_type.py @@ -22,7 +22,7 @@ class PaymentType(BaseCodeModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """This is the model for a Payment Type.""" - __tablename__ = 'payment_types' + __tablename__ = "payment_types" @classmethod def get_default_payment_type(cls): diff --git a/auth-api/src/auth_api/models/permissions.py b/auth-api/src/auth_api/models/permissions.py index 777a2d4a76..fd9d8ca942 100644 --- a/auth-api/src/auth_api/models/permissions.py +++ b/auth-api/src/auth_api/models/permissions.py @@ -24,12 +24,10 @@ class Permissions(BaseModel): # pylint: disable=too-few-public-methods # Temporarily disable until methods defined """Model for a Permissions model. Associates Roles and Actions.""" - __tablename__ = 'permissions' + __tablename__ = "permissions" id = Column(Integer, primary_key=True) - membership_type_code = Column( - String(25), nullable=True - ) + membership_type_code = Column(String(25), nullable=True) org_status_code = Column(String(25), nullable=True) actions = Column(String(100), primary_key=True, autoincrement=False) diff --git a/auth-api/src/auth_api/models/product_code.py b/auth-api/src/auth_api/models/product_code.py index 703bb9ccf0..ce2305501f 100644 --- a/auth-api/src/auth_api/models/product_code.py +++ b/auth-api/src/auth_api/models/product_code.py @@ -27,7 +27,7 @@ class ProductCode(BaseCodeModel): # pylint: disable=too-few-public-methods """Product code table to store all the products supported by auth system.""" - __tablename__ = 'product_codes' + __tablename__ = "product_codes" # this mapper is used so that new and old versions of the service can be run simultaneously, # making rolling upgrades easier # This is used by SQLAlchemy to explicitly define which fields we're interested @@ -39,32 +39,33 @@ class ProductCode(BaseCodeModel): # pylint: disable=too-few-public-methods # NOTE: please keep mapper names in alpha-order, easier to track that way # Exception, id is always first, _fields first __mapper_args__ = { - 'include_properties': [ - 'can_resubmit', - 'code', - 'default', - 'description', - 'hidden', - 'keycloak_group', - 'linked_product_code', - 'need_review', - 'need_system_admin', - 'parent_code', - 'premium_only', - 'type_code', - 'url' + "include_properties": [ + "can_resubmit", + "code", + "default", + "description", + "hidden", + "keycloak_group", + "linked_product_code", + "need_review", + "need_system_admin", + "parent_code", + "premium_only", + "type_code", + "url", ] } - type_code = Column(ForeignKey('product_type_codes.code'), default='INTERNAL', nullable=False) + type_code = Column(ForeignKey("product_type_codes.code"), default="INTERNAL", nullable=False) parent_code = Column(String(75), nullable=True) # Used for sub products to define a parent product code premium_only = Column(Boolean(), default=False, nullable=True) # Available only for premium accounts can_resubmit = Column(Boolean(), default=False, nullable=False) # Allows resubmission of subscription request need_review = Column(Boolean(), default=False, nullable=True) # Need a review from staff for activating product need_system_admin = Column(Boolean(), default=False, nullable=True) # Needs system admin for activating product hidden = Column(Boolean(), default=False, nullable=True) # Flag to hide from the UI - linked_product_code = Column(String(100), - nullable=True) # Product linked to to another product, like business and NR + linked_product_code = Column( + String(100), nullable=True + ) # Product linked to to another product, like business and NR keycloak_group = Column(String(100), nullable=True) url = Column(String(100), nullable=True) @@ -76,19 +77,26 @@ def find_by_code(cls, code): @classmethod def get_all_products(cls): """Get all of the products codes.""" - linked_code_subquery = db.session.query(ProductCode.linked_product_code) \ - .filter(ProductCode.linked_product_code.isnot(None)) \ + linked_code_subquery = ( + db.session.query(ProductCode.linked_product_code) + .filter(ProductCode.linked_product_code.isnot(None)) .subquery() + ) - return cls.query.filter(ProductCode.code.notin_(linked_code_subquery)).order_by( # pylint: disable=no-member - ProductCode.type_code.asc(), ProductCode.description.asc()).all() # pylint: disable=no-member + return ( + cls.query.filter(ProductCode.type_code.notin_(linked_code_subquery.select())) + .order_by(ProductCode.type_code.asc(), ProductCode.description.asc()) # pylint: disable=no-member + .all() + ) # pylint: disable=no-member @classmethod def get_visible_products(cls): # pylint: disable=no-member """Get all of the products with hidden false.""" - return cls.query.filter_by(hidden=False).order_by( - ProductCode.type_code.asc(), ProductCode.description.asc() # pylint: disable=no-member - ).all() + return ( + cls.query.filter_by(hidden=False) + .order_by(ProductCode.type_code.asc(), ProductCode.description.asc()) # pylint: disable=no-member + .all() + ) @classmethod def find_by_type_code(cls, type_code: str) -> List[ProductCode]: diff --git a/auth-api/src/auth_api/models/product_subscription.py b/auth-api/src/auth_api/models/product_subscription.py index a1ae8afcc6..64482456f0 100644 --- a/auth-api/src/auth_api/models/product_subscription.py +++ b/auth-api/src/auth_api/models/product_subscription.py @@ -16,35 +16,41 @@ The ProductSubscription object connects Org models to one or more ProductSubscription models. """ +from sql_versioning import Versioned from sqlalchemy import Column, ForeignKey, Integer, and_ from sqlalchemy.orm import relationship from ..utils.roles import VALID_SUBSCRIPTION_STATUSES -from .base_model import VersionedModel +from .base_model import BaseModel -class ProductSubscription(VersionedModel): # pylint: disable=too-few-public-methods +class ProductSubscription(Versioned, BaseModel): # pylint: disable=too-few-public-methods """Model for a Product Subscription model.""" - __tablename__ = 'product_subscriptions' + __tablename__ = "product_subscriptions" id = Column(Integer, primary_key=True) - org_id = Column(ForeignKey('orgs.id'), nullable=False, index=True) - product_code = Column(ForeignKey('product_codes.code'), nullable=False) + org_id = Column(ForeignKey("orgs.id"), nullable=False, index=True) + product_code = Column(ForeignKey("product_codes.code"), nullable=False) - product = relationship('ProductCode', foreign_keys=[product_code], lazy='select') - status_code = Column(ForeignKey('product_subscriptions_statuses.code'), nullable=False) - product_subscriptions_status = relationship('ProductSubscriptionsStatus') + product = relationship("ProductCode", foreign_keys=[product_code], lazy="select") + status_code = Column(ForeignKey("product_subscriptions_statuses.code"), nullable=False) + product_subscriptions_status = relationship("ProductSubscriptionsStatus") @classmethod def find_by_org_ids(cls, org_ids, valid_statuses=VALID_SUBSCRIPTION_STATUSES): """Find an product subscription instance that matches the provided org ids.""" return cls.query.filter( - and_(ProductSubscription.org_id.in_(org_ids), ProductSubscription.status_code.in_(valid_statuses))).all() + and_(ProductSubscription.org_id.in_(org_ids), ProductSubscription.status_code.in_(valid_statuses)) + ).all() @classmethod - def find_by_org_id_product_code(cls, org_id, product_code, valid_statuses=VALID_SUBSCRIPTION_STATUSES): + def find_by_org_id_product_code(cls, org_id: int, product_code, valid_statuses=VALID_SUBSCRIPTION_STATUSES): """Find an product subscription instance that matches the provided id.""" return cls.query.filter( - and_(ProductSubscription.org_id == org_id, ProductSubscription.product_code == product_code, - ProductSubscription.status_code.in_(valid_statuses))).first() + and_( + ProductSubscription.org_id == int(org_id or -1), + ProductSubscription.product_code == product_code, + ProductSubscription.status_code.in_(valid_statuses), + ) + ).first() diff --git a/auth-api/src/auth_api/models/product_subscriptions_status.py b/auth-api/src/auth_api/models/product_subscriptions_status.py index f622e0f84f..29c9552ba7 100644 --- a/auth-api/src/auth_api/models/product_subscriptions_status.py +++ b/auth-api/src/auth_api/models/product_subscriptions_status.py @@ -25,7 +25,7 @@ class ProductSubscriptionsStatus(BaseCodeModel): # pylint: disable=too-few-public-methods """This is the model for an ProductSubscription Status record.""" - __tablename__ = 'product_subscriptions_statuses' + __tablename__ = "product_subscriptions_statuses" @declared_attr def code(cls): # pylint:disable=no-self-argument, # noqa: N805 diff --git a/auth-api/src/auth_api/models/product_type_code.py b/auth-api/src/auth_api/models/product_type_code.py index fc4a493d78..21adfcd9c3 100644 --- a/auth-api/src/auth_api/models/product_type_code.py +++ b/auth-api/src/auth_api/models/product_type_code.py @@ -22,7 +22,7 @@ class ProductTypeCode(BaseCodeModel): # pylint: disable=too-few-public-methods """Product type code table to store all the types of products supported by auth system.""" - __tablename__ = 'product_type_codes' + __tablename__ = "product_type_codes" @classmethod def find_by_code(cls, code): diff --git a/auth-api/src/auth_api/models/pubsub_message_processing.py b/auth-api/src/auth_api/models/pubsub_message_processing.py index 2e3dc2234f..5f6f58a4dc 100644 --- a/auth-api/src/auth_api/models/pubsub_message_processing.py +++ b/auth-api/src/auth_api/models/pubsub_message_processing.py @@ -3,17 +3,19 @@ NOTE: Only use this when it's not possible to use other indicators to track message processing. Currently used by the account-mailer / auth-queue. This prevents duplicates. """ + import datetime as dt -import pytz +import pytz from sqlalchemy import Column, DateTime, Integer, String + from .db import db class PubSubMessageProcessing(db.Model): """PubSub Message Processing for cloud event messages.""" - __tablename__ = 'pubsub_message_processing' + __tablename__ = "pubsub_message_processing" id = Column(Integer, index=True, primary_key=True) cloud_event_id = Column(String(250), nullable=False) @@ -22,9 +24,9 @@ class PubSubMessageProcessing(db.Model): processed = Column(DateTime, nullable=True) @classmethod - def find_by_id(cls, identifier): + def find_by_id(cls, identifier: int): """Find a pubsub message processing by id.""" - return cls.query.filter_by(id=identifier).one_or_none() + return cls.query.filter_by(id=int(identifier)).one_or_none() @classmethod def find_by_cloud_event_id_and_type(cls, cloud_event_id, message_type): diff --git a/auth-api/src/auth_api/models/staff_remark_code.py b/auth-api/src/auth_api/models/staff_remark_code.py index e33dac4bcd..41354524ba 100644 --- a/auth-api/src/auth_api/models/staff_remark_code.py +++ b/auth-api/src/auth_api/models/staff_remark_code.py @@ -22,4 +22,4 @@ class StaffRemarkCode(BaseCodeModel): # pylint: disable=too-few-public-methods """Staff Remarks against an account.""" - __tablename__ = 'staff_remark_codes' + __tablename__ = "staff_remark_codes" diff --git a/auth-api/src/auth_api/models/suspension_reason_code.py b/auth-api/src/auth_api/models/suspension_reason_code.py index b63513c754..c08009468e 100644 --- a/auth-api/src/auth_api/models/suspension_reason_code.py +++ b/auth-api/src/auth_api/models/suspension_reason_code.py @@ -25,4 +25,4 @@ class SuspensionReasonCode(BaseCodeModel): # pylint: disable=too-few-public-met That is supported by auth system. """ - __tablename__ = 'suspension_reason_codes' + __tablename__ = "suspension_reason_codes" diff --git a/auth-api/src/auth_api/models/task.py b/auth-api/src/auth_api/models/task.py index 93667a464e..0badd670ec 100644 --- a/auth-api/src/auth_api/models/task.py +++ b/auth-api/src/auth_api/models/task.py @@ -13,13 +13,14 @@ # limitations under the License. """This model manages a Task item in the Auth Service.""" import datetime as dt -import pytz +import pytz from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, text from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.orm import relationship from auth_api.models.dataclass import TaskSearch + from ..utils.enums import TaskRelationshipStatus, TaskRelationshipType, TaskStatus from .base_model import BaseModel from .db import db @@ -28,14 +29,15 @@ class Task(BaseModel): """Model for a Task record.""" - __tablename__ = 'tasks' + __tablename__ = "tasks" id = Column(Integer, index=True, primary_key=True) name = Column(String(250), nullable=False) # Stores name of the relationship item. For eg, an org name date_submitted = Column(DateTime) # Instance when task is created external_source_id = Column(String(75), nullable=True) # Optional external system source identifier - is_resubmitted = Column(Boolean(), default=False, - nullable=False) # Stores whether this task is resubmitted for review + is_resubmitted = Column( + Boolean(), default=False, nullable=False + ) # Stores whether this task is resubmitted for review relationship_type = Column(String(50), nullable=False) # That is to be acted up on. For eg, an org relationship_id = Column(Integer, index=True, nullable=False) relationship_status = Column(String(100), nullable=True) # Status of the related object. e.g, PENDING_STAFF_REVIEW @@ -45,13 +47,12 @@ class Task(BaseModel): status = Column(String(50), nullable=False) # task is acted or to be acted. can be open or completed account_id = Column(Integer, nullable=True) # account id related to task. Eg, # org id for pending product subscriptions - related_to = Column(ForeignKey('users.id', ondelete='SET NULL', - name='related_to_fkey'), nullable=False) + related_to = Column(ForeignKey("users.id", ondelete="SET NULL", name="related_to_fkey"), nullable=False) # python list of remarks <- -> postgres array of remarks remarks = Column(ARRAY(String, dimensions=1), nullable=True) # task that is assigned to the particular user - user = relationship('User', foreign_keys=[related_to], lazy='select') + user = relationship("User", foreign_keys=[related_to], lazy="select") @classmethod def fetch_tasks(cls, task_search: TaskSearch): @@ -59,7 +60,7 @@ def fetch_tasks(cls, task_search: TaskSearch): query = db.session.query(Task) if task_search.name: - query = query.filter(Task.name.ilike(f'%{task_search.name}%')) + query = query.filter(Task.name.ilike(f"%{task_search.name}%")) if task_search.type: query = query.filter(Task.type == task_search.type) if task_search.status: @@ -75,14 +76,16 @@ def fetch_tasks(cls, task_search: TaskSearch): if task_search.relationship_status: query = query.filter(Task.relationship_status == task_search.relationship_status) if task_search.modified_by: - query = query.join(Task.modified_by) \ - .filter(text("lower(users.first_name || ' ' || users.last_name) like lower(:search_text)"))\ - .params(search_text=f'%{task_search.modified_by}%') + query = ( + query.join(Task.modified_by) + .filter(text("lower(users.first_name || ' ' || users.last_name) like lower(:search_text)")) + .params(search_text=f"%{task_search.modified_by}%") + ) if task_search.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value: query = query.order_by(Task.date_submitted.asc()) - if task_search.submitted_sort_order == 'asc': + if task_search.submitted_sort_order == "asc": query = query.order_by(Task.date_submitted.asc()) - if task_search.submitted_sort_order == 'desc': + if task_search.submitted_sort_order == "desc": query = query.order_by(Task.date_submitted.desc()) # Add pagination @@ -90,36 +93,50 @@ def fetch_tasks(cls, task_search: TaskSearch): return pagination.items, pagination.total @classmethod - def find_by_task_id(cls, task_id): + def find_by_task_id(cls, task_id: int): """Find a task instance that matches the provided id.""" - return db.session.query(Task).filter_by(id=task_id).first() + return db.session.query(Task).filter_by(id=int(task_id or -1)).first() @classmethod - def find_by_task_relationship_id(cls, relationship_id: int, task_relationship_type: str, - task_status: str = TaskStatus.OPEN.value): + def find_by_task_relationship_id( + cls, relationship_id: int, task_relationship_type: str, task_status: str = TaskStatus.OPEN.value + ): """Find a task instance that related to the relationship id ( may be an ORG or a PRODUCT.""" - return db.session.query(Task).filter(Task.relationship_id == relationship_id, - Task.relationship_type == task_relationship_type, - Task.status == task_status).first() + return ( + db.session.query(Task) + .filter( + Task.relationship_id == int(relationship_id or -1), + Task.relationship_type == task_relationship_type, + Task.status == task_status, + ) + .first() + ) @classmethod - def find_by_task_for_account(cls, org_id, status): + def find_by_task_for_account(cls, org_id: int, status): """Find a task instance that matches the provided id.""" - return db.session.query(Task).filter_by(relationship_id=org_id, - relationship_type=TaskRelationshipType.ORG.value, status=status).first() + return ( + db.session.query(Task) + .filter_by( + relationship_id=int(org_id or -1), relationship_type=TaskRelationshipType.ORG.value, status=status + ) + .first() + ) @classmethod - def find_by_user_and_status(cls, org_id, status): + def find_by_user_and_status(cls, org_id: int, status): """Find a task instance that matches the provided id.""" - return db.session.query(Task).filter_by(account_id=org_id, - relationship_type=TaskRelationshipType.USER.value, status=status)\ + return ( + db.session.query(Task) + .filter_by(account_id=int(org_id or -1), relationship_type=TaskRelationshipType.USER.value, status=status) .first() + ) @classmethod def _str_to_utc_dt(cls, date: str, add_time: bool): """Convert ISO formatted dates into dateTime objects in UTC.""" - time_zone = pytz.timezone('Canada/Pacific') - naive_dt = dt.datetime.strptime(date, '%Y-%m-%d') + time_zone = pytz.timezone("Canada/Pacific") + naive_dt = dt.datetime.strptime(date, "%Y-%m-%d") local_dt = time_zone.localize(naive_dt, is_dst=None) if add_time: local_dt = dt.datetime(local_dt.year, local_dt.month, local_dt.day, 23, 59, 59) diff --git a/auth-api/src/auth_api/models/user.py b/auth-api/src/auth_api/models/user.py index aa74a62264..22c0d5cee1 100644 --- a/auth-api/src/auth_api/models/user.py +++ b/auth-api/src/auth_api/models/user.py @@ -18,10 +18,10 @@ import datetime -from flask import current_app from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, and_, or_ from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship +from structured_logging import StructuredLogging from auth_api.utils.enums import LoginSource, Status, UserStatus from auth_api.utils.roles import Role @@ -33,47 +33,54 @@ from .org import Org as OrgModel from .user_status_code import UserStatusCode +logger = StructuredLogging.get_logger() + class User(BaseModel): """This is the model for a User.""" - __tablename__ = 'users' + __tablename__ = "users" - __versioned__ = { - 'exclude': ['modified', 'modified_by_id', 'modified_by', 'created'] - } + __versioned__ = {"exclude": ["modified", "modified_by_id", "modified_by", "created"]} id = Column(Integer, primary_key=True) - username = Column('username', String(100), index=True) - firstname = Column('first_name', String(200), index=True) - lastname = Column('last_name', String(200), index=True) - email = Column('email', String(200), index=True) + username = Column("username", String(100), index=True) + firstname = Column("first_name", String(200), index=True) + lastname = Column("last_name", String(200), index=True) + email = Column("email", String(200), index=True) keycloak_guid = Column( - 'keycloak_guid', UUID(as_uuid=True), unique=True, nullable=True # bcros users comes with no guid + "keycloak_guid", UUID(as_uuid=True), unique=True, nullable=True # bcros users comes with no guid ) is_terms_of_use_accepted = Column(Boolean(), default=False, nullable=True) - terms_of_use_accepted_version = Column( - ForeignKey('documents.version_id'), nullable=True - ) + terms_of_use_accepted_version = Column(ForeignKey("documents.version_id"), nullable=True) # a type for the user to identify what kind of user it is..ie anonymous , bcsc etc ..similar to login source - type = Column('type', String(200), nullable=True) - status = Column(ForeignKey('user_status_codes.id')) - idp_userid = Column('idp_userid', String(256), index=True) - login_source = Column('login_source', String(200), nullable=True) + type = Column("type", String(200), nullable=True) + status = Column(ForeignKey("user_status_codes.id")) + idp_userid = Column("idp_userid", String(256), index=True) + login_source = Column("login_source", String(200), nullable=True) login_time = Column(DateTime, default=None, nullable=True) verified = Column(Boolean()) - contacts = relationship('ContactLink', primaryjoin='User.id == ContactLink.user_id', lazy='select') - orgs = relationship('Membership', - primaryjoin='and_(User.id == Membership.user_id, or_(Membership.status == ' + str( - Status.ACTIVE.value) + ', Membership.status == ' + str( - Status.PENDING_APPROVAL.value) + '))', lazy='select') # noqa:E127 - - terms_of_use_version = relationship('Documents', foreign_keys=[terms_of_use_accepted_version], uselist=False, - lazy='select') - user_status = relationship('UserStatusCode', foreign_keys=[status], lazy='subquery') + contacts = relationship( + "ContactLink", primaryjoin="User.id == ContactLink.user_id", lazy="select", back_populates="user" + ) + orgs = relationship( + "Membership", + primaryjoin="and_(User.id == Membership.user_id, or_(Membership.status == " + + str(Status.ACTIVE.value) + + ", Membership.status == " + + str(Status.PENDING_APPROVAL.value) + + "))", + lazy="select", + back_populates="user", + ) # noqa:E127 + + terms_of_use_version = relationship( + "Documents", foreign_keys=[terms_of_use_accepted_version], uselist=False, lazy="select" + ) + user_status = relationship("UserStatusCode", foreign_keys=[status], lazy="subquery") @classmethod def find_by_username(cls, username): @@ -84,46 +91,54 @@ def find_by_username(cls, username): @user_context def find_by_jwt_token(cls, **kwargs): """Find an existing user by the keycloak GUID and (idpUserId is null or from token) in the provided token.""" - user_from_context: UserContext = kwargs['user_context'] - return db.session.query(User).filter( - and_(User.keycloak_guid == user_from_context.sub, - or_(User.idp_userid == user_from_context.token_info.get('idp_userid', None), - User.idp_userid.is_(None)))).one_or_none() + user_from_context: UserContext = kwargs["user_context"] + return ( + db.session.query(User) + .filter( + and_( + User.keycloak_guid == user_from_context.sub, + or_( + User.idp_userid == user_from_context.token_info.get("idp_userid", None), + User.idp_userid.is_(None), + ), + ) + ) + .one_or_none() + ) @classmethod @user_context def find_by_jwt_idp_userid(cls, **kwargs): """Find an existing user by idp_userid.""" - user_from_context: UserContext = kwargs['user_context'] - if idp_userid := user_from_context.token_info.get('idp_userid', None): - return db.session.query(User).filter( - User.idp_userid == idp_userid).one_or_none() + user_from_context: UserContext = kwargs["user_context"] + if idp_userid := user_from_context.token_info.get("idp_userid", None): + return db.session.query(User).filter(User.idp_userid == idp_userid).one_or_none() if not idp_userid: - current_app.logger.error('No idp_userid provided from token_info.') + logger.error("No idp_userid provided from token_info.") return None @classmethod @user_context def create_from_jwt_token(cls, first_name: str, last_name: str, **kwargs): """Create a User from the provided JWT.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] token = user_from_context.token_info if token: user = User( username=user_from_context.user_name, firstname=first_name, lastname=last_name, - email=token.get('email', None), + email=token.get("email", None), keycloak_guid=user_from_context.sub, created=datetime.datetime.now(), login_source=user_from_context.login_source, status=UserStatusCode.get_default_type(), - idp_userid=token.get('idp_userid', None), + idp_userid=token.get("idp_userid", None), login_time=datetime.datetime.now(), type=cls._get_type(user_from_context=user_from_context), - verified=cls._is_verified(user_from_context.login_source) + verified=cls._is_verified(user_from_context.login_source), ) - current_app.logger.debug(f'Creating user from JWT:{token}; User:{user}') + logger.debug(f"Creating user from JWT:{token}; User:{user}") user.save() return user @@ -131,33 +146,41 @@ def create_from_jwt_token(cls, first_name: str, last_name: str, **kwargs): @classmethod @user_context - def update_from_jwt_token(cls, user, # pylint:disable=too-many-arguments - first_name: str, last_name: str, is_login: bool = False, **kwargs): + def update_from_jwt_token( + cls, + user, # pylint:disable=too-many-positional-arguments + first_name: str, + last_name: str, + is_login: bool = False, + **kwargs, + ): """Update a User from the provided JWT.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] token = user_from_context.token_info if not token or not user: return None # Do not save if nothing has been changed # pylint: disable=too-many-boolean-expressions - if not is_login \ - and (user.username == user_from_context.user_name or user.username) \ - and user.firstname == first_name \ - and user.lastname == last_name \ - and user.email == token.get('email', user.email) \ - and (str(user.keycloak_guid) == user_from_context.sub or user.keycloak_guid) \ - and user.status == UserStatus.ACTIVE.value \ - and (user.login_source == user_from_context.login_source or user.login_source) \ - and user.idp_userid == token.get('idp_userid', None): + if ( + not is_login + and (user.username == user_from_context.user_name or user.username) + and user.firstname == first_name + and user.lastname == last_name + and user.email == token.get("email", user.email) + and (str(user.keycloak_guid) == user_from_context.sub or user.keycloak_guid) + and user.status == UserStatus.ACTIVE.value + and (user.login_source == user_from_context.login_source or user.login_source) + and user.idp_userid == token.get("idp_userid", None) + ): return user - current_app.logger.debug(f'Updating user from JWT:{token}; User:{user}') + logger.debug(f"Updating user from JWT:{token}; User:{user}") user.username = user_from_context.user_name or user.username user.firstname = first_name user.lastname = last_name - user.email = token.get('email', user.email) + user.email = token.get("email", user.email) user.modified = datetime.datetime.now() @@ -172,7 +195,7 @@ def update_from_jwt_token(cls, user, # pylint:disable=too-many-arguments if is_login: user.login_time = datetime.datetime.now() - user.idp_userid = token.get('idp_userid') + user.idp_userid = token.get("idp_userid") cls.commit() @@ -182,7 +205,7 @@ def update_from_jwt_token(cls, user, # pylint:disable=too-many-arguments def find_users(cls, first_name, last_name, email): """Return a set of users with either the given username or the given email.""" # TODO: This needs to be improved for scalability. Paging large datasets etc. - if first_name == '' and last_name == '' and email == '': + if first_name == "" and last_name == "" and email == "": return cls.query.all() return cls.query.filter(or_(cls.firstname == first_name, cls.lastname == last_name, cls.email == email)).all() @@ -190,27 +213,36 @@ def find_users(cls, first_name, last_name, email): @user_context def update_terms_of_use(cls, is_terms_accepted, terms_of_use_version, **kwargs): """Update the terms of service for the user.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] if user_from_context.token_info: user = cls.find_by_jwt_token() user.is_terms_of_use_accepted = is_terms_accepted user.terms_of_use_accepted_version = terms_of_use_version - current_app.logger.debug(f'Updating users Terms of use is_terms_accepted:{is_terms_accepted}; ' - f'terms_of_use_version:{terms_of_use_version}') + logger.debug( + f"Updating users Terms of use is_terms_accepted:{is_terms_accepted}; " + f"terms_of_use_version:{terms_of_use_version}" + ) cls.save(user) return user return None @classmethod - def find_users_by_org_id_by_status_by_roles(cls, org_id, roles, status=Status.ACTIVE.value): + def find_users_by_org_id_by_status_by_roles(cls, org_id: int, roles, status=Status.ACTIVE.value): """Find all members of the org with a status.""" - return db.session.query(User). \ - join(MembershipModel, - (User.id == MembershipModel.user_id) & (MembershipModel.status == status) & - (MembershipModel.membership_type_code.in_(roles))). \ - join(OrgModel).filter(OrgModel.id == org_id).all() + return ( + db.session.query(User) + .join( + MembershipModel, + (User.id == MembershipModel.user_id) + & (MembershipModel.status == status) + & (MembershipModel.membership_type_code.in_(roles)), + ) + .join(OrgModel) + .filter(OrgModel.id == org_id) + .all() + ) def delete(self): """Users cannot be deleted so intercept the ORM by just returning.""" @@ -221,13 +253,17 @@ def _get_type(cls, user_from_context: UserContext) -> str: """Return type of the user from the token info.""" user_type: str = None if user_from_context.roles: - if Role.ANONYMOUS_USER.value in user_from_context.roles \ - or user_from_context.login_source == LoginSource.BCROS.value: + if ( + Role.ANONYMOUS_USER.value in user_from_context.roles + or user_from_context.login_source == LoginSource.BCROS.value + ): user_type = Role.ANONYMOUS_USER.name elif Role.GOV_ACCOUNT_USER.value in user_from_context.roles: user_type = Role.GOV_ACCOUNT_USER.name - elif Role.PUBLIC_USER.value in user_from_context.roles \ - or user_from_context.login_source in [LoginSource.BCEID.value, LoginSource.BCSC.value]: + elif Role.PUBLIC_USER.value in user_from_context.roles or user_from_context.login_source in [ + LoginSource.BCEID.value, + LoginSource.BCSC.value, + ]: user_type = Role.PUBLIC_USER.name elif user_from_context.is_staff(): user_type = Role.STAFF.name diff --git a/auth-api/src/auth_api/models/user_settings.py b/auth-api/src/auth_api/models/user_settings.py index ef8312d653..a19de0d26e 100644 --- a/auth-api/src/auth_api/models/user_settings.py +++ b/auth-api/src/auth_api/models/user_settings.py @@ -17,7 +17,7 @@ """ -class UserSettings(): # pylint: disable=too-few-public-methods, too-many-instance-attributes +class UserSettings: # pylint: disable=too-few-public-methods, too-many-instance-attributes """ This is the User Settings model. @@ -26,8 +26,18 @@ class UserSettings(): # pylint: disable=too-few-public-methods, too-many-instan Can extended to product which user has access to. """ - def __init__(self, id_, label, urlorigin, urlpath, type_, account_type=None, # pylint: disable=too-many-arguments - account_status=None, product_settings=None, additional_label=None): + def __init__( # pylint: disable=too-many-positional-arguments,too-many-arguments + self, + id_, + label, + urlorigin, + urlpath, + type_, + account_type=None, + account_status=None, + product_settings=None, + additional_label=None, + ): """Return a usersettings.""" self.id = id_ self.label = label diff --git a/auth-api/src/auth_api/models/user_status_code.py b/auth-api/src/auth_api/models/user_status_code.py index 53fc45e0c5..9338b17417 100644 --- a/auth-api/src/auth_api/models/user_status_code.py +++ b/auth-api/src/auth_api/models/user_status_code.py @@ -24,7 +24,7 @@ class UserStatusCode(BaseModel): # pylint: disable=too-few-public-methods """This is the User Status model for the Auth service.""" - __tablename__ = 'user_status_codes' + __tablename__ = "user_status_codes" id = Column(Integer, primary_key=True, autoincrement=False) name = Column(String(15)) diff --git a/auth-api/src/auth_api/models/views/authorization.py b/auth-api/src/auth_api/models/views/authorization.py index 1bc097a298..5575c39a6a 100644 --- a/auth-api/src/auth_api/models/views/authorization.py +++ b/auth-api/src/auth_api/models/views/authorization.py @@ -29,7 +29,7 @@ class Authorization(db.Model): """This is the model the authorizations_view.""" - __tablename__ = 'authorizations_view' + __tablename__ = "authorizations_view" business_identifier = Column(String) entity_name = Column(String) @@ -47,20 +47,19 @@ class Authorization(db.Model): status_code = Column(String) @classmethod - def find_user_authorization_by_business_number(cls, business_identifier: str, keycloak_guid: uuid = None, - org_id: int = None, is_staff=None): + def find_user_authorization_by_business_number( + cls, business_identifier: str, keycloak_guid: uuid = None, org_id: int = None, is_staff=None + ): """Return authorization view object.""" auth = None if keycloak_guid and business_identifier and org_id: - auth = cls.query.filter_by(keycloak_guid=keycloak_guid, - business_identifier=business_identifier, - org_id=org_id).one_or_none() + auth = cls.query.filter_by( + keycloak_guid=keycloak_guid, business_identifier=business_identifier, org_id=int(org_id or -1) + ).one_or_none() elif business_identifier and org_id: - auth = cls.query.filter_by(business_identifier=business_identifier, - org_id=org_id).first() + auth = cls.query.filter_by(business_identifier=business_identifier, org_id=int(org_id or -1)).first() elif keycloak_guid and business_identifier: - auth = cls.query.filter_by(keycloak_guid=keycloak_guid, - business_identifier=business_identifier).first() + auth = cls.query.filter_by(keycloak_guid=keycloak_guid, business_identifier=business_identifier).first() elif is_staff and business_identifier: auth = cls.query.filter_by(business_identifier=business_identifier).first() return auth @@ -72,32 +71,40 @@ def find_user_authorization_by_business_number_and_product(cls, business_identif Mainly used for service accounts.Sorted using the membership since service accounts gets all access """ - return cls.query.filter_by(product_code=product_code, business_identifier=business_identifier) \ - .order_by(expression.case(((Authorization.org_membership == ADMIN, 1), - (Authorization.org_membership == COORDINATOR, 2), - (Authorization.org_membership == USER, 3)))) \ + return ( + cls.query.filter_by(product_code=product_code, business_identifier=business_identifier) + .order_by( + expression.case( + (Authorization.org_membership == ADMIN, 1), + (Authorization.org_membership == COORDINATOR, 2), + (Authorization.org_membership == USER, 3), + ) + ) .first() + ) @classmethod def find_user_authorization_by_org_id(cls, keycloak_guid: uuid, org_id: int): """Return authorization view object.""" - return cls.query.filter_by(keycloak_guid=keycloak_guid, org_id=org_id).one_or_none() + return cls.query.filter_by(keycloak_guid=keycloak_guid, org_id=int(org_id or -1)).one_or_none() @classmethod def find_authorization_for_admin_by_org_id(cls, org_id: int): """Return authorization view object for staff.""" # staff gets ADMIN level access - return cls.query.filter_by(org_id=org_id, org_membership=ADMIN).first() + return cls.query.filter_by(org_id=int(org_id or -1), org_membership=ADMIN).first() @classmethod def find_account_authorization_by_org_id_and_product_for_user(cls, keycloak_guid: uuid, org_id: int, product: str): """Return authorization view object.""" - return cls.query.filter_by(keycloak_guid=keycloak_guid, org_id=org_id, product_code=product).one_or_none() + return cls.query.filter_by( + keycloak_guid=keycloak_guid, org_id=int(org_id or -1), product_code=product + ).one_or_none() @classmethod def find_account_authorization_by_org_id_and_product(cls, org_id: int, product: str): """Return authorization view object.""" - return cls.query.filter_by(org_id=org_id, product_code=product, org_membership=ADMIN).first() + return cls.query.filter_by(org_id=int(org_id or -1), product_code=product, org_membership=ADMIN).first() @classmethod def find_all_authorizations_for_user(cls, keycloak_guid): diff --git a/auth-api/src/auth_api/resources/endpoints.py b/auth-api/src/auth_api/resources/endpoints.py index ce82c91105..5ba0bb69c3 100644 --- a/auth-api/src/auth_api/resources/endpoints.py +++ b/auth-api/src/auth_api/resources/endpoints.py @@ -12,14 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. """Mounting the end-points.""" +import os from typing import Optional -import os from flask import Blueprint, Flask # noqa: I001 + from .v1 import v1_endpoint -from .reset import bp as reset_bp -TEST_BLUEPRINT = Blueprint('TEST', __name__, url_prefix='/test') +TEST_BLUEPRINT = Blueprint("TEST", __name__, url_prefix="/test") class Endpoints: # pylint: disable=too-few-public-methods @@ -40,9 +40,8 @@ def _mount_endpoints(self): """Mount the endpoints of the system.""" v1_endpoint.init_app(self.app) - if os.getenv('FLASK_ENV', 'production') in ['development', 'testing']: + if os.getenv("FLASK_ENV", "production") in ["development", "testing"]: self.app.register_blueprint(TEST_BLUEPRINT) - self.app.register_blueprint(reset_bp) endpoints = Endpoints() diff --git a/auth-api/src/auth_api/auth.py b/auth-api/src/auth_api/resources/meta.py similarity index 60% rename from auth-api/src/auth_api/auth.py rename to auth-api/src/auth_api/resources/meta.py index b8eec74c34..367baac932 100644 --- a/auth-api/src/auth_api/auth.py +++ b/auth-api/src/auth_api/resources/meta.py @@ -11,9 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Bring in the common JWT Manager.""" -from flask_jwt_oidc import JwtManager +"""Endpoints to check and manage the health of the service.""" +from flask import Blueprint, jsonify +from auth_api.metadata import APP_VERSION, FLASK_VERSION -# lower case name as used by convention in most Flask apps -jwt = JwtManager() # pylint: disable=invalid-name +bp = Blueprint("META", __name__, url_prefix="/meta") + + +@bp.route("/info") +def info(): + """Return a JSON object with meta information about the Service.""" + return jsonify(API=f"auth_api/{APP_VERSION}", FrameWork=f"{FLASK_VERSION}") diff --git a/auth-api/src/auth_api/resources/ops.py b/auth-api/src/auth_api/resources/ops.py index 8cfb1af6c2..08bc5679ca 100644 --- a/auth-api/src/auth_api/resources/ops.py +++ b/auth-api/src/auth_api/resources/ops.py @@ -17,26 +17,25 @@ from auth_api.models import db +bp = Blueprint("OPS", __name__, url_prefix="/ops") -bp = Blueprint('OPS', __name__, url_prefix='/ops') +SQL = text("select 1") -SQL = text('select 1') - -@bp.route('healthz', methods=['GET']) +@bp.route("healthz", methods=["GET"]) def get_ops_healthz(): """Return a JSON object stating the health of the Service and dependencies.""" try: - db.engine.execute(SQL) + db.session.execute(SQL) except exc.SQLAlchemyError: - return {'message': 'api is down'}, 500 + return {"message": "api is down"}, 500 # made it here, so all checks passed - return {'message': 'api is healthy'}, 200 + return {"message": "api is healthy"}, 200 -@bp.route('readyz', methods=['GET']) +@bp.route("readyz", methods=["GET"]) def get_ops_readyz(): """Return a JSON object that identifies if the service is setupAnd ready to work.""" # TODO: add a poll to the DB when called - return {'message': 'api is ready'}, 200 + return {"message": "api is ready"}, 200 diff --git a/auth-api/src/auth_api/resources/reset.py b/auth-api/src/auth_api/resources/reset.py deleted file mode 100644 index 6e6a18b9dd..0000000000 --- a/auth-api/src/auth_api/resources/reset.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Endpoints to reset test data from database.""" - -from flask import Blueprint -from flask_cors import cross_origin - -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt -from auth_api.exceptions import BusinessException -from auth_api.services import ResetTestData as ResetService -from auth_api.tracer import Tracer -from auth_api.utils.endpoints_enums import EndpointEnum -from auth_api.utils.roles import Role - - -bp = Blueprint('RESET', __name__, url_prefix=f'{EndpointEnum.TEST_API.value}/reset') -TRACER = Tracer.get_instance() - - -@bp.route('', methods=['POST']) -@TRACER.trace() -@cross_origin(origin='*') -@_jwt.has_one_of_roles([Role.TESTER.value]) -def post_reset(): - """Cleanup test data by the provided token.""" - try: - ResetService.reset() - response, status = '', http_status.HTTP_204_NO_CONTENT - except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code - return response, status diff --git a/auth-api/src/auth_api/resources/v1/__init__.py b/auth-api/src/auth_api/resources/v1/__init__.py index 5d64fb3a02..d185d33a00 100644 --- a/auth-api/src/auth_api/resources/v1/__init__.py +++ b/auth-api/src/auth_api/resources/v1/__init__.py @@ -16,6 +16,8 @@ from flask import Flask +from ..meta import bp as meta_bp +from ..ops import bp as ops_bp from .account import bp as accounts_bp from .activity_log import bp as activity_log_bp from .affiliation_invitation import bp as affiliation_invitation_bp @@ -27,9 +29,7 @@ from .entity import bp as entity_bp from .invitation import bp as invitation_bp from .keycloak import bp as keycloak_bp -from .meta import bp as meta_bp from .notifications import bp as notifications_bp -from ..ops import bp as ops_bp from .org import bp as org_bp from .org_api_keys import bp as org_api_keys_bp from .org_authorizations import bp as org_authorizations_bp @@ -51,7 +51,7 @@ def __init__(self): def init_app(self, app): """Register and initialize the Endpoint setup.""" if not app: - raise Exception('Cannot initialize without a Flask App.') # pylint: disable=broad-exception-raised + raise Exception("Cannot initialize without a Flask App.") # pylint: disable=broad-exception-raised self.app = app self.app.register_blueprint(accounts_bp) diff --git a/auth-api/src/auth_api/resources/v1/account.py b/auth-api/src/auth_api/resources/v1/account.py index 7be92c8701..bce22dae61 100644 --- a/auth-api/src/auth_api/resources/v1/account.py +++ b/auth-api/src/auth_api/resources/v1/account.py @@ -13,24 +13,23 @@ # limitations under the License. """API endpoints for managing an Org resource.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt + from auth_api.services.authorization import Authorization as AuthorizationService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum -bp = Blueprint('ACCOUNTS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/accounts') -TRACER = Tracer.get_instance() +bp = Blueprint("ACCOUNTS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/accounts") -@bp.route('//products//authorizations', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("//products//authorizations", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_account_product_authorizations(account_id, product_code): """Return authorizations for a product in an account.""" - expanded: bool = request.args.get('expanded', False) + expanded: bool = request.args.get("expanded", False) authorizations = AuthorizationService.get_account_authorizations_for_product(account_id, product_code, expanded) - return authorizations, http_status.HTTP_200_OK + return authorizations, HTTPStatus.OK diff --git a/auth-api/src/auth_api/resources/v1/activity_log.py b/auth-api/src/auth_api/resources/v1/activity_log.py index de58736c03..a4b82b0afa 100644 --- a/auth-api/src/auth_api/resources/v1/activity_log.py +++ b/auth-api/src/auth_api/resources/v1/activity_log.py @@ -13,41 +13,40 @@ # limitations under the License. """API endpoints for managing a Activity resource.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.services import ActivityLog as ActivityLogService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role - -bp = Blueprint('ACTIVITY_LOGS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/orgs//activity-logs') -TRACER = Tracer.get_instance() +bp = Blueprint("ACTIVITY_LOGS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/orgs//activity-logs") -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods='GET') -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods="GET") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF.value, Role.ACCOUNT_HOLDER.value]) def get_activities(org_id): """Fetch activities.""" try: # Search based on request arguments - item_name = request.args.get('itemName', None) - item_type = request.args.get('itemType', None) - action = request.args.get('action', None) - page = request.args.get('page', 1) - limit = request.args.get('limit', 10) - - response, status = ActivityLogService.fetch_activity_logs(org_id, - item_name=item_name, - item_type=item_type, action=action, - page=page, limit=limit), http_status.HTTP_200_OK + item_name = request.args.get("itemName", None) + item_type = request.args.get("itemType", None) + action = request.args.get("action", None) + page = request.args.get("page", 1) + limit = request.args.get("limit", 10) + + response, status = ( + ActivityLogService.fetch_activity_logs( + org_id, item_name=item_name, item_type=item_type, action=action, page=page, limit=limit + ), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/affiliation_invitation.py b/auth-api/src/auth_api/resources/v1/affiliation_invitation.py index d915b129cb..917390c4bd 100644 --- a/auth-api/src/auth_api/resources/v1/affiliation_invitation.py +++ b/auth-api/src/auth_api/resources/v1/affiliation_invitation.py @@ -13,11 +13,11 @@ # limitations under the License. """API endpoints for managing an Affiliation Invitation resource.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException, Error from auth_api.models.dataclass import AffiliationInvitationSearch from auth_api.schemas import utils as schema_utils @@ -25,35 +25,33 @@ from auth_api.services import Entity as EntityService from auth_api.services import User as UserService from auth_api.services.authorization import check_auth -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role from auth_api.utils.util import get_request_environment - -bp = Blueprint('AFFILIATION_INVITATIONS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/affiliationInvitations') -TRACER = Tracer.get_instance() +bp = Blueprint("AFFILIATION_INVITATIONS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/affiliationInvitations") -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'POST']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_affiliation_invitations(): """Get affiliation invitations.""" try: - get_business_details = request.args.get('businessDetails', 'false') - org_id = request.args.get('orgId', None) - business_identifier = request.args.get('businessIdentifier', None) + get_business_details = request.args.get("businessDetails", "false") + org_id = request.args.get("orgId", None) + business_identifier = request.args.get("businessIdentifier", None) search_filter = AffiliationInvitationSearch() - search_filter.from_org_id = request.args.get('fromOrgId', None) - search_filter.to_org_id = request.args.get('toOrgId', None) - search_filter.status_codes = request.args.getlist('statuses') - search_filter.invitation_types = request.args.getlist('types') + search_filter.from_org_id = request.args.get("fromOrgId", None) + search_filter.to_org_id = request.args.get("toOrgId", None) + search_filter.status_codes = request.args.getlist("statuses") + search_filter.invitation_types = request.args.getlist("types") if business_identifier: - business = EntityService\ - .find_by_business_identifier(business_identifier=business_identifier, skip_auth=True) + business = EntityService.find_by_business_identifier( + business_identifier=business_identifier, skip_auth=True + ) search_filter.entity_id = business.identifier if business else None auth_check_org_id = org_id or search_filter.from_org_id or search_filter.to_org_id @@ -61,158 +59,168 @@ def get_affiliation_invitations(): raise BusinessException(Error.NOT_AUTHORIZED_TO_PERFORM_THIS_ACTION, None) if org_id: - data = AffiliationInvitationService. \ - get_all_invitations_with_details_related_to_org(org_id=org_id, search_filter=search_filter) + data = AffiliationInvitationService.get_all_invitations_with_details_related_to_org( + org_id=org_id, search_filter=search_filter + ) else: - data = AffiliationInvitationService. \ - search_invitations(search_filter=search_filter) + data = AffiliationInvitationService.search_invitations(search_filter=search_filter) - if get_business_details.lower() == 'true': + if get_business_details.lower() == "true": data = AffiliationInvitationService.enrich_affiliation_invitations_dict_list_with_business_data(data) - response, status = {'affiliationInvitations': data}, http_status.HTTP_200_OK + response, status = {"affiliationInvitations": data}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("", methods=["POST"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) + [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value] +) def post_affiliation_invitation(): """Send a new affiliation invitation using the details in request and saves the affiliation invitation.""" environment = get_request_environment() - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'affiliation_invitation') + valid_format, errors = schema_utils.validate(request_json, "affiliation_invitation") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: user = UserService.find_by_jwt_token() - response, status = AffiliationInvitationService.create_affiliation_invitation(request_json, - user, origin, environment)\ - .as_dict(mask_email=True), \ - http_status.HTTP_201_CREATED + response, status = ( + AffiliationInvitationService.create_affiliation_invitation(request_json, user, origin, environment).as_dict( + mask_email=True + ), + HTTPStatus.CREATED, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'PATCH', 'DELETE']) -@TRACER.trace() +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PATCH", "DELETE"]) @_jwt.requires_auth def get_affiliation_invitation(affiliation_invitation_id): """Get the affiliation invitation specified by the provided id.""" affiliation_invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(affiliation_invitation_id) - if not affiliation_invitation or affiliation_invitation.as_dict().get('is_deleted'): - response, status = {'message': 'The requested affiliation invitation could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + if not affiliation_invitation or affiliation_invitation.as_dict().get("is_deleted"): + response, status = {"message": "The requested affiliation invitation could not be found."}, HTTPStatus.NOT_FOUND else: dictionary = affiliation_invitation.as_dict(mask_email=True) - response, status = dictionary, http_status.HTTP_200_OK + response, status = dictionary, HTTPStatus.OK return response, status -@bp.route('/', methods=['PATCH']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["PATCH"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) def patch_affiliation_invitation(affiliation_invitation_id): """Update the affiliation invitation specified by the provided id.""" - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") request_json = request.get_json() try: - affiliation_invitation = AffiliationInvitationService\ - .find_affiliation_invitation_by_id(affiliation_invitation_id) + affiliation_invitation = AffiliationInvitationService.find_affiliation_invitation_by_id( + affiliation_invitation_id + ) if affiliation_invitation is None: - response, status = {'message': 'The requested affiliation invitation could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = { + "message": "The requested affiliation invitation could not be found." + }, HTTPStatus.NOT_FOUND else: user = UserService.find_by_jwt_token() - response, status = affiliation_invitation\ - .update_affiliation_invitation(user, origin, request_json).as_dict(mask_email=True),\ - http_status.HTTP_200_OK + response, status = ( + affiliation_invitation.update_affiliation_invitation(user, origin, request_json).as_dict( + mask_email=True + ), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['DELETE']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) + [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value] +) def delete_affiliation_invitation(affiliation_invitation_id): """Delete the specified affiliation invitation.""" try: AffiliationInvitationService.delete_affiliation_invitation(affiliation_invitation_id) - response, status = {}, http_status.HTTP_200_OK + response, status = {}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//token/', methods=['PUT', 'OPTIONS']) -@cross_origin(origins='*', methods=['PUT']) -@TRACER.trace() +@bp.route("//token/", methods=["PUT", "OPTIONS"]) +@cross_origin(origins="*", methods=["PUT"]) @_jwt.requires_auth def accept_affiliation_invitation_token(affiliation_invitation_id, affiliation_invitation_token): """Check whether the passed token is valid and add affiliation from the affiliation invitation.""" - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") environment = get_request_environment() try: if not (user := UserService.find_by_jwt_token()): - response, status = {'message': 'Not authorized to perform this action'}, \ - http_status.HTTP_401_UNAUTHORIZED + response, status = {"message": "Not authorized to perform this action"}, HTTPStatus.UNAUTHORIZED else: - affiliation_invitation_id = AffiliationInvitationService\ - .validate_token(affiliation_invitation_token, int(affiliation_invitation_id)).as_dict().get('id') - response, status = AffiliationInvitationService\ - .accept_affiliation_invitation(affiliation_invitation_id, user, origin, - environment).as_dict(mask_email=True), \ - http_status.HTTP_200_OK + affiliation_invitation_id = ( + AffiliationInvitationService.validate_token( + affiliation_invitation_token, int(affiliation_invitation_id) + ) + .as_dict() + .get("id") + ) + response, status = ( + AffiliationInvitationService.accept_affiliation_invitation( + affiliation_invitation_id, user, origin, environment + ).as_dict(mask_email=True), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//authorization/', methods=['PATCH', 'OPTIONS']) -@cross_origin(origins='*', methods=['PATCH']) -@TRACER.trace() +@bp.route("//authorization/", methods=["PATCH", "OPTIONS"]) +@cross_origin(origins="*", methods=["PATCH"]) @_jwt.requires_auth def patch_affiliation_invitation_authorization(affiliation_invitation_id, authorize_action): """Check if user is active part of the Org. Authorize/Refuse Authorization invite if he is.""" - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") env = get_request_environment() try: user = UserService.find_by_jwt_token() _verify_permissions(user=user, affiliation_invitation_id=affiliation_invitation_id) - if authorize_action == 'accept': - response, status = AffiliationInvitationService \ - .accept_affiliation_invitation(affiliation_invitation_id=affiliation_invitation_id, - user=user, - origin=origin, - environment=env).as_dict(mask_email=True), \ - http_status.HTTP_200_OK - elif authorize_action == 'refuse': - response, status = AffiliationInvitationService \ - .refuse_affiliation_invitation(invitation_id=affiliation_invitation_id, user=user)\ - .as_dict(mask_email=True), \ - http_status.HTTP_200_OK + if authorize_action == "accept": + response, status = ( + AffiliationInvitationService.accept_affiliation_invitation( + affiliation_invitation_id=affiliation_invitation_id, user=user, origin=origin, environment=env + ).as_dict(mask_email=True), + HTTPStatus.OK, + ) + elif authorize_action == "refuse": + response, status = ( + AffiliationInvitationService.refuse_affiliation_invitation( + invitation_id=affiliation_invitation_id, user=user + ).as_dict(mask_email=True), + HTTPStatus.OK, + ) else: - err = {'code': 400, 'message': f'{authorize_action} is not supported on this endpoint'} - raise BusinessException(err, http_status.HTTP_400_BAD_REQUEST) + err = {"code": 400, "message": f"{authorize_action} is not supported on this endpoint"} + raise BusinessException(err, HTTPStatus.BAD_REQUEST) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status @@ -221,11 +229,10 @@ def _verify_permissions(user, affiliation_invitation_id): if not user: raise BusinessException(Error.NOT_AUTHORIZED_TO_PERFORM_THIS_ACTION, None) - affiliation_invitation = AffiliationInvitationService. \ - find_affiliation_invitation_by_id(affiliation_invitation_id) + affiliation_invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(affiliation_invitation_id) if not affiliation_invitation: raise BusinessException(Error.DATA_NOT_FOUND, None) - to_org_id = affiliation_invitation.as_dict()['to_org']['id'] + to_org_id = affiliation_invitation.as_dict()["to_org"]["id"] if not UserService.is_user_admin_or_coordinator(user=user, org_id=to_org_id): raise BusinessException(Error.NOT_AUTHORIZED_TO_PERFORM_THIS_ACTION, None) diff --git a/auth-api/src/auth_api/resources/v1/bcol_profiles.py b/auth-api/src/auth_api/resources/v1/bcol_profiles.py index 8912bb3700..e67ca00b69 100644 --- a/auth-api/src/auth_api/resources/v1/bcol_profiles.py +++ b/auth-api/src/auth_api/resources/v1/bcol_profiles.py @@ -16,22 +16,17 @@ from flask import Blueprint, request from flask_cors import cross_origin -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.services.org import Org -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role +bp = Blueprint("BCOL_PROFILES", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/bcol-profiles") -bp = Blueprint('BCOL_PROFILES', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/bcol-profiles') -TRACER = Tracer.get_instance() - - -@bp.route('', methods=['POST', 'OPTIONS']) -@cross_origin(origins='*', methods=['POST']) -@TRACER.trace() +@bp.route("", methods=["POST", "OPTIONS"]) +@cross_origin(origins="*", methods=["POST"]) @_jwt.has_one_of_roles([Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) def post_for_bcol_details(): """Return BC Online profile details.""" @@ -41,5 +36,5 @@ def post_for_bcol_details(): bcol_response = Org.get_bcol_details(bcol_credential=request_json) response, status = bcol_response.json(), bcol_response.status_code except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/bulk_user.py b/auth-api/src/auth_api/resources/v1/bulk_user.py index 0dc424f546..4cbb059e96 100644 --- a/auth-api/src/auth_api/resources/v1/bulk_user.py +++ b/auth-api/src/auth_api/resources/v1/bulk_user.py @@ -13,38 +13,35 @@ # limitations under the License. """API endpoints for managing a User resource.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.schemas import utils as schema_utils from auth_api.services.user import User as UserService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum - -bp = Blueprint('BULK_USERS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/bulk/users') -TRACER = Tracer.get_instance() +bp = Blueprint("BULK_USERS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/bulk/users") -@bp.route('', methods=['POST', 'OPTIONS']) -@cross_origin(origins='*', methods=['POST']) -@TRACER.trace() +@bp.route("", methods=["POST", "OPTIONS"]) +@cross_origin(origins="*", methods=["POST"]) @_jwt.requires_auth def post_bulk_users(): """Admin users can post multiple users to his org.Use it for anonymous purpose only.""" try: request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'bulk_user') + valid_format, errors = schema_utils.validate(request_json, "bulk_user") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST - users = UserService.create_user_and_add_membership(request_json['users'], request_json['orgId']) - is_any_error = any(user['http_status'] != 201 for user in users['users']) + users = UserService.create_user_and_add_membership(request_json["users"], request_json["orgId"]) + is_any_error = any(user["http_status"] != 201 for user in users["users"]) - response, status = users, http_status.HTTP_207_MULTI_STATUS if is_any_error else http_status.HTTP_200_OK + response, status = users, HTTPStatus.MULTI_STATUS if is_any_error else HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/codes.py b/auth-api/src/auth_api/resources/v1/codes.py index 290dbe8fa2..5b26d777b1 100644 --- a/auth-api/src/auth_api/resources/v1/codes.py +++ b/auth-api/src/auth_api/resources/v1/codes.py @@ -12,28 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. """API endpoints for managing an Invitation resource.""" +from http import HTTPStatus + from flask import Blueprint, jsonify from flask_cors import cross_origin -from auth_api import status as http_status from auth_api.exceptions import BusinessException from auth_api.services import Codes as CodeService from auth_api.utils.endpoints_enums import EndpointEnum -bp = Blueprint('CODES', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/codes') +bp = Blueprint("CODES", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/codes") -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) def get_codes(code_type): """Return the codes by giving name.""" try: codes = CodeService.fetch_codes(code_type=code_type) if codes is not None: - response, status = jsonify(codes), http_status.HTTP_200_OK + response, status = jsonify(codes), HTTPStatus.OK else: - response, status = jsonify({'message': f'The code type ({code_type}) could not be found.'}), \ - http_status.HTTP_404_NOT_FOUND + response, status = ( + jsonify({"message": f"The code type ({code_type}) could not be found."}), + HTTPStatus.NOT_FOUND, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/documents.py b/auth-api/src/auth_api/resources/v1/documents.py index 14cd0ecc88..a220fa4f88 100644 --- a/auth-api/src/auth_api/resources/v1/documents.py +++ b/auth-api/src/auth_api/resources/v1/documents.py @@ -13,37 +13,34 @@ # limitations under the License. """API endpoints for managing an Invitation resource.""" from datetime import datetime +from http import HTTPStatus from flask import Blueprint, g, jsonify from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.services import Documents as DocumentService from auth_api.services.minio import MinioService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.enums import AccessType, DocumentType, LoginSource +bp = Blueprint("DOCUMENTS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/documents") -bp = Blueprint('DOCUMENTS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/documents') -TRACER = Tracer.get_instance() - -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_document_by_type(document_type): """Return the latest terms of use.""" try: if document_type == DocumentType.TERMS_OF_USE.value: token = g.jwt_oidc_token_info - if token.get('accessType', None) == AccessType.ANONYMOUS.value: + if token.get("accessType", None) == AccessType.ANONYMOUS.value: document_type = DocumentType.TERMS_OF_USE_DIRECTOR_SEARCH.value - elif token.get('loginSource', - None) == LoginSource.STAFF.value: # ideally for govm user who logs in with IDIR + elif ( + token.get("loginSource", None) == LoginSource.STAFF.value + ): # ideally for govm user who logs in with IDIR document_type = DocumentType.TERMS_OF_USE_GOVM.value doc = DocumentService.fetch_latest_document(document_type) @@ -51,33 +48,30 @@ def get_document_by_type(document_type): doc_dict = doc.as_dict() if document_type == DocumentType.TERMS_OF_USE_PAD.value: replaced_content = _replace_current_date(doc) - doc_dict.update({'content': replaced_content}) + doc_dict.update({"content": replaced_content}) - response, status = doc_dict, http_status.HTTP_200_OK + response, status = doc_dict, HTTPStatus.OK else: - response, status = {'message': 'The requested invitation could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested invitation could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status def _replace_current_date(doc): """Replace any dynamic contents.""" today = datetime.today() - replaced_content = doc.as_dict().get('content').replace('Month Day, Year', - today.strftime('%m/%d/%Y')) + replaced_content = doc.as_dict().get("content").replace("Month Day, Year", today.strftime("%m/%d/%Y")) return replaced_content -@bp.route('//signatures', methods=['GET', 'OPTIONS']) -@TRACER.trace() -@cross_origin(origins='*', methods=['GET']) +@bp.route("//signatures", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_document_signature_by_name(file_name: str): """Return the latest terms of use.""" try: - response, status = jsonify(MinioService.create_signed_put_url(file_name)), http_status.HTTP_200_OK + response, status = jsonify(MinioService.create_signed_put_url(file_name)), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/documents_affidavit.py b/auth-api/src/auth_api/resources/v1/documents_affidavit.py index 8547067392..6a8e0bafc3 100644 --- a/auth-api/src/auth_api/resources/v1/documents_affidavit.py +++ b/auth-api/src/auth_api/resources/v1/documents_affidavit.py @@ -14,35 +14,31 @@ """API endpoints for managing a document resource.""" +from http import HTTPStatus + from flask import Blueprint, send_from_directory from flask_cors import cross_origin -from auth_api import status as http_status from auth_api.exceptions import BusinessException from auth_api.services import Documents as DocumentService -from auth_api.tracer import Tracer from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.enums import ContentType, DocumentType - -bp = Blueprint('DOCUMENTS_AFFIDAVIT', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/documents/affidavit') -TRACER = Tracer.get_instance() +bp = Blueprint("DOCUMENTS_AFFIDAVIT", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/documents/affidavit") -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) def get_affidavit_document(): """Return the Affidavit.""" try: doc = DocumentService.fetch_latest_document(DocumentType.AFFIDAVIT.value) if doc is None: - response, status = {'message': 'The requested document could not be found.'},\ - http_status.HTTP_404_NOT_FOUND - elif doc.as_dict().get('content_type', None) == ContentType.PDF.value: # pdf has to be served as attachment - return send_from_directory('static', filename=doc.as_dict()['content'], as_attachment=True) + response, status = {"message": "The requested document could not be found."}, HTTPStatus.NOT_FOUND + elif doc.as_dict().get("content_type", None) == ContentType.PDF.value: # pdf has to be served as attachment + return send_from_directory(directory="static", path=doc.as_dict()["content"], as_attachment=True) else: - response, status = doc.as_dict(), http_status.HTTP_200_OK + response, status = doc.as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/entity.py b/auth-api/src/auth_api/resources/v1/entity.py index fc208d30f8..daf72f8b93 100644 --- a/auth-api/src/auth_api/resources/v1/entity.py +++ b/auth-api/src/auth_api/resources/v1/entity.py @@ -13,104 +13,104 @@ # limitations under the License. """API endpoints for managing an entity (business) resource.""" +from http import HTTPStatus + from flask import Blueprint, jsonify, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.schemas import utils as schema_utils from auth_api.services.authorization import Authorization as AuthorizationService from auth_api.services.contact import Contact as ContactService from auth_api.services.entity import Entity as EntityService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import ALL_ALLOWED_ROLES, CLIENT_AUTH_ROLES, Role from auth_api.utils.util import mask_email - -bp = Blueprint('ENTITIES', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/entities') -TRACER = Tracer.get_instance() +bp = Blueprint("ENTITIES", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/entities") -@bp.route('', methods=['POST']) -@cross_origin(origins='*', methods=['POST']) -@TRACER.trace() +@bp.route("", methods=["POST"]) +@cross_origin(origins="*", methods=["POST"]) @_jwt.has_one_of_roles([Role.SYSTEM.value]) def post_entity(): """Post a new Entity using the request body.""" request_json = request.get_json() # If the record exists, just return existing record. - entity = EntityService.find_by_business_identifier(request_json.get('businessIdentifier'), - allowed_roles=ALL_ALLOWED_ROLES) + entity = EntityService.find_by_business_identifier( + request_json.get("businessIdentifier"), allowed_roles=ALL_ALLOWED_ROLES + ) if entity: - return entity.as_dict(), http_status.HTTP_202_ACCEPTED + return entity.as_dict(), HTTPStatus.ACCEPTED - valid_format, errors = schema_utils.validate(request_json, 'entity') + valid_format, errors = schema_utils.validate(request_json, "entity") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: entity = EntityService.save_entity(request_json) - response, status = entity.as_dict(), http_status.HTTP_201_CREATED + response, status = entity.as_dict(), HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'PATCH', 'DELETE']) -@TRACER.trace() +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PATCH", "DELETE"]) @_jwt.requires_auth def get_entity(business_identifier): """Get an existing entity by it's business number.""" try: entity = EntityService.find_by_business_identifier(business_identifier, allowed_roles=ALL_ALLOWED_ROLES) if entity is not None: - response, status = entity.as_dict(), http_status.HTTP_200_OK + response, status = entity.as_dict(), HTTPStatus.OK else: - response, status = jsonify({'message': f'A business for {business_identifier} was not found.'}), \ - http_status.HTTP_404_NOT_FOUND + response, status = ( + jsonify({"message": f"A business for {business_identifier} was not found."}), + HTTPStatus.NOT_FOUND, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['PATCH']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["PATCH"]) +@cross_origin(origins="*") @_jwt.requires_auth def patch_entity(business_identifier): """Update an existing business by it's business number.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'entity') + valid_format, errors = schema_utils.validate(request_json, "entity") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST - passcode_reset = request_json.get('resetPasscode', False) + passcode_reset = request_json.get("resetPasscode", False) try: if passcode_reset: - entity = EntityService.reset_passcode(business_identifier, - email_addresses=request_json.get('passcodeResetEmail', None)) + entity = EntityService.reset_passcode( + business_identifier, email_addresses=request_json.get("passcodeResetEmail", None) + ) else: entity = EntityService.update_entity(business_identifier, request_json) if entity is not None: - response, status = entity.as_dict(), http_status.HTTP_200_OK + response, status = entity.as_dict(), HTTPStatus.OK else: - response, status = jsonify({'message': f'A business for {business_identifier} was not found.'}), \ - http_status.HTTP_404_NOT_FOUND + response, status = ( + jsonify({"message": f"A business for {business_identifier} was not found."}), + HTTPStatus.NOT_FOUND, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['DELETE']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value]) def delete_entity(business_identifier): """Delete an existing entity by it's business number.""" @@ -119,119 +119,118 @@ def delete_entity(business_identifier): if entity: entity.delete() - response, status = {}, http_status.HTTP_204_NO_CONTENT + response, status = {}, HTTPStatus.NO_CONTENT else: - response, status = jsonify({'message': f'A business for {business_identifier} was not found.'}), \ - http_status.HTTP_404_NOT_FOUND + response, status = ( + jsonify({"message": f"A business for {business_identifier} was not found."}), + HTTPStatus.NOT_FOUND, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//authentication', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("//authentication", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*") @_jwt.requires_auth def get_entity_authentication(business_identifier): """Get passcode or password for the Entity identified by the provided business identifier.""" # This route allows public users to see if businesses have a form of authentication. # It's used by the business dashboard for magic link. - if ((entity := EntityService.find_by_business_identifier(business_identifier, skip_auth=True)) and - (contact := entity.get_contact())): - has_valid_pass_code = (entity.pass_code_claimed is False and entity.pass_code is not None) or \ - entity.corp_type in ['SP', 'GP'] + if (entity := EntityService.find_by_business_identifier(business_identifier, skip_auth=True)) and ( + contact := entity.get_contact() + ): + has_valid_pass_code = ( + entity.pass_code_claimed is False and entity.pass_code is not None + ) or entity.corp_type in ["SP", "GP"] return { - 'contactEmail': mask_email(contact.email), - 'hasValidPassCode': has_valid_pass_code - }, http_status.HTTP_200_OK - return jsonify({'message': f'Authentication for {business_identifier} was not found.'}), \ - http_status.HTTP_404_NOT_FOUND + "contactEmail": mask_email(contact.email), + "hasValidPassCode": has_valid_pass_code, + }, HTTPStatus.OK + return ( + jsonify({"message": f"Authentication for {business_identifier} was not found."}), + HTTPStatus.NOT_FOUND, + ) -@bp.route('//contacts', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'POST', 'PUT', 'DELETE']) -@TRACER.trace() +@bp.route("//contacts", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST", "PUT", "DELETE"]) @_jwt.requires_auth def get_entity_contact(business_identifier): """Get contact email for the Entity identified by the provided business identifier.""" # This route allows public users to look at masked email addresses. # It's used by the business dashboard for magic link. - if ((entity := EntityService.find_by_business_identifier(business_identifier, skip_auth=True)) and - (contact := entity.get_contact())): - return ContactService(contact).as_dict(masked_email_only=True), http_status.HTTP_200_OK - return jsonify({'message': f'Contacts for {business_identifier} was not found.'}), \ - http_status.HTTP_404_NOT_FOUND + if (entity := EntityService.find_by_business_identifier(business_identifier, skip_auth=True)) and ( + contact := entity.get_contact() + ): + return ContactService(contact).as_dict(masked_email_only=True), HTTPStatus.OK + return jsonify({"message": f"Contacts for {business_identifier} was not found."}), HTTPStatus.NOT_FOUND -@bp.route('//contacts', methods=['POST']) -@cross_origin(origins='*') +@bp.route("//contacts", methods=["POST"]) +@cross_origin(origins="*") @_jwt.requires_auth def post_entity_contact(business_identifier): """Add a new contact for the Entity identified by the provided id.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'contact') + valid_format, errors = schema_utils.validate(request_json, "contact") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: entity = EntityService.find_by_business_identifier(business_identifier, allowed_roles=ALL_ALLOWED_ROLES) if entity: - response, status = entity.add_contact(request_json).as_dict(), \ - http_status.HTTP_201_CREATED + response, status = entity.add_contact(request_json).as_dict(), HTTPStatus.CREATED else: - response, status = {'message': 'The requested business could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested business could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//contacts', methods=['PUT']) -@cross_origin(origins='*') +@bp.route("//contacts", methods=["PUT"]) +@cross_origin(origins="*") @_jwt.requires_auth def put_entity_contact(business_identifier): """Update the business contact for the Entity identified by the provided id.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'contact') + valid_format, errors = schema_utils.validate(request_json, "contact") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: entity = EntityService.find_by_business_identifier(business_identifier, allowed_roles=ALL_ALLOWED_ROLES) if entity: - response, status = entity.update_contact(request_json).as_dict(), \ - http_status.HTTP_200_OK + response, status = entity.update_contact(request_json).as_dict(), HTTPStatus.OK else: - response, status = {'message': 'The requested business could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested business could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//contacts', methods=['DELETE']) -@cross_origin(origins='*') +@bp.route("//contacts", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.requires_auth def delete_entity_contact(business_identifier): """Delete the business contact for the Entity identified by the provided id.""" try: entity = EntityService.find_by_business_identifier(business_identifier, allowed_roles=CLIENT_AUTH_ROLES) if entity: - response, status = entity.delete_contact().as_dict(), http_status.HTTP_200_OK + response, status = entity.delete_contact().as_dict(), HTTPStatus.OK else: - response, status = {'message': 'The requested business could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested business could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//authorizations', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) +@bp.route("//authorizations", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_entity_authorizations(business_identifier): """Return authorization for the user for the passed business identifier.""" - expanded: bool = request.args.get('expanded', False) + expanded: bool = request.args.get("expanded", False) authorisations = AuthorizationService.get_user_authorizations_for_entity(business_identifier, expanded) - return authorisations, http_status.HTTP_200_OK + return authorisations, HTTPStatus.OK diff --git a/auth-api/src/auth_api/resources/v1/invitation.py b/auth-api/src/auth_api/resources/v1/invitation.py index 25adb69dd7..3fdcc63306 100644 --- a/auth-api/src/auth_api/resources/v1/invitation.py +++ b/auth-api/src/auth_api/resources/v1/invitation.py @@ -13,126 +13,121 @@ # limitations under the License. """API endpoints for managing an Invitation resource.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.schemas import utils as schema_utils from auth_api.services import Invitation as InvitationService from auth_api.services import User as UserService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role - -bp = Blueprint('INVITATIONS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/invitations') -TRACER = Tracer.get_instance() +bp = Blueprint("INVITATIONS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/invitations") -@bp.route('', methods=['POST', 'OPTIONS']) -@cross_origin(origins='*', methods=['POST']) -@TRACER.trace() +@bp.route("", methods=["POST", "OPTIONS"]) +@cross_origin(origins="*", methods=["POST"]) @_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) + [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value] +) def post_invitation(): """Send a new invitation using the details in request and saves the invitation.""" - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'invitation') + valid_format, errors = schema_utils.validate(request_json, "invitation") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: user = UserService.find_by_jwt_token() - response, status = InvitationService.create_invitation(request_json, user, origin).as_dict(), \ - http_status.HTTP_201_CREATED + response, status = ( + InvitationService.create_invitation(request_json, user, origin).as_dict(), + HTTPStatus.CREATED, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'PATCH', 'DELETE']) -@TRACER.trace() +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PATCH", "DELETE"]) @_jwt.requires_auth def get_invitation(invitation_id): """Get the invitation specified by the provided id.""" invitation = InvitationService.find_invitation_by_id(invitation_id) if invitation is None: - response, status = {'message': 'The requested invitation could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested invitation could not be found."}, HTTPStatus.NOT_FOUND else: - response, status = invitation.as_dict(), http_status.HTTP_200_OK + response, status = invitation.as_dict(), HTTPStatus.OK return response, status -@bp.route('/', methods=['PATCH']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["PATCH"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) def patch_invitation(invitation_id): """Update the invitation specified by the provided id as retried.""" - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") try: invitation = InvitationService.find_invitation_by_id(invitation_id) if invitation is None: - response, status = {'message': 'The requested invitation could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested invitation could not be found."}, HTTPStatus.NOT_FOUND else: user = UserService.find_by_jwt_token() - response, status = invitation.update_invitation(user, origin).as_dict(), http_status.HTTP_200_OK + response, status = invitation.update_invitation(user, origin).as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['DELETE']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("/", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) + [Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value] +) def delete_invitation(invitation_id): """Delete the specified invitation.""" try: InvitationService.delete_invitation(invitation_id) - response, status = {}, http_status.HTTP_200_OK + response, status = {}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/tokens/', methods=['GET', 'OPTIONS']) -@TRACER.trace() -@cross_origin(origins='*', methods=['GET', 'PUT']) +@bp.route("/tokens/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PUT"]) def validate_invitation_token(invitation_token): """Check whether the passed token is valid.""" try: InvitationService.validate_token(invitation_token) - response, status = {}, http_status.HTTP_200_OK + response, status = {}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/tokens/', methods=['PUT']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("/tokens/", methods=["PUT"]) +@cross_origin(origins="*") @_jwt.requires_auth def accept_invitation_token(invitation_token): """Check whether the passed token is valid and add user, role and org from invitation to membership.""" - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") try: user = UserService.find_by_jwt_token() if user is None: - response, status = {'message': 'Not authorized to perform this action'}, \ - http_status.HTTP_401_UNAUTHORIZED + response, status = {"message": "Not authorized to perform this action"}, HTTPStatus.UNAUTHORIZED else: - invitation_id = InvitationService.validate_token(invitation_token).as_dict().get('id') - response, status = InvitationService.accept_invitation(invitation_id, user, origin).as_dict(), \ - http_status.HTTP_200_OK # noqa:E127 + invitation_id = InvitationService.validate_token(invitation_token).as_dict().get("id") + response, status = ( + InvitationService.accept_invitation(invitation_id, user, origin).as_dict(), + HTTPStatus.OK, + ) # noqa:E127 except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/keycloak.py b/auth-api/src/auth_api/resources/v1/keycloak.py index a71dcea743..c86bbd7deb 100644 --- a/auth-api/src/auth_api/resources/v1/keycloak.py +++ b/auth-api/src/auth_api/resources/v1/keycloak.py @@ -1,4 +1,5 @@ """Keycloak resource, will ultimately get swapped out.""" + # Copyright © 2024 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the 'License'); @@ -18,26 +19,25 @@ from flask import Blueprint, jsonify, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.services.keycloak import KeycloakService +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role -bp = Blueprint('KEYCLOAK', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/keycloak') +bp = Blueprint("KEYCLOAK", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/keycloak") -@bp.route('/users', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) +@bp.route("/users", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.SYSTEM.value]) def get_keycloak_users_by_role(): """Return keycloak name + email by role.""" - role = request.args.get('role', None) + role = request.args.get("role", None) if role is None: - response, status = {'message': 'Role query parameter is required'}, http_status.HTTP_400_BAD_REQUEST + response, status = {"message": "Role query parameter is required"}, 400 try: - response, status = KeycloakService.get_user_emails_with_role(role), http_status.HTTP_200_OK + response, status = KeycloakService.get_user_emails_with_role(role), 200 except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return jsonify(response), status diff --git a/auth-api/src/auth_api/resources/v1/notifications.py b/auth-api/src/auth_api/resources/v1/notifications.py index acea2de343..0877e0c8cf 100644 --- a/auth-api/src/auth_api/resources/v1/notifications.py +++ b/auth-api/src/auth_api/resources/v1/notifications.py @@ -13,32 +13,32 @@ # limitations under the License. """API endpoints for managing an Notification resource.""" +from http import HTTPStatus + from flask import Blueprint from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.services import Membership as MembershipService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role - -bp = Blueprint('NOTIFICATIONS', __name__, - url_prefix=f'{EndpointEnum.API_V1.value}/users//org//notifications') -TRACER = Tracer.get_instance() +bp = Blueprint( + "NOTIFICATIONS", + __name__, + url_prefix=f"{EndpointEnum.API_V1.value}/users//org//notifications", +) -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF.value, Role.PUBLIC_USER.value]) def get_notifications(user_id, org_id): # pylint:disable=unused-argument """Find the count of notification remaining.If any details invalid, it returns zero.""" try: pending_count = MembershipService.get_pending_member_count_for_org(org_id) - response, status = {'count': pending_count}, http_status.HTTP_200_OK + response, status = {"count": pending_count}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/org.py b/auth-api/src/auth_api/resources/v1/org.py index effa0c21cb..46e24aafb7 100644 --- a/auth-api/src/auth_api/resources/v1/org.py +++ b/auth-api/src/auth_api/resources/v1/org.py @@ -13,19 +13,18 @@ # limitations under the License. """API endpoints for managing an Org resource.""" import asyncio +from http import HTTPStatus import orjson from flask import Blueprint, current_app, g, jsonify, request from flask_cors import cross_origin +from structured_logging import StructuredLogging -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException, ServiceUnavailableException from auth_api.models import Affiliation as AffiliationModel from auth_api.models import Org as OrgModel from auth_api.models.dataclass import Affiliation as AffiliationData -from auth_api.models.dataclass import DeleteAffiliationRequest -from auth_api.models.dataclass import SimpleOrgSearch +from auth_api.models.dataclass import DeleteAffiliationRequest, SimpleOrgSearch from auth_api.models.org import OrgSearch # noqa: I005; Not sure why isort doesn't like this from auth_api.schemas import InvitationSchema, MembershipSchema from auth_api.schemas import utils as schema_utils @@ -37,103 +36,105 @@ from auth_api.services import SimpleOrg as SimpleOrgService from auth_api.services import User as UserService from auth_api.services.authorization import Authorization as AuthorizationService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.enums import AccessType, NotificationType, OrgStatus, OrgType, PatchActions, Status from auth_api.utils.role_validator import validate_roles from auth_api.utils.roles import ALL_ALLOWED_ROLES, CLIENT_ADMIN_ROLES, STAFF, USER, Role # noqa: I005 from auth_api.utils.util import extract_numbers, get_request_environment +bp = Blueprint("ORGS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/orgs") +logger = StructuredLogging.get_logger() -bp = Blueprint('ORGS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/orgs') -TRACER = Tracer.get_instance() - -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'POST']) -@TRACER.trace() -@_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST"]) +@_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def search_organizations(): """Search orgs.""" env = get_request_environment() org_search = OrgSearch( - request.args.get('name', None), - request.args.get('branchName', None), - request.args.get('affiliation', None), - request.args.getlist('status', None), - request.args.getlist('accessType', None), - request.args.get('bcolAccountId', None), - extract_numbers(request.args.get('id', None)), - request.args.get('decisionMadeBy', None), - request.args.get('orgType', None), - int(request.args.get('page', 1)), - int(request.args.get('limit', 10)) + request.args.get("name", None), + request.args.get("branchName", None), + request.args.get("affiliation", None), + request.args.getlist("status", None), + request.args.getlist("accessType", None), + request.args.get("bcolAccountId", None), + extract_numbers(request.args.get("id", None)), + request.args.get("decisionMadeBy", None), + request.args.get("orgType", None), + int(request.args.get("page", 1)), + int(request.args.get("limit", 10)), ) - validate_name = request.args.get('validateName', 'False') + validate_name = request.args.get("validateName", "False") try: token = g.jwt_oidc_token_info - if validate_name.upper() == 'TRUE': - response, status = OrgService.find_by_org_name(org_name=org_search.name, - branch_name=org_search.branch_name), \ - http_status.HTTP_200_OK + if validate_name.upper() == "TRUE": + response, status = ( + OrgService.find_by_org_name(org_name=org_search.name, branch_name=org_search.branch_name), + HTTPStatus.OK, + ) else: - response, status = OrgService.search_orgs(org_search, env), http_status.HTTP_200_OK + response, status = OrgService.search_orgs(org_search, env), HTTPStatus.OK - roles = token.get('realm_access').get('roles') + roles = token.get("realm_access").get("roles") # public user can only get status of orgs in search, unless they have special roles. allowed_roles = [Role.STAFF.value, Role.SYSTEM.value, Role.ACCOUNT_IDENTITY] if Role.PUBLIC_USER.value in roles and not set(roles).intersection(set(allowed_roles)): - if response and response.get('orgs'): - status = http_status.HTTP_200_OK + if response and response.get("orgs"): + status = HTTPStatus.OK else: - status = http_status.HTTP_204_NO_CONTENT + status = HTTPStatus.NO_CONTENT response = {} # Do not return any results if searching by name except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/simple', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("/simple", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @validate_roles(allowed_roles=[Role.MANAGE_EFT.value, Role.SYSTEM.value]) -@_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.MANAGE_EFT.value]) +@_jwt.has_one_of_roles([Role.SYSTEM.value, Role.MANAGE_EFT.value]) def search_simple_orgs(): """Return simplified organization information.""" - current_app.logger.info('search_simple_orgs') + logger.info("search_simple_orgs") return jsonify(response), status -@bp.route('', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() -@validate_roles(allowed_roles=[Role.PUBLIC_USER.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.SYSTEM.value], - not_allowed_roles=[Role.ANONYMOUS_USER.value]) +@bp.route("", methods=["POST"]) +@cross_origin(origins="*") +@validate_roles( + allowed_roles=[Role.PUBLIC_USER.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.SYSTEM.value], + not_allowed_roles=[Role.ANONYMOUS_USER.value], +) @_jwt.has_one_of_roles([Role.PUBLIC_USER.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.SYSTEM.value]) def post_organization(): """Post a new org using the request body. @@ -141,84 +142,77 @@ def post_organization(): If the org already exists, update the attributes. """ request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'org') + valid_format, errors = schema_utils.validate(request_json, "org") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: user = UserService.find_by_jwt_token() if user is None: - response, status = {'message': 'Not authorized to perform this action'}, \ - http_status.HTTP_401_UNAUTHORIZED + response, status = {"message": "Not authorized to perform this action"}, HTTPStatus.UNAUTHORIZED return response, status - response, status = OrgService.create_org(request_json, - user.identifier).as_dict(), http_status.HTTP_201_CREATED + response, status = OrgService.create_org(request_json, user.identifier).as_dict(), HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'PUT', 'PATCH', 'DELETE']) -@TRACER.trace() -@_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PUT", "PATCH", "DELETE"]) +@_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_organization(org_id): """Get the org specified by the provided id.""" org = OrgService.find_by_org_id(org_id, allowed_roles=ALL_ALLOWED_ROLES) if org is None: - response, status = {'message': 'The requested organization could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested organization could not be found."}, HTTPStatus.NOT_FOUND else: - response, status = org.as_dict(), http_status.HTTP_200_OK + response, status = org.as_dict(), HTTPStatus.OK return response, status -@bp.route('/', methods=['PUT']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["PUT"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.PUBLIC_USER.value, Role.GOV_ACCOUNT_USER.value, Role.STAFF_MANAGE_ACCOUNTS.value]) + [Role.SYSTEM.value, Role.PUBLIC_USER.value, Role.GOV_ACCOUNT_USER.value, Role.STAFF_MANAGE_ACCOUNTS.value] +) def put_organization(org_id): """Update the org specified by the provided id with the request body.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'org') + valid_format, errors = schema_utils.validate(request_json, "org") token_info = g.jwt_oidc_token_info if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: org = OrgService.find_by_org_id(org_id, allowed_roles=(*CLIENT_ADMIN_ROLES, STAFF)) - if org and org.as_dict().get('accessType', None) == AccessType.ANONYMOUS.value and \ - Role.STAFF_CREATE_ACCOUNTS.value not in token_info.get('realm_access').get('roles'): - return {'message': 'The organisation can only be updated by a staff admin.'}, \ - http_status.HTTP_401_UNAUTHORIZED + if ( + org + and org.as_dict().get("accessType", None) == AccessType.ANONYMOUS.value + and Role.STAFF_CREATE_ACCOUNTS.value not in token_info.get("realm_access").get("roles") + ): + return {"message": "The organisation can only be updated by a staff admin."}, HTTPStatus.UNAUTHORIZED if org: - response, status = org.update_org(org_info=request_json).as_dict(), \ - http_status.HTTP_200_OK + response, status = org.update_org(org_info=request_json).as_dict(), HTTPStatus.OK else: - response, status = {'message': 'The requested organization could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested organization could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['DELETE']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_CREATE_ACCOUNTS.value, Role.PUBLIC_USER.value]) def delete_organization(org_id): """Inactivates the org if it has no active members or affiliations.""" try: OrgService.delete_org(org_id) - response, status = '', http_status.HTTP_204_NO_CONTENT + response, status = "", HTTPStatus.NO_CONTENT except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['PATCH']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["PATCH"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.STAFF_MANAGE_ACCOUNTS.value, Role.SYSTEM.value]) def patch_organization(org_id): """Patch an account.""" @@ -227,224 +221,221 @@ def patch_organization(org_id): org = OrgService(OrgModel.find_by_org_id(org_id)) if org: # set default patch action to updating status action - action = request_json.get('action', PatchActions.UPDATE_STATUS.value) - response, status = org.patch_org(action, - request_json), http_status.HTTP_200_OK + action = request_json.get("action", PatchActions.UPDATE_STATUS.value) + response, status = org.patch_org(action, request_json), HTTPStatus.OK else: - response, status = {'message': 'The requested organization could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested organization could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//login-options', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'POST', 'PUT']) -@TRACER.trace() +@bp.route("//login-options", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST", "PUT"]) @_jwt.requires_auth def get_org_login_options(org_id): """Retrieve the set of payment settings associated with the specified org.""" try: login_options = OrgService.get_login_options_for_org(org_id, allowed_roles=ALL_ALLOWED_ROLES) - response, status = jsonify( - {'loginOption': login_options.login_source if login_options else None}), http_status.HTTP_200_OK + response, status = ( + jsonify({"loginOption": login_options.login_source if login_options else None}), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//login-options', methods=['POST']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("//login-options", methods=["POST"]) +@cross_origin(origins="*") @_jwt.requires_auth def post_org_login_options(org_id): """Create a new login type for the specified org.""" request_json = request.get_json() - login_option_val = request_json.get('loginOption') + login_option_val = request_json.get("loginOption") try: login_option = OrgService.add_login_option(org_id, login_option_val) - response, status = jsonify({'login_option': login_option.login_source}), http_status.HTTP_201_CREATED + response, status = jsonify({"login_option": login_option.login_source}), HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//login-options', methods=['PUT']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("//login-options", methods=["PUT"]) +@cross_origin(origins="*") @_jwt.requires_auth def put_org_login_optjons(org_id): """Update a new login type for the specified org.""" request_json = request.get_json() - login_option_val = request_json.get('loginOption') + login_option_val = request_json.get("loginOption") try: login_option = OrgService.update_login_option(org_id, login_option_val) - response, status = jsonify({'login_option': login_option.login_source}), http_status.HTTP_201_CREATED + response, status = jsonify({"login_option": login_option.login_source}), HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//contacts', methods=['GET', 'OPTIONS']) -@TRACER.trace() -@cross_origin(origins='*', methods=['GET', 'POST', 'PUT', 'DELETE']) +@bp.route("//contacts", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST", "PUT", "DELETE"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get(org_id): """Retrieve the set of contacts associated with the specified org.""" try: - response, status = OrgService.get_contacts(org_id), http_status.HTTP_200_OK + response, status = OrgService.get_contacts(org_id), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//contacts', methods=['POST']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("//contacts", methods=["POST"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.PUBLIC_USER.value]) def post_organization_contact(org_id): """Create a new contact for the specified org.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'contact') + valid_format, errors = schema_utils.validate(request_json, "contact") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - response, status = OrgService.add_contact(org_id, request_json).as_dict(), http_status.HTTP_201_CREATED + response, status = OrgService.add_contact(org_id, request_json).as_dict(), HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//contacts', methods=['PUT']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("//contacts", methods=["PUT"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.PUBLIC_USER.value]) def put_organization_contact(org_id): """Update an existing contact for the specified org.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'contact') + valid_format, errors = schema_utils.validate(request_json, "contact") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - response, status = OrgService.update_contact(org_id, request_json).as_dict(), http_status.HTTP_200_OK + response, status = OrgService.update_contact(org_id, request_json).as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//contacts', methods=['DELETE']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("//contacts", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.PUBLIC_USER.value]) def delete_organzization_contact(org_id): """Delete the contact info for the specified org.""" try: - response, status = OrgService.delete_contact(org_id).as_dict(), http_status.HTTP_200_OK + response, status = OrgService.delete_contact(org_id).as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//affiliations', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['POST', 'GET']) -@TRACER.trace() +@bp.route("//affiliations", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["POST", "GET"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_BUSINESS.value, Role.PUBLIC_USER.value]) def get_organization_affiliations(org_id): """Get all affiliated entities for the given org.""" try: env = get_request_environment() # keep old response until UI is updated - if (request.args.get('new', 'false')).lower() != 'true': - return jsonify( - {'entities': AffiliationService.find_visible_affiliations_by_org_id(org_id, env)} - ), http_status.HTTP_200_OK + if (request.args.get("new", "false")).lower() != "true": + return ( + jsonify({"entities": AffiliationService.find_visible_affiliations_by_org_id(org_id, env)}), + HTTPStatus.OK, + ) # get affiliation identifiers and the urls for the source data affiliations = AffiliationModel.find_affiliations_by_org_id(org_id, env) affiliations_details_list = asyncio.run(AffiliationService.get_affiliation_details(affiliations)) # Use orjson serializer here, it's quite a bit faster. - response, status = current_app.response_class( - response=orjson.dumps({'entities': affiliations_details_list}), # pylint: disable=maybe-no-member - status=200, - mimetype='application/json' - ), http_status.HTTP_200_OK + response, status = ( + current_app.response_class( + response=orjson.dumps({"entities": affiliations_details_list}), # pylint: disable=maybe-no-member + status=200, + mimetype="application/json", + ), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code except ServiceUnavailableException as exception: - response, status = {'message': exception.error}, exception.status_code + response, status = {"message": exception.error}, exception.status_code return response, status -@bp.route('//affiliations', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("//affiliations", methods=["POST"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_BUSINESS.value, Role.PUBLIC_USER.value]) def post_organization_affiliation(org_id): """Post a new Affiliation for an org using the request body.""" env = get_request_environment() request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'affiliation') - is_new_business = request.args.get('newBusiness', 'false').lower() == 'true' + valid_format, errors = schema_utils.validate(request_json, "affiliation") + is_new_business = request.args.get("newBusiness", "false").lower() == "true" if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST - business_identifier = request_json.get('businessIdentifier') + business_identifier = request_json.get("businessIdentifier") if not any(character.isdigit() for character in business_identifier): - return {'message': 'Business identifier requires at least 1 digit.'}, http_status.HTTP_400_BAD_REQUEST + return {"message": "Business identifier requires at least 1 digit."}, HTTPStatus.BAD_REQUEST try: if is_new_business: - affiliation_data = AffiliationData(org_id=org_id, business_identifier=business_identifier, - email=request_json.get('email'), phone=request_json.get('phone'), - certified_by_name=request_json.get('certifiedByName')) - - response, status = AffiliationService.create_new_business_affiliation( - affiliation_data, env).as_dict(), http_status.HTTP_201_CREATED + affiliation_data = AffiliationData( + org_id=org_id, + business_identifier=business_identifier, + email=request_json.get("email"), + phone=request_json.get("phone"), + certified_by_name=request_json.get("certifiedByName"), + ) + + response, status = ( + AffiliationService.create_new_business_affiliation(affiliation_data, env).as_dict(), + HTTPStatus.CREATED, + ) else: - response, status = AffiliationService.create_affiliation( - org_id, business_identifier, env, request_json.get('passCode'), - request_json.get('certifiedByName')).\ - as_dict(), http_status.HTTP_201_CREATED - - entity_details = request_json.get('entityDetails', None) + response, status = ( + AffiliationService.create_affiliation( + org_id, business_identifier, env, request_json.get("passCode"), request_json.get("certifiedByName") + ).as_dict(), + HTTPStatus.CREATED, + ) + + entity_details = request_json.get("entityDetails", None) if entity_details: AffiliationService.fix_stale_affiliations(org_id, entity_details, env) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/affiliation/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() -@_jwt.has_one_of_roles( - [Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) +@bp.route("/affiliation/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) +@_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_org_details_by_affiliation(business_identifier): """Search non staff orgs by BusinessIdentifier and return org Name, branch Name and UUID.""" environment = get_request_environment() excluded_org_types = [OrgType.STAFF.value, OrgType.SBC_STAFF.value] try: - data = OrgService.search_orgs_by_affiliation( - business_identifier, environment, excluded_org_types - ) + data = OrgService.search_orgs_by_affiliation(business_identifier, environment, excluded_org_types) - org_details = \ - [{'name': org.name, 'uuid': org.uuid, 'branchName': org.branch_name} for org in data['orgs']] - response, status = {'orgs_details': org_details}, http_status.HTTP_200_OK + org_details = [{"name": org.name, "uuid": org.uuid, "branchName": org.branch_name} for org in data["orgs"]] + response, status = {"orgs_details": org_details}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//affiliations/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'DELETE']) -@TRACER.trace() +@bp.route("//affiliations/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "DELETE"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_BUSINESS.value, Role.PUBLIC_USER.value]) def get_org_affiliation_by_business_identifier(org_id, business_identifier): """Get the affiliation by org id and business identifier with authorized user.""" @@ -453,97 +444,97 @@ def get_org_affiliation_by_business_identifier(org_id, business_identifier): if AuthorizationService.get_user_authorizations_for_entity(business_identifier): environment = get_request_environment() # get affiliation - response, status = AffiliationService.find_affiliation( - org_id, business_identifier, environment), http_status.HTTP_200_OK + response, status = ( + AffiliationService.find_affiliation(org_id, business_identifier, environment), + HTTPStatus.OK, + ) else: - response, status = {'message': 'Not authorized to perform this action'}, \ - http_status.HTTP_401_UNAUTHORIZED + response, status = {"message": "Not authorized to perform this action"}, HTTPStatus.UNAUTHORIZED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code except ServiceUnavailableException as exception: - response, status = {'message': exception.error}, exception.status_code + response, status = {"message": exception.error}, exception.status_code return response, status -@bp.route('//affiliations/', methods=['DELETE']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("//affiliations/", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_BUSINESS.value, Role.PUBLIC_USER.value]) def delete_org_affiliation_by_business_identifier(org_id, business_identifier): """Delete an affiliation between an org and an entity.""" env = get_request_environment() request_json = request.get_json(silent=True) or {} try: - delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=business_identifier, - email_addresses=request_json.get('passcodeResetEmail'), - reset_passcode=request_json.get('resetPasscode', False), - log_delete_draft=request_json.get('logDeleteDraft', False) - ) + delete_affiliation_request = DeleteAffiliationRequest( + org_id=org_id, + business_identifier=business_identifier, + email_addresses=request_json.get("passcodeResetEmail"), + reset_passcode=request_json.get("resetPasscode", False), + log_delete_draft=request_json.get("logDeleteDraft", False), + ) AffiliationService.delete_affiliation(delete_affiliation_request, env) - response, status = {}, http_status.HTTP_200_OK + response, status = {}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//members', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("//members", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_organization_members(org_id): """Retrieve the set of members for the given org.""" try: - status = request.args.get('status').upper() if request.args.get('status') else None - roles = request.args.get('roles').upper().split(',') if request.args.get('roles') else None + status = request.args.get("status").upper() if request.args.get("status") else None + roles = request.args.get("roles").upper().split(",") if request.args.get("roles") else None members = MembershipService.get_members_for_org(org_id, status=status, membership_roles=roles) if members: - response, status = {'members': MembershipSchema(exclude=['org']) - .dump(members, many=True)}, \ - http_status.HTTP_200_OK + response, status = {"members": MembershipSchema(exclude=["org"]).dump(members, many=True)}, HTTPStatus.OK else: - response, status = {}, \ - http_status.HTTP_200_OK + response, status = {}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//members/', methods=['PATCH', 'OPTIONS']) -@cross_origin(origins='*', methods=['PATCH', 'DELETE']) -@TRACER.trace() +@bp.route("//members/", methods=["PATCH", "OPTIONS"]) +@cross_origin(origins="*", methods=["PATCH", "DELETE"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) def patch_organization_member(org_id, membership_id): # pylint:disable=unused-argument """Update a membership record with new member role.""" - role = request.get_json().get('role') - membership_status = request.get_json().get('status') - notify_user = request.get_json().get('notifyUser') + role = request.get_json().get("role") + membership_status = request.get_json().get("status") + notify_user = request.get_json().get("notifyUser") updated_fields_dict = {} - origin = request.environ.get('HTTP_ORIGIN', 'localhost') + origin = request.environ.get("HTTP_ORIGIN", "localhost") try: if role is not None: updated_role = MembershipService.get_membership_type_by_code(role) - updated_fields_dict['membership_type'] = updated_role + updated_fields_dict["membership_type"] = updated_role if membership_status is not None: - updated_fields_dict['membership_status'] = \ - MembershipService.get_membership_status_by_code(membership_status) + updated_fields_dict["membership_status"] = MembershipService.get_membership_status_by_code( + membership_status + ) membership = MembershipService.find_membership_by_id(membership_id) - is_own_membership = \ - membership.as_dict()['user']['username'] == UserService.find_by_jwt_token().as_dict()['username'] + is_own_membership = ( + membership.as_dict()["user"]["username"] == UserService.find_by_jwt_token().as_dict()["username"] + ) if not membership: - response, status = {'message': 'The requested membership record could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested membership record could not be found."}, HTTPStatus.NOT_FOUND else: - response, status = membership.update_membership(updated_fields=updated_fields_dict).as_dict(), \ - http_status.HTTP_200_OK + response, status = ( + membership.update_membership(updated_fields=updated_fields_dict).as_dict(), + HTTPStatus.OK, + ) # if user status changed to active , mail the user if membership_status == Status.ACTIVE.name: @@ -553,13 +544,12 @@ def patch_organization_member(org_id, membership_id): # pylint:disable=unused-a return response, status except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//members/', methods=['DELETE']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("//members/", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) def delete_organization_member(org_id, membership_id): # pylint:disable=unused-argument """Mark a membership record as inactive. Membership must match current user token.""" @@ -567,62 +557,56 @@ def delete_organization_member(org_id, membership_id): # pylint:disable=unused- membership = MembershipService.find_membership_by_id(membership_id) if membership: - response, status = membership.deactivate_membership().as_dict(), \ - http_status.HTTP_200_OK + response, status = membership.deactivate_membership().as_dict(), HTTPStatus.OK else: - response, status = {'message': 'The requested membership could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested membership could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//invitations', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("//invitations", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_organization_invitations(org_id): """Retrieve the set of invitations for the given org.""" try: - invitation_status = request.args.get('status').upper() if request.args.get('status') else None - invitations = InvitationService.get_invitations_for_org(org_id=org_id, - status=invitation_status) + invitation_status = request.args.get("status").upper() if request.args.get("status") else None + invitations = InvitationService.get_invitations_for_org(org_id=org_id, status=invitation_status) - response, status = {'invitations': InvitationSchema(exclude=['membership.org']) - .dump(invitations, many=True)}, http_status.HTTP_200_OK + response, status = { + "invitations": InvitationSchema(exclude=["membership.org"]).dump(invitations, many=True) + }, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//admins/affidavits', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("//admins/affidavits", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_ACCOUNTS.value]) def get_org_admin_affidavit(org_id): """Get the affidavit for the admin who created the account.""" try: - response, status = AffidavitService.find_affidavit_by_org_id(org_id=org_id), \ - http_status.HTTP_200_OK + response, status = AffidavitService.find_affidavit_by_org_id(org_id=org_id), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//payment_info', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("//payment_info", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_org_payment_info(org_id): """Retrieve the set of payment settings associated with the specified org.""" try: org = OrgService.find_by_org_id(org_id, allowed_roles=(*CLIENT_ADMIN_ROLES, STAFF)) - response, status = org.get_payment_info(), http_status.HTTP_200_OK + response, status = org.get_payment_info(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/org_api_keys.py b/auth-api/src/auth_api/resources/v1/org_api_keys.py index 685ec20205..de9292b007 100644 --- a/auth-api/src/auth_api/resources/v1/org_api_keys.py +++ b/auth-api/src/auth_api/resources/v1/org_api_keys.py @@ -13,58 +13,54 @@ # limitations under the License. """API endpoints for managing an API gateway keys for org.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.schemas import utils as schema_utils from auth_api.services import ApiGateway as ApiGatewayService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role -bp = Blueprint('KEYS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/orgs//api-keys') -TRACER = Tracer.get_instance() +bp = Blueprint("KEYS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/orgs//api-keys") -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'POST']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.ACCOUNT_HOLDER.value]) def get_organization_api_keys(org_id): """Get all API keys for the account.""" - return ApiGatewayService.get_api_keys(org_id), http_status.HTTP_200_OK + return ApiGatewayService.get_api_keys(org_id), HTTPStatus.OK -@bp.route('', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("", methods=["POST"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.SYSTEM.value]) def post_organization_api_key(org_id): """Create new api key for the org.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'api_key') + valid_format, errors = schema_utils.validate(request_json, "api_key") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - response, status = ApiGatewayService.create_key(org_id, request_json), http_status.HTTP_201_CREATED + response, status = ApiGatewayService.create_key(org_id, request_json), HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['DELETE', 'OPTIONS']) -@cross_origin(origins='*', methods=['DELETE']) -@TRACER.trace() +@bp.route("/", methods=["DELETE", "OPTIONS"]) +@cross_origin(origins="*", methods=["DELETE"]) @_jwt.has_one_of_roles([Role.SYSTEM.value, Role.STAFF_MANAGE_ACCOUNTS.value, Role.ACCOUNT_HOLDER.value]) def delete_organization_api_key(org_id, key): """Revoke API Key.""" try: ApiGatewayService.revoke_key(org_id, key) - response, status = {}, http_status.HTTP_200_OK + response, status = {}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/org_authorizations.py b/auth-api/src/auth_api/resources/v1/org_authorizations.py index 2f2d8950bb..cb6cca1c21 100644 --- a/auth-api/src/auth_api/resources/v1/org_authorizations.py +++ b/auth-api/src/auth_api/resources/v1/org_authorizations.py @@ -13,25 +13,24 @@ # limitations under the License. """API endpoints for managing a Product resource.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.services.authorization import Authorization as AuthorizationService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum -bp = Blueprint('PERMISSIONS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/orgs//authorizations') -TRACER = Tracer.get_instance() +bp = Blueprint("PERMISSIONS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/orgs//authorizations") -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_organization_permissions(org_id): """Return authorization for the user for the passed business identifier.""" - expanded: bool = request.args.get('expanded', False) - corp_type_code = request.headers.get('Product-Code', None) + expanded: bool = request.args.get("expanded", False) + corp_type_code = request.headers.get("Product-Code", None) authorisations = AuthorizationService.get_account_authorizations_for_org(org_id, corp_type_code, expanded) - return authorisations, http_status.HTTP_200_OK + return authorisations, HTTPStatus.OK diff --git a/auth-api/src/auth_api/resources/v1/org_products.py b/auth-api/src/auth_api/resources/v1/org_products.py index 9571700831..acfc9e87fe 100644 --- a/auth-api/src/auth_api/resources/v1/org_products.py +++ b/auth-api/src/auth_api/resources/v1/org_products.py @@ -13,77 +13,84 @@ # limitations under the License. """API endpoints for managing an Org resource.""" import json +from http import HTTPStatus from flask import Blueprint, g, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.schemas import utils as schema_utils from auth_api.services import Product as ProductService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.roles import Role +bp = Blueprint("ORG_PRODUCTS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/orgs//products") -bp = Blueprint('ORG_PRODUCTS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/orgs//products') -TRACER = Tracer.get_instance() - -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'PATCH', 'POST']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PATCH", "POST"]) @_jwt.has_one_of_roles([Role.PUBLIC_USER.value, Role.STAFF_VIEW_ACCOUNTS.value]) def get_org_product_subscriptions(org_id): """GET a new product subscription to the org using the request body.""" + + if not org_id or org_id == "None" or not org_id.isdigit() or int(org_id) < 0: + return {"message": "The organization ID is in an incorrect format."}, HTTPStatus.BAD_REQUEST + try: - include_hidden = request.args.get('include_hidden', None) == 'true' # used by NDS - response, status = json.dumps(ProductService.get_all_product_subscription(org_id=org_id, - include_hidden=include_hidden) - ), http_status.HTTP_200_OK + include_hidden = request.args.get("include_hidden", None) == "true" # used by NDS + response, status = ( + json.dumps(ProductService.get_all_product_subscription(org_id=int(org_id), include_hidden=include_hidden)), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("", methods=["POST"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.STAFF_CREATE_ACCOUNTS.value, Role.PUBLIC_USER.value, Role.SYSTEM.value]) def post_org_product_subscription(org_id): """Post a new product subscription to the org using the request body.""" + + if not org_id or org_id == "None" or not org_id.isdigit() or int(org_id) < 0: + return {"message": "The organization ID is in an incorrect format."}, HTTPStatus.BAD_REQUEST + request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'org_product_subscription') + valid_format, errors = schema_utils.validate(request_json, "org_product_subscription") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - roles = g.jwt_oidc_token_info.get('realm_access').get('roles') - subscriptions = ProductService.create_product_subscription(org_id, request_json, - skip_auth=Role.SYSTEM.value in roles, - auto_approve=Role.SYSTEM.value in roles) - ProductService.update_org_product_keycloak_groups(org_id) - response, status = {'subscriptions': subscriptions}, http_status.HTTP_201_CREATED + roles = g.jwt_oidc_token_info.get("realm_access").get("roles") + subscriptions = ProductService.create_product_subscription( + int(org_id), request_json, skip_auth=Role.SYSTEM.value in roles, auto_approve=Role.SYSTEM.value in roles + ) + ProductService.update_org_product_keycloak_groups(int(org_id)) + response, status = {"subscriptions": subscriptions}, HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('', methods=['PATCH']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("", methods=["PATCH"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.PUBLIC_USER.value]) def patch_org_product_subscription(org_id): """Patch existing product subscription to resubmit it for review.""" + + if not org_id or org_id == "None" or not org_id.isdigit() or int(org_id) < 0: + return {"message": "The organization ID is in an incorrect format."}, HTTPStatus.BAD_REQUEST + request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'org_product_subscription') + valid_format, errors = schema_utils.validate(request_json, "org_product_subscription") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - subscriptions = ProductService.resubmit_product_subscription(org_id, request_json) - response, status = {'subscriptions': subscriptions}, http_status.HTTP_200_OK + subscriptions = ProductService.resubmit_product_subscription(int(org_id), request_json) + response, status = {"subscriptions": subscriptions}, HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/permissions.py b/auth-api/src/auth_api/resources/v1/permissions.py index d57a6dbf22..ba608bfed5 100644 --- a/auth-api/src/auth_api/resources/v1/permissions.py +++ b/auth-api/src/auth_api/resources/v1/permissions.py @@ -14,37 +14,33 @@ """API endpoints for managing a Product resource.""" import json +from http import HTTPStatus from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.services import Permissions as PermissionsService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum -bp = Blueprint('MEMBER_PERMISSIONS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/permissions') -TRACER = Tracer.get_instance() +bp = Blueprint("MEMBER_PERMISSIONS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/permissions") -@bp.route('//', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("//", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_membership_permissions(org_status, membership_type): """Get a list of all permissions for the membership.""" try: - case = request.args.get('case') + case = request.args.get("case") permissions = PermissionsService.get_permissions_for_membership(org_status.upper(), membership_type.upper()) - if case == 'lower': + if case == "lower": permissions = [x.lower() for x in permissions] - elif case == 'upper': + elif case == "upper": permissions = [x.upper() for x in permissions] - response, status = json.dumps(permissions), \ - http_status.HTTP_200_OK + response, status = json.dumps(permissions), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/products.py b/auth-api/src/auth_api/resources/v1/products.py index d61c9c4afe..0298a89dd4 100644 --- a/auth-api/src/auth_api/resources/v1/products.py +++ b/auth-api/src/auth_api/resources/v1/products.py @@ -14,28 +14,24 @@ """API endpoints for managing a Product resource.""" import json +from http import HTTPStatus from flask import Blueprint from flask_cors import cross_origin -from auth_api import status as http_status from auth_api.exceptions import BusinessException from auth_api.services import Product as ProductService -from auth_api.tracer import Tracer from auth_api.utils.endpoints_enums import EndpointEnum +bp = Blueprint("PRODUCTS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/products") -bp = Blueprint('PRODUCTS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/products') -TRACER = Tracer.get_instance() - -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) def get_products(): """Get a list of all products.""" try: - response, status = json.dumps(ProductService.get_products()), http_status.HTTP_200_OK + response, status = json.dumps(ProductService.get_products()), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/task.py b/auth-api/src/auth_api/resources/v1/task.py index b21c094e4f..7304a3f30a 100644 --- a/auth-api/src/auth_api/resources/v1/task.py +++ b/auth-api/src/auth_api/resources/v1/task.py @@ -13,98 +13,91 @@ # limitations under the License. """API endpoints for managing a Task resource.""" +from http import HTTPStatus + from flask import Blueprint, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.models import Task as TaskModel from auth_api.models.dataclass import TaskSearch from auth_api.schemas import utils as schema_utils from auth_api.services import Product as ProductService from auth_api.services import Task as TaskService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.enums import TaskRelationshipType from auth_api.utils.roles import Role - -bp = Blueprint('TASKS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/tasks') -TRACER = Tracer.get_instance() +bp = Blueprint("TASKS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/tasks") -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.STAFF.value]) def get_tasks(): """Fetch tasks.""" try: # Search based on request arguments task_search = TaskSearch( - name=request.args.get('name', None), - start_date=request.args.get('startDate', None), - end_date=request.args.get('endDate', None), - relationship_status=request.args.get('relationshipStatus', None), - type=request.args.get('type', None), - status=request.args.getlist('status', None), - modified_by=request.args.get('modifiedBy', None), - submitted_sort_order=request.args.get('submittedSortOrder', None), - page=int(request.args.get('page', 1)), - limit=int(request.args.get('limit', 10)) + name=request.args.get("name", None), + start_date=request.args.get("startDate", None), + end_date=request.args.get("endDate", None), + relationship_status=request.args.get("relationshipStatus", None), + type=request.args.get("type", None), + status=request.args.getlist("status", None), + modified_by=request.args.get("modifiedBy", None), + submitted_sort_order=request.args.get("submittedSortOrder", None), + page=int(request.args.get("page", 1)), + limit=int(request.args.get("limit", 10)), ) - response, status = TaskService.fetch_tasks(task_search), http_status.HTTP_200_OK + response, status = TaskService.fetch_tasks(task_search), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'PUT']) -@TRACER.trace() +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PUT"]) @_jwt.has_one_of_roles([Role.STAFF.value]) def get_task(task_id): """Fetch task by id.""" try: task = TaskService(TaskModel.find_by_task_id(task_id=task_id)) - response, status = task.as_dict(), http_status.HTTP_200_OK + response, status = task.as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['PUT']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["PUT"]) +@cross_origin(origins="*") @_jwt.has_one_of_roles([Role.STAFF.value]) def put_task(task_id): """Update a task.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'task_request') + valid_format, errors = schema_utils.validate(request_json, "task_request") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: task = TaskService(TaskModel.find_by_task_id(task_id)) if task: # Update task and its relationships - origin = request.environ.get('HTTP_ORIGIN', 'localhost') - task_dict = task.update_task(task_info=request_json, - origin_url=origin).as_dict() + origin = request.environ.get("HTTP_ORIGIN", "localhost") + task_dict = task.update_task(task_info=request_json, origin_url=origin).as_dict() # ProductService uses TaskService already. So, we need to avoid circular import. - if task_dict['relationship_type'] == TaskRelationshipType.PRODUCT.value: - ProductService.update_org_product_keycloak_groups(task_dict['account_id']) + if task_dict["relationship_type"] == TaskRelationshipType.PRODUCT.value: + ProductService.update_org_product_keycloak_groups(task_dict["account_id"]) response = task_dict - status = http_status.HTTP_200_OK + status = HTTPStatus.OK else: - response, status = {'message': 'The requested task could not be found.'}, \ - http_status.HTTP_404_NOT_FOUND + response, status = {"message": "The requested task could not be found."}, HTTPStatus.NOT_FOUND except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status diff --git a/auth-api/src/auth_api/resources/v1/user.py b/auth-api/src/auth_api/resources/v1/user.py index 7fa4093df9..8f4d4a2724 100644 --- a/auth-api/src/auth_api/resources/v1/user.py +++ b/auth-api/src/auth_api/resources/v1/user.py @@ -13,11 +13,11 @@ # limitations under the License. """API endpoints for managing a User resource.""" +from http import HTTPStatus + from flask import Blueprint, abort, g, jsonify, request from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.schemas import MembershipSchema, OrgSchema from auth_api.schemas import utils as schema_utils @@ -28,73 +28,70 @@ from auth_api.services.membership import Membership as MembershipService from auth_api.services.org import Org as OrgService from auth_api.services.user import User as UserService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum from auth_api.utils.enums import LoginSource, Status from auth_api.utils.roles import Role -bp = Blueprint('USERS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/users') -TRACER = Tracer.get_instance() +bp = Blueprint("USERS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/users") -@bp.route('/bcros', methods=['POST', 'OPTIONS']) -@cross_origin(origins='*', methods=['POST']) -@TRACER.trace() +@bp.route("/bcros", methods=["POST", "OPTIONS"]) +@cross_origin(origins="*", methods=["POST"]) def post_anonymous_user(): """Post a new user using the request body who has a proper invitation.""" try: request_json = request.get_json() - invitation_token = request.headers.get('invitation_token', None) + invitation_token = request.headers.get("invitation_token", None) invitation = InvitationService.validate_token(invitation_token).as_dict() - valid_format, errors = schema_utils.validate(request_json, 'anonymous_user') + valid_format, errors = schema_utils.validate(request_json, "anonymous_user") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST membership_details = { - 'email': invitation['recipient_email'], - 'membershipType': invitation['membership'][0]['membership_type'], - 'update_password_on_login': False + "email": invitation["recipient_email"], + "membershipType": invitation["membership"][0]["membership_type"], + "update_password_on_login": False, } membership_details.update(request_json) - user = UserService.create_user_and_add_membership([membership_details], - invitation['membership'][0]['org']['id'], - single_mode=True) - user_dict = user['users'][0] - if user_dict['http_status'] != http_status.HTTP_201_CREATED: - response, status = {'code': user_dict['http_status'], 'message': user_dict['error']}, user_dict[ - 'http_status'] + user = UserService.create_user_and_add_membership( + [membership_details], invitation["membership"][0]["org"]["id"], single_mode=True + ) + user_dict = user["users"][0] + if user_dict["http_status"] != HTTPStatus.CREATED: + response, status = {"code": user_dict["http_status"], "message": user_dict["error"]}, user_dict[ + "http_status" + ] else: - InvitationService.accept_invitation(invitation['id'], None, None, False) - response, status = user, http_status.HTTP_201_CREATED + InvitationService.accept_invitation(invitation["id"], None, None, False) + response, status = user, HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'POST']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST"]) @_jwt.has_one_of_roles([Role.STAFF_VIEW_ACCOUNTS.value]) def get_user(): """Return a set of users based on search query parameters (staff only).""" - search_email = request.args.get('email', '') - search_first_name = request.args.get('firstname', '') - search_last_name = request.args.get('lastname', '') + search_email = request.args.get("email", "") + search_first_name = request.args.get("firstname", "") + search_last_name = request.args.get("lastname", "") users = UserService.find_users(first_name=search_first_name, last_name=search_last_name, email=search_email) collection = [] for user in users: collection.append(UserService(user).as_dict()) response = jsonify(collection) - status = http_status.HTTP_200_OK + status = HTTPStatus.OK return response, status -@bp.route('', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("", methods=["POST"]) +@cross_origin(origins="*") @_jwt.requires_auth def post_user(): """Post a new user using the request body (which will contain a JWT). @@ -106,19 +103,19 @@ def post_user(): try: request_json = request.get_json(silent=True) # For BCeID users validate schema. - if token.get('loginSource', None) == LoginSource.BCEID.value and request_json is not None: - valid_format, errors = schema_utils.validate(request_json, 'user') + if token.get("loginSource", None) == LoginSource.BCEID.value and request_json is not None: + valid_format, errors = schema_utils.validate(request_json, "user") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST user = UserService.save_from_jwt_token(request_json) - response, status = user.as_dict(), http_status.HTTP_201_CREATED + response, status = user.as_dict(), HTTPStatus.CREATED # Add the user to public_users group if the user doesn't have public_user group - if token.get('loginSource', '') != LoginSource.STAFF.value: + if token.get("loginSource", "") != LoginSource.STAFF.value: KeycloakService.join_users_group() # For anonymous users, there are no invitation process for members, # so whenever they login perform this check and add them to corresponding groups - if token.get('loginSource', '') == LoginSource.BCROS.value: + if token.get("loginSource", "") == LoginSource.BCROS.value: if len(OrgService.get_orgs(user.identifier, [Status.ACTIVE.value])) > 0: KeycloakService.join_account_holders_group() if user.type == Role.STAFF.name: @@ -127,68 +124,64 @@ def post_user(): MembershipService.remove_staff_membership(user.identifier) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//otp', methods=['DELETE', 'OPTIONS']) -@cross_origin(origins='*', methods=['DELETE']) -@TRACER.trace() +@bp.route("//otp", methods=["DELETE", "OPTIONS"]) +@cross_origin(origins="*", methods=["DELETE"]) @_jwt.has_one_of_roles([Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value, Role.STAFF_VIEW_ACCOUNTS.value]) def delete_user_otp(username): """Delete/Reset the OTP of user profile associated with the provided username.""" try: user = UserService.find_by_username(username) if user is None: - response, status = jsonify({'message': f'User {username} does not exist.'}), http_status.HTTP_404_NOT_FOUND - elif user.as_dict().get('login_source', None) != LoginSource.BCEID.value: - response, status = {'Only BCEID users has OTP', http_status.HTTP_400_BAD_REQUEST} + response, status = jsonify({"message": f"User {username} does not exist."}), HTTPStatus.NOT_FOUND + elif user.as_dict().get("login_source", None) != LoginSource.BCEID.value: + response, status = {"Only BCEID users has OTP", HTTPStatus.BAD_REQUEST} else: - origin_url = request.environ.get('HTTP_ORIGIN', 'localhost') + origin_url = request.environ.get("HTTP_ORIGIN", "localhost") UserService.delete_otp_for_user(username, origin_url=origin_url) - response, status = '', http_status.HTTP_204_NO_CONTENT + response, status = "", HTTPStatus.NO_CONTENT except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'DELETE', 'PATCH']) -@TRACER.trace() +@bp.route("/", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "DELETE", "PATCH"]) @_jwt.requires_auth def get_by_username(username): """Return the user profile associated with the provided username.""" user = UserService.find_by_username(username) if user is None: - response, status = jsonify({'message': f'User {username} does not exist.'}), http_status.HTTP_404_NOT_FOUND + response, status = jsonify({"message": f"User {username} does not exist."}), HTTPStatus.NOT_FOUND else: - response, status = user.as_dict(), http_status.HTTP_200_OK + response, status = user.as_dict(), HTTPStatus.OK return response, status -@bp.route('/', methods=['DELETE']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.requires_auth def delete_by_username(username): """Delete the user profile associated with the provided username.""" try: user = UserService.find_by_username(username) if user is None: - response, status = jsonify({'message': f'User {username} does not exist.'}), http_status.HTTP_404_NOT_FOUND - elif user.as_dict().get('type', None) != Role.ANONYMOUS_USER.name: - response, status = {'Normal users cant be deleted', http_status.HTTP_501_NOT_IMPLEMENTED} + response, status = jsonify({"message": f"User {username} does not exist."}), HTTPStatus.NOT_FOUND + elif user.as_dict().get("type", None) != Role.ANONYMOUS_USER.name: + response, status = {"Normal users cant be deleted", HTTPStatus.NOT_IMPLEMENTED} else: UserService.delete_anonymous_user(username) - response, status = '', http_status.HTTP_204_NO_CONTENT + response, status = "", HTTPStatus.NO_CONTENT except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/', methods=['PATCH']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/", methods=["PATCH"]) +@cross_origin(origins="*") @_jwt.requires_auth def patch_by_username(username): """Patch the user profile associated with the provided username. @@ -198,222 +191,216 @@ def patch_by_username(username): try: request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'anonymous_user') + valid_format, errors = schema_utils.validate(request_json, "anonymous_user") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST user = UserService.find_by_username(username) if user is None: - response, status = jsonify({'message': f'User {username} does not exist.'}), http_status.HTTP_404_NOT_FOUND - elif user.as_dict().get('type', None) != Role.ANONYMOUS_USER.name: - response, status = {'Normal users cant be patched', http_status.HTTP_501_NOT_IMPLEMENTED} + response, status = jsonify({"message": f"User {username} does not exist."}), HTTPStatus.NOT_FOUND + elif user.as_dict().get("type", None) != Role.ANONYMOUS_USER.name: + response, status = {"Normal users cant be patched", HTTPStatus.NOT_IMPLEMENTED} else: UserService.reset_password_for_anon_user(request_json, username) - response, status = '', http_status.HTTP_204_NO_CONTENT + response, status = "", HTTPStatus.NO_CONTENT except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/@me', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'PATCH', 'DELETE']) -@TRACER.trace() +@bp.route("/@me", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "PATCH", "DELETE"]) @_jwt.requires_auth def get_current_user(): """Return the user profile associated with the JWT in the authorization header.""" try: - response, status = UserService.find_by_jwt_token().as_dict(), http_status.HTTP_200_OK + response, status = UserService.find_by_jwt_token().as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/@me', methods=['PATCH']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/@me", methods=["PATCH"]) +@cross_origin(origins="*") @_jwt.requires_auth def patch_current_user(): """Update terms of service for the user.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'termsofuse') + valid_format, errors = schema_utils.validate(request_json, "termsofuse") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST - version = request_json['termsversion'] - is_terms_accepted = request_json['istermsaccepted'] + version = request_json["termsversion"] + is_terms_accepted = request_json["istermsaccepted"] try: - response, status = UserService.update_terms_of_use(is_terms_accepted, version).as_dict(), \ - http_status.HTTP_200_OK + response, status = ( + UserService.update_terms_of_use(is_terms_accepted, version).as_dict(), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/@me', methods=['DELETE']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/@me", methods=["DELETE"]) +@cross_origin(origins="*") @_jwt.requires_auth def delete_current_user(): """Delete the user profile.""" try: UserService.delete_user() - response, status = '', http_status.HTTP_204_NO_CONTENT + response, status = "", HTTPStatus.NO_CONTENT except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/contacts', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET', 'POST', 'PUT', 'DELETE']) -@TRACER.trace() +@bp.route("/contacts", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST", "PUT", "DELETE"]) @_jwt.requires_auth def get_user_contacts(): """Retrieve the set of contacts asociated with the current user identifier by the JWT in the header.""" token = g.jwt_oidc_token_info if not token: - return {'message': 'Authorization required.'}, http_status.HTTP_401_UNAUTHORIZED + return {"message": "Authorization required."}, HTTPStatus.UNAUTHORIZED try: - response, status = UserService.get_contacts(), http_status.HTTP_200_OK + response, status = UserService.get_contacts(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/contacts', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("/contacts", methods=["POST"]) +@cross_origin(origins="*") @_jwt.requires_auth def post_user_contact(): """Create a new contact for the user associated with the JWT in the authorization header.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'contact') + valid_format, errors = schema_utils.validate(request_json, "contact") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - response, status = UserService.add_contact(request_json).as_dict(), http_status.HTTP_201_CREATED + response, status = UserService.add_contact(request_json).as_dict(), HTTPStatus.CREATED except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/contacts', methods=['PUT']) -@TRACER.trace() -@cross_origin(origins='*') +@bp.route("/contacts", methods=["PUT"]) +@cross_origin(origins="*") @_jwt.requires_auth def put_user_contact(): """Update an existing contact for the user associated with the JWT in the authorization header.""" request_json = request.get_json() - valid_format, errors = schema_utils.validate(request_json, 'contact') + valid_format, errors = schema_utils.validate(request_json, "contact") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - response, status = UserService.update_contact(request_json).as_dict(), http_status.HTTP_200_OK + response, status = UserService.update_contact(request_json).as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/contacts', methods=['DELETE']) -@cross_origin(originss='*') -@TRACER.trace() +@bp.route("/contacts", methods=["DELETE"]) +@cross_origin(originss="*") @_jwt.requires_auth def delete_user_contact(): """Delete the contact info for the user associated with the JWT in the authorization header.""" try: - response, status = UserService.delete_contact().as_dict(), http_status.HTTP_200_OK + response, status = UserService.delete_contact().as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/orgs', methods=['GET', 'OPTIONS']) -@TRACER.trace() -@cross_origin(origins='*', methods=['GET']) +@bp.route("/orgs", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_user_organizations(): """Get a list of orgs that the current user is associated with.""" try: user = UserService.find_by_jwt_token() if not user: - response, status = {'message': 'User not found.'}, http_status.HTTP_404_NOT_FOUND + response, status = {"message": "User not found."}, HTTPStatus.NOT_FOUND else: all_orgs = OrgService.get_orgs(user.identifier) - orgs = OrgSchema().dump( - all_orgs, many=True) - response, status = jsonify({'orgs': orgs}), http_status.HTTP_200_OK + orgs = OrgSchema().dump(all_orgs, many=True) + response, status = jsonify({"orgs": orgs}), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/orgs//membership', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) +@bp.route("/orgs//membership", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.has_one_of_roles([Role.STAFF_VIEW_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_user_org_membership(org_id): """Get the membership for the given org and user.""" try: user = UserService.find_by_jwt_token() if not user: - response, status = {'message': 'User not found.'}, http_status.HTTP_404_NOT_FOUND + response, status = {"message": "User not found."}, HTTPStatus.NOT_FOUND else: - membership = MembershipService \ - .get_membership_for_org_and_user_all_status(org_id=org_id, user_id=user.identifier) - response, status = MembershipSchema(exclude=['org']).dump(membership), http_status.HTTP_200_OK + membership = MembershipService.get_membership_for_org_and_user_all_status( + org_id=org_id, user_id=user.identifier + ) + response, status = MembershipSchema(exclude=["org"]).dump(membership), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//affidavits', methods=['GET', 'OPTIONS']) -@TRACER.trace() -@cross_origin(origins='*', methods=['GET', 'POST']) +@bp.route("//affidavits", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET", "POST"]) @_jwt.has_one_of_roles([Role.STAFF_MANAGE_ACCOUNTS.value, Role.PUBLIC_USER.value]) def get_user_affidavit(user_guid): """Return pending/active affidavit for the user.""" token = g.jwt_oidc_token_info - affidavit_status = request.args.get('status', None) + affidavit_status = request.args.get("status", None) - if Role.STAFF.value not in token['realm_access']['roles'] and token.get('sub', None) != user_guid: + if Role.STAFF.value not in token["realm_access"]["roles"] and token.get("sub", None) != user_guid: abort(403) try: - response, status = AffidavitService.find_affidavit_by_user_guid(user_guid, status=affidavit_status), \ - http_status.HTTP_200_OK + response, status = ( + AffidavitService.find_affidavit_by_user_guid(user_guid, status=affidavit_status), + HTTPStatus.OK, + ) except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('//affidavits', methods=['POST']) -@cross_origin(origins='*') -@TRACER.trace() +@bp.route("//affidavits", methods=["POST"]) +@cross_origin(origins="*") @_jwt.requires_auth def post_user_affidavit(user_guid): """Create affidavit record for the user.""" token = g.jwt_oidc_token_info request_json = request.get_json() - if token.get('sub', None) != user_guid: + if token.get("sub", None) != user_guid: abort(403) - valid_format, errors = schema_utils.validate(request_json, 'affidavit') + valid_format, errors = schema_utils.validate(request_json, "affidavit") if not valid_format: - return {'message': schema_utils.serialize(errors)}, http_status.HTTP_400_BAD_REQUEST + return {"message": schema_utils.serialize(errors)}, HTTPStatus.BAD_REQUEST try: - response, status = AffidavitService.create_affidavit(request_json).as_dict(), http_status.HTTP_200_OK + response, status = AffidavitService.create_affidavit(request_json).as_dict(), HTTPStatus.OK except BusinessException as exception: - response, status = {'code': exception.code, 'message': exception.message}, exception.status_code + response, status = {"code": exception.code, "message": exception.message}, exception.status_code return response, status -@bp.route('/authorizations', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) +@bp.route("/authorizations", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_user_authorizations(): """Add a new contact for the Entity identified by the provided id.""" - sub = g.jwt_oidc_token_info.get('sub', None) - return AuthorizationService.get_user_authorizations(sub), http_status.HTTP_200_OK + sub = g.jwt_oidc_token_info.get("sub", None) + return AuthorizationService.get_user_authorizations(sub), HTTPStatus.OK diff --git a/auth-api/src/auth_api/resources/v1/user_settings.py b/auth-api/src/auth_api/resources/v1/user_settings.py index 806597fa42..f2ba33da18 100644 --- a/auth-api/src/auth_api/resources/v1/user_settings.py +++ b/auth-api/src/auth_api/resources/v1/user_settings.py @@ -13,27 +13,23 @@ # limitations under the License. """API endpoints for managing a User resource.""" import json +from http import HTTPStatus from flask import Blueprint, g, jsonify from flask_cors import cross_origin -from auth_api import status as http_status -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException from auth_api.schemas import UserSettingsSchema from auth_api.services.user import User as UserService from auth_api.services.user_settings import UserSettings as UserSettingsService -from auth_api.tracer import Tracer +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.endpoints_enums import EndpointEnum +bp = Blueprint("USER_SETTINGS", __name__, url_prefix=f"{EndpointEnum.API_V1.value}/users//settings") -bp = Blueprint('USER_SETTINGS', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/users//settings') -TRACER = Tracer.get_instance() - -@bp.route('', methods=['GET', 'OPTIONS']) -@cross_origin(origins='*', methods=['GET']) -@TRACER.trace() +@bp.route("", methods=["GET", "OPTIONS"]) +@cross_origin(origins="*", methods=["GET"]) @_jwt.requires_auth def get_user_settings(user_id): """Get info related to the user. @@ -42,15 +38,15 @@ def get_user_settings(user_id): """ token = g.jwt_oidc_token_info - if token.get('sub', None) != user_id: - return {'message': 'Unauthorized'}, http_status.HTTP_401_UNAUTHORIZED + if token.get("sub", None) != user_id: + return {"message": "Unauthorized"}, HTTPStatus.UNAUTHORIZED try: user = UserService.find_by_jwt_token(silent_mode=True) user_id = user.identifier if user else None all_settings = UserSettingsService.fetch_user_settings(user_id) - response, status = jsonify(UserSettingsSchema(many=True).dump(all_settings)), http_status.HTTP_200_OK + response, status = jsonify(UserSettingsSchema(many=True).dump(all_settings)), HTTPStatus.OK except BusinessException: - response, status = json.dumps([]), http_status.HTTP_200_OK + response, status = json.dumps([]), HTTPStatus.OK return response, status diff --git a/auth-api/src/auth_api/schemas/__init__.py b/auth-api/src/auth_api/schemas/__init__.py index d25803eee3..a5ce5783e0 100644 --- a/auth-api/src/auth_api/schemas/__init__.py +++ b/auth-api/src/auth_api/schemas/__init__.py @@ -15,19 +15,21 @@ from .activity_log import ActivityLogSchema from .affidavit import AffidavitSchema +from .affidavit_status import AffidavitStatusSchema from .affiliation import AffiliationSchema +from .affiliation_invitation import AffiliationInvitationSchema from .contact import ContactSchema, ContactSchemaPublic from .contact_link import ContactLinkSchema from .corp_type import CorpTypeSchema from .documents import DocumentSchema from .entity import EntitySchema -from .affiliation_invitation import AffiliationInvitationSchema from .invitation import InvitationSchema from .invitation_membership import InvitationMembershipSchema from .membership import MembershipSchema from .membership_status_code import MembershipStatusCodeSchema from .membership_types import MembershipTypeSchema from .org import OrgSchema +from .org_status import OrgStatusSchema from .org_type import OrgTypeSchema from .product_code import ProductCodeSchema from .product_subscription import ProductSubscriptionSchema @@ -35,3 +37,4 @@ from .task import TaskSchema from .user import UserSchema from .user_settings import UserSettingsSchema +from .user_status import UserStatusSchema diff --git a/auth-api/src/auth_api/schemas/account_payment_settings.py b/auth-api/src/auth_api/schemas/account_payment_settings.py deleted file mode 100644 index 4989dd395e..0000000000 --- a/auth-api/src/auth_api/schemas/account_payment_settings.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Manager for affiliation schema and export.""" - -from marshmallow import fields - -from auth_api.models import AccountPaymentSettings as AccountPaymentSettingsModel - -from .base_schema import BaseSchema - - -class AccountPaymentSettingsSchema(BaseSchema): # pylint: disable=too-many-ancestors, too-few-public-methods - """This is the schema for the AccountPaymentSettings model.""" - - class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods - """Maps all of the Account Payment Settings fields to a default schema.""" - - model = AccountPaymentSettingsModel - exclude = ['is_active', 'preferred_payment', 'created', 'created_by', 'id', 'modified', 'org'] - - preferred_payment_code = fields.String(data_key='preferredPayment') diff --git a/auth-api/src/auth_api/schemas/activity_log.py b/auth-api/src/auth_api/schemas/activity_log.py index ccf41b7797..0d9ea43605 100644 --- a/auth-api/src/auth_api/schemas/activity_log.py +++ b/auth-api/src/auth_api/schemas/activity_log.py @@ -25,4 +25,4 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the Activity Log fields to a default schema.""" model = ActivityLogModel - exclude = ('id', 'remote_addr') + exclude = ("id", "remote_addr") diff --git a/auth-api/src/auth_api/schemas/affidavit.py b/auth-api/src/auth_api/schemas/affidavit.py index 0fa2919ef1..0f74179afb 100644 --- a/auth-api/src/auth_api/schemas/affidavit.py +++ b/auth-api/src/auth_api/schemas/affidavit.py @@ -27,8 +27,9 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the affidavit fields to a default schema.""" model = AffidavitModel - exclude = ( - 'id', - ) + exclude = ("id", "version") - contacts = fields.Pluck('ContactLinkSchema', 'contact', many=True) + contacts = fields.Pluck("ContactLinkSchema", "contact", many=True) + status = fields.Pluck("AffidavitStatusSchema", "code", data_key="status") + status_code = fields.String(data_key="status_code") + user = fields.Pluck("UserSchema", "id", data_key="user") diff --git a/auth-api/src/auth_api/schemas/affidavit_status.py b/auth-api/src/auth_api/schemas/affidavit_status.py new file mode 100644 index 0000000000..b3bc268414 --- /dev/null +++ b/auth-api/src/auth_api/schemas/affidavit_status.py @@ -0,0 +1,30 @@ +# Copyright © 2019 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Manager for corp type schema and export.""" +from marshmallow import fields + +from auth_api.models import AffidavitStatus as AffidavitStatusModel +from auth_api.models import ma + + +class AffidavitStatusSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods + """This is the schema for the AffidavitStatus model.""" + + class Meta: # pylint: disable=too-few-public-methods + """Maps all of the AffidavitStatus fields to a default schema.""" + + model = AffidavitStatusModel + # front end expects desc still + + description = fields.String(data_key="desc") diff --git a/auth-api/src/auth_api/schemas/affiliation.py b/auth-api/src/auth_api/schemas/affiliation.py index 527945e56d..8395bc7041 100644 --- a/auth-api/src/auth_api/schemas/affiliation.py +++ b/auth-api/src/auth_api/schemas/affiliation.py @@ -28,5 +28,5 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods model = AffiliationModel - entity = fields.Nested('EntitySchema', many=False, data_key='business') - org = fields.Nested('OrgSchema', many=False, data_key='organization') + entity = fields.Nested("EntitySchema", many=False, data_key="business") + org = fields.Nested("OrgSchema", many=False, data_key="organization") diff --git a/auth-api/src/auth_api/schemas/affiliation_invitation.py b/auth-api/src/auth_api/schemas/affiliation_invitation.py index e62533febc..e5c68992df 100644 --- a/auth-api/src/auth_api/schemas/affiliation_invitation.py +++ b/auth-api/src/auth_api/schemas/affiliation_invitation.py @@ -17,8 +17,8 @@ from auth_api.models import AffiliationInvitation as AffiliationInvitationModel -from .base_schema import BaseSchema from ..utils.util import mask_email +from .base_schema import BaseSchema class AffiliationInvitationSchema(BaseSchema): # pylint: disable=too-many-ancestors, too-few-public-methods @@ -29,12 +29,25 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods model = AffiliationInvitationModel fields = ( - 'id', 'from_org', 'to_org', 'business_identifier', 'recipient_email', 'sent_date', 'expires_on', - 'accepted_date', 'status', 'token', 'type', 'affiliation_id', 'additional_message', 'is_deleted') - - from_org = fields.Nested('OrgSchema', only=('id', 'name', 'org_type')) - to_org = fields.Nested('OrgSchema', only=('id', 'name', 'org_type'), allow_none=True, required=False) - business_identifier = fields.String(attribute='entity.business_identifier', data_key='business_identifier') + "id", + "from_org", + "to_org", + "business_identifier", + "recipient_email", + "sent_date", + "expires_on", + "accepted_date", + "status", + "token", + "type", + "affiliation_id", + "additional_message", + "is_deleted", + ) + + from_org = fields.Nested("OrgSchema", only=("id", "name", "org_type")) + to_org = fields.Nested("OrgSchema", only=("id", "name", "org_type"), allow_none=True, required=False) + business_identifier = fields.String(attribute="entity.business_identifier", data_key="business_identifier") # pylint: disable=too-many-ancestors, too-few-public-methods @@ -44,5 +57,5 @@ class AffiliationInvitationSchemaPublic(AffiliationInvitationSchema): @post_dump(pass_many=False) def _mask_recipient_email_field(self, data, many): # pylint: disable=unused-argument """Mask recipient email field.""" - data['recipient_email'] = mask_email(data.get('recipient_email')) + data["recipient_email"] = mask_email(data.get("recipient_email")) return data diff --git a/auth-api/src/auth_api/schemas/authorization.py b/auth-api/src/auth_api/schemas/authorization.py index eaf653ff33..3753d12498 100644 --- a/auth-api/src/auth_api/schemas/authorization.py +++ b/auth-api/src/auth_api/schemas/authorization.py @@ -19,13 +19,13 @@ from auth_api.models.views.authorization import Authorization as AuthorizationModel -class AuthorizationSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class AuthorizationSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the Authorization model.""" class Meta: # pylint: disable=too-few-public-methods """Maps all of the Authorization fields to a default schema.""" model = AuthorizationModel - exclude = ['entity_name', 'keycloak_guid', 'org_id', 'org_type'] + exclude = ["entity_name", "keycloak_guid", "org_id", "org_type"] - org_membership = fields.String(data_key='orgMembership') + org_membership = fields.String(data_key="orgMembership") diff --git a/auth-api/src/auth_api/schemas/base_schema.py b/auth-api/src/auth_api/schemas/base_schema.py index 414f36a522..1771f14af0 100644 --- a/auth-api/src/auth_api/schemas/base_schema.py +++ b/auth-api/src/auth_api/schemas/base_schema.py @@ -18,43 +18,30 @@ from auth_api.models import ma -class BaseSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors +class BaseSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors """Base Schema.""" - def __init__(self, *args, **kwargs): - """Excludes versions. Otherwise database will query _versions table.""" - if hasattr(self.opts.model, 'versions') and (len(self.opts.fields) == 0): - self.opts.exclude += ('versions',) - super().__init__(*args, **kwargs) - class Meta: # pylint: disable=too-few-public-methods """Meta class to declare any class attributes.""" - datetimeformat = '%Y-%m-%dT%H:%M:%S+00:00' # Default output date format. + datetimeformat = "%Y-%m-%dT%H:%M:%S+00:00" # Default output date format. created_by = fields.Function( - lambda obj: f'{obj.created_by.firstname} {obj.created_by.lastname}' if obj.created_by else None + lambda obj: f"{obj.created_by.firstname} {obj.created_by.lastname}" if obj.created_by else None ) modified_by = fields.Function( - lambda obj: f'{obj.modified_by.firstname} {obj.modified_by.lastname}' if obj.modified_by else None + lambda obj: f"{obj.modified_by.firstname} {obj.modified_by.lastname}" if obj.modified_by else None ) @post_dump(pass_many=True) def _remove_empty(self, data, many): """Remove all empty values and versions from the dumped dict.""" if not many: - for key in list(data): - if key == 'versions': - data.pop(key) - - return { - key: value for key, value in data.items() - if value is not None - } + return {key: value for key, value in data.items() if value is not None} for item in data: for key in list(item): - if (key == 'versions') or (item[key] is None): + if item[key] is None: item.pop(key) return data diff --git a/auth-api/src/auth_api/schemas/basecode_type.py b/auth-api/src/auth_api/schemas/basecode_type.py index e72b7e6685..9154b8278d 100644 --- a/auth-api/src/auth_api/schemas/basecode_type.py +++ b/auth-api/src/auth_api/schemas/basecode_type.py @@ -18,13 +18,13 @@ from auth_api.models import ma -class BaseCodeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class BaseCodeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the BaseCode model.""" class Meta: # pylint: disable=too-few-public-methods """Maps fields to a default schema.""" - fields = ('code', 'description', 'default', 'is_government_agency', 'is_business') + fields = ("code", "description", "default", "is_government_agency", "is_business") # front end expects desc still - description = fields.String(data_key='desc') + description = fields.String(data_key="desc") diff --git a/auth-api/src/auth_api/schemas/contact.py b/auth-api/src/auth_api/schemas/contact.py index 964e7c7c2b..638b240eb9 100644 --- a/auth-api/src/auth_api/schemas/contact.py +++ b/auth-api/src/auth_api/schemas/contact.py @@ -17,8 +17,8 @@ from auth_api.models import Contact as ContactModel -from .base_schema import BaseSchema from ..utils.util import mask_email +from .base_schema import BaseSchema class ContactSchema(BaseSchema): # pylint: disable=too-many-ancestors, too-few-public-methods @@ -28,10 +28,10 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the User fields to a default schema.""" model = ContactModel - exclude = ('id', 'links', 'created', 'created_by', 'modified', 'modified_by') + exclude = ("id", "links", "created", "created_by", "modified", "modified_by", "version") - email = fields.String(data_key='email') - phone = fields.String(data_key='phone') + email = fields.String(data_key="email") + phone = fields.String(data_key="phone") class ContactSchemaPublic(BaseSchema): # pylint: disable=too-many-ancestors @@ -41,13 +41,29 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the User fields to a default schema.""" model = ContactModel - exclude = ('id', 'links', 'created', 'created_by', 'modified', 'modified_by', 'phone', 'phone_extension', - 'postal_code', 'street', 'city', 'region', 'street_additional', 'country', 'delivery_instructions') + exclude = ( + "id", + "links", + "created", + "created_by", + "modified", + "modified_by", + "phone", + "phone_extension", + "postal_code", + "street", + "city", + "region", + "street_additional", + "country", + "delivery_instructions", + "version", + ) - email = fields.String(data_key='email') + email = fields.String(data_key="email") @post_dump(pass_many=False) def _mask_email_field(self, data, many): # pylint: disable=unused-argument """Mask email field.""" - data['email'] = mask_email(data.get('email')) + data["email"] = mask_email(data.get("email")) return data diff --git a/auth-api/src/auth_api/schemas/contact_link.py b/auth-api/src/auth_api/schemas/contact_link.py index 3c1ec75636..3254ed5fcd 100644 --- a/auth-api/src/auth_api/schemas/contact_link.py +++ b/auth-api/src/auth_api/schemas/contact_link.py @@ -28,6 +28,6 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the User fields to a default schema.""" model = ContactLinkModel - exclude = ('id', 'user', 'entity', 'org') + exclude = ("id", "user", "entity", "org", "version") contact = fields.Nested(ContactSchema, many=False) diff --git a/auth-api/src/auth_api/schemas/corp_type.py b/auth-api/src/auth_api/schemas/corp_type.py index f082d95a55..52d7845d2e 100644 --- a/auth-api/src/auth_api/schemas/corp_type.py +++ b/auth-api/src/auth_api/schemas/corp_type.py @@ -18,7 +18,7 @@ from auth_api.models import ma -class CorpTypeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class CorpTypeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the CorpType model.""" class Meta: # pylint: disable=too-few-public-methods @@ -27,4 +27,4 @@ class Meta: # pylint: disable=too-few-public-methods model = CorpTypeModel # front end expects desc still - description = fields.String(data_key='desc') + description = fields.String(data_key="desc") diff --git a/auth-api/src/auth_api/schemas/entity.py b/auth-api/src/auth_api/schemas/entity.py index 1cdb6ce455..232d82cf02 100644 --- a/auth-api/src/auth_api/schemas/entity.py +++ b/auth-api/src/auth_api/schemas/entity.py @@ -28,9 +28,9 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the Entity fields to a default schema.""" model = EntityModel - exclude = ('id', 'pass_code') + exclude = ("id", "pass_code") - contacts = fields.Pluck('ContactLinkSchema', 'contact', many=True) + contacts = fields.Pluck("ContactLinkSchema", "contact", many=True) corp_type = fields.Nested(CorpTypeSchema, many=False) corp_sub_type = fields.Nested(CorpTypeSchema, many=False) - affiliations = fields.Nested('AffiliationSchema', many=True, only=('id', 'created', 'certified_by_name')) + affiliations = fields.Nested("AffiliationSchema", many=True, only=("id", "created", "certified_by_name")) diff --git a/auth-api/src/auth_api/schemas/invitation.py b/auth-api/src/auth_api/schemas/invitation.py index 9d39c0252a..3e2650c632 100644 --- a/auth-api/src/auth_api/schemas/invitation.py +++ b/auth-api/src/auth_api/schemas/invitation.py @@ -29,7 +29,15 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods model = InvitationModel fields = ( - 'id', 'recipient_email', 'sent_date', 'expires_on', 'accepted_date', 'status', 'membership', 'token', - 'type') + "id", + "recipient_email", + "sent_date", + "expires_on", + "accepted_date", + "status", + "membership", + "token", + "type", + ) membership = fields.Nested(InvitationMembershipSchema, many=True) diff --git a/auth-api/src/auth_api/schemas/invitation_membership.py b/auth-api/src/auth_api/schemas/invitation_membership.py index 42f1c98563..915f64ace5 100644 --- a/auth-api/src/auth_api/schemas/invitation_membership.py +++ b/auth-api/src/auth_api/schemas/invitation_membership.py @@ -28,8 +28,20 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods model = InvitationMembershipModel - org = fields.Nested('OrgSchema', exclude=['contacts', 'created', 'created_by', 'affiliated_entities', 'invitations', - 'members', 'modified', 'org_status', - 'org_type']) - - invitation = fields.Nested('InvitationSchema', only=('id', 'recipient_email', 'sent_date', 'expires_on', 'status')) + org = fields.Nested( + "OrgSchema", + exclude=[ + "contacts", + "created", + "created_by", + "affiliated_entities", + "invitations", + "members", + "modified", + "org_status", + "org_type", + ], + ) + + invitation = fields.Nested("InvitationSchema", only=("id", "recipient_email", "sent_date", "expires_on", "status")) + membership_type = fields.Pluck("MembershipTypeSchema", "code", data_key="membership_type") diff --git a/auth-api/src/auth_api/schemas/invite_status.py b/auth-api/src/auth_api/schemas/invite_status.py index 7020e4c3fd..f8b1f21d6f 100644 --- a/auth-api/src/auth_api/schemas/invite_status.py +++ b/auth-api/src/auth_api/schemas/invite_status.py @@ -14,11 +14,10 @@ """Manager for invitation status schema and export.""" from auth_api.models import InvitationStatus as InvitationStatusModel - from auth_api.models import ma -class InvitationStatusSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class InvitationStatusSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the Invitation Status model.""" class Meta: # pylint: disable=too-few-public-methods diff --git a/auth-api/src/auth_api/schemas/membership.py b/auth-api/src/auth_api/schemas/membership.py index 2b1f28f811..51c8284ab4 100644 --- a/auth-api/src/auth_api/schemas/membership.py +++ b/auth-api/src/auth_api/schemas/membership.py @@ -27,9 +27,11 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the Membership fields to a default schema.""" model = MembershipModel - fields = ('id', 'membership_type_code', 'user', 'org', 'membership_status') - - user = fields.Nested('UserSchema', only=( - 'firstname', 'lastname', 'username', 'modified', 'contacts', 'login_source', 'id')) - org = fields.Nested('OrgSchema', only=('id', 'name', 'affiliated_entities', 'org_type', 'members', 'invitations')) - membership_status = fields.Pluck('MembershipStatusCodeSchema', 'name', data_key='membershipStatus') + fields = ("id", "membership_type_code", "user", "org", "membership_status", "membership_type") + + user = fields.Nested( + "UserSchema", only=("firstname", "lastname", "username", "modified", "contacts", "login_source", "id") + ) + org = fields.Nested("OrgSchema", only=("id", "name", "affiliated_entities", "org_type", "members", "invitations")) + membership_status = fields.Pluck("MembershipStatusCodeSchema", "name", data_key="membershipStatus") + membership_type = fields.Pluck("MembershipTypeSchema", "code", data_key="membershipType") diff --git a/auth-api/src/auth_api/schemas/membership_status_code.py b/auth-api/src/auth_api/schemas/membership_status_code.py index 58da8eea62..e8e18552ad 100644 --- a/auth-api/src/auth_api/schemas/membership_status_code.py +++ b/auth-api/src/auth_api/schemas/membership_status_code.py @@ -14,11 +14,10 @@ """Manager for membership type schema and export.""" from auth_api.models import MembershipStatusCode as MembershipStatusCodeModel - from auth_api.models import ma -class MembershipStatusCodeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class MembershipStatusCodeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the MembershipStatusCode model.""" class Meta: # pylint: disable=too-few-public-methods diff --git a/auth-api/src/auth_api/schemas/membership_types.py b/auth-api/src/auth_api/schemas/membership_types.py index 43c9dedec9..46946e3f8f 100644 --- a/auth-api/src/auth_api/schemas/membership_types.py +++ b/auth-api/src/auth_api/schemas/membership_types.py @@ -19,7 +19,7 @@ from auth_api.models import ma -class MembershipTypeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class MembershipTypeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the MembershipType model.""" class Meta: # pylint: disable=too-few-public-methods @@ -27,5 +27,5 @@ class Meta: # pylint: disable=too-few-public-methods model = MembershipTypeModel - code = fields.String(data_key='name') - description = fields.String(data_key='desc') + code = fields.String(data_key="name") + description = fields.String(data_key="desc") diff --git a/auth-api/src/auth_api/schemas/org.py b/auth-api/src/auth_api/schemas/org.py index 61a8187cd8..ad1184e450 100644 --- a/auth-api/src/auth_api/schemas/org.py +++ b/auth-api/src/auth_api/schemas/org.py @@ -27,26 +27,34 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the Org fields to a default schema.""" model = OrgModel - exclude = ('members', 'invitations', 'affiliated_entities', 'suspension_reason', - 'products', 'login_options', 'type_code') - - type_code = fields.String(data_key='org_type') - status_code = fields.String(data_key='status_code') - suspension_reason_code = fields.String(data_key='suspension_reason_code') - business_size = fields.String(data_key='business_size') - business_type = fields.String(data_key='business_type') - contacts = fields.Pluck('ContactLinkSchema', 'contact', many=True, data_key='mailing_address') + exclude = ( + "members", + "invitations", + "affiliated_entities", + "suspension_reason", + "products", + "login_options", + ) + + org_type = fields.Pluck("OrgTypeSchema", "code", data_key="org_type") + org_status = fields.Pluck("OrgStatusSchema", "code", data_key="org_status") + type_code = fields.String(data_key="type_code") + status_code = fields.String(data_key="status_code") + suspension_reason_code = fields.String(data_key="suspension_reason_code") + business_size = fields.String(data_key="business_size") + business_type = fields.String(data_key="business_type") + contacts = fields.Pluck("ContactLinkSchema", "contact", many=True, data_key="contacts") @post_dump(pass_many=False) def _include_dynamic_fields(self, data, many): """Remove all empty values and versions from the dumped dict.""" if not many: - if data.get('is_business_account', False): + if data.get("is_business_account", False): # Adding a dynamic field businessName for making other application integrations easy. - data['businessName'] = data.get('name') + data["businessName"] = data.get("name") # Map the mailing address to the first from contact as there can be only one mailing address. - - if (mailing_address := data.get('mailing_address', None)) is not None and mailing_address: - data['mailing_address'] = mailing_address[0] + if (mailing_address := data.get("contacts", None)) is not None and mailing_address: + if mailing_address[0] is not None and mailing_address[0].get("street", None) is not None: + data["mailing_address"] = mailing_address[0] return data diff --git a/auth-api/src/auth_api/schemas/org_status.py b/auth-api/src/auth_api/schemas/org_status.py index cf0e1ebc46..949d11d923 100644 --- a/auth-api/src/auth_api/schemas/org_status.py +++ b/auth-api/src/auth_api/schemas/org_status.py @@ -14,11 +14,10 @@ """Manager for org status schema and export.""" from auth_api.models import OrgStatus as OrgStatusModel - from auth_api.models import ma -class OrgStatusSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class OrgStatusSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the OrgStatus model.""" class Meta: # pylint: disable=too-few-public-methods diff --git a/auth-api/src/auth_api/schemas/org_type.py b/auth-api/src/auth_api/schemas/org_type.py index cc74f9d404..684c9e0c71 100644 --- a/auth-api/src/auth_api/schemas/org_type.py +++ b/auth-api/src/auth_api/schemas/org_type.py @@ -14,11 +14,10 @@ """Manager for org type schema and export.""" from auth_api.models import OrgType as OrgTypeModel - from auth_api.models import ma -class OrgTypeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class OrgTypeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the OrgType model.""" class Meta: # pylint: disable=too-few-public-methods diff --git a/auth-api/src/auth_api/schemas/payment_type.py b/auth-api/src/auth_api/schemas/payment_type.py index 994dd1adb2..0ac95a7bdf 100644 --- a/auth-api/src/auth_api/schemas/payment_type.py +++ b/auth-api/src/auth_api/schemas/payment_type.py @@ -14,11 +14,10 @@ """Manager for payment type schema and export.""" from auth_api.models import PaymentType as PaymentTypeModel - from auth_api.models import ma -class PaymentTypeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class PaymentTypeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the PaymentType model.""" class Meta: # pylint: disable=too-few-public-methods diff --git a/auth-api/src/auth_api/schemas/product_code.py b/auth-api/src/auth_api/schemas/product_code.py index dcdfa26e15..d69f69a86e 100644 --- a/auth-api/src/auth_api/schemas/product_code.py +++ b/auth-api/src/auth_api/schemas/product_code.py @@ -19,16 +19,16 @@ from auth_api.models import ma -class ProductCodeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class ProductCodeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the ProductCodeModel model.""" class Meta: # pylint: disable=too-few-public-methods """Maps all of the ProductSubscription fields to a default schema.""" model = ProductCodeModel - exclude = ['default', 'linked_product_code', 'can_resubmit'] + exclude = ["default", "linked_product_code", "can_resubmit"] - type_code = fields.String(data_key='type') - parent_code = fields.String(data_key='parentCode') - premium_only = fields.Boolean(data_key='premiumOnly') - need_review = fields.Boolean(data_key='needReview') + type_code = fields.String(data_key="type") + parent_code = fields.String(data_key="parentCode") + premium_only = fields.Boolean(data_key="premiumOnly") + need_review = fields.Boolean(data_key="needReview") diff --git a/auth-api/src/auth_api/schemas/simple_org.py b/auth-api/src/auth_api/schemas/simple_org.py index 6c5743cd87..4264b898a5 100644 --- a/auth-api/src/auth_api/schemas/simple_org.py +++ b/auth-api/src/auth_api/schemas/simple_org.py @@ -33,7 +33,4 @@ def from_row(cls, row: Org): https://www.attrs.org/en/stable/init.html """ - return cls(id=row.id, - name=row.name, - branch_name=row.branch_name, - status=row.status_code) + return cls(id=row.id, name=row.name, branch_name=row.branch_name, status=row.status_code) diff --git a/auth-api/src/auth_api/schemas/suspension_reason_code.py b/auth-api/src/auth_api/schemas/suspension_reason_code.py index 7f40c748c8..c2913c4b7c 100644 --- a/auth-api/src/auth_api/schemas/suspension_reason_code.py +++ b/auth-api/src/auth_api/schemas/suspension_reason_code.py @@ -14,11 +14,10 @@ """Manager for suspension reason schema and export.""" from auth_api.models import SuspensionReasonCode as SuspensionReasonCodeModel - from auth_api.models import ma -class SuspensionReasonCodeSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class SuspensionReasonCodeSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the SuspensionReasonCode model.""" class Meta: # pylint: disable=too-few-public-methods diff --git a/auth-api/src/auth_api/schemas/task.py b/auth-api/src/auth_api/schemas/task.py index 56c110c694..30e360b699 100644 --- a/auth-api/src/auth_api/schemas/task.py +++ b/auth-api/src/auth_api/schemas/task.py @@ -16,6 +16,7 @@ from marshmallow import fields from auth_api.models import Task as TaskModel + from .base_schema import BaseSchema from .user import UserSchema diff --git a/auth-api/src/auth_api/schemas/user.py b/auth-api/src/auth_api/schemas/user.py index c28bddc2e5..6db751ef90 100644 --- a/auth-api/src/auth_api/schemas/user.py +++ b/auth-api/src/auth_api/schemas/user.py @@ -27,19 +27,15 @@ class Meta(BaseSchema.Meta): # pylint: disable=too-few-public-methods """Maps all of the User fields to a default schema.""" model = UserModel - exclude = ( - 'orgs', - 'is_terms_of_use_accepted', - 'terms_of_use_accepted_version', - 'terms_of_use_version' - ) + exclude = ("orgs", "is_terms_of_use_accepted", "terms_of_use_accepted_version", "terms_of_use_version") - user_terms = fields.Method('get_user_terms_object') - contacts = fields.Pluck('ContactLinkSchema', 'contact', many=True) + user_terms = fields.Method("get_user_terms_object") + contacts = fields.Pluck("ContactLinkSchema", "contact", many=True) + user_status = fields.Pluck("UserStatusSchema", "id", data_key="user_status") def get_user_terms_object(self, obj): """Map terms properties into nested object.""" return { - 'isTermsOfUseAccepted': obj.is_terms_of_use_accepted, - 'termsOfUseAcceptedVersion': obj.terms_of_use_accepted_version + "isTermsOfUseAccepted": obj.is_terms_of_use_accepted, + "termsOfUseAcceptedVersion": obj.terms_of_use_accepted_version, } diff --git a/auth-api/src/auth_api/schemas/user_settings.py b/auth-api/src/auth_api/schemas/user_settings.py index e9db7dfd6d..5a1bbdf9d9 100644 --- a/auth-api/src/auth_api/schemas/user_settings.py +++ b/auth-api/src/auth_api/schemas/user_settings.py @@ -18,23 +18,29 @@ from auth_api.models import ma -class UserSettingsSchema(ma.ModelSchema): # pylint: disable=too-many-ancestors, too-few-public-methods +class UserSettingsSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods """This is the schema for the User Settings model.""" class Meta: # pylint: disable=too-few-public-methods """Maps all of the User Settings fields to a default schema.""" - fields = ('id', 'label', 'additional_label', 'urlorigin', 'urlpath', 'type', 'account_type', 'account_status', - 'product_settings') + fields = ( + "id", + "label", + "additional_label", + "urlorigin", + "urlpath", + "type", + "account_type", + "account_status", + "product_settings", + ) @post_dump(pass_many=True) def _remove_empty(self, data, many): """Remove all empty values from the dumped dict.""" if not many: - return { - key: value for key, value in data.items() - if value or isinstance(value, float) - } + return {key: value for key, value in data.items() if value or isinstance(value, float)} for item in data: for key in list(item): value = item[key] diff --git a/auth-api/src/auth_api/resources/v1/meta.py b/auth-api/src/auth_api/schemas/user_status.py similarity index 50% rename from auth-api/src/auth_api/resources/v1/meta.py rename to auth-api/src/auth_api/schemas/user_status.py index 3c8d663872..0017cae35e 100644 --- a/auth-api/src/auth_api/resources/v1/meta.py +++ b/auth-api/src/auth_api/schemas/user_status.py @@ -11,22 +11,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Meta information about the service. +"""Manager for corp type schema and export.""" +from marshmallow import fields -Currently this only provides API versioning information -""" -from flask import Blueprint, jsonify +from auth_api.models import UserStatusCode as UserStatusModel +from auth_api.models import ma -from auth_api.utils.endpoints_enums import EndpointEnum -from auth_api.utils.run_version import get_run_version +class UserStatusSchema(ma.SQLAlchemyAutoSchema): # pylint: disable=too-many-ancestors, too-few-public-methods + """This is the schema for the UserStatus model.""" -bp = Blueprint('META', __name__, url_prefix=f'{EndpointEnum.API_V1.value}/meta') + class Meta: # pylint: disable=too-few-public-methods + """Maps all of the UserStatus fields to a default schema.""" + model = UserStatusModel + # front end expects desc still -@bp.route('/info') -def get_meta_info(): - """Return a JSON object with meta information about the Service.""" - version = get_run_version() - return jsonify( - API=f'auth_api/{version}') + description = fields.String(data_key="desc") diff --git a/auth-api/src/auth_api/schemas/utils.py b/auth-api/src/auth_api/schemas/utils.py index acc05cd853..0b2485de47 100644 --- a/auth-api/src/auth_api/schemas/utils.py +++ b/auth-api/src/auth_api/schemas/utils.py @@ -21,8 +21,7 @@ from jsonschema import Draft7Validator, RefResolver, SchemaError - -BASE_URI = 'https://bcrs.gov.bc.ca/.well_known/schemas' +BASE_URI = "https://bcrs.gov.bc.ca/.well_known/schemas" def get_schema(filename: str) -> dict: @@ -32,10 +31,10 @@ def get_schema(filename: str) -> dict: def _load_json_schema(filename: str): """Return the given schema file identified by filename.""" - relative_path = path.join('schemas', filename) + relative_path = path.join("schemas", filename) absolute_path = path.join(path.dirname(__file__), relative_path) - with open(absolute_path, 'r', encoding='utf-8') as schema_file: + with open(absolute_path, "r", encoding="utf-8") as schema_file: schema = json.loads(schema_file.read()) return schema @@ -48,16 +47,16 @@ def get_schema_store(validate_schema: bool = False, schema_search_path: str = No """ try: if not schema_search_path: - schema_search_path = path.join(path.dirname(__file__), 'schemas') + schema_search_path = path.join(path.dirname(__file__), "schemas") schemastore = {} fnames = listdir(schema_search_path) for fname in fnames: fpath = path.join(schema_search_path, fname) - if fpath[-5:] == '.json': - with open(fpath, 'r', encoding='utf-8') as schema_fd: + if fpath[-5:] == ".json": + with open(fpath, "r", encoding="utf-8") as schema_fd: schema = json.load(schema_fd) - if '$id' in schema: - schemastore[schema['$id']] = schema + if "$id" in schema: + schemastore[schema["$id"]] = schema if validate_schema: for _, schema in schemastore.items(): @@ -69,39 +68,36 @@ def get_schema_store(validate_schema: bool = False, schema_search_path: str = No raise error -def validate(json_data: json, - schema_id: str, - schema_store: dict = None, - validate_schema: bool = False, - schema_search_path: str = None - ) -> Tuple[bool, iter]: +def validate( + json_data: json, + schema_id: str, + schema_store: dict = None, + validate_schema: bool = False, + schema_search_path: str = None, +) -> Tuple[bool, iter]: """Load the json file and validate against loaded schema.""" try: if not schema_search_path: - schema_search_path = path.join(path.dirname(__file__), 'schemas') + schema_search_path = path.join(path.dirname(__file__), "schemas") if not schema_store: schema_store = get_schema_store(validate_schema, schema_search_path) - schema = schema_store.get(f'{BASE_URI}/{schema_id}') + schema = schema_store.get(f"{BASE_URI}/{schema_id}") if validate_schema: Draft7Validator.check_schema(schema) schema_file_path = path.join(schema_search_path, schema_id) - resolver = RefResolver(f'file://{schema_file_path}.json', schema, schema_store) + resolver = RefResolver(f"file://{schema_file_path}.json", schema, schema_store) - if Draft7Validator(schema, - format_checker=Draft7Validator.FORMAT_CHECKER, - resolver=resolver - ) \ - .is_valid(json_data): + if Draft7Validator(schema, format_checker=Draft7Validator.FORMAT_CHECKER, resolver=resolver).is_valid( + json_data + ): return True, None - errors = Draft7Validator(schema, - format_checker=Draft7Validator.FORMAT_CHECKER, - resolver=resolver - ) \ - .iter_errors(json_data) + errors = Draft7Validator(schema, format_checker=Draft7Validator.FORMAT_CHECKER, resolver=resolver).iter_errors( + json_data + ) return False, errors except SchemaError as error: diff --git a/auth-api/src/auth_api/services/__init__.py b/auth-api/src/auth_api/services/__init__.py index 179a4bafde..7b6dc89f83 100644 --- a/auth-api/src/auth_api/services/__init__.py +++ b/auth-api/src/auth_api/services/__init__.py @@ -22,15 +22,14 @@ from .contact import Contact from .documents import Documents from .entity import Entity +from .flags import Flags from .invitation import Invitation from .membership import Membership from .minio import MinioService from .org import Org from .permissions import Permissions from .products import Product -from .reset import ResetTestData from .simple_org import SimpleOrg from .task import Task from .user import User from .user_settings import UserSettings -from .flags import Flags diff --git a/auth-api/src/auth_api/services/activity_log.py b/auth-api/src/auth_api/services/activity_log.py index 42bba8a903..fd31630578 100644 --- a/auth-api/src/auth_api/services/activity_log.py +++ b/auth-api/src/auth_api/services/activity_log.py @@ -17,9 +17,9 @@ """ import json -from flask import current_app + from jinja2 import Environment, FileSystemLoader -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 +from structured_logging import StructuredLogging from auth_api.models import ActivityLog as ActivityLogModel from auth_api.schemas import ActivityLogSchema @@ -28,10 +28,10 @@ from auth_api.utils.roles import ADMIN, STAFF, Role from auth_api.utils.user_context import UserContext, user_context -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) +logger = StructuredLogging.get_logger() -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class ActivityLog: # pylint: disable=too-many-instance-attributes """Manages all aspects of the Activity Log Entity.""" @@ -44,7 +44,6 @@ def identifier(self): """Return the identifier for this user.""" return self._model.id - @ServiceTracing.disable_tracing def as_dict(self): """Return the Activity Log as a python dict. @@ -58,38 +57,34 @@ def as_dict(self): @user_context def fetch_activity_logs(org_id: int, **kwargs): # pylint: disable=too-many-locals """Search all activity logs.""" - user_from_context: UserContext = kwargs['user_context'] - item_name = kwargs.get('item_name') - item_type = kwargs.get('item_type') - action = kwargs.get('action') + user_from_context: UserContext = kwargs["user_context"] + item_name = kwargs.get("item_name") + item_type = kwargs.get("item_type") + action = kwargs.get("action") check_auth(one_of_roles=(ADMIN, STAFF), org_id=org_id) - logs = {'activity_logs': []} - page: int = int(kwargs.get('page')) - limit: int = int(kwargs.get('limit')) - search_args = (item_name, - item_type, - action, - page, - limit) - - current_app.logger.debug('fetch_activity logs') + logger.debug(">fetch_activity logs") return logs @staticmethod @@ -106,19 +101,19 @@ def _build_string(activity: ActivityLogModel) -> str: ActivityAction.ACCOUNT_ADDRESS_CHANGE.value: ActivityLog._account_address_changes, ActivityAction.AUTHENTICATION_METHOD_CHANGE.value: ActivityLog._authentication_method_changes, ActivityAction.ACCOUNT_SUSPENSION.value: ActivityLog._account_suspension, - ActivityAction.ADD_PRODUCT_AND_SERVICE.value: ActivityLog._adding_products_and_services + ActivityAction.ADD_PRODUCT_AND_SERVICE.value: ActivityLog._adding_products_and_services, }.get(activity.action) return mapping(activity) if (mapping) else activity.action @staticmethod def _inviting_team_member(activity: ActivityLogModel) -> str: """Invited User Y as a [role name].""" - return f'Invited {activity.item_name} as a {activity.item_value}' + return f"Invited {activity.item_name} as a {activity.item_value}" @staticmethod def _get_names(name): - first_name = f'{name.get("first_name")}' if name.get('first_name') else '' - last_name = f'{name.get("last_name")}' if name.get('last_name') else '' + first_name = f'{name.get("first_name")}' if name.get("first_name") else "" + last_name = f'{name.get("last_name")}' if name.get("last_name") else "" return first_name, last_name @staticmethod @@ -129,8 +124,8 @@ def _approving_new_team_member(activity: ActivityLogModel) -> str: except ValueError: name = {} first_name, last_name = ActivityLog._get_names(name) - return f'Approved {first_name} {last_name} \ - joining the team as {activity.item_value}' + return f"Approved {first_name} {last_name} \ + joining the team as {activity.item_value}" @staticmethod def _removing_team_member(activity: ActivityLogModel) -> str: @@ -140,33 +135,33 @@ def _removing_team_member(activity: ActivityLogModel) -> str: except ValueError: name = {} first_name, last_name = ActivityLog._get_names(name) - return f'Removed {first_name} {last_name}' + return f"Removed {first_name} {last_name}" @staticmethod def _twofactor_reset(activity: ActivityLogModel) -> str: """User X Authenticator for User Y.""" - return f'Reset Authenticator for {activity.item_name}' + return f"Reset Authenticator for {activity.item_name}" @staticmethod def _payment_info_change(activity: ActivityLogModel) -> str: """User X updated the account payment information to [payment method].""" - payment_information = activity.item_value.replace('_', ' ') - return f'Updated the account payment information to {payment_information}' + payment_information = activity.item_value.replace("_", " ") + return f"Updated the account payment information to {payment_information}" @staticmethod def _adding_a_business_affilliation(activity: ActivityLogModel) -> str: """User X has affiliated [Business Name] to the account.""" - return f'Has affiliated {activity.item_name} to the account' + return f"Has affiliated {activity.item_name} to the account" @staticmethod def _removing_a_business_affilliation(activity: ActivityLogModel) -> str: """User X has unaffiliated [Business Name] from the account.""" - return f'Has unaffiliated {activity.item_name} from the account' + return f"Has unaffiliated {activity.item_name} from the account" @staticmethod def _account_name_changes(activity: ActivityLogModel) -> str: """User X changed the account name to [new account name].""" - return f'Changed the account name to {activity.item_value}' + return f"Changed the account name to {activity.item_value}" @staticmethod def _account_address_changes(activity: ActivityLogModel) -> str: @@ -175,46 +170,46 @@ def _account_address_changes(activity: ActivityLogModel) -> str: address = json.loads(activity.item_value) except ValueError: address = {} - account_address_formatted = '' - street = f'{address.get("street")}; ' if address.get('street') else '' - street_additional = f'{address.get("streetAdditional")}; ' if address.get('streetAdditional') else '' - city = f'{address.get("city")}; ' if address.get('city') else '' - region = f'{address.get("region")}; ' if address.get('region') else '' - postal_code = f'{address.get("postalCode")}; ' if address.get('postal_code') else '' - country = f'{address.get("country")}; ' if address.get('country') else '' - account_address_formatted = f'{street}{street_additional}{city}{region}{postal_code}{country}' - return f'Changed the mailing address to {account_address_formatted}' - - @ staticmethod + account_address_formatted = "" + street = f'{address.get("street")}; ' if address.get("street") else "" + street_additional = f'{address.get("streetAdditional")}; ' if address.get("streetAdditional") else "" + city = f'{address.get("city")}; ' if address.get("city") else "" + region = f'{address.get("region")}; ' if address.get("region") else "" + postal_code = f'{address.get("postalCode")}; ' if address.get("postal_code") else "" + country = f'{address.get("country")}; ' if address.get("country") else "" + account_address_formatted = f"{street}{street_additional}{city}{region}{postal_code}{country}" + return f"Changed the mailing address to {account_address_formatted}" + + @staticmethod def _authentication_method_changes(activity: ActivityLogModel) -> str: """User X changed the account authentication method to [auth type].""" - return f'Changed the account authentication method to {activity.item_value}' + return f"Changed the account authentication method to {activity.item_value}" - @ staticmethod + @staticmethod def _account_suspension(activity: ActivityLogModel) -> str: """Account was suspended due to [Suspension reason].""" - suspension_reason = activity.item_value.replace('_', ' ') - return f'The account was suspended due to {suspension_reason}' + suspension_reason = activity.item_value.replace("_", " ") + return f"The account was suspended due to {suspension_reason}" - @ staticmethod + @staticmethod def _adding_products_and_services(activity: ActivityLogModel) -> str: """User X added [product name] to the account Products and Services.""" - return f'Added {activity.item_name} from account Products and Services' + return f"Added {activity.item_name} from account Products and Services" - @ staticmethod + @staticmethod def _removing_products_and_services(activity: ActivityLogModel) -> str: """User X removed [product name] from the account Products and Services.""" - return f'Removed {activity.item_name} from account Products and Services' + return f"Removed {activity.item_name} from account Products and Services" - @ staticmethod + @staticmethod def _mask_user_name(is_staff_access, user): if user is None: - return 'Service Account' + return "Service Account" is_actor_a_staff = user.type == Role.STAFF.name if not is_staff_access and is_actor_a_staff: - actor = 'BC Registry Staff' + actor = "BC Registry Staff" else: - actor = f'{user.firstname} {user.lastname}' + actor = f"{user.firstname} {user.lastname}" if not user.firstname and not user.lastname: - actor = 'Service Account' + actor = "Service Account" return actor diff --git a/auth-api/src/auth_api/services/activity_log_publisher.py b/auth-api/src/auth_api/services/activity_log_publisher.py index d8bff23fb7..4f810e2366 100644 --- a/auth-api/src/auth_api/services/activity_log_publisher.py +++ b/auth-api/src/auth_api/services/activity_log_publisher.py @@ -15,19 +15,18 @@ import uuid from datetime import datetime, timezone -from flask import current_app, g -from sentry_sdk import capture_message -from simple_cloudevent import SimpleCloudEvent -from sqlalchemy_continuum.plugins.flask import fetch_remote_addr +from flask import g, request from sbc_common_components.utils.enums import QueueMessageTypes +from simple_cloudevent import SimpleCloudEvent +from structured_logging import StructuredLogging from auth_api.config import get_named_config -from auth_api.models.dataclass import Activity from auth_api.models import User as UserModel +from auth_api.models.dataclass import Activity from auth_api.services.gcp_queue import GcpQueue, queue - CONFIG = get_named_config() +logger = StructuredLogging.get_logger() class ActivityLogPublisher: # pylint: disable=too-many-instance-attributes, too-few-public-methods @@ -38,29 +37,29 @@ def publish_activity(activity: Activity): # pylint:disable=unused-argument """Publish the activity using the given details.""" try: # find user_id if haven't passed in - if not activity.actor_id and g and 'jwt_oidc_token_info' in g: + if not activity.actor_id and g and "jwt_oidc_token_info" in g: user: UserModel = UserModel.find_by_jwt_token() activity.actor_id = user.id if user else None data = { - 'actorId': activity.actor_id, - 'action': activity.action, - 'itemType': 'ACCOUNT', - 'itemName': activity.name, - 'itemId': activity.id, - 'itemValue': activity.value, - 'orgId': activity.org_id, - 'remoteAddr': fetch_remote_addr(), - 'createdAt': f'{datetime.now()}' + "actorId": activity.actor_id, + "action": activity.action, + "itemType": "ACCOUNT", + "itemName": activity.name, + "itemId": activity.id, + "itemValue": activity.value, + "orgId": activity.org_id, + "remoteAddr": request.remote_addr, + "createdAt": f"{datetime.now()}", } cloud_event = SimpleCloudEvent( id=str(uuid.uuid4()), - source='sbc-auth-auth-api', + source="sbc-auth-auth-api", subject=None, time=datetime.now(tz=timezone.utc).isoformat(), type=QueueMessageTypes.ACTIVITY_LOG.value, - data=data + data=data, ) queue.publish(CONFIG.AUTH_EVENT_TOPIC, GcpQueue.to_queue_message(cloud_event)) - except Exception as err: # noqa: B902 # pylint: disable=broad-except - capture_message('Activity Queue Publish Event Error:' + str(err), level='error') - current_app.logger.error('Activity Queue Publish Event Error:', exc_info=True) + except Exception as e: # noqa: B902 # pylint: disable=broad-except + error_msg = f"Activity Queue Publish Event Error: {e}" + logger.error(error_msg) diff --git a/auth-api/src/auth_api/services/affidavit.py b/auth-api/src/auth_api/services/affidavit.py index 5faf763c49..26331196cf 100644 --- a/auth-api/src/auth_api/services/affidavit.py +++ b/auth-api/src/auth_api/services/affidavit.py @@ -18,8 +18,7 @@ from datetime import datetime from typing import Dict -from flask import current_app -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error @@ -38,8 +37,9 @@ from .user import User as UserService +logger = StructuredLogging.get_logger() + -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class Affidavit: # pylint: disable=too-many-instance-attributes """Manages all aspects of the Affidavit Entity.""" @@ -47,7 +47,6 @@ def __init__(self, model): """Return a affidavit object.""" self._model = model - @ServiceTracing.disable_tracing def as_dict(self): """Return the Affidavit as a python dict. @@ -60,7 +59,7 @@ def as_dict(self): @staticmethod def create_affidavit(affidavit_info: Dict): """Create a new affidavit record.""" - current_app.logger.debug('find_affidavit_by_org_id ') + affidavit_dict["documentUrl"] = MinioService.create_signed_get_url(affidavit.document_id) + logger.debug(">find_affidavit_by_org_id ") return affidavit_dict @staticmethod def find_affidavit_by_user_guid(user_guid: str, status: str = None): """Return affidavit for the user.""" - current_app.logger.debug('find_affidavit_by_user_guid ') + affidavit_dict["documentUrl"] = MinioService.create_signed_get_url(affidavit.document_id) + logger.debug(">find_affidavit_by_user_guid ") return affidavit_dict @staticmethod def approve_or_reject(org_id: int, is_approved: bool, user: UserModel): """Mark the affidavit as approved or rejected.""" - current_app.logger.debug('approve_or_reject') + logger.debug(">approve_or_reject") return Affidavit(affidavit) @staticmethod def approve_or_reject_bceid_admin(admin_user_id: int, is_approved: bool, user: UserModel): """Mark the BCeId Admin Affidavit as approved or rejected.""" - current_app.logger.debug('approve_or_reject_bceid_admin ') + logger.debug(">approve_or_reject_bceid_admin ") return Affidavit(affidavit) diff --git a/auth-api/src/auth_api/services/affiliation.py b/auth-api/src/auth_api/services/affiliation.py index cbcc2f3e91..8267e8dadc 100644 --- a/auth-api/src/auth_api/services/affiliation.py +++ b/auth-api/src/auth_api/services/affiliation.py @@ -18,8 +18,8 @@ from flask import current_app from requests.exceptions import HTTPError -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sqlalchemy.orm import contains_eager, subqueryload +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException, ServiceUnavailableException from auth_api.exceptions.errors import Error @@ -40,11 +40,13 @@ from auth_api.utils.passcode import validate_passcode from auth_api.utils.roles import ALL_ALLOWED_ROLES, CLIENT_AUTH_ROLES, STAFF from auth_api.utils.user_context import UserContext, user_context + from .activity_log_publisher import ActivityLogPublisher from .rest_service import RestService +logger = StructuredLogging.get_logger() + -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class Affiliation: """Manages all aspect of Affiliation data. @@ -65,7 +67,6 @@ def entity(self): """Return the entity for this affiliation as a service.""" return EntityService(self._model.entity) - @ServiceTracing.disable_tracing def as_dict(self): """Return the affiliation as a python dictionary. @@ -78,7 +79,7 @@ def as_dict(self): @staticmethod def find_visible_affiliations_by_org_id(org_id, environment=None): """Given an org_id, this will return the entities affiliated with it.""" - current_app.logger.debug(f' Its a Temp affiliation with incorporation complete. # In this case, a TMP affiliation will be there but the name will be BC... - if entity['name'] in nr_numbers or entity['name'] == entity['business_identifier']: + if entity["name"] in nr_numbers or entity["name"] == entity["business_identifier"]: # If temp affiliation is for an NR, change the name to NR's name - if entity['name'] in nr_numbers: - entity['nr_number'] = entity['name'] - entity['name'] = nr_number_name_dict[entity['name']] + if entity["name"] in nr_numbers: + entity["nr_number"] = entity["name"] + entity["name"] = nr_number_name_dict[entity["name"]] filtered_affiliations.append(entity) else: filtered_affiliations.append(entity) - current_app.logger.debug('>find_visible_affiliations_by_org_id') + logger.debug(">find_visible_affiliations_by_org_id") return filtered_affiliations @staticmethod def find_affiliations_by_org_id(org_id, environment=None): """Return business affiliations for the org.""" # Accomplished in service instead of model (easier to avoid circular reference issues). - entities = db.session.query(Entity) \ - .join(AffiliationModel) \ + entities = ( + db.session.query(Entity) + .join(AffiliationModel) .options( contains_eager(Entity.affiliations), subqueryload(Entity.contacts).subqueryload(ContactLink.contact), subqueryload(Entity.created_by), - subqueryload(Entity.modified_by)) \ - .filter(AffiliationModel.org_id == org_id, Entity.affiliations.any(AffiliationModel.org_id == org_id)) + subqueryload(Entity.modified_by), + ) + .filter( + AffiliationModel.org_id == int(org_id or -1), + Entity.affiliations.any(AffiliationModel.org_id == int(org_id or -1)), + ) + ) if environment: entities = entities.filter(AffiliationModel.environment == environment) else: @@ -147,9 +155,9 @@ def find_affiliations_by_org_id(org_id, environment=None): @staticmethod def find_affiliation(org_id, business_identifier, environment=None): """Return business affiliation by the org id and business identifier.""" - affiliation = AffiliationModel.find_affiliation_by_org_id_and_business_identifier(org_id, - business_identifier, - environment) + affiliation = AffiliationModel.find_affiliation_by_org_id_and_business_identifier( + org_id, business_identifier, environment + ) if affiliation is None: raise BusinessException(Error.DATA_NOT_FOUND, None) return Affiliation(affiliation).as_dict() @@ -158,7 +166,7 @@ def find_affiliation(org_id, business_identifier, environment=None): def create_affiliation(org_id, business_identifier, environment=None, pass_code=None, certified_by_name=None): """Create an Affiliation.""" # Validate if org_id is valid by calling Org Service. - current_app.logger.info(f' 0 else entity.business_identifier - ActivityLogPublisher.publish_activity(Activity(org_id, ActivityAction.CREATE_AFFILIATION.value, - name=name, id=entity.business_identifier)) + ActivityLogPublisher.publish_activity( + Activity(org_id, ActivityAction.CREATE_AFFILIATION.value, name=name, id=entity.business_identifier) + ) return Affiliation(affiliation) @staticmethod @@ -201,11 +208,12 @@ def is_authorized(entity: Entity, pass_code: str) -> bool: """Return True if user is authorized to create an affiliation.""" if Affiliation.is_staff_or_sbc_staff(): return True - if entity.corp_type in ['SP', 'GP']: + if entity.corp_type in ["SP", "GP"]: if not pass_code: return False - token = RestService.get_service_account_token(config_id='ENTITY_SVC_CLIENT_ID', - config_secret='ENTITY_SVC_CLIENT_SECRET') + token = RestService.get_service_account_token( + config_id="ENTITY_SVC_CLIENT_ID", config_secret="ENTITY_SVC_CLIENT_SECRET" + ) return Affiliation._validate_firms_party(token, entity.business_identifier, pass_code) if pass_code: return validate_passcode(pass_code, entity.pass_code) @@ -214,8 +222,9 @@ def is_authorized(entity: Entity, pass_code: str) -> bool: return True @staticmethod - def create_new_business_affiliation(affiliation_data: AffiliationData, # pylint: disable=too-many-locals - environment: str = None): + def create_new_business_affiliation( # pylint: disable=too-many-locals + affiliation_data: AffiliationData, environment: str = None + ): """Initiate a new incorporation.""" org_id = affiliation_data.org_id business_identifier = affiliation_data.business_identifier @@ -223,7 +232,7 @@ def create_new_business_affiliation(affiliation_data: AffiliationData, # pylint phone = affiliation_data.phone certified_by_name = affiliation_data.certified_by_name - current_app.logger.info(f' 0 else entity.business_identifier - ActivityLogPublisher.publish_activity(Activity(org_id, ActivityAction.REMOVE_AFFILIATION.value, - name=name, id=entity.business_identifier)) + ActivityLogPublisher.publish_activity( + Activity(org_id, ActivityAction.REMOVE_AFFILIATION.value, name=name, id=entity.business_identifier) + ) @staticmethod @user_context @@ -377,37 +406,41 @@ def fix_stale_affiliations(org_id: int, entity_details: Dict, environment: str = # 2. staff takes NR, creates a business # 3. filer updates the business for staff (which creates a new entity) # 4. fix_stale_affiliations is called, and fixes the client's affiliation to point at this new entity - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] if not user_from_context.is_system(): return - nr_number: str = entity_details.get('nrNumber') - bootstrap_identifier: str = entity_details.get('bootstrapIdentifier') - identifier: str = entity_details.get('identifier') - current_app.logger.debug(f'fix_stale_affiliations') + logger.debug(">fix_stale_affiliations") @staticmethod def _affiliation_details_url(affiliation: AffiliationModel) -> str: """Determine url to call for affiliation details.""" # only have LEAR and NAMEX affiliations if affiliation.entity.corp_type_code == CorpType.NR.value: - return current_app.config.get('NAMEX_AFFILIATION_DETAILS_URL') - return current_app.config.get('LEAR_AFFILIATION_DETAILS_URL') + return current_app.config.get("NAMEX_AFFILIATION_DETAILS_URL") + return current_app.config.get("LEAR_AFFILIATION_DETAILS_URL") @staticmethod async def get_affiliation_details(affiliations: List[AffiliationModel]) -> List: @@ -415,48 +448,52 @@ async def get_affiliation_details(affiliations: List[AffiliationModel]) -> List: url_identifiers = {} # i.e. turns into { url: [identifiers...] } for affiliation in affiliations: url = Affiliation._affiliation_details_url(affiliation) - url_identifiers.setdefault(url, [affiliation.entity.business_identifier])\ - .append(affiliation.entity.business_identifier) + url_identifiers.setdefault(url, [affiliation.entity.business_identifier]).append( + affiliation.entity.business_identifier + ) - call_info = [{'url': url, 'payload': {'identifiers': identifiers}} - for url, identifiers in url_identifiers.items()] + call_info = [ + {"url": url, "payload": {"identifiers": identifiers}} for url, identifiers in url_identifiers.items() + ] token = RestService.get_service_account_token( - config_id='ENTITY_SVC_CLIENT_ID', config_secret='ENTITY_SVC_CLIENT_SECRET') + config_id="ENTITY_SVC_CLIENT_ID", config_secret="ENTITY_SVC_CLIENT_SECRET" + ) try: responses = await RestService.call_posts_in_parallel(call_info, token) combined = Affiliation._combine_affiliation_details(responses) # Should provide us with ascending order affiliations_sorted = sorted(affiliations, key=lambda x: x.created, reverse=True) # Provide us with a dict with the max created date. - ordered = {affiliation.entity.business_identifier: - affiliation.created for affiliation in affiliations_sorted} + ordered = { + affiliation.entity.business_identifier: affiliation.created for affiliation in affiliations_sorted + } def sort_key(item): - identifier = item.get('identifier', item.get('nameRequest', {}).get('nrNum', '')) + identifier = item.get("identifier", item.get("nameRequest", {}).get("nrNum", "")) return ordered.get(identifier, datetime.datetime.min) combined.sort(key=sort_key, reverse=True) return combined except ServiceUnavailableException as err: - current_app.logger.debug(err) - current_app.logger.debug('Failed to get affiliations details: %s', affiliations) - raise ServiceUnavailableException('Failed to get affiliation details') from err + logger.debug(err) + logger.debug("Failed to get affiliations details: %s", affiliations) + raise ServiceUnavailableException("Failed to get affiliation details") from err @staticmethod def _group_details(details): name_requests = {} businesses = [] drafts = [] - businesses_key = 'businessEntities' - drafts_key = 'draftEntities' + businesses_key = "businessEntities" + drafts_key = "draftEntities" for data in details: if isinstance(data, list): # assume this is an NR list for name_request in data: # i.e. {'NR1234567': {...}} - name_requests[name_request['nrNum']] = {'legalType': CorpType.NR.value, 'nameRequest': name_request} + name_requests[name_request["nrNum"]] = {"legalType": CorpType.NR.value, "nameRequest": name_request} continue if businesses_key in data: businesses = list(data[businesses_key]) @@ -466,9 +503,11 @@ def _group_details(details): @staticmethod def _update_draft_type_for_amalgamation_nr(business): - if business.get('draftType', None) \ - and business['nameRequest']['request_action_cd'] == NRActionCodes.AMALGAMATE.value: - business['draftType'] = CorpType.ATMP.value + if ( + business.get("draftType", None) + and business["nameRequest"]["request_action_cd"] == NRActionCodes.AMALGAMATE.value + ): + business["draftType"] = CorpType.ATMP.value return business @staticmethod @@ -476,12 +515,12 @@ def _combine_nrs(name_requests, businesses, drafts): # combine NRs for business in drafts + businesses: # Only drafts have nrNumber coming back from legal-api. - if 'nrNumber' in business and (nr_num := business['nrNumber']): - if business['nrNumber'] in name_requests: - business['nameRequest'] = name_requests[nr_num]['nameRequest'] + if "nrNumber" in business and (nr_num := business["nrNumber"]): + if business["nrNumber"] in name_requests: + business["nameRequest"] = name_requests[nr_num]["nameRequest"] business = Affiliation._update_draft_type_for_amalgamation_nr(business) # Remove the business if the draft associated to the NR is consumed. - if business['nameRequest']['stateCd'] == NRStatus.CONSUMED.value: + if business["nameRequest"]["stateCd"] == NRStatus.CONSUMED.value: drafts.remove(business) del name_requests[nr_num] else: @@ -499,40 +538,41 @@ def _combine_affiliation_details(details): @staticmethod def _get_nr_details(nr_number: str): """Return NR details by calling legal-api.""" - nr_api_url = current_app.config.get('NAMEX_API_URL') - get_nr_url = f'{nr_api_url}/requests/{nr_number}' + nr_api_url = current_app.config.get("NAMEX_API_URL") + get_nr_url = f"{nr_api_url}/requests/{nr_number}" try: token = RestService.get_service_account_token( - config_id='ENTITY_SVC_CLIENT_ID', config_secret='ENTITY_SVC_CLIENT_SECRET') + config_id="ENTITY_SVC_CLIENT_ID", config_secret="ENTITY_SVC_CLIENT_SECRET" + ) get_nr_response = RestService.get(get_nr_url, token=token, skip_404_logging=True) except (HTTPError, ServiceUnavailableException) as e: - current_app.logger.info(e) + logger.info(e) raise BusinessException(Error.DATA_NOT_FOUND, None) from e return get_nr_response.json() @staticmethod def _validate_firms_party(token, business_identifier, party_name_str: str): - legal_api_url = current_app.config.get('LEGAL_API_URL') + current_app.config.get('LEGAL_API_VERSION_2') - parties_url = f'{ legal_api_url }/businesses/{business_identifier}/parties' + legal_api_url = current_app.config.get("LEGAL_API_URL") + current_app.config.get("LEGAL_API_VERSION_2") + parties_url = f"{legal_api_url}/businesses/{business_identifier}/parties" try: lear_response = RestService.get(parties_url, token=token, skip_404_logging=True) except (HTTPError, ServiceUnavailableException) as e: - current_app.logger.info(e) + logger.info(e) raise BusinessException(Error.DATA_NOT_FOUND, None) from e parties_json = lear_response.json() - for party in parties_json['parties']: - officer = party.get('officer') - if officer.get('partyType') == 'organization': - party_name = officer.get('organizationName') + for party in parties_json["parties"]: + officer = party.get("officer") + if officer.get("partyType") == "organization": + party_name = officer.get("organizationName") else: - party_name = officer.get('lastName') + ', ' + officer.get('firstName') - if officer.get('middleInitial'): - party_name = party_name + ' ' + officer.get('middleInitial') + party_name = officer.get("lastName") + ", " + officer.get("firstName") + if officer.get("middleInitial"): + party_name = party_name + " " + officer.get("middleInitial") # remove duplicate spaces - party_name_str = ' '.join(party_name_str.split()) - party_name = ' '.join(party_name.split()) + party_name_str = " ".join(party_name_str.split()) + party_name = " ".join(party_name.split()) if party_name_str.upper() == party_name.upper(): return True @@ -542,9 +582,10 @@ def _validate_firms_party(token, business_identifier, party_name_str: str): @user_context def is_staff_or_sbc_staff(**kwargs): """Return True if user is staff or sbc staff.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] current_user: UserService = UserService.find_by_jwt_token(silent_mode=True) - if user_from_context.is_staff() or \ - (current_user and MembershipModel.check_if_sbc_staff(current_user.identifier)): + if user_from_context.is_staff() or ( + current_user and MembershipModel.check_if_sbc_staff(current_user.identifier) + ): return True return False diff --git a/auth-api/src/auth_api/services/affiliation_invitation.py b/auth-api/src/auth_api/services/affiliation_invitation.py index a2608235f7..f4d1be8f46 100644 --- a/auth-api/src/auth_api/services/affiliation_invitation.py +++ b/auth-api/src/auth_api/services/affiliation_invitation.py @@ -21,9 +21,9 @@ from itsdangerous import URLSafeTimedSerializer from jinja2 import Environment, FileSystemLoader from requests.exceptions import HTTPError -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sbc_common_components.utils.enums import QueueMessageTypes from sqlalchemy.exc import DataError +from structured_logging import StructuredLogging from auth_api.config import get_named_config from auth_api.exceptions import BusinessException, ServiceUnavailableException @@ -33,7 +33,7 @@ from auth_api.models import InvitationStatus as InvitationStatusModel from auth_api.models import Membership as MembershipModel from auth_api.models.affiliation import Affiliation as AffiliationModel -from auth_api.models.dataclass import AffiliationInvitationSearch, AffiliationInvitationData +from auth_api.models.dataclass import AffiliationInvitationData, AffiliationInvitationSearch from auth_api.models.entity import Entity as EntityModel # noqa: I005 from auth_api.models.org import Org as OrgModel from auth_api.schemas import AffiliationInvitationSchema @@ -50,9 +50,9 @@ from .authorization import check_auth from .rest_service import RestService - -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) CONFIG = get_named_config() +logger = StructuredLogging.get_logger() class AffiliationInvitation: @@ -66,7 +66,6 @@ def __init__(self, model): """Return an affiliation invitation service instance.""" self._model = model - @ServiceTracing.disable_tracing def as_dict(self, mask_email=False): """Return the Affiliation Invitation model as a dictionary.""" affiliation_invitation_schema = self.get_affiliation_invitation_schema(mask_email) @@ -79,27 +78,30 @@ def get_affiliation_invitation_schema(cls, mask_email: bool): return AffiliationInvitationSchemaPublic() if mask_email else AffiliationInvitationSchema() @classmethod - def affiliation_invitations_to_dict_list(cls, models: List[AffiliationInvitationModel], mask_email=True) \ - -> List[Dict]: + def affiliation_invitations_to_dict_list( + cls, models: List[AffiliationInvitationModel], mask_email=True + ) -> List[Dict]: """Return list of AffiliationInvitationModels converted to list dicts.""" schema = cls.get_affiliation_invitation_schema(mask_email) return [schema.dump(model) for model in models] @classmethod - def enrich_affiliation_invitations_dict_list_with_business_data(cls, affiliation_invitation_dicts: List[Dict]) -> \ - List[AffiliationInvitationData]: + def enrich_affiliation_invitations_dict_list_with_business_data( + cls, affiliation_invitation_dicts: List[Dict] + ) -> List[AffiliationInvitationData]: """Enrich affiliation invitation model data with business details.""" if not affiliation_invitation_dicts: return [] token = RestService.get_service_account_token( - config_id='ENTITY_SVC_CLIENT_ID', - config_secret='ENTITY_SVC_CLIENT_SECRET') + config_id="ENTITY_SVC_CLIENT_ID", config_secret="ENTITY_SVC_CLIENT_SECRET" + ) - business_identifiers = [afi['business_identifier'] for afi in affiliation_invitation_dicts] + business_identifiers = [afi["business_identifier"] for afi in affiliation_invitation_dicts] - business_entities = AffiliationInvitation. \ - _get_multiple_business_details(business_identifiers=business_identifiers, token=token) + business_entities = AffiliationInvitation._get_multiple_business_details( + business_identifiers=business_identifiers, token=token + ) result = [] def _init_dict_for_dataclass_from_dict(dataclass, initial_dict: Dict): @@ -107,31 +109,44 @@ def _init_dict_for_dataclass_from_dict(dataclass, initial_dict: Dict): for affiliation_invitation_dict in affiliation_invitation_dicts: from_org = AffiliationInvitationData.OrgDetails( - **_init_dict_for_dataclass_from_dict(AffiliationInvitationData.OrgDetails, - affiliation_invitation_dict['from_org'])) - if to_org := affiliation_invitation_dict.get('to_org'): + **_init_dict_for_dataclass_from_dict( + AffiliationInvitationData.OrgDetails, affiliation_invitation_dict["from_org"] + ) + ) + if to_org := affiliation_invitation_dict.get("to_org"): to_org = AffiliationInvitationData.OrgDetails( - **_init_dict_for_dataclass_from_dict(AffiliationInvitationData.OrgDetails, - affiliation_invitation_dict['to_org'])) + **_init_dict_for_dataclass_from_dict( + AffiliationInvitationData.OrgDetails, affiliation_invitation_dict["to_org"] + ) + ) business_entity = next( - (business_entity for business_entity in business_entities if - affiliation_invitation_dict['business_identifier'] == business_entity['identifier']), - None) + ( + business_entity + for business_entity in business_entities + if affiliation_invitation_dict["business_identifier"] == business_entity["identifier"] + ), + None, + ) - entity = AffiliationInvitationData.EntityDetails(business_identifier=business_entity['identifier'], - name=business_entity['legalName'], - state=business_entity['state'], - corp_type=business_entity['legalType'], - corp_sub_type=business_entity.get('legalSubType', None) - ) if business_entity else None + entity = ( + AffiliationInvitationData.EntityDetails( + business_identifier=business_entity["identifier"], + name=business_entity["legalName"], + state=business_entity["state"], + corp_type=business_entity["legalType"], + corp_sub_type=business_entity.get("legalSubType", None), + ) + if business_entity + else None + ) aid = AffiliationInvitationData( **{ **_init_dict_for_dataclass_from_dict(AffiliationInvitationData, affiliation_invitation_dict), - 'from_org': from_org, - 'to_org': to_org, - 'entity': entity + "from_org": from_org, + "to_org": to_org, + "entity": entity, } ) result.append(aid) @@ -139,8 +154,13 @@ def _init_dict_for_dataclass_from_dict(dataclass, initial_dict: Dict): return result @staticmethod - def _validate_prerequisites(business_identifier, from_org_id, to_org_id, environment, - affiliation_invitation_type=AffiliationInvitationType.EMAIL): + def _validate_prerequisites( + business_identifier, + from_org_id, + to_org_id, + environment, + affiliation_invitation_type=AffiliationInvitationType.EMAIL, + ): # Validate from organizations exists if not (from_org := OrgModel.find_by_org_id(from_org_id)): raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -151,8 +171,9 @@ def _validate_prerequisites(business_identifier, from_org_id, to_org_id, environ # Validate business exists in LEAR # Fetch the up-to-date business details from legal API - Business exception raised if failure - token = RestService.get_service_account_token(config_id='ENTITY_SVC_CLIENT_ID', - config_secret='ENTITY_SVC_CLIENT_SECRET') + token = RestService.get_service_account_token( + config_id="ENTITY_SVC_CLIENT_ID", config_secret="ENTITY_SVC_CLIENT_SECRET" + ) business = AffiliationInvitation._get_business_details(business_identifier, token) # Validate that entity exists @@ -160,13 +181,11 @@ def _validate_prerequisites(business_identifier, from_org_id, to_org_id, environ raise BusinessException(Error.DATA_NOT_FOUND, None) # Validate that entity contact exists - if not (contact := entity.get_contact()) and \ - affiliation_invitation_type != AffiliationInvitationType.REQUEST: + if not (contact := entity.get_contact()) and affiliation_invitation_type != AffiliationInvitationType.REQUEST: raise BusinessException(Error.INVALID_BUSINESS_EMAIL, None) # Validate that entity contact email exists - if (contact and not contact.email) and \ - affiliation_invitation_type != AffiliationInvitationType.REQUEST: + if (contact and not contact.email) and affiliation_invitation_type != AffiliationInvitationType.REQUEST: raise BusinessException(Error.INVALID_BUSINESS_EMAIL, None) # Check if affiliation already exists @@ -174,24 +193,28 @@ def _validate_prerequisites(business_identifier, from_org_id, to_org_id, environ raise BusinessException(Error.DATA_ALREADY_EXISTS, None) # Check if an affiliation invitation already exists - if AffiliationInvitationModel.find_invitations_by_org_entity_ids(from_org_id=from_org_id, - entity_id=entity.identifier): + if AffiliationInvitationModel.find_invitations_by_org_entity_ids( + from_org_id=from_org_id, entity_id=entity.identifier + ): raise BusinessException(Error.DATA_ALREADY_EXISTS, None) return entity, from_org, business @staticmethod - def get_invitation_email(affiliation_invitation_type: AffiliationInvitationType, - entity: Optional[EntityService] = None, - org_id: Optional[int] = None) -> Optional[str]: + def get_invitation_email( + affiliation_invitation_type: AffiliationInvitationType, + entity: Optional[EntityService] = None, + org_id: Optional[int] = None, + ) -> Optional[str]: """Get affiliation invitation email based on provided params.""" if affiliation_invitation_type == AffiliationInvitationType.REQUEST: admin_emails = UserService.get_admin_emails_for_org(org_id) - if admin_emails != '': - current_app.logger.debug(f'Sending emails to: ${admin_emails}') + if admin_emails != "": + logger.debug(f"Sending emails to: ${admin_emails}") return admin_emails # continue but log error - current_app.logger.error('No admin email record for org id %s', org_id) + error_msg = f"No admin email record for org id {org_id}" + logger.error(error_msg) return None if affiliation_invitation_type == AffiliationInvitationType.EMAIL: @@ -200,14 +223,17 @@ def get_invitation_email(affiliation_invitation_type: AffiliationInvitationType, return None @staticmethod - def _get_invitation_email(affiliation_invitation_info: Dict, - entity: OrgService = None, org_id: Optional[int] = None) -> Optional[str]: - if invitation_type := AffiliationInvitationType.\ - from_value(affiliation_invitation_info.get('type', AffiliationInvitationType.EMAIL.value)): - return AffiliationInvitation.get_invitation_email(affiliation_invitation_type=invitation_type, - entity=entity, org_id=org_id) + def _get_invitation_email( + affiliation_invitation_info: Dict, entity: OrgService = None, org_id: Optional[int] = None + ) -> Optional[str]: + if invitation_type := AffiliationInvitationType.from_value( + affiliation_invitation_info.get("type", AffiliationInvitationType.EMAIL.value) + ): + return AffiliationInvitation.get_invitation_email( + affiliation_invitation_type=invitation_type, entity=entity, org_id=org_id + ) - return affiliation_invitation_info.get('recipientEmail', None) + return affiliation_invitation_info.get("recipientEmail", None) @staticmethod def _get_org_id_from_org_uuid(to_org_uuid): @@ -224,73 +250,82 @@ def _get_org_id_from_org_uuid(to_org_uuid): @staticmethod @user_context - def create_affiliation_invitation(affiliation_invitation_info: Dict, - # pylint:disable=unused-argument,too-many-locals - user, invitation_origin, environment=None, **kwargs): + def create_affiliation_invitation( + affiliation_invitation_info: Dict, + # pylint:disable=unused-argument,too-many-locals + user, + invitation_origin, + environment=None, + **kwargs, + ): """Create a new affiliation invitation.""" - from_org_id = affiliation_invitation_info['fromOrgId'] - if to_org_uuid := affiliation_invitation_info.get('toOrgUuid'): - affiliation_invitation_info['toOrgId'] = AffiliationInvitation._get_org_id_from_org_uuid(to_org_uuid) - to_org_id = affiliation_invitation_info.get('toOrgId') + from_org_id = affiliation_invitation_info["fromOrgId"] + if to_org_uuid := affiliation_invitation_info.get("toOrgUuid"): + affiliation_invitation_info["toOrgId"] = AffiliationInvitation._get_org_id_from_org_uuid(to_org_uuid) + to_org_id = affiliation_invitation_info.get("toOrgId") - business_identifier = affiliation_invitation_info['businessIdentifier'] - affiliation_invitation_type = AffiliationInvitationType.from_value(affiliation_invitation_info.get('type')) + business_identifier = affiliation_invitation_info["businessIdentifier"] + affiliation_invitation_type = AffiliationInvitationType.from_value(affiliation_invitation_info.get("type")) if from_org_id == to_org_id: raise BusinessException(Error.DATA_ALREADY_EXISTS, None) - check_auth(org_id=from_org_id, - one_of_roles=(ADMIN, COORDINATOR, STAFF)) + check_auth(org_id=from_org_id, one_of_roles=(ADMIN, COORDINATOR, STAFF)) - entity, from_org, business = AffiliationInvitation. \ - _validate_prerequisites(business_identifier=business_identifier, from_org_id=from_org_id, - to_org_id=to_org_id, environment=environment, - affiliation_invitation_type=affiliation_invitation_type - ) + entity, from_org, business = AffiliationInvitation._validate_prerequisites( + business_identifier=business_identifier, + from_org_id=from_org_id, + to_org_id=to_org_id, + environment=environment, + affiliation_invitation_type=affiliation_invitation_type, + ) - affiliation_invitation_info['entityId'] = entity.identifier + affiliation_invitation_info["entityId"] = entity.identifier if from_org.access_type == AccessType.ANONYMOUS.value: # anonymous account never get bceid or bcsc choices mandatory_login_source = LoginSource.BCROS.value elif from_org.access_type == AccessType.GOVM.value: mandatory_login_source = LoginSource.STAFF.value else: - default_login_option_based_on_accesstype = LoginSource.BCSC.value if \ - from_org.access_type == AccessType.REGULAR.value else LoginSource.BCEID.value + default_login_option_based_on_accesstype = ( + LoginSource.BCSC.value if from_org.access_type == AccessType.REGULAR.value else LoginSource.BCEID.value + ) account_login_options = AccountLoginOptionsModel.find_active_by_org_id(from_org.id) - mandatory_login_source = getattr(account_login_options, 'login_source', - default_login_option_based_on_accesstype) - - affiliation_invitation_info['recipientEmail'] = \ - AffiliationInvitation._get_invitation_email(affiliation_invitation_info=affiliation_invitation_info, - entity=entity, - org_id=to_org_id) - - affiliation_invitation = AffiliationInvitationModel.create_from_dict(affiliation_invitation_info, - user.identifier) - confirmation_token = AffiliationInvitation.generate_confirmation_token(affiliation_invitation.id, - from_org_id, to_org_id, - business_identifier) + mandatory_login_source = getattr( + account_login_options, "login_source", default_login_option_based_on_accesstype + ) + + affiliation_invitation_info["recipientEmail"] = AffiliationInvitation._get_invitation_email( + affiliation_invitation_info=affiliation_invitation_info, entity=entity, org_id=to_org_id + ) + + affiliation_invitation = AffiliationInvitationModel.create_from_dict( + affiliation_invitation_info, user.identifier + ) + confirmation_token = AffiliationInvitation.generate_confirmation_token( + affiliation_invitation.id, from_org_id, to_org_id, business_identifier + ) affiliation_invitation.token = confirmation_token affiliation_invitation.login_source = mandatory_login_source affiliation_invitation.save() - AffiliationInvitation\ - .send_affiliation_invitation(affiliation_invitation=affiliation_invitation, - business_name=business['business']['legalName'], - app_url=invitation_origin + '/', - email_addresses=affiliation_invitation.recipient_email) + AffiliationInvitation.send_affiliation_invitation( + affiliation_invitation=affiliation_invitation, + business_name=business["business"]["legalName"], + app_url=invitation_origin + "/", + email_addresses=affiliation_invitation.recipient_email, + ) return AffiliationInvitation(affiliation_invitation) @staticmethod def _get_business_details(business_identifier: str, token: str): """Return business details by calling legal-api.""" - legal_api_url = current_app.config.get('LEGAL_API_URL') + current_app.config.get('LEGAL_API_VERSION_2') - get_businesses_url = f'{ legal_api_url }/businesses/{business_identifier}' + legal_api_url = current_app.config.get("LEGAL_API_URL") + current_app.config.get("LEGAL_API_VERSION_2") + get_businesses_url = f"{legal_api_url}/businesses/{business_identifier}" try: get_business_response = RestService.get(get_businesses_url, token=token, skip_404_logging=True) except (HTTPError, ServiceUnavailableException) as e: - current_app.logger.info(e) + logger.info(e) raise BusinessException(Error.AFFILIATION_INVITATION_BUSINESS_NOT_FOUND, None) from e return get_business_response.json() @@ -298,22 +333,21 @@ def _get_business_details(business_identifier: str, token: str): @staticmethod def _get_multiple_business_details(business_identifiers: List[str], token: str) -> List: """Return json of multiple business details by calling legal-api.""" - legal_api_url = current_app.config.get('LEGAL_API_URL') + current_app.config.get('LEGAL_API_VERSION_2') - get_businesses_url = f'{legal_api_url}/businesses/search' + legal_api_url = current_app.config.get("LEGAL_API_URL") + current_app.config.get("LEGAL_API_VERSION_2") + get_businesses_url = f"{legal_api_url}/businesses/search" - data = {'identifiers': business_identifiers} + data = {"identifiers": business_identifiers} try: get_business_response = RestService.post(get_businesses_url, token=token, data=data) except (HTTPError, ServiceUnavailableException) as e: - current_app.logger.info(e) + logger.info(e) raise BusinessException(Error.AFFILIATION_INVITATION_BUSINESS_NOT_FOUND, None) from e - return get_business_response.json()['businessEntities'] + return get_business_response.json()["businessEntities"] def update_affiliation_invitation(self, user, invitation_origin, affiliation_invitation_info: Dict): """Update the specified affiliation invitation with new data.""" - check_auth(org_id=self._model.from_org_id, - one_of_roles=(ADMIN, COORDINATOR, STAFF)) + check_auth(org_id=self._model.from_org_id, one_of_roles=(ADMIN, COORDINATOR, STAFF)) invitation: AffiliationInvitationModel = self._model @@ -322,27 +356,27 @@ def update_affiliation_invitation(self, user, invitation_origin, affiliation_inv return AffiliationInvitation(invitation) # Check for status to patch - new_status = affiliation_invitation_info.get('status') + new_status = affiliation_invitation_info.get("status") if not new_status or new_status == InvitationStatus.PENDING.value: # Resend invitation - confirmation_token = AffiliationInvitation\ - .generate_confirmation_token(self._model.id, - self._model.from_org_id, - self._model.to_org_id, - self._model.entity.business_identifier) + confirmation_token = AffiliationInvitation.generate_confirmation_token( + self._model.id, self._model.from_org_id, self._model.to_org_id, self._model.entity.business_identifier + ) self._model.token = confirmation_token invitation = self._model.update_invitation_as_retried(user.identifier) entity: EntityModel = invitation.entity - token = RestService.get_service_account_token(config_id='ENTITY_SVC_CLIENT_ID', - config_secret='ENTITY_SVC_CLIENT_SECRET') + token = RestService.get_service_account_token( + config_id="ENTITY_SVC_CLIENT_ID", config_secret="ENTITY_SVC_CLIENT_SECRET" + ) business = AffiliationInvitation._get_business_details(entity.business_identifier, token) - AffiliationInvitation\ - .send_affiliation_invitation(affiliation_invitation=invitation, - business_name=business['business']['legalName'], - app_url=invitation_origin + '/', - email_addresses=invitation.recipient_email) + AffiliationInvitation.send_affiliation_invitation( + affiliation_invitation=invitation, + business_name=business["business"]["legalName"], + app_url=invitation_origin + "/", + email_addresses=invitation.recipient_email, + ) # Expire invitation elif new_status == InvitationStatus.EXPIRED.value: invitation = self._model.set_status(InvitationStatus.EXPIRED.value) @@ -364,8 +398,9 @@ def delete_affiliation_invitation(invitation_id): invitation.delete() @staticmethod - def _filter_request_invites_role_based(affiliation_invitation_models: List[AffiliationInvitationModel], - org_id: int) -> List[AffiliationInvitationModel]: + def _filter_request_invites_role_based( + affiliation_invitation_models: List[AffiliationInvitationModel], org_id: int + ) -> List[AffiliationInvitationModel]: """Filter out affiliation invitations of type REQUEST if current user is not staff or org admin/coordinator.""" if UserService.is_context_user_staff(): return affiliation_invitation_models @@ -378,9 +413,12 @@ def _filter_request_invites_role_based(affiliation_invitation_models: List[Affil return affiliation_invitation_models # filter out affiliation invitations of type request - return list(filter( - lambda affiliation_invitation: affiliation_invitation.type != AffiliationInvitationType.REQUEST.value, - affiliation_invitation_models)) + return list( + filter( + lambda affiliation_invitation: affiliation_invitation.type != AffiliationInvitationType.REQUEST.value, + affiliation_invitation_models, + ) + ) @staticmethod def search_invitations(search_filter: AffiliationInvitationSearch, mask_email=True): @@ -391,16 +429,16 @@ def search_invitations(search_filter: AffiliationInvitationSearch, mask_email=Tr org_id = None searched_invitations = AffiliationInvitationModel().filter_by(search_filter=search_filter) - invitation_models = (AffiliationInvitation. - _filter_request_invites_role_based(affiliation_invitation_models=searched_invitations, - org_id=org_id)) + invitation_models = AffiliationInvitation._filter_request_invites_role_based( + affiliation_invitation_models=searched_invitations, org_id=org_id + ) return [AffiliationInvitation(invitation).as_dict(mask_email=mask_email) for invitation in invitation_models] @staticmethod @user_context def get_invitations_for_to_org(org_id, status=None, **kwargs): """Get affiliation invitations for to org.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] if not OrgModel.find_by_org_id(org_id): return None @@ -410,16 +448,17 @@ def get_invitations_for_to_org(org_id, status=None, **kwargs): # If staff return full list if user_from_context.is_staff(): - return AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(to_org_id=org_id, status_codes=['PENDING'])) + return AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(to_org_id=int(org_id), status_codes=["PENDING"]) + ) current_user: UserService = UserService.find_by_jwt_token() - current_user_membership: MembershipModel = \ - MembershipModel.find_membership_by_user_and_org(user_id=current_user.identifier, org_id=org_id) + current_user_membership: MembershipModel = MembershipModel.find_membership_by_user_and_org( + user_id=current_user.identifier, org_id=org_id + ) # If no active membership return empty array - if current_user_membership is None or \ - current_user_membership.status != Status.ACTIVE.value: + if current_user_membership is None or current_user_membership.status != Status.ACTIVE.value: return [] return AffiliationInvitationModel.find_invitations_to_org(org_id=org_id, status=status) @@ -439,41 +478,41 @@ def find_affiliation_invitation_by_id(invitation_id): def _get_token_confirm_path(app_url, org_name, token, query_params=None): """Get the config for different email types.""" escape_url = escape_wam_friendly_url(org_name) - path = f'{escape_url}/affiliationInvitation/acceptToken' - token_confirm_url = f'{app_url}/{path}/{token}' + path = f"{escape_url}/affiliationInvitation/acceptToken" + token_confirm_url = f"{app_url}/{path}/{token}" if query_params: - token_confirm_url += f'?{urlencode(query_params)}' + token_confirm_url += f"?{urlencode(query_params)}" return token_confirm_url @staticmethod - def send_affiliation_invitation(affiliation_invitation: AffiliationInvitationModel, - business_name, - app_url=None, - is_authorized=None, - email_addresses=None): + def send_affiliation_invitation( + affiliation_invitation: AffiliationInvitationModel, + business_name, + app_url=None, + is_authorized=None, + email_addresses=None, + ): """Send the email notification.""" - current_app.logger.debug('send_affiliation_invitation failed') - current_app.logger.debug(exception) + logger.debug(">send_affiliation_invitation failed") + logger.debug(exception) raise BusinessException(Error.FAILED_AFFILIATION_INVITATION, None) from exception - current_app.logger.debug('>send_affiliation_invitation') + logger.debug(">send_affiliation_invitation") @staticmethod - def send_affiliation_invitation_authorization_email(affiliation_invitation: AffiliationInvitationModel, - is_authorized: bool): + def send_affiliation_invitation_authorization_email( + affiliation_invitation: AffiliationInvitationModel, is_authorized: bool + ): """Send authorization email, either for accepted or refused authorization.""" - token = RestService.get_service_account_token(config_id='ENTITY_SVC_CLIENT_ID', - config_secret='ENTITY_SVC_CLIENT_SECRET') - business = AffiliationInvitation. \ - _get_business_details(business_identifier=affiliation_invitation.entity.business_identifier, - token=token) - business_name = business['business']['legalName'] + token = RestService.get_service_account_token( + config_id="ENTITY_SVC_CLIENT_ID", config_secret="ENTITY_SVC_CLIENT_SECRET" + ) + business = AffiliationInvitation._get_business_details( + business_identifier=affiliation_invitation.entity.business_identifier, token=token + ) + business_name = business["business"]["legalName"] - email_address = AffiliationInvitation. \ - get_invitation_email(affiliation_invitation_type=AffiliationInvitationType.REQUEST, - org_id=affiliation_invitation.from_org_id) + email_address = AffiliationInvitation.get_invitation_email( + affiliation_invitation_type=AffiliationInvitationType.REQUEST, org_id=affiliation_invitation.from_org_id + ) AffiliationInvitation.send_affiliation_invitation( affiliation_invitation=affiliation_invitation, business_name=business_name, email_addresses=email_address, - is_authorized=is_authorized + is_authorized=is_authorized, ) @staticmethod def generate_confirmation_token(affiliation_invitation_id, from_org_id, to_org_id, business_identifier): """Generate the token to be sent in the email.""" serializer = URLSafeTimedSerializer(CONFIG.EMAIL_TOKEN_SECRET_KEY) - token = {'id': affiliation_invitation_id, - 'fromOrgId': from_org_id, - 'toOrgId': to_org_id, - 'businessIdentifier': business_identifier} + token = { + "id": affiliation_invitation_id, + "fromOrgId": from_org_id, + "toOrgId": to_org_id, + "businessIdentifier": business_identifier, + } return serializer.dumps(token, salt=CONFIG.EMAIL_SECURITY_PASSWORD_SALT) @staticmethod def validate_token(token, affiliation_invitation_id: int): """Check whether the passed token is valid.""" serializer = URLSafeTimedSerializer(CONFIG.EMAIL_TOKEN_SECRET_KEY) - token_valid_for = int(CONFIG.AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS) \ - * 60 if CONFIG.AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS else 60 * 15 + token_valid_for = ( + int(CONFIG.AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS) * 60 + if CONFIG.AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS + else 60 * 15 + ) try: - token_payload = serializer.loads(token, salt=CONFIG.EMAIL_SECURITY_PASSWORD_SALT, - max_age=token_valid_for) - token_invitation_id = token_payload.get('id') + token_payload = serializer.loads(token, salt=CONFIG.EMAIL_SECURITY_PASSWORD_SALT, max_age=token_valid_for) + token_invitation_id = token_payload.get("id") # The specified affiliation_invitation_id does not match the token if affiliation_invitation_id != token_invitation_id: - raise BusinessException(Error.INVALID_AFFILATION_INVITATION_TOKEN, None) + raise BusinessException(Error.INVALID_AFFILIATION_INVITATION_TOKEN, None) except Exception as e: # noqa: E722 raise BusinessException(Error.EXPIRED_AFFILIATION_INVITATION, None) from e - affiliation_invitation: AffiliationInvitationModel = AffiliationInvitationModel.\ - find_invitation_by_id(affiliation_invitation_id) + affiliation_invitation: AffiliationInvitationModel = AffiliationInvitationModel.find_invitation_by_id( + affiliation_invitation_id + ) if affiliation_invitation is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -570,13 +616,19 @@ def validate_token(token, affiliation_invitation_id: int): @staticmethod @user_context - def accept_affiliation_invitation(affiliation_invitation_id, - # pylint:disable=unused-argument - user: UserService, origin, environment=None, **kwargs): + def accept_affiliation_invitation( + affiliation_invitation_id, + # pylint:disable=unused-argument + user: UserService, + origin, + environment=None, + **kwargs, + ): """Add an affiliation from the affiliation invitation.""" - current_app.logger.debug('>accept_affiliation_invitation') - affiliation_invitation: AffiliationInvitationModel = AffiliationInvitationModel.\ - find_invitation_by_id(affiliation_invitation_id) + logger.debug(">accept_affiliation_invitation") + affiliation_invitation: AffiliationInvitationModel = AffiliationInvitationModel.find_invitation_by_id( + affiliation_invitation_id + ) if affiliation_invitation is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -590,38 +642,43 @@ def accept_affiliation_invitation(affiliation_invitation_id, org_id = affiliation_invitation.from_org_id entity_id = affiliation_invitation.entity_id - if not (affiliation_model := AffiliationModel.find_affiliation_by_org_and_entity_ids(org_id, entity_id, - environment)): + if not ( + affiliation_model := AffiliationModel.find_affiliation_by_org_and_entity_ids(org_id, entity_id, environment) + ): # Create an affiliation with to_org_id - affiliation_model = AffiliationModel(org_id=org_id, entity_id=entity_id, certified_by_name=None, - environment=environment) + affiliation_model = AffiliationModel( + org_id=org_id, entity_id=entity_id, certified_by_name=None, environment=environment + ) affiliation_model.save() affiliation_invitation.affiliation_id = affiliation_model.id affiliation_invitation.approver_id = user.identifier affiliation_invitation.accepted_date = datetime.now() - affiliation_invitation.invitation_status = InvitationStatusModel\ - .get_status_by_code(InvitationStatus.ACCEPTED.value) + affiliation_invitation.invitation_status = InvitationStatusModel.get_status_by_code( + InvitationStatus.ACCEPTED.value + ) affiliation_invitation.save() if affiliation_invitation.type == AffiliationInvitationType.REQUEST.value: - AffiliationInvitation. \ - send_affiliation_invitation_authorization_email(affiliation_invitation=affiliation_invitation, - is_authorized=True) + AffiliationInvitation.send_affiliation_invitation_authorization_email( + affiliation_invitation=affiliation_invitation, is_authorized=True + ) - current_app.logger.debug(' List[Dict]: + def get_all_invitations_with_details_related_to_org( + cls, org_id: int, search_filter: AffiliationInvitationSearch + ) -> List[Dict]: """Get affiliation invitations for from org and for to org.""" - affiliation_invitations = AffiliationInvitationModel.find_all_related_to_org(org_id=org_id, - search_filter=search_filter) + affiliation_invitations = AffiliationInvitationModel.find_all_related_to_org( + org_id=org_id, search_filter=search_filter + ) filtered_affiliation_invitations = AffiliationInvitation._filter_request_invites_role_based( - affiliation_invitation_models=affiliation_invitations, org_id=org_id) + affiliation_invitation_models=affiliation_invitations, org_id=org_id + ) return cls.affiliation_invitations_to_dict_list(filtered_affiliation_invitations) @@ -639,7 +696,8 @@ def refuse_affiliation_invitation(invitation_id: int, user: UserService): invitation.approver_id = user.identifier invitation.save() - AffiliationInvitation. \ - send_affiliation_invitation_authorization_email(affiliation_invitation=invitation, is_authorized=False) + AffiliationInvitation.send_affiliation_invitation_authorization_email( + affiliation_invitation=invitation, is_authorized=False + ) return AffiliationInvitation(invitation) diff --git a/auth-api/src/auth_api/services/api_gateway.py b/auth-api/src/auth_api/services/api_gateway.py index 150f4ad9ed..28c9fbd5dc 100644 --- a/auth-api/src/auth_api/services/api_gateway.py +++ b/auth-api/src/auth_api/services/api_gateway.py @@ -18,6 +18,7 @@ from flask import current_app from requests.exceptions import HTTPError +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException, Error from auth_api.models.org import Org as OrgModel @@ -29,6 +30,8 @@ from auth_api.utils.roles import ADMIN, STAFF from auth_api.utils.user_context import UserContext, user_context +logger = StructuredLogging.get_logger() + class ApiGateway: """Manages all aspects of the API gateway integration.""" @@ -45,9 +48,9 @@ def create_key(cls, org_id: int, request_json: Dict[str, str]): B - If consumer already exists, 1 - Create key for specific environment. """ - current_app.logger.debug(' List[Dict[str, any]]: """Get all api keys.""" - current_app.logger.debug(' List[Dict[str, any]]: @classmethod def _filter_and_add_keys(cls, api_keys_response, keys, email): def _add_key_to_response(_key): - if _key['keyStatus'] == 'approved': - _key['email'] = email - _key['environment'] = 'prod' if _key['environment'] == 'prod' else 'sandbox' - api_keys_response['consumer']['consumerKey'].append(_key) + if _key["keyStatus"] == "approved": + _key["email"] = email + _key["environment"] = "prod" if _key["environment"] == "prod" else "sandbox" + api_keys_response["consumer"]["consumerKey"].append(_key) if isinstance(keys, dict): _add_key_to_response(keys) @@ -183,12 +184,12 @@ def _add_key_to_response(_key): @classmethod def _get_email_id(cls, org_id, env) -> str: - if current_app.config.get('API_GW_CONSUMER_EMAIL', None) is not None: - return current_app.config.get('API_GW_CONSUMER_EMAIL') + if current_app.config.get("API_GW_CONSUMER_EMAIL", None) is not None: + return current_app.config.get("API_GW_CONSUMER_EMAIL") - api_gw_email_suffix: str = current_app.config.get('API_GW_EMAIL_SUFFIX') - id_suffix = '' if env == 'prod' else '-sandbox' - email_id = f'{org_id}{id_suffix}@{api_gw_email_suffix}' + api_gw_email_suffix: str = current_app.config.get("API_GW_EMAIL_SUFFIX") + id_suffix = "" if env == "prod" else "-sandbox" + email_id = f"{org_id}{id_suffix}@{api_gw_email_suffix}" return email_id @classmethod @@ -198,9 +199,9 @@ def _consumer_exists(cls, email, env): gw_api_key: str = cls._get_api_gw_key(env) try: RestService.get( - f'{consumer_endpoint}/mc/v1/consumers/{email}', - additional_headers={'x-apikey': gw_api_key}, - skip_404_logging=True + f"{consumer_endpoint}/mc/v1/consumers/{email}", + additional_headers={"x-apikey": gw_api_key}, + skip_404_logging=True, ) except HTTPError as exc: if exc.response.status_code == 404: # If consumer doesn't exist @@ -218,36 +219,35 @@ def _create_payment_account(cls, org: OrgModel, **kwargs): - Get the payment account details (non sandbox). - Mask the details and call pay sandbox endpoint to create a sandbox account. """ - if not current_app.config.get('PAY_API_SANDBOX_URL'): - current_app.logger.warning('Sandbox URL not provided, skipping sandbox pay account creation') + if not current_app.config.get("PAY_API_SANDBOX_URL"): + logger.warning("Sandbox URL not provided, skipping sandbox pay account creation") return - user: UserContext = kwargs['user_context'] + user: UserContext = kwargs["user_context"] pay_account = cls._get_pay_account(org, user) pay_request = { - 'accountId': org.id, - 'accountName': f"{org.name}-{org.branch_name or ''}", - 'padTosAcceptedBy': pay_account.get('padTosAcceptedBy', ''), - 'bcolAccountNumber': org.bcol_account_id or '', - 'bcolUserId': org.bcol_user_id or '', - 'paymentInfo': { - 'methodOfPayment': pay_account.get('futurePaymentMethod', None) or pay_account['paymentMethod'], - 'bankInstitutionNumber': '000', # Dummy values - 'bankTransitNumber': '00000', # Dummy values - 'bankAccountNumber': '0000000' # Dummy values + "accountId": org.id, + "accountName": f"{org.name}-{org.branch_name or ''}", + "padTosAcceptedBy": pay_account.get("padTosAcceptedBy", ""), + "bcolAccountNumber": org.bcol_account_id or "", + "bcolUserId": org.bcol_user_id or "", + "paymentInfo": { + "methodOfPayment": pay_account.get("futurePaymentMethod", None) or pay_account["paymentMethod"], + "bankInstitutionNumber": "000", # Dummy values + "bankTransitNumber": "00000", # Dummy values + "bankAccountNumber": "0000000", # Dummy values }, - 'contactInfo': {} + "contactInfo": {}, } cls._create_sandbox_pay_account(pay_request, user) @classmethod def _create_sandbox_pay_account(cls, pay_request, user): - current_app.logger.info('Creating Sandbox Payload %s', pay_request) + logger.info("Creating Sandbox Payload %s", pay_request) pay_sandbox_accounts_endpoint = f"{current_app.config.get('PAY_API_SANDBOX_URL')}/accounts?sandbox=true" - RestService.post(endpoint=pay_sandbox_accounts_endpoint, - token=user.bearer_token, - data=pay_request, - raise_for_status=True) + RestService.post( + endpoint=pay_sandbox_accounts_endpoint, token=user.bearer_token, data=pay_request, raise_for_status=True + ) @classmethod def _get_pay_account(cls, org, user): @@ -257,13 +257,16 @@ def _get_pay_account(cls, org, user): @classmethod def _get_api_consumer_endpoint(cls, env): - current_app.logger.info('_get_api_consumer_endpoint %s', env) - return current_app.config.get('API_GW_CONSUMERS_API_URL') if env == 'prod' else current_app.config.get( - 'API_GW_CONSUMERS_SANDBOX_API_URL') + logger.info("_get_api_consumer_endpoint %s", env) + return ( + current_app.config.get("API_GW_CONSUMERS_API_URL") + if env == "prod" + else current_app.config.get("API_GW_CONSUMERS_SANDBOX_API_URL") + ) @classmethod def _make_string_compatible(cls, target: str) -> str: """Make string compatible for API gateway.""" # Length 64 max - alphanumeric, space, and the following: . _ - - target = re.sub(r'[^a-zA-Z0-9_\- .]', '', target) + target = re.sub(r"[^a-zA-Z0-9_\- .]", "", target) return target[:64] diff --git a/auth-api/src/auth_api/services/authorization.py b/auth-api/src/auth_api/services/authorization.py index e7f39e51ea..c85b473051 100644 --- a/auth-api/src/auth_api/services/authorization.py +++ b/auth-api/src/auth_api/services/authorization.py @@ -17,7 +17,8 @@ """ from typing import Dict, Optional -from flask import abort, current_app +from flask import abort +from structured_logging import StructuredLogging from auth_api.models.views.authorization import Authorization as AuthorizationView from auth_api.services.permissions import Permissions as PermissionsService @@ -26,6 +27,8 @@ from auth_api.utils.roles import STAFF, Role from auth_api.utils.user_context import UserContext, user_context +logger = StructuredLogging.get_logger() + class Authorization: """This module is to handle authorization related queries. @@ -40,10 +43,11 @@ def __init__(self, model): @staticmethod @user_context - def get_account_authorizations_for_org(account_id: str, corp_type_code: Optional[str], - expanded: bool = False, **kwargs): + def get_account_authorizations_for_org( + account_id: str, corp_type_code: Optional[str], expanded: bool = False, **kwargs + ): """Get User authorizations for the org.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] auth_response = {} auth = None token_roles = user_from_context.roles @@ -54,7 +58,7 @@ def get_account_authorizations_for_org(account_id: str, corp_type_code: Optional # Query Authorization view by business identifier auth = AuthorizationView.find_authorization_for_admin_by_org_id(account_id) auth_response = Authorization(auth).as_dict(expanded) - auth_response['roles'] = token_roles + auth_response["roles"] = token_roles else: keycloak_guid = user_from_context.sub @@ -63,8 +67,9 @@ def get_account_authorizations_for_org(account_id: str, corp_type_code: Optional check_product_based_auth = Authorization._is_product_based_auth(corp_type_code) if check_product_based_auth: if account_id_claim: - auth = AuthorizationView.find_account_authorization_by_org_id_and_product(account_id_claim, - corp_type_code) + auth = AuthorizationView.find_account_authorization_by_org_id_and_product( + account_id_claim, corp_type_code + ) else: auth = AuthorizationView.find_account_authorization_by_org_id_and_product_for_user( keycloak_guid, account_id, corp_type_code @@ -74,12 +79,11 @@ def get_account_authorizations_for_org(account_id: str, corp_type_code: Optional auth = AuthorizationView.find_authorization_for_admin_by_org_id(account_id_claim) elif account_id and keycloak_guid: auth = AuthorizationView.find_user_authorization_by_org_id(keycloak_guid, account_id) - auth_response['roles'] = [] + auth_response["roles"] = [] if auth: - permissions = PermissionsService.get_permissions_for_membership(auth.status_code, - auth.org_membership) + permissions = PermissionsService.get_permissions_for_membership(auth.status_code, auth.org_membership) auth_response = Authorization(auth).as_dict(expanded) - auth_response['roles'] = permissions + auth_response["roles"] = permissions return auth_response @@ -87,36 +91,36 @@ def get_account_authorizations_for_org(account_id: str, corp_type_code: Optional @user_context def get_user_authorizations_for_entity(business_identifier: str, expanded: bool = False, **kwargs): """Get User authorizations for the entity.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] auth_response = {} auth = None token_roles = user_from_context.roles - current_app.logger.debug(f'check roles=:{token_roles}') + logger.debug(f"check roles=:{token_roles}") if Role.STAFF.value in token_roles: if expanded: # Query Authorization view by business identifier auth = AuthorizationView.find_user_authorization_by_business_number(business_identifier, is_staff=True) auth_response = Authorization(auth).as_dict(expanded) - auth_response['roles'] = token_roles + auth_response["roles"] = token_roles elif Role.SYSTEM.value in token_roles: # a service account in keycloak should have product_code claim setup. - keycloak_product_code = user_from_context.token_info.get('product_code', None) + keycloak_product_code = user_from_context.token_info.get("product_code", None) if keycloak_product_code: - auth = AuthorizationView.find_user_authorization_by_business_number_and_product(business_identifier, - keycloak_product_code) + auth = AuthorizationView.find_user_authorization_by_business_number_and_product( + business_identifier, keycloak_product_code + ) if auth: auth_response = Authorization(auth).as_dict(expanded) - permissions = PermissionsService.get_permissions_for_membership(auth.status_code, 'SYSTEM') - auth_response['roles'] = permissions + permissions = PermissionsService.get_permissions_for_membership(auth.status_code, "SYSTEM") + auth_response["roles"] = permissions else: if business_identifier: # if this is an API GW account, check if the account has access to the resource if user_from_context.login_source == LoginSource.API_GW.value: auth = AuthorizationView.find_user_authorization_by_business_number( - business_identifier=business_identifier, - org_id=user_from_context.account_id_claim + business_identifier=business_identifier, org_id=user_from_context.account_id_claim ) # Check if the user has access to the resource @@ -124,7 +128,7 @@ def get_user_authorizations_for_entity(business_identifier: str, expanded: bool auth = AuthorizationView.find_user_authorization_by_business_number( business_identifier=business_identifier, keycloak_guid=keycloak_guid, - org_id=user_from_context.account_id + org_id=user_from_context.account_id, ) if auth: @@ -132,40 +136,38 @@ def get_user_authorizations_for_entity(business_identifier: str, expanded: bool auth.status_code, auth.org_membership ) auth_response = Authorization(auth).as_dict(expanded) - auth_response['roles'] = permissions + auth_response["roles"] = permissions return auth_response @staticmethod def get_user_authorizations(keycloak_guid: str): """Get all user authorizations.""" - authorizations_response: Dict = {'authorizations': []} + authorizations_response: Dict = {"authorizations": []} authorizations = AuthorizationView.find_all_authorizations_for_user(keycloak_guid) if authorizations: for auth in authorizations: - authorizations_response['authorizations'].append(Authorization(auth).as_dict()) + authorizations_response["authorizations"].append(Authorization(auth).as_dict()) return authorizations_response @staticmethod @user_context def get_account_authorizations_for_product(account_id: str, product_code: str, expanded: bool = False, **kwargs): """Get account authorizations for the product.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] account_id_claim = user_from_context.account_id if account_id_claim: - auth = AuthorizationView.find_account_authorization_by_org_id_and_product( - account_id_claim, product_code - ) + auth = AuthorizationView.find_account_authorization_by_org_id_and_product(account_id_claim, product_code) else: auth = AuthorizationView.find_account_authorization_by_org_id_and_product_for_user( user_from_context.sub, account_id, product_code ) auth_response = Authorization(auth).as_dict(expanded) - auth_response['roles'] = [] + auth_response["roles"] = [] if auth: permissions = PermissionsService.get_permissions_for_membership(auth.status_code, auth.org_membership) - auth_response['roles'] = permissions + auth_response["roles"] = permissions return auth_response @@ -176,22 +178,19 @@ def as_dict(self, expanded: bool = False): if not self._model: return auth_dict - auth_dict['orgMembership'] = self._model.org_membership + auth_dict["orgMembership"] = self._model.org_membership # If the request is for expanded authz return more info if expanded: - auth_dict['business'] = { - 'folioNumber': self._model.folio_number, - 'name': self._model.entity_name - } - auth_dict['account'] = { - 'id': self._model.org_id, - 'name': self._model.org_name, - 'accountType': self._model.org_type, - 'paymentPreference': { - 'bcOnlineUserId': self._model.bcol_user_id, - 'bcOnlineAccountId': self._model.bcol_account_id - } + auth_dict["business"] = {"folioNumber": self._model.folio_number, "name": self._model.entity_name} + auth_dict["account"] = { + "id": self._model.org_id, + "name": self._model.org_name, + "accountType": self._model.org_type, + "paymentPreference": { + "bcOnlineUserId": self._model.bcol_user_id, + "bcOnlineAccountId": self._model.bcol_account_id, + }, } return auth_dict @@ -200,8 +199,9 @@ def _is_product_based_auth(product_code): check_product_based_auth = False if product_code: - from auth_api.services.products import \ - Product as ProductService # pylint:disable=cyclic-import, import-outside-toplevel + # pylint:disable=cyclic-import, import-outside-toplevel + from auth_api.services.products import Product as ProductService + product_type: str = ProductService.find_product_type_by_code(product_code) # TODO should we reject if the product code is unknown?? if product_type == ProductTypeCodeEnum.PARTNER.value: # PARTNERS needs product based auth @@ -212,18 +212,18 @@ def _is_product_based_auth(product_code): @user_context def check_auth(**kwargs): """Check if user is authorized to perform action on the service.""" - user_from_context: UserContext = kwargs['user_context'] - if user_from_context.is_staff() and not kwargs.get('system_required', None): + user_from_context: UserContext = kwargs["user_context"] + if user_from_context.is_staff() and not kwargs.get("system_required", None): _check_for_roles(STAFF, kwargs) elif user_from_context.is_system(): - business_identifier = kwargs.get('business_identifier', None) - org_identifier = kwargs.get('org_id', None) + business_identifier = kwargs.get("business_identifier", None) + org_identifier = kwargs.get("org_id", None) - product_code_in_jwt = user_from_context.token_info.get('product_code', None) + product_code_in_jwt = user_from_context.token_info.get("product_code", None) if product_code_in_jwt is None: # product code must be present in jwt abort(403) - if product_code_in_jwt == 'ALL': # Product code for super admin service account (sbc-auth-admin) + if product_code_in_jwt == "ALL": # Product code for super admin service account (sbc-auth-admin) return auth = None @@ -235,32 +235,33 @@ def check_auth(**kwargs): abort(403) return else: - business_identifier = kwargs.get('business_identifier', None) - org_identifier = kwargs.get('org_id', None) or user_from_context.account_id + business_identifier = kwargs.get("business_identifier", None) + org_identifier = kwargs.get("org_id", None) or user_from_context.account_id if business_identifier: auth = Authorization.get_user_authorizations_for_entity(business_identifier) elif org_identifier: # If the account id is part of claim (api gw users), then no need to lookup using keycloak guid. - if user_from_context.account_id_claim and \ - int(user_from_context.account_id_claim) == kwargs.get('org_id', None): + if user_from_context.account_id_claim and int(user_from_context.account_id_claim) == kwargs.get( + "org_id", None + ): auth_record = AuthorizationView.find_authorization_for_admin_by_org_id(user_from_context.account_id) else: auth_record = AuthorizationView.find_user_authorization_by_org_id(user_from_context.sub, org_identifier) auth = Authorization(auth_record).as_dict() if auth_record else None - _check_for_roles(auth.get('orgMembership', None) if auth else None, kwargs) + _check_for_roles(auth.get("orgMembership", None) if auth else None, kwargs) def _check_for_roles(role: str, kwargs): is_authorized: bool = False # If role is found if role: - if kwargs.get('one_of_roles', None): - is_authorized = role in kwargs.get('one_of_roles') - if kwargs.get('disabled_roles', None): - is_authorized = role not in kwargs.get('disabled_roles') - if kwargs.get('equals_role', None): - is_authorized = role == kwargs.get('equals_role') + if kwargs.get("one_of_roles", None): + is_authorized = role in kwargs.get("one_of_roles") + if kwargs.get("disabled_roles", None): + is_authorized = role not in kwargs.get("disabled_roles") + if kwargs.get("equals_role", None): + is_authorized = role == kwargs.get("equals_role") if not is_authorized: abort(403) diff --git a/auth-api/src/auth_api/services/codes.py b/auth-api/src/auth_api/services/codes.py index 62caff4572..fc68def079 100644 --- a/auth-api/src/auth_api/services/codes.py +++ b/auth-api/src/auth_api/services/codes.py @@ -36,8 +36,8 @@ def fetch_data_model(code_type: str = None): :param table_fullname: String with fullname of table. :return: Class reference or None. """ - for model_class in db.Model._decl_class_registry.values(): # pylint:disable=protected-access - if hasattr(model_class, '__table__') and model_class.__table__.fullname == code_type: + for model_class in db.Model.registry._class_registry.values(): # pylint:disable=protected-access + if hasattr(model_class, "__table__") and model_class.__table__.fullname == code_type: if issubclass(model_class, BaseCodeModel): return model_class @@ -57,13 +57,14 @@ def fetch_codes(cls, code_type: str = None) -> []: data = [] # transform each of entry to a dictionary base on schema. for entry in codes: - module_name = f'auth_api.schemas.{entry.__tablename__}' - class_name = f'{entry.__class__.__name__}Schema' + module_name = f"auth_api.schemas.{entry.__tablename__}" + class_name = f"{entry.__class__.__name__}Schema" try: schema = getattr(importlib.import_module(module_name), class_name) except ModuleNotFoundError: - schema = getattr(importlib.import_module('auth_api.schemas.basecode_type'), - 'BaseCodeSchema') + schema = getattr( + importlib.import_module("auth_api.schemas.basecode_type"), "BaseCodeSchema" + ) code_schema = schema() data.append(code_schema.dump(entry, many=False)) return data diff --git a/auth-api/src/auth_api/services/contact.py b/auth-api/src/auth_api/services/contact.py index 84ff75a445..78b3a89413 100644 --- a/auth-api/src/auth_api/services/contact.py +++ b/auth-api/src/auth_api/services/contact.py @@ -16,12 +16,9 @@ This module manages the Contact information for a user or entity. """ -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001, I003, I004 - from auth_api.schemas import ContactSchema, ContactSchemaPublic # noqa: I001, I003, I004 -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class Contact: """Manage all aspects of the Contact entity.""" @@ -34,7 +31,6 @@ def identifier(self): """Return the identifier for this contact.""" return self._model.id - @ServiceTracing.disable_tracing def as_dict(self, masked_email_only=False): """Return the Contact as a python dict. diff --git a/auth-api/src/auth_api/services/documents.py b/auth-api/src/auth_api/services/documents.py index ec072223bf..cbb08946ec 100644 --- a/auth-api/src/auth_api/services/documents.py +++ b/auth-api/src/auth_api/services/documents.py @@ -14,18 +14,15 @@ """Service for managing the documents.""" from jinja2 import Environment, FileSystemLoader -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from auth_api.config import get_named_config from auth_api.models import Documents as DocumentsModel from auth_api.schemas import DocumentSchema - -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) CONFIG = get_named_config() -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class Documents: """Manages the documents in DB. @@ -36,7 +33,6 @@ def __init__(self, model): """Return an invitation service instance.""" self._model = model - @ServiceTracing.disable_tracing def as_dict(self): """Return the User as a python dict. diff --git a/auth-api/src/auth_api/services/entity.py b/auth-api/src/auth_api/services/entity.py index be78fd700f..d210283d0f 100644 --- a/auth-api/src/auth_api/services/entity.py +++ b/auth-api/src/auth_api/services/entity.py @@ -17,9 +17,8 @@ import string from typing import Tuple -from flask import current_app -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sbc_common_components.utils.enums import QueueMessageTypes +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error @@ -32,10 +31,12 @@ from auth_api.utils.roles import ALL_ALLOWED_ROLES from auth_api.utils.user_context import UserContext, user_context from auth_api.utils.util import camelback2snake + from .authorization import check_auth +logger = StructuredLogging.get_logger() + -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class Entity: """Manages all aspect of Entity data. @@ -86,7 +87,6 @@ def set_pass_code_claimed(self, pass_code_claimed): self._model.pass_code_claimed = pass_code_claimed self._model.save() - @ServiceTracing.disable_tracing def as_dict(self): """Return the entity as a python dictionary. @@ -97,8 +97,9 @@ def as_dict(self): return obj @classmethod - def find_by_business_identifier(cls, business_identifier: str = None, - allowed_roles: Tuple = None, skip_auth: bool = False): + def find_by_business_identifier( + cls, business_identifier: str = None, allowed_roles: Tuple = None, skip_auth: bool = False + ): """Given a business identifier, this will return the corresponding entity or None.""" if not business_identifier: return None @@ -131,12 +132,12 @@ def save_entity(entity_info: dict): if not entity_info: return None - existing_entity = EntityModel.find_by_business_identifier(entity_info['businessIdentifier']) + existing_entity = EntityModel.find_by_business_identifier(entity_info["businessIdentifier"]) if existing_entity is None: entity_model = EntityModel.create_from_dict(entity_info) else: # TODO temporary allow update passcode, should replace with reset passcode endpoint. - entity_info['passCode'] = passcode_hash(entity_info['passCode']) + entity_info["passCode"] = passcode_hash(entity_info["passCode"]) existing_entity.update_from_dict(**camelback2snake(entity_info)) entity_model = existing_entity entity_model.commit() @@ -153,20 +154,20 @@ def update_entity(business_identifier: str, entity_info: dict, **kwargs): """ if not entity_info or not business_identifier: return None - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] if not user_from_context.is_system(): check_auth(one_of_roles=ALL_ALLOWED_ROLES, business_identifier=business_identifier) entity = EntityModel.find_by_business_identifier(business_identifier) if entity is None or entity.corp_type_code is None: raise BusinessException(Error.DATA_NOT_FOUND, None) if user_from_context.is_system(): - if entity_info.get('passCode') is not None: - entity_info['passCode'] = passcode_hash(entity_info['passCode']) + if entity_info.get("passCode") is not None: + entity_info["passCode"] = passcode_hash(entity_info["passCode"]) # Small mapping from state -> status. EX in LEAR: Business.State.HISTORICAL - if 'state' in entity_info: - entity_info['status'] = entity_info['state'] - del entity_info['state'] + if "state" in entity_info: + entity_info["status"] = entity_info["state"] + del entity_info["state"] entity.update_from_dict(**camelback2snake(entity_info)) entity.commit() @@ -178,28 +179,25 @@ def update_entity(business_identifier: str, entity_info: dict, **kwargs): @user_context def reset_passcode(business_identifier: str, email_addresses: str = None, **kwargs): """Reset the entity passcode and send email.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] check_auth(one_of_roles=ALL_ALLOWED_ROLES, business_identifier=business_identifier) - current_app.logger.debug('reset passcode') + logger.debug("reset passcode") entity: EntityModel = EntityModel.find_by_business_identifier(business_identifier) # generate passcode and set - new_pass_code = ''.join(secrets.choice(string.digits) for i in range(9)) + new_pass_code = "".join(secrets.choice(string.digits) for i in range(9)) entity.pass_code = passcode_hash(new_pass_code) entity.pass_code_claimed = False entity.save() if email_addresses: mailer_payload = { - 'emailAddresses': email_addresses, - 'passCode': new_pass_code, - 'businessIdentifier': business_identifier, - 'businessName': entity.name, - 'isStaffInitiated': user_from_context.is_staff() + "emailAddresses": email_addresses, + "passCode": new_pass_code, + "businessIdentifier": business_identifier, + "businessName": entity.name, + "isStaffInitiated": user_from_context.is_staff(), } - publish_to_mailer( - notification_type=QueueMessageTypes.RESET_PASSCODE.value, - data=mailer_payload - ) + publish_to_mailer(notification_type=QueueMessageTypes.RESET_PASSCODE.value, data=mailer_payload) entity = Entity(entity) return entity @@ -242,7 +240,7 @@ def delete_contact(self): raise BusinessException(Error.DATA_NOT_FOUND, None) del contact_link.entity - contact_link.commit() + contact_link.save() if not contact_link.has_links(): contact = contact_link.contact diff --git a/auth-api/src/auth_api/services/flags.py b/auth-api/src/auth_api/services/flags.py index ac72235a99..185f938988 100644 --- a/auth-api/src/auth_api/services/flags.py +++ b/auth-api/src/auth_api/services/flags.py @@ -12,17 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. """Manage the Feature Flags initialization, setup and service.""" -import logging + from flask import current_app -from ldclient import get as ldclient_get, set_config as ldclient_set_config # noqa: I001 -from ldclient.config import Config # noqa: I005 from ldclient import Context +from ldclient import get as ldclient_get # noqa: I001 +from ldclient import set_config as ldclient_set_config +from ldclient.config import Config # noqa: I005 from ldclient.integrations import Files +from structured_logging import StructuredLogging from auth_api.models import User +logger = StructuredLogging.get_logger() + -class Flags(): +class Flags: """Wrapper around the feature flag system. calls FAIL to FALSE @@ -45,45 +49,41 @@ def __init__(self, app=None): def init_app(self, app): """Initialize the Feature Flag environment.""" self.app = app - self.sdk_key = app.config.get('AUTH_LD_SDK_KEY') + self.sdk_key = app.config.get("AUTH_LD_SDK_KEY") - if self.sdk_key or app.env != 'production': + if self.sdk_key or app.config["ENV"] != "production": - if app.env == 'testing': - factory = Files.new_data_source(paths=['flags.json'], auto_update=True) - config = Config(sdk_key=self.sdk_key, - update_processor_class=factory, - send_events=False) + if app.config["ENV"] == "testing": + factory = Files.new_data_source(paths=["flags.json"], auto_update=True) + config = Config(sdk_key=self.sdk_key, update_processor_class=factory, send_events=False) else: config = Config(sdk_key=self.sdk_key) ldclient_set_config(config) client = ldclient_get() - app.extensions['featureflags'] = client + app.extensions["featureflags"] = client def _get_client(self): try: - client = current_app.extensions['featureflags'] + client = current_app.extensions["featureflags"] except KeyError: try: self.init_app(current_app) - client = current_app.extensions['featureflags'] + client = current_app.extensions["featureflags"] except KeyError: - logging.warning("Couldn\'t retrieve launch darkly client from extensions.") + logger.warning("Couldn't retrieve launch darkly client from extensions.") client = None return client @staticmethod def _get_anonymous_user(): - return Context.create('anonymous') + return Context.create("anonymous") @staticmethod def _user_as_key(user: User): - return Context.builder(user.idp_userid)\ - .set('firstName', user.firstname)\ - .set('lastName', user.lastname).build() + return Context.builder(user.idp_userid).set("firstName", user.firstname).set("lastName", user.lastname).build() def is_on(self, flag: str, default: bool = False, user: User = None) -> bool: """Assert that the flag is set for this user.""" diff --git a/auth-api/src/auth_api/services/gcp_queue/gcp_auth.py b/auth-api/src/auth_api/services/gcp_queue/gcp_auth.py index a4d2b0cf1e..30caab28c6 100644 --- a/auth-api/src/auth_api/services/gcp_queue/gcp_auth.py +++ b/auth-api/src/auth_api/services/gcp_queue/gcp_auth.py @@ -15,27 +15,27 @@ def verify_jwt(session): """Check token is valid with the correct audience and email claims for configured email address.""" try: - jwt_token = request.headers.get('Authorization', '').split()[1] + jwt_token = request.headers.get("Authorization", "").split()[1] claims = id_token.verify_oauth2_token( - jwt_token, - Request(session=session), - audience=current_app.config.get('AUTH_AUDIENCE_SUB') + jwt_token, Request(session=session), audience=current_app.config.get("AUTH_AUDIENCE_SUB") ) - required_emails = current_app.config.get('VERIFY_PUBSUB_EMAILS') - if claims.get('email_verified') and claims.get('email') in required_emails: + required_emails = current_app.config.get("VERIFY_PUBSUB_EMAILS") + if claims.get("email_verified") and claims.get("email") in required_emails: return None else: - return 'Email not verified or does not match', 401 + return "Email not verified or does not match", 401 except Exception as e: - return f'Invalid token: {e}', 400 + return f"Invalid token: {e}", 400 def ensure_authorized_queue_user(f): """Ensures the user is authorized to use the queue.""" + @functools.wraps(f) def decorated_function(*args, **kwargs): # Use CacheControl to avoid re-fetching certificates for every request. if verify_jwt(CacheControl(Session())): abort(HTTPStatus.UNAUTHORIZED) return f(*args, **kwargs) + return decorated_function diff --git a/auth-api/src/auth_api/services/invitation.py b/auth-api/src/auth_api/services/invitation.py index d91b832889..4262f995b0 100644 --- a/auth-api/src/auth_api/services/invitation.py +++ b/auth-api/src/auth_api/services/invitation.py @@ -17,14 +17,12 @@ from typing import Dict from urllib.parse import urlencode -from flask import current_app from itsdangerous import URLSafeTimedSerializer from jinja2 import Environment, FileSystemLoader -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sbc_common_components.utils.enums import QueueMessageTypes +from structured_logging import StructuredLogging from auth_api.config import get_named_config - from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import AccountLoginOptions as AccountLoginOptionsModel @@ -40,7 +38,13 @@ from auth_api.utils.enums import AccessType, ActivityAction, InvitationStatus, InvitationType, LoginSource from auth_api.utils.enums import OrgStatus as OrgStatusEnum from auth_api.utils.enums import ( - Status, TaskAction, TaskRelationshipStatus, TaskRelationshipType, TaskStatus, TaskTypePrefix) + Status, + TaskAction, + TaskRelationshipStatus, + TaskRelationshipType, + TaskStatus, + TaskTypePrefix, +) from auth_api.utils.roles import ADMIN, COORDINATOR, STAFF, USER from auth_api.utils.user_context import UserContext, user_context @@ -52,9 +56,9 @@ from .membership import Membership as MembershipService from .products import Product as ProductService - -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) CONFIG = get_named_config() +logger = StructuredLogging.get_logger() class Invitation: @@ -67,7 +71,6 @@ def __init__(self, model): """Return an invitation service instance.""" self._model = model - @ServiceTracing.disable_tracing def as_dict(self): """Return the internal Invitation model as a dictionary.""" invitation_schema = InvitationSchema() @@ -78,10 +81,10 @@ def as_dict(self): @user_context def create_invitation(invitation_info: Dict, user, invitation_origin, **kwargs): # pylint: disable=too-many-locals """Create a new invitation.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] # Ensure that the current user is ADMIN or COORDINATOR on each org being invited to - org_id = invitation_info['membership'][0]['orgId'] - membership_type = invitation_info['membership'][0]['membershipType'] + org_id = invitation_info["membership"][0]["orgId"] + membership_type = invitation_info["membership"][0]["membershipType"] token_email_query_params: Dict = {} # get the org and check the access_type org: OrgModel = OrgModel.find_by_org_id(org_id) @@ -98,38 +101,51 @@ def create_invitation(invitation_info: Dict, user, invitation_origin, **kwargs): elif org.access_type == AccessType.GOVM.value: mandatory_login_source = LoginSource.STAFF.value else: - default_login_option_based_on_accesstype = LoginSource.BCSC.value if \ - org.access_type == AccessType.REGULAR.value else LoginSource.BCEID.value + default_login_option_based_on_accesstype = ( + LoginSource.BCSC.value if org.access_type == AccessType.REGULAR.value else LoginSource.BCEID.value + ) account_login_options = AccountLoginOptionsModel.find_active_by_org_id(org.id) - mandatory_login_source = getattr(account_login_options, 'login_source', - default_login_option_based_on_accesstype) + mandatory_login_source = getattr( + account_login_options, "login_source", default_login_option_based_on_accesstype + ) - if membership_type == ADMIN \ - and mandatory_login_source == LoginSource.BCEID.value: - token_email_query_params['affidavit'] = 'true' + if membership_type == ADMIN and mandatory_login_source == LoginSource.BCEID.value: + token_email_query_params["affidavit"] = "true" invitation = InvitationModel.create_from_dict(invitation_info, user.identifier, invitation_type) confirmation_token = Invitation.generate_confirmation_token(invitation.id, invitation.type) invitation.token = confirmation_token invitation.login_source = mandatory_login_source invitation.save() - Invitation.send_invitation(invitation, org_name, org.id, user.as_dict(), - f'{invitation_origin}/', mandatory_login_source, - org_status=org.status_code, query_params=token_email_query_params) - ActivityLogPublisher.publish_activity(Activity(org_id, ActivityAction.INVITE_TEAM_MEMBER.value, - name=invitation_info['recipientEmail'], value=membership_type, - id=invitation.id)) + Invitation.send_invitation( + invitation, + org_name, + org.id, + user.as_dict(), + f"{invitation_origin}/", + mandatory_login_source, + org_status=org.status_code, + query_params=token_email_query_params, + ) + ActivityLogPublisher.publish_activity( + Activity( + org_id, + ActivityAction.INVITE_TEAM_MEMBER.value, + name=invitation_info["recipientEmail"], + value=membership_type, + id=invitation.id, + ) + ) # notify admin if staff adds team members if user_from_context.is_staff() and invitation_type == InvitationType.STANDARD.value: try: - current_app.logger.debug('') + logger.debug("send_team_member_invitation_notification>") except Exception as e: # noqa=B901 - current_app.logger.error('') + logger.debug("send_admin_notification>") except Exception as e: # noqa=B901 - current_app.logger.error('send_invitation failed') - current_app.logger.debug(exception) + logger.debug(">send_invitation failed") + logger.debug(exception) raise BusinessException(Error.FAILED_INVITATION, None) from exception - current_app.logger.debug('>send_invitation') + logger.debug(">send_invitation") @staticmethod def _get_invitation_configs(org_name, login_source, org_status=None): """Get the config for different email types.""" login_source = login_source or LoginSource.BCSC.value escape_url = escape_wam_friendly_url(org_name) - token_confirm_path = f'{escape_url}/validatetoken/{login_source}' + token_confirm_path = f"{escape_url}/validatetoken/{login_source}" if login_source == LoginSource.STAFF.value: # for GOVM accounts , there are two kinda of invitation. Its same login source # if its first invitation to org , its an account set up invitation else normal joining invite - login_source = 'IDIR/ACCOUNTSETUP' if Invitation._is_first_user_to_a_gov_accnt(org_status) else login_source + login_source = "IDIR/ACCOUNTSETUP" if Invitation._is_first_user_to_a_gov_accnt(org_status) else login_source govm_setup_configs = { - 'token_confirm_path': token_confirm_path, - 'notification_type': QueueMessageTypes.GOVM_BUSINESS_INVITATION.value, + "token_confirm_path": token_confirm_path, + "notification_type": QueueMessageTypes.GOVM_BUSINESS_INVITATION.value, } govm_member_configs = { - 'token_confirm_path': token_confirm_path, - 'notification_type': QueueMessageTypes.GOVM_MEMBER_INVITATION.value, + "token_confirm_path": token_confirm_path, + "notification_type": QueueMessageTypes.GOVM_MEMBER_INVITATION.value, } director_search_configs = { - 'token_confirm_path': token_confirm_path, - 'notification_type': QueueMessageTypes.DIRSEARCH_BUSINESS_INVITATION.value, + "token_confirm_path": token_confirm_path, + "notification_type": QueueMessageTypes.DIRSEARCH_BUSINESS_INVITATION.value, } bceid_configs = { - 'token_confirm_path': token_confirm_path, - 'notification_type': QueueMessageTypes.BUSINESS_INVITATION_FOR_BCEID.value, + "token_confirm_path": token_confirm_path, + "notification_type": QueueMessageTypes.BUSINESS_INVITATION_FOR_BCEID.value, } default_configs = { - 'token_confirm_path': token_confirm_path, - 'notification_type': QueueMessageTypes.BUSINESS_INVITATION.value, + "token_confirm_path": token_confirm_path, + "notification_type": QueueMessageTypes.BUSINESS_INVITATION.value, } mail_configs = { - 'BCROS': director_search_configs, - 'BCEID': bceid_configs, - 'IDIR': govm_member_configs, - 'IDIR/ACCOUNTSETUP': govm_setup_configs - + "BCROS": director_search_configs, + "BCEID": bceid_configs, + "IDIR": govm_member_configs, + "IDIR/ACCOUNTSETUP": govm_setup_configs, } return mail_configs.get(login_source, default_configs) @staticmethod - def generate_confirmation_token(invitation_id, invitation_type=''): + def generate_confirmation_token(invitation_id, invitation_type=""): """Generate the token to be sent in the email.""" serializer = URLSafeTimedSerializer(CONFIG.EMAIL_TOKEN_SECRET_KEY) - token = {'id': invitation_id, 'type': invitation_type} + token = {"id": invitation_id, "type": invitation_type} return serializer.dumps(token, salt=CONFIG.EMAIL_SECURITY_PASSWORD_SALT) @staticmethod @@ -332,8 +363,9 @@ def validate_token(token): serializer = URLSafeTimedSerializer(CONFIG.EMAIL_TOKEN_SECRET_KEY) token_valid_for = int(CONFIG.TOKEN_EXPIRY_PERIOD) * 3600 * 24 if CONFIG.TOKEN_EXPIRY_PERIOD else 3600 * 24 * 7 try: - invitation_id = serializer.loads(token, salt=CONFIG.EMAIL_SECURITY_PASSWORD_SALT, - max_age=token_valid_for).get('id') + invitation_id = serializer.loads( + token, salt=CONFIG.EMAIL_SECURITY_PASSWORD_SALT, max_age=token_valid_for + ).get("id") except Exception as e: # noqa: E722 raise BusinessException(Error.EXPIRED_INVITATION, None) from e @@ -341,9 +373,9 @@ def validate_token(token): if invitation is None: raise BusinessException(Error.DATA_NOT_FOUND, None) - if invitation.invitation_status_code == 'ACCEPTED': + if invitation.invitation_status_code == "ACCEPTED": raise BusinessException(Error.ACTIONED_INVITATION, None) - if invitation.invitation_status_code == 'EXPIRED': + if invitation.invitation_status_code == "EXPIRED": raise BusinessException(Error.EXPIRED_INVITATION, None) return Invitation(invitation) @@ -351,23 +383,26 @@ def validate_token(token): @staticmethod def notify_admin(user, invitation_id, membership_id, invitation_origin): """Admins should be notified if user has responded to invitation.""" - current_app.logger.debug('= 1: - admin_emails = ','.join([str(x.contacts[0].contact.email) for x in admin_list if x.contacts]) + admin_emails = ",".join([str(x.contacts[0].contact.email) for x in admin_list if x.contacts]) else: # No admin, find Sender email to notify sender (staff) admin_emails = invitation.sender.email - if admin_emails != '': - Invitation.send_admin_notification(user.as_dict(), - f'{invitation_origin}/', - admin_emails, invitation.membership[0].org.name, - invitation.membership[0].org.id) - current_app.logger.debug('>notify_admin') + if admin_emails != "": + Invitation.send_admin_notification( + user.as_dict(), + f"{invitation_origin}/", + admin_emails, + invitation.membership[0].org.name, + invitation.membership[0].org.id, + ) + logger.debug(">notify_admin") return Invitation(invitation) @@ -375,15 +410,15 @@ def notify_admin(user, invitation_id, membership_id, invitation_origin): @user_context def accept_invitation(invitation_id, user: UserService, origin, add_membership: bool = True, **kwargs): """Add user, role and org from the invitation to membership.""" - current_app.logger.debug('>accept_invitation') - user_from_context: UserContext = kwargs['user_context'] + logger.debug(">accept_invitation") + user_from_context: UserContext = kwargs["user_context"] invitation: InvitationModel = InvitationModel.find_invitation_by_id(invitation_id) if invitation is None: raise BusinessException(Error.DATA_NOT_FOUND, None) - if invitation.invitation_status_code == 'ACCEPTED': + if invitation.invitation_status_code == "ACCEPTED": raise BusinessException(Error.ACTIONED_INVITATION, None) - if invitation.invitation_status_code == 'EXPIRED': + if invitation.invitation_status_code == "EXPIRED": raise BusinessException(Error.EXPIRED_INVITATION, None) login_source = user_from_context.login_source @@ -400,8 +435,9 @@ def accept_invitation(invitation_id, user: UserService, origin, add_membership: membership_model.membership_type = membership.membership_type # check to ensure an invitation for this user/org has not already been processed - existing_membership = MembershipService \ - .get_membership_for_org_and_user(org_id=membership_model.org_id, user_id=membership_model.user_id) + existing_membership = MembershipService.get_membership_for_org_and_user( + org_id=membership_model.org_id, user_id=membership_model.user_id + ) if existing_membership: raise BusinessException(Error.DATA_ALREADY_EXISTS, None) @@ -423,11 +459,12 @@ def accept_invitation(invitation_id, user: UserService, origin, add_membership: if membership_model.status not in (Status.ACTIVE.value, Status.PENDING_STAFF_REVIEW.value): Invitation.notify_admin(user, invitation_id, membership_model.id, origin) except BusinessException as exception: - current_app.logger.error(' str: + def _get_status_based_on_org( + org_model: OrgModel, login_source: str, membership_model: MembershipModel, verified: bool + ) -> str: if org_model.access_type == AccessType.GOVM.value: return Status.ACTIVE.value if login_source == LoginSource.BCEID.value and membership_model.membership_type.code == ADMIN and not verified: @@ -457,27 +496,30 @@ def _get_status_based_on_org(org_model: OrgModel, login_source: str, membership_ def _publish_activity_if_active(membership: MembershipModel, user: UserContext): """Purpose: GOVM accounts - they instantly get accepted.""" if membership.status == Status.ACTIVE.value: - name = {'first_name': user.first_name, 'last_name': user.last_name} - ActivityLogPublisher.publish_activity(Activity(membership.org_id, - ActivityAction.APPROVE_TEAM_MEMBER.value, - name=json.dumps(name), - value=membership.membership_type_code - )) + name = {"first_name": user.first_name, "last_name": user.last_name} + ActivityLogPublisher.publish_activity( + Activity( + membership.org_id, + ActivityAction.APPROVE_TEAM_MEMBER.value, + name=json.dumps(name), + value=membership.membership_type_code, + ) + ) @staticmethod def _create_affidavit_review_task(org: OrgModel, membership: MembershipModel): """Create a task for staff to review the affidavit.""" if membership.status == Status.PENDING_STAFF_REVIEW.value: task_info = { - 'name': org.name, - 'relationshipType': TaskRelationshipType.USER.value, - 'relationshipId': membership.user_id, - 'relatedTo': membership.user_id, - 'dateSubmitted': datetime.today(), - 'type': TaskTypePrefix.BCEID_ADMIN.value, - 'action': TaskAction.AFFIDAVIT_REVIEW.value, - 'status': TaskStatus.OPEN.value, - 'relationship_status': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, - 'account_id': org.id + "name": org.name, + "relationshipType": TaskRelationshipType.USER.value, + "relationshipId": membership.user_id, + "relatedTo": membership.user_id, + "dateSubmitted": datetime.today(), + "type": TaskTypePrefix.BCEID_ADMIN.value, + "action": TaskAction.AFFIDAVIT_REVIEW.value, + "status": TaskStatus.OPEN.value, + "relationship_status": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + "account_id": org.id, } Task.create_task(task_info=task_info, do_commit=False) diff --git a/auth-api/src/auth_api/services/keycloak.py b/auth-api/src/auth_api/services/keycloak.py index efa3a70cfb..ff0f31513b 100644 --- a/auth-api/src/auth_api/services/keycloak.py +++ b/auth-api/src/auth_api/services/keycloak.py @@ -15,24 +15,31 @@ import asyncio import json -from typing import Dict, List from string import Template -import aiohttp +from typing import Dict, List +import aiohttp import requests from flask import current_app +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models.dataclass import KeycloakGroupSubscription from auth_api.utils.constants import ( - GROUP_ACCOUNT_HOLDERS, GROUP_ANONYMOUS_USERS, GROUP_GOV_ACCOUNT_USERS, GROUP_PUBLIC_USERS) + GROUP_ACCOUNT_HOLDERS, + GROUP_ANONYMOUS_USERS, + GROUP_GOV_ACCOUNT_USERS, + GROUP_PUBLIC_USERS, +) from auth_api.utils.enums import ContentType, KeycloakGroupActions, LoginSource from auth_api.utils.roles import Role from auth_api.utils.user_context import UserContext, user_context from .keycloak_user import KeycloakUser +logger = StructuredLogging.get_logger() + class KeycloakService: """For Keycloak services.""" @@ -44,9 +51,9 @@ def add_user(user: KeycloakUser, return_if_exists: bool = False, throw_error_if_ # Add user and set password admin_token = KeycloakService._get_admin_token(upstream=True) - base_url = config.get('KEYCLOAK_BCROS_BASE_URL') - realm = config.get('KEYCLOAK_BCROS_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BCROS_BASE_URL") + realm = config.get("KEYCLOAK_BCROS_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) # Check if the user exists if return_if_exists or throw_error_if_exists: @@ -56,14 +63,10 @@ def add_user(user: KeycloakUser, return_if_exists: bool = False, throw_error_if_ return existing_user raise BusinessException(Error.USER_ALREADY_EXISTS_IN_KEYCLOAK, None) # Add user to the keycloak group '$group_name' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - - add_user_url = f'{base_url}/auth/admin/realms/{realm}/users' - response = requests.post(add_user_url, data=user.value(), headers=headers, - timeout=timeout) + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} + + add_user_url = f"{base_url}/auth/admin/realms/{realm}/users" + response = requests.post(add_user_url, data=user.value(), headers=headers, timeout=timeout) response.raise_for_status() return KeycloakService.get_user_by_username(user.user_name, admin_token) @@ -75,21 +78,17 @@ def update_user(user: KeycloakUser): # Add user and set password admin_token = KeycloakService._get_admin_token(upstream=True) - base_url = config.get('KEYCLOAK_BCROS_BASE_URL') - realm = config.get('KEYCLOAK_BCROS_REALMNAME') - timeout = current_app.config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BCROS_BASE_URL") + realm = config.get("KEYCLOAK_BCROS_REALMNAME") + timeout = current_app.config.get("CONNECT_TIMEOUT", 60) existing_user = KeycloakService.get_user_by_username(user.user_name, admin_token=admin_token) if not existing_user: raise BusinessException(Error.DATA_NOT_FOUND, None) - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - - update_user_url = f'{base_url}/auth/admin/realms/{realm}/users/{existing_user.id}' - response = requests.put(update_user_url, data=user.value(), headers=headers, - timeout=timeout) + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} + + update_user_url = f"{base_url}/auth/admin/realms/{realm}/users/{existing_user.id}" + response = requests.put(update_user_url, data=user.value(), headers=headers, timeout=timeout) response.raise_for_status() return KeycloakService.get_user_by_username(user.user_name, admin_token) @@ -98,20 +97,18 @@ def update_user(user: KeycloakUser): def get_user_by_username(username: str, admin_token=None) -> KeycloakUser: """Get user from Keycloak by username.""" user = None - base_url = current_app.config.get('KEYCLOAK_BCROS_BASE_URL') - realm = current_app.config.get('KEYCLOAK_BCROS_REALMNAME') - timeout = current_app.config.get('CONNECT_TIMEOUT', 60) + base_url = current_app.config.get("KEYCLOAK_BCROS_BASE_URL") + realm = current_app.config.get("KEYCLOAK_BCROS_REALMNAME") + timeout = current_app.config.get("CONNECT_TIMEOUT", 60) if not admin_token: admin_token = KeycloakService._get_admin_token(upstream=True) - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} # Get the user and return - query_user_url = Template(f'{base_url}/auth/admin/realms/{realm}/users?username=$username') \ - .substitute(username=username) + query_user_url = Template(f"{base_url}/auth/admin/realms/{realm}/users?username=$username").substitute( + username=username + ) response = requests.get(query_user_url, headers=headers, timeout=timeout) response.raise_for_status() if len(response.json()) == 1: @@ -121,19 +118,22 @@ def get_user_by_username(username: str, admin_token=None) -> KeycloakUser: @staticmethod def get_user_groups(user_id, upstream: bool = False) -> KeycloakUser: """Get user from Keycloak by username.""" - base_url = current_app.config.get('KEYCLOAK_BCROS_BASE_URL') if upstream else current_app.config.get( - 'KEYCLOAK_BASE_URL') - realm = current_app.config.get('KEYCLOAK_BCROS_REALMNAME') if upstream else current_app.config.get( - 'KEYCLOAK_REALMNAME') - timeout = current_app.config.get('CONNECT_TIMEOUT', 60) + base_url = ( + current_app.config.get("KEYCLOAK_BCROS_BASE_URL") + if upstream + else current_app.config.get("KEYCLOAK_BASE_URL") + ) + realm = ( + current_app.config.get("KEYCLOAK_BCROS_REALMNAME") + if upstream + else current_app.config.get("KEYCLOAK_REALMNAME") + ) + timeout = current_app.config.get("CONNECT_TIMEOUT", 60) admin_token = KeycloakService._get_admin_token(upstream=upstream) - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} # Get the user and return - query_user_url = f'{base_url}/auth/admin/realms/{realm}/users/{user_id}/groups' + query_user_url = f"{base_url}/auth/admin/realms/{realm}/users/{user_id}/groups" response = requests.get(query_user_url, headers=headers, timeout=timeout) response.raise_for_status() return response.json() @@ -142,42 +142,37 @@ def get_user_groups(user_id, upstream: bool = False) -> KeycloakUser: def delete_user_by_username(username): """Delete user from Keycloak by username.""" admin_token = KeycloakService._get_admin_token(upstream=True) - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - - base_url = current_app.config.get('KEYCLOAK_BCROS_BASE_URL') - realm = current_app.config.get('KEYCLOAK_BCROS_REALMNAME') - timeout = current_app.config.get('CONNECT_TIMEOUT', 60) + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} + + base_url = current_app.config.get("KEYCLOAK_BCROS_BASE_URL") + realm = current_app.config.get("KEYCLOAK_BCROS_REALMNAME") + timeout = current_app.config.get("CONNECT_TIMEOUT", 60) user = KeycloakService.get_user_by_username(username) if not user: raise BusinessException(Error.DATA_NOT_FOUND, None) # Delete the user - delete_user_url = f'{base_url}/auth/admin/realms/{realm}/users/{user.id}' - response = requests.delete(delete_user_url, headers=headers, - timeout=timeout) + delete_user_url = f"{base_url}/auth/admin/realms/{realm}/users/{user.id}" + response = requests.delete(delete_user_url, headers=headers, timeout=timeout) response.raise_for_status() @staticmethod def get_token(username, password): """Get user access token by username and password.""" try: - base_url = current_app.config.get('KEYCLOAK_BASE_URL') - realm = current_app.config.get('KEYCLOAK_REALMNAME') - token_request = f"client_id={current_app.config.get('JWT_OIDC_AUDIENCE')}" \ - f"&client_secret={current_app.config.get('JWT_OIDC_CLIENT_SECRET')}" \ - f'&username={username}&password={password}&grant_type=password' - timeout = current_app.config.get('CONNECT_TIMEOUT', 60) - - headers = { - 'Content-Type': 'application/x-www-form-urlencoded' - } - token_url = f'{base_url}/auth/realms/{realm}/protocol/openid-connect/token' - response = requests.post(token_url, data=token_request, headers=headers, - timeout=timeout) + base_url = current_app.config.get("KEYCLOAK_BASE_URL") + realm = current_app.config.get("KEYCLOAK_REALMNAME") + token_request = ( + f"client_id={current_app.config.get('JWT_OIDC_AUDIENCE')}" + f"&client_secret={current_app.config.get('JWT_OIDC_CLIENT_SECRET')}" + f"&username={username}&password={password}&grant_type=password" + ) + timeout = current_app.config.get("CONNECT_TIMEOUT", 60) + + headers = {"Content-Type": "application/x-www-form-urlencoded"} + token_url = f"{base_url}/auth/realms/{realm}/protocol/openid-connect/token" + response = requests.post(token_url, data=token_request, headers=headers, timeout=timeout) response.raise_for_status() return response.json() @@ -188,7 +183,7 @@ def get_token(username, password): @user_context def join_users_group(**kwargs) -> str: """Add user to the group (public_users or anonymous_users) if the user is public.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] group_name: str = None login_source = user_from_context.login_source roles = user_from_context.roles @@ -196,8 +191,11 @@ def join_users_group(**kwargs) -> str: # Cannot check the group from token, so check if the role 'edit' is already present. if login_source in (LoginSource.BCEID.value, LoginSource.BCSC.value) and Role.PUBLIC_USER.value not in roles: group_name = GROUP_PUBLIC_USERS - elif login_source == LoginSource.STAFF.value \ - and Role.GOV_ACCOUNT_USER.value not in roles and Role.STAFF.value not in roles: + elif ( + login_source == LoginSource.STAFF.value + and Role.GOV_ACCOUNT_USER.value not in roles + and Role.STAFF.value not in roles + ): group_name = GROUP_GOV_ACCOUNT_USERS elif login_source == LoginSource.BCROS.value and Role.ANONYMOUS_USER.value not in roles: group_name = GROUP_ANONYMOUS_USERS @@ -213,7 +211,7 @@ def join_account_holders_group(keycloak_guid: str = None, **kwargs): """Add user to the account holders group (account_holders) if the user is public.""" # If keycloak_guid is provided add the user to the group directly, else find out from the token if not keycloak_guid: - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] # Cannot check the group from token, so check if the role 'account_holder' is already present. if Role.ACCOUNT_HOLDER.value in user_from_context.roles: return @@ -225,7 +223,7 @@ def join_account_holders_group(keycloak_guid: str = None, **kwargs): @user_context def remove_from_account_holders_group(keycloak_guid: str = None, **kwargs): """Remove user from the group.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] if not keycloak_guid: keycloak_guid: Dict = user_from_context.sub @@ -237,7 +235,7 @@ def remove_from_account_holders_group(keycloak_guid: str = None, **kwargs): def reset_otp(keycloak_guid: str = None, **kwargs): """Reset user one time password from Keycloak.""" if not keycloak_guid: - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] keycloak_guid: Dict = user_from_context.sub KeycloakService._reset_otp(keycloak_guid) @@ -248,10 +246,12 @@ def add_or_remove_product_keycloak_groups(kgs: List[KeycloakGroupSubscription]): add_groups = [kg for kg in kgs if kg.group_action == KeycloakGroupActions.ADD_TO_GROUP.value] remove_groups = [kg for kg in kgs if kg.group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value] for keycloak_group_subscription in add_groups + remove_groups: - current_app.logger.debug(f'Action: {keycloak_group_subscription.group_action} ' - f'Product: {keycloak_group_subscription.product_code} ' - f'Keycloak Group: {keycloak_group_subscription.group_name} ' - f'User guid: {keycloak_group_subscription.user_guid}') + logger.debug( + f"Action: {keycloak_group_subscription.group_action} " + f"Product: {keycloak_group_subscription.product_code} " + f"Keycloak Group: {keycloak_group_subscription.group_name} " + f"User guid: {keycloak_group_subscription.user_guid}" + ) asyncio.run(KeycloakService.add_or_remove_users_from_group(add_groups)) asyncio.run(KeycloakService.add_or_remove_users_from_group(remove_groups)) @@ -261,140 +261,132 @@ async def add_or_remove_users_from_group(kgs: List[KeycloakGroupSubscription]): if not kgs: return config = current_app.config - base_url, realm, timeout = config.get('KEYCLOAK_BASE_URL'), config.get( - 'KEYCLOAK_REALMNAME'), config.get('CONNECT_TIMEOUT', 60) + base_url, realm, timeout = ( + config.get("KEYCLOAK_BASE_URL"), + config.get("KEYCLOAK_REALMNAME"), + config.get("CONNECT_TIMEOUT", 60), + ) admin_token = KeycloakService._get_admin_token() group_names = {kg.group_name for kg in kgs} group_ids = {group_name: KeycloakService._get_group_id(admin_token, group_name) for group_name in group_names} - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} - method = 'PUT' if kgs[0].group_action == KeycloakGroupActions.ADD_TO_GROUP.value else 'DELETE' + method = "PUT" if kgs[0].group_action == KeycloakGroupActions.ADD_TO_GROUP.value else "DELETE" # Normal limit is 100, cap this to 40, so it doesn't hit keycloak too aggressively. connector = aiohttp.TCPConnector(limit=40) async with aiohttp.ClientSession(connector=connector) as session: - tasks = [asyncio.create_task( - session.request(method, f'{base_url}/auth/admin/realms/{realm}/users/' - f'{kg.user_guid}/groups/{group_ids[kg.group_name]}', - headers=headers, timeout=timeout)) - for kg in kgs] + tasks = [ + asyncio.create_task( + session.request( + method, + f"{base_url}/auth/admin/realms/{realm}/users/" + f"{kg.user_guid}/groups/{group_ids[kg.group_name]}", + headers=headers, + timeout=timeout, + ) + ) + for kg in kgs + ] tasks = await asyncio.gather(*tasks, return_exceptions=True) for task in tasks: if isinstance(task, aiohttp.ClientConnectionError): - current_app.logger.error('Connection error') + logger.error("Connection error") elif isinstance(task, asyncio.TimeoutError): - current_app.logger.error('Timeout error') + logger.error("Timeout error") elif isinstance(task, Exception): - current_app.logger.error(f'Exception: {task}') + logger.error(f"Exception: {task}") elif task.status != 204: - current_app.logger.error(f'Returned non 204: {task.method} - {task.url} - {task.status}') + logger.error(f"Returned non 204: {task.method} - {task.url} - {task.status}") @staticmethod def get_user_emails_with_role(role: str): """Get user emails with the role name.""" config = current_app.config - base_url = config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) admin_token = KeycloakService._get_admin_token() - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} users = [] - get_role_users = f'{base_url}/auth/admin/realms/{realm}/roles/{role}/users' + get_role_users = f"{base_url}/auth/admin/realms/{realm}/roles/{role}/users" response = requests.get(get_role_users, headers=headers, timeout=timeout) if response.status_code == 404: raise BusinessException(Error.DATA_NOT_FOUND, None) response.raise_for_status() for user in response.json(): - users.append({'firstName': user['firstName'], - 'lastName': user['lastName'], - 'email': user['email']}) + users.append({"firstName": user["firstName"], "lastName": user["lastName"], "email": user["email"]}) return users @staticmethod def add_user_to_group(user_id: str, group_name: str): """Add user to the keycloak group.""" config = current_app.config - base_url = config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) # Create an admin token admin_token = KeycloakService._get_admin_token() # Get the '$group_name' group group_id = KeycloakService._get_group_id(admin_token, group_name) # Add user to the keycloak group '$group_name' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - add_to_group_url = f'{base_url}/auth/admin/realms/{realm}/users/{user_id}/groups/{group_id}' - response = requests.put(add_to_group_url, headers=headers, - timeout=timeout) + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} + add_to_group_url = f"{base_url}/auth/admin/realms/{realm}/users/{user_id}/groups/{group_id}" + response = requests.put(add_to_group_url, headers=headers, timeout=timeout) response.raise_for_status() @staticmethod def _remove_user_from_group(user_id: str, group_name: str): """Remove user from the keycloak group.""" config = current_app.config - base_url = config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) # Create an admin token admin_token = KeycloakService._get_admin_token() # Get the '$group_name' group group_id = KeycloakService._get_group_id(admin_token, group_name) # Add user to the keycloak group '$group_name' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } - remove_group_url = f'{base_url}/auth/admin/realms/{realm}/users/{user_id}/groups/{group_id}' - response = requests.delete(remove_group_url, headers=headers, - timeout=timeout) + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} + remove_group_url = f"{base_url}/auth/admin/realms/{realm}/users/{user_id}/groups/{group_id}" + response = requests.delete(remove_group_url, headers=headers, timeout=timeout) response.raise_for_status() @staticmethod def _get_admin_token(upstream: bool = False): """Create an admin token.""" config = current_app.config - base_url = config.get('KEYCLOAK_BCROS_BASE_URL') if upstream else config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_BCROS_REALMNAME') if upstream else config.get('KEYCLOAK_REALMNAME') - admin_client_id = config.get('KEYCLOAK_BCROS_ADMIN_CLIENTID') if upstream else config.get( - 'KEYCLOAK_ADMIN_USERNAME') - admin_secret = config.get('KEYCLOAK_BCROS_ADMIN_SECRET') if upstream else config.get('KEYCLOAK_ADMIN_SECRET') - timeout = config.get('CONNECT_TIMEOUT', 60) - headers = { - 'Content-Type': 'application/x-www-form-urlencoded' - } - token_url = f'{base_url}/auth/realms/{realm}/protocol/openid-connect/token' - - response = requests.post(token_url, - data=f'client_id={admin_client_id}&grant_type=client_credentials' - f'&client_secret={admin_secret}', headers=headers, - timeout=timeout) - return response.json().get('access_token') + base_url = config.get("KEYCLOAK_BCROS_BASE_URL") if upstream else config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_BCROS_REALMNAME") if upstream else config.get("KEYCLOAK_REALMNAME") + admin_client_id = ( + config.get("KEYCLOAK_BCROS_ADMIN_CLIENTID") if upstream else config.get("KEYCLOAK_ADMIN_USERNAME") + ) + admin_secret = config.get("KEYCLOAK_BCROS_ADMIN_SECRET") if upstream else config.get("KEYCLOAK_ADMIN_SECRET") + timeout = config.get("CONNECT_TIMEOUT", 60) + headers = {"Content-Type": "application/x-www-form-urlencoded"} + token_url = f"{base_url}/auth/realms/{realm}/protocol/openid-connect/token" + + response = requests.post( + token_url, + data=f"client_id={admin_client_id}&grant_type=client_credentials" f"&client_secret={admin_secret}", + headers=headers, + timeout=timeout, + ) + return response.json().get("access_token") @staticmethod def _get_group_id(admin_token: str, group_name: str): """Get a group id for the group name.""" config = current_app.config - base_url = config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) - get_group_url = f'{base_url}/auth/admin/realms/{realm}/groups?search={group_name}' - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + base_url = config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) + get_group_url = f"{base_url}/auth/admin/realms/{realm}/groups?search={group_name}" + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} response = requests.get(get_group_url, headers=headers, timeout=timeout) return KeycloakService._find_group_or_subgroup_id(response.json(), group_name) @@ -402,9 +394,9 @@ def _get_group_id(admin_token: str, group_name: str): def _find_group_or_subgroup_id(groups: list, group_name: str): """Return group id by searching main and sub groups.""" for group in groups: - if group['name'] == group_name: - return group['id'] - if group_id := KeycloakService._find_group_or_subgroup_id(group['subGroups'], group_name): + if group["name"] == group_name: + return group["id"] + if group_id := KeycloakService._find_group_or_subgroup_id(group["subGroups"], group_name): return group_id return None @@ -412,74 +404,59 @@ def _find_group_or_subgroup_id(groups: list, group_name: str): def _reset_otp(user_id: str): """Reset user one time password from Keycloak.""" config = current_app.config - base_url = config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) # Create an admin token admin_token = KeycloakService._get_admin_token() - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} # step 1: add required action as configure otp - configure_otp_url = f'{base_url}/auth/admin/realms/{realm}/users/{user_id}' - input_data = json.dumps( - { - 'id': user_id, - 'requiredActions': ['CONFIGURE_TOTP'] - } - ) + configure_otp_url = f"{base_url}/auth/admin/realms/{realm}/users/{user_id}" + input_data = json.dumps({"id": user_id, "requiredActions": ["CONFIGURE_TOTP"]}) - response = requests.put(configure_otp_url, headers=headers, data=input_data, - timeout=timeout) + response = requests.put(configure_otp_url, headers=headers, data=input_data, timeout=timeout) if response.status_code == 204: - get_credentials_url = f'{base_url}/auth/admin/realms/{realm}/users/{user_id}/credentials' - response = requests.get(get_credentials_url, headers=headers, - timeout=timeout) + get_credentials_url = f"{base_url}/auth/admin/realms/{realm}/users/{user_id}/credentials" + response = requests.get(get_credentials_url, headers=headers, timeout=timeout) for credential in response.json(): - if credential['type'] == 'otp': + if credential["type"] == "otp": delete_credential_url = f'{get_credentials_url}/{credential["id"]}' - response = requests.delete(delete_credential_url, headers=headers, - timeout=timeout) + response = requests.delete(delete_credential_url, headers=headers, timeout=timeout) response.raise_for_status() @staticmethod def create_client(client_representation: Dict[str, any]): """Create a client in keycloak.""" config = current_app.config - base_url = config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) admin_token = KeycloakService._get_admin_token() - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} - create_client_url = f'{base_url}/auth/admin/realms/{realm}/clients' - response = requests.post(create_client_url, data=json.dumps(client_representation), - headers=headers, timeout=timeout) + create_client_url = f"{base_url}/auth/admin/realms/{realm}/clients" + response = requests.post( + create_client_url, data=json.dumps(client_representation), headers=headers, timeout=timeout + ) response.raise_for_status() @staticmethod def get_service_account_user(client_identifier: str): """Return service account user.""" config = current_app.config - base_url = config.get('KEYCLOAK_BASE_URL') - realm = config.get('KEYCLOAK_REALMNAME') - timeout = config.get('CONNECT_TIMEOUT', 60) + base_url = config.get("KEYCLOAK_BASE_URL") + realm = config.get("KEYCLOAK_REALMNAME") + timeout = config.get("CONNECT_TIMEOUT", 60) admin_token = KeycloakService._get_admin_token() - headers = { - 'Content-Type': ContentType.JSON.value, - 'Authorization': f'Bearer {admin_token}' - } + headers = {"Content-Type": ContentType.JSON.value, "Authorization": f"Bearer {admin_token}"} response = requests.get( - f'{base_url}/auth/admin/realms/{realm}/clients/{client_identifier}/service-account-user', - headers=headers, timeout=timeout + f"{base_url}/auth/admin/realms/{realm}/clients/{client_identifier}/service-account-user", + headers=headers, + timeout=timeout, ) response.raise_for_status() return response.json() diff --git a/auth-api/src/auth_api/services/keycloak_user.py b/auth-api/src/auth_api/services/keycloak_user.py index 29f704bbf4..ddf2540363 100644 --- a/auth-api/src/auth_api/services/keycloak_user.py +++ b/auth-api/src/auth_api/services/keycloak_user.py @@ -28,12 +28,12 @@ def __init__(self, user: Dict = None): @property def user_name(self) -> str: """Return the user_name.""" - return self._user.get('username') + return self._user.get("username") @user_name.setter def user_name(self, value: str): """Set the user_name.""" - self._user['username'] = value + self._user["username"] = value # Default first name to user name if not self.first_name: self.first_name = value @@ -41,89 +41,89 @@ def user_name(self, value: str): @property def email(self) -> str: """Return the email.""" - return self._user.get('email') + return self._user.get("email") @email.setter def email(self, value: str): """Set the email.""" - self._user['email'] = value + self._user["email"] = value @property def enabled(self): """Return the enabled.""" - return self._user['enabled'] + return self._user["enabled"] @enabled.setter def enabled(self, value: bool): """Set the enabled.""" - self._user['enabled'] = value + self._user["enabled"] = value @property def first_name(self) -> str: """Return the firstName.""" - return self._user.get('firstName') + return self._user.get("firstName") @first_name.setter def first_name(self, value: str): """Set the firstName.""" - self._user['firstName'] = value + self._user["firstName"] = value @property def last_name(self) -> str: """Return the last_name.""" - return self._user.get('lastName') + return self._user.get("lastName") @last_name.setter def last_name(self, value: str): """Set the last_name.""" - self._user['lastName'] = value + self._user["lastName"] = value @property def id(self) -> str: """Return the id.""" - return self._user.get('id') + return self._user.get("id") @id.setter def id(self, value: str): """Set the id.""" - self._user['id'] = value + self._user["id"] = value @property def password(self) -> str: """Return the password.""" - return self._user.get('credentials')[0].get('value', None) + return self._user.get("credentials")[0].get("value", None) @password.setter def password(self, value: str): """Set the password.""" - self._user['credentials'] = [{}] - self._user['credentials'][0]['value'] = value - self._user['credentials'][0]['type'] = 'password' + self._user["credentials"] = [{}] + self._user["credentials"][0]["value"] = value + self._user["credentials"][0]["type"] = "password" @property def attributes(self) -> str: """Return the attributes.""" - return self._user.get('attributes') + return self._user.get("attributes") @attributes.setter def attributes(self, value: dict = None): """Set the attributes.""" - self._user['attributes'] = {} + self._user["attributes"] = {} if value: for key in value.keys(): - self._user['attributes'][key] = value[key] + self._user["attributes"][key] = value[key] def update_password_on_login(self): """Set the required_actions.""" - if not self._user.get('requiredActions', None): - self._user['requiredActions'] = [] - self._user['requiredActions'].append(RequiredAction.UPDATE_PASSWORD.value) + if not self._user.get("requiredActions", None): + self._user["requiredActions"] = [] + self._user["requiredActions"].append(RequiredAction.UPDATE_PASSWORD.value) def configure_totp_on_login(self): """Set the required_actions.""" - if not self._user['requiredActions']: - self._user['requiredActions'] = [] - self._user['requiredActions'].append(RequiredAction.CONFIGURE_TOTP.value) + if not self._user["requiredActions"]: + self._user["requiredActions"] = [] + self._user["requiredActions"].append(RequiredAction.CONFIGURE_TOTP.value) def value(self) -> Dict: """Return dict value.""" diff --git a/auth-api/src/auth_api/services/membership.py b/auth-api/src/auth_api/services/membership.py index 695274f71f..f01d725d1c 100644 --- a/auth-api/src/auth_api/services/membership.py +++ b/auth-api/src/auth_api/services/membership.py @@ -17,13 +17,12 @@ """ import json -from flask import current_app + from jinja2 import Environment, FileSystemLoader -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sbc_common_components.utils.enums import QueueMessageTypes +from structured_logging import StructuredLogging from auth_api.config import get_named_config -from auth_api.models.dataclass import Activity from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import ContactLink as ContactLinkModel @@ -31,6 +30,7 @@ from auth_api.models import MembershipStatusCode as MembershipStatusCodeModel from auth_api.models import MembershipType as MembershipTypeModel from auth_api.models import Org as OrgModel +from auth_api.models.dataclass import Activity from auth_api.schemas import MembershipSchema from auth_api.utils.enums import ActivityAction, LoginSource, NotificationType, Status from auth_api.utils.roles import ADMIN, ALL_ALLOWED_ROLES, COORDINATOR, STAFF @@ -43,12 +43,11 @@ from .products import Product as ProductService from .user import User as UserService - -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) CONFIG = get_named_config() +logger = StructuredLogging.get_logger() -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class Membership: # pylint: disable=too-many-instance-attributes,too-few-public-methods """Manages all aspects of the Membership Entity. @@ -87,8 +86,9 @@ def get_pending_member_count_for_org(org_id): current_user: UserService = UserService.find_by_jwt_token() except BusinessException: return default_count - is_active_admin_or_owner = MembershipModel.check_if_active_admin_or_owner_org_id(org_id, - current_user.identifier) + is_active_admin_or_owner = MembershipModel.check_if_active_admin_or_owner_org_id( + org_id, current_user.identifier + ) if is_active_admin_or_owner < 1: return default_count pending_member_count = MembershipModel.get_pending_members_count_by_org_id(org_id) @@ -96,14 +96,18 @@ def get_pending_member_count_for_org(org_id): @staticmethod @user_context - def get_members_for_org(org_id, status=Status.ACTIVE.name, # pylint:disable=too-many-return-statements - membership_roles=ALL_ALLOWED_ROLES, **kwargs): + def get_members_for_org( + org_id, + status=Status.ACTIVE.name, # pylint:disable=too-many-return-statements + membership_roles=ALL_ALLOWED_ROLES, + **kwargs, + ): """Get members of org.Fetches using status and roles.""" org_model = OrgModel.find_by_org_id(org_id) if not org_model: return None - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] status = Status.ACTIVE.value if status is None else Status[status].value membership_roles = ALL_ALLOWED_ROLES if membership_roles is None else membership_roles # If staff return full list @@ -111,13 +115,16 @@ def get_members_for_org(org_id, status=Status.ACTIVE.name, # pylint:disable=too return MembershipModel.find_members_by_org_id_by_status_by_roles(org_id, membership_roles, status) current_user: UserService = UserService.find_by_jwt_token() - current_user_membership: MembershipModel = \ - MembershipModel.find_membership_by_user_and_org(user_id=current_user.identifier, org_id=org_id) + current_user_membership: MembershipModel = MembershipModel.find_membership_by_user_and_org( + user_id=current_user.identifier, org_id=org_id + ) # If no active or pending membership return empty array - if current_user_membership is None or \ - current_user_membership.status == Status.INACTIVE.value or \ - current_user_membership.status == Status.REJECTED.value: + if ( + current_user_membership is None + or current_user_membership.status == Status.INACTIVE.value + or current_user_membership.status == Status.REJECTED.value + ): return [] # If pending approval, return empty for active, array of self only for pending @@ -129,8 +136,11 @@ def get_members_for_org(org_id, status=Status.ACTIVE.name, # pylint:disable=too if current_user_membership.membership_type_code in (ADMIN, COORDINATOR): return MembershipModel.find_members_by_org_id_by_status_by_roles(org_id, membership_roles, status) - return MembershipModel.find_members_by_org_id_by_status_by_roles(org_id, membership_roles, status) \ - if status == Status.ACTIVE.value else [] + return ( + MembershipModel.find_members_by_org_id_by_status_by_roles(org_id, membership_roles, status) + if status == Status.ACTIVE.value + else [] + ) return [] @@ -143,7 +153,7 @@ def get_membership_status_by_code(name): @user_context def find_membership_by_id(cls, membership_id, **kwargs): """Retrieve a membership record by id.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] membership = MembershipModel.find_membership_by_id(membership_id) if membership: @@ -156,84 +166,82 @@ def find_membership_by_id(cls, membership_id, **kwargs): def send_notification_to_member(self, origin_url, notification_type): """Send member notification.""" - current_app.logger.debug(f' 0 + is_bceid_user = self._model.user.username.find("@bceid") > 0 if is_bceid_user: notification_type_for_mailer = QueueMessageTypes.MEMBERSHIP_APPROVED_NOTIFICATION_FOR_BCEID.value else: notification_type_for_mailer = QueueMessageTypes.MEMBERSHIP_APPROVED_NOTIFICATION.value - data = { - 'accountId': org_id, - 'emailAddresses': recipient, - 'contextUrl': app_url, - 'orgName': org_name - } + data = {"accountId": org_id, "emailAddresses": recipient, "contextUrl": app_url, "orgName": org_name} else: - data = { - 'accountId': org_id - } + data = {"accountId": org_id} try: publish_to_mailer(notification_type_for_mailer, data=data) - current_app.logger.debug('update_membership') + logger.debug(">update_membership") return self @user_context def deactivate_membership(self, **kwargs): """Mark this membership as inactive.""" - current_app.logger.debug('deactivate_membership') + name = {"first_name": self._model.user.firstname, "last_name": self._model.user.lastname} + ActivityLogPublisher.publish_activity( + Activity( + self._model.org_id, + ActivityAction.REMOVE_TEAM_MEMBER.value, + name=json.dumps(name), + id=self._model.user.id, + ) + ) + logger.debug(">deactivate_membership") return self @staticmethod @@ -306,8 +316,10 @@ def _add_or_remove_group(model: MembershipModel): """Add or remove the user from/to account holders / product keycloak group.""" if model.membership_status.id == Status.ACTIVE.value: KeycloakService.join_account_holders_group(model.user.keycloak_guid) - elif model.membership_status.id == Status.INACTIVE.value and len( - MembershipModel.find_orgs_for_user(model.user.id)) == 0: + elif ( + model.membership_status.id == Status.INACTIVE.value + and len(MembershipModel.find_orgs_for_user(model.user.id)) == 0 + ): # Check if the user has any other active org membership, if none remove from the group KeycloakService.remove_from_account_holders_group(model.user.keycloak_guid) ProductService.update_users_products_keycloak_groups([model.user.id]) diff --git a/auth-api/src/auth_api/services/minio.py b/auth-api/src/auth_api/services/minio.py index 2ea4c15849..f4fec25774 100644 --- a/auth-api/src/auth_api/services/minio.py +++ b/auth-api/src/auth_api/services/minio.py @@ -17,9 +17,12 @@ from flask import current_app from minio import Minio +from structured_logging import StructuredLogging from auth_api.utils.constants import AFFIDAVIT_FOLDER_NAME +logger = StructuredLogging.get_logger() + class MinioService: """Document Storage class.""" @@ -27,14 +30,15 @@ class MinioService: @staticmethod def create_signed_put_url(file_name: str, prefix_key: str = AFFIDAVIT_FOLDER_NAME) -> dict: """Return a pre-signed URL for new doc upload.""" - current_app.logger.debug('Creating pre-signed URL.') + logger.debug("Creating pre-signed URL.") minio_client: Minio = MinioService._get_client() - file_extension: str = file_name.split('.')[-1] - key = f'{prefix_key}/{str(uuid.uuid4())}.{file_extension}' + file_extension: str = file_name.split(".")[-1] + key = f"{prefix_key}/{str(uuid.uuid4())}.{file_extension}" signed_url_details = { - 'preSignedUrl': minio_client.presigned_put_object(current_app.config['MINIO_BUCKET_ACCOUNTS'], key, - timedelta(minutes=5)), - 'key': key + "preSignedUrl": minio_client.presigned_put_object( + current_app.config["MINIO_BUCKET_ACCOUNTS"], key, timedelta(minutes=5) + ), + "key": key, } return signed_url_details @@ -43,15 +47,16 @@ def create_signed_put_url(file_name: str, prefix_key: str = AFFIDAVIT_FOLDER_NAM def create_signed_get_url(key: str) -> str: """Return a pre-signed URL for uploaded document.""" minio_client: Minio = MinioService._get_client() - current_app.logger.debug('Creating pre-signed GET URL.') + logger.debug("Creating pre-signed GET URL.") - return minio_client.presigned_get_object(current_app.config['MINIO_BUCKET_ACCOUNTS'], key, timedelta(hours=1)) + return minio_client.presigned_get_object(current_app.config["MINIO_BUCKET_ACCOUNTS"], key, timedelta(hours=1)) @staticmethod def _get_client() -> Minio: """Return a minio client.""" - minio_endpoint = current_app.config['MINIO_ENDPOINT'] - minio_key = current_app.config['MINIO_ACCESS_KEY'] - minio_secret = current_app.config['MINIO_ACCESS_SECRET'] - return Minio(minio_endpoint, access_key=minio_key, secret_key=minio_secret, - secure=current_app.config['MINIO_SECURE']) + minio_endpoint = current_app.config["MINIO_ENDPOINT"] + minio_key = current_app.config["MINIO_ACCESS_KEY"] + minio_secret = current_app.config["MINIO_ACCESS_SECRET"] + return Minio( + minio_endpoint, access_key=minio_key, secret_key=minio_secret, secure=current_app.config["MINIO_SECURE"] + ) diff --git a/auth-api/src/auth_api/services/notification.py b/auth-api/src/auth_api/services/notification.py index 1288e04283..44238b8c69 100644 --- a/auth-api/src/auth_api/services/notification.py +++ b/auth-api/src/auth_api/services/notification.py @@ -15,27 +15,24 @@ import json from flask import current_app +from structured_logging import StructuredLogging from .rest_service import RestService +logger = StructuredLogging.get_logger() + def send_email(subject: str, sender: str, recipients: str, html_body: str): # pylint:disable=unused-argument """Send the email asynchronously, using the given details.""" - current_app.logger.info(f'send_email {recipients}') - notify_url = current_app.config.get('NOTIFY_API_URL') + '/notify/' - notify_body = { - 'recipients': recipients, - 'content': { - 'subject': subject, - 'body': html_body - } - } + logger.info(f"send_email {recipients}") + notify_url = current_app.config.get("NOTIFY_API_URL") + "/notify/" + notify_body = {"recipients": recipients, "content": {"subject": subject, "body": html_body}} notify_response = RestService.post(notify_url, data=notify_body) - current_app.logger.info('send_email notify_response') + logger.info("send_email notify_response") if notify_response: response_json = json.loads(notify_response.text) - if response_json['notifyStatus']['code'] != 'FAILURE': + if response_json["notifyStatus"]["code"] != "FAILURE": return True return False diff --git a/auth-api/src/auth_api/services/org.py b/auth-api/src/auth_api/services/org.py index 6f7edfc539..fd2fd25468 100644 --- a/auth-api/src/auth_api/services/org.py +++ b/auth-api/src/auth_api/services/org.py @@ -12,18 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. """Service for managing Organization data.""" -from datetime import datetime +# pylint:disable=too-many-lines import json +from datetime import datetime +from http import HTTPStatus from typing import Dict, List, Tuple from flask import current_app, g from jinja2 import Environment, FileSystemLoader from requests.exceptions import HTTPError -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sbc_common_components.utils.enums import QueueMessageTypes +from structured_logging import StructuredLogging -from auth_api import status as http_status -from auth_api.models.dataclass import Activity, DeleteAffiliationRequest from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import AccountLoginOptions as AccountLoginOptionsModel @@ -31,9 +31,10 @@ from auth_api.models import ContactLink as ContactLinkModel from auth_api.models import Membership as MembershipModel from auth_api.models import Org as OrgModel -from auth_api.models import User as UserModel from auth_api.models import Task as TaskModel +from auth_api.models import User as UserModel from auth_api.models.affidavit import Affidavit as AffidavitModel +from auth_api.models.dataclass import Activity, DeleteAffiliationRequest from auth_api.models.org import OrgSearch from auth_api.schemas import ContactSchema, InvitationSchema, OrgSchema from auth_api.services.user import User as UserService @@ -43,9 +44,23 @@ from auth_api.services.validators.duplicate_org_name import validate as duplicate_org_name_validate from auth_api.services.validators.payment_type import validate as payment_type_validate from auth_api.utils.enums import ( - AccessType, ActivityAction, AffidavitStatus, LoginSource, OrgStatus, OrgType, PatchActions, PaymentAccountStatus, - PaymentMethod, Status, SuspensionReasonCode, TaskRelationshipStatus, TaskRelationshipType, - TaskStatus, TaskTypePrefix, TaskAction) + AccessType, + ActivityAction, + AffidavitStatus, + LoginSource, + OrgStatus, + OrgType, + PatchActions, + PaymentAccountStatus, + PaymentMethod, + Status, + SuspensionReasonCode, + TaskAction, + TaskRelationshipStatus, + TaskRelationshipType, + TaskStatus, + TaskTypePrefix, +) from auth_api.utils.roles import ADMIN, EXCLUDED_FIELDS, STAFF, VALID_STATUSES, Role # noqa: I005 from auth_api.utils.util import camelback2snake @@ -61,7 +76,8 @@ from .task import Task as TaskService from .validators.validator_response import ValidatorResponse -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) +logger = StructuredLogging.get_logger() class Org: # pylint: disable=too-many-public-methods @@ -74,7 +90,6 @@ def __init__(self, model): """Return an Org Service.""" self._model = model - @ServiceTracing.disable_tracing def as_dict(self): """Return the internal Org model as a dictionary. @@ -87,25 +102,25 @@ def as_dict(self): @staticmethod def create_org(org_info: dict, user_id): """Create a new organization.""" - current_app.logger.debug('update_org ') + logger.debug(">update_org ") return self @staticmethod @@ -404,27 +437,35 @@ def _is_govm_missing_account_data(is_govm_account_creation, mailing_address, rev @staticmethod def _publish_activity_on_mailing_address_change(org_id: int, org_name: str, mailing_address: str): if mailing_address: - ActivityLogPublisher.publish_activity(Activity(org_id, ActivityAction.ACCOUNT_ADDRESS_CHANGE.value, - name=org_name, value=json.dumps(mailing_address))) + ActivityLogPublisher.publish_activity( + Activity( + org_id, + ActivityAction.ACCOUNT_ADDRESS_CHANGE.value, + name=org_name, + value=json.dumps(mailing_address), + ) + ) @staticmethod def _publish_activity_on_name_change(org_id: int, org_name: str): if org_name: - ActivityLogPublisher.publish_activity(Activity(org_id, ActivityAction.ACCOUNT_NAME_CHANGE.value, - name=org_name, value=org_name)) + ActivityLogPublisher.publish_activity( + Activity(org_id, ActivityAction.ACCOUNT_NAME_CHANGE.value, name=org_name, value=org_name) + ) @staticmethod def _create_payment_for_org(mailing_address, org, payment_info, is_new_org: bool = True) -> PaymentAccountStatus: """Create Or update payment info for org.""" - selected_payment_method = payment_info.get('paymentMethod', None) + selected_payment_method = payment_info.get("paymentMethod", None) payment_method = None - arg_dict = {'selected_payment_method': selected_payment_method, - 'access_type': org.access_type, - 'org_type': OrgType[org.type_code] - } + arg_dict = { + "selected_payment_method": selected_payment_method, + "access_type": org.access_type, + "org_type": OrgType[org.type_code], + } if is_new_org or selected_payment_method: validator_obj = payment_type_validate(is_fatal=True, **arg_dict) - payment_method = validator_obj.info.get('payment_type') + payment_method = validator_obj.info.get("payment_type") Org._create_payment_settings(org, payment_info, payment_method, mailing_address, is_new_org) @staticmethod @@ -432,16 +473,17 @@ def _create_gov_account_task(org_model: OrgModel): # create a staff review task for this account task_type = TaskTypePrefix.GOVM_REVIEW.value user: UserModel = UserModel.find_by_jwt_token() - task_info = {'name': org_model.name, - 'relationshipId': org_model.id, - 'relatedTo': user.id, - 'dateSubmitted': datetime.today(), - 'relationshipType': TaskRelationshipType.ORG.value, - 'type': task_type, - 'action': TaskAction.ACCOUNT_REVIEW.value, - 'status': TaskStatus.OPEN.value, - 'relationship_status': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value - } + task_info = { + "name": org_model.name, + "relationshipId": org_model.id, + "relatedTo": user.id, + "dateSubmitted": datetime.today(), + "relationshipType": TaskRelationshipType.ORG.value, + "type": task_type, + "action": TaskAction.ACCOUNT_REVIEW.value, + "status": TaskStatus.OPEN.value, + "relationship_status": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + } TaskService.create_task(task_info=task_info, do_commit=False) @staticmethod @@ -457,7 +499,7 @@ def delete_org(org_id): 1 - If there is any active PAD transactions going on, then cannot be deleted. """ - current_app.logger.debug(f'') + logger.debug("org Inactivated>") @staticmethod def _delete_pay_account(org_id): - pay_url = current_app.config.get('PAY_API_URL') + pay_url = current_app.config.get("PAY_API_URL") try: token = RestService.get_service_account_token() - pay_response = RestService.delete(endpoint=f'{pay_url}/accounts/{org_id}', token=token, - raise_for_status=False) + pay_response = RestService.delete( + endpoint=f"{pay_url}/accounts/{org_id}", token=token, raise_for_status=False + ) pay_response.raise_for_status() except HTTPError as pay_err: - current_app.logger.info(pay_err) + logger.info(pay_err) response_json = pay_response.json() - error_type = response_json.get('type') + error_type = response_json.get("type") error: Error = Error[error_type] if error_type in Error.__members__ else Error.PAY_ACCOUNT_DEACTIVATE_ERROR raise BusinessException(error, pay_err) from pay_err def get_payment_info(self): """Return the Payment Details for an org by calling Pay API.""" - pay_url = current_app.config.get('PAY_API_URL') + pay_url = current_app.config.get("PAY_API_URL") # invoke pay-api token = RestService.get_service_account_token() - response = RestService.get(endpoint=f'{pay_url}/accounts/{self._model.id}', token=token, retry_on_failure=True) + response = RestService.get(endpoint=f"{pay_url}/accounts/{self._model.id}", token=token, retry_on_failure=True) return response.json() @staticmethod @@ -555,17 +598,17 @@ def find_by_org_name(org_name, branch_name=None): if not org_model: return None - orgs = {'orgs': []} + orgs = {"orgs": []} for org in org_model: - orgs['orgs'].append(Org(org).as_dict()) + orgs["orgs"].append(Org(org).as_dict()) return orgs @staticmethod def get_login_options_for_org(org_id, allowed_roles: Tuple = None): """Get the payment settings for the given org.""" - current_app.logger.debug('get_login_options(>') + logger.debug("get_login_options(>") org = OrgModel.find_by_org_id(org_id) if org is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -578,7 +621,7 @@ def get_login_options_for_org(org_id, allowed_roles: Tuple = None): def add_login_option(org_id, login_source): """Create a new contact for this org.""" # check for existing contact (only one contact per org for now) - current_app.logger.debug('>add_login_option') + logger.debug(">add_login_option") org = OrgModel.find_by_org_id(org_id) if org is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -593,7 +636,7 @@ def add_login_option(org_id, login_source): def update_login_option(org_id, login_source): """Create a new contact for this org.""" # check for existing contact (only one contact per org for now) - current_app.logger.debug('>update_login_option') + logger.debug(">update_login_option") org = OrgModel.find_by_org_id(org_id) if org is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -607,15 +650,21 @@ def update_login_option(org_id, login_source): login_option = AccountLoginOptionsModel(login_source=login_source, org_id=org_id) login_option.save() - ActivityLogPublisher.publish_activity(Activity(org_id, ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, - name=org.name, value=login_source, - id=login_option.id)) + ActivityLogPublisher.publish_activity( + Activity( + org_id, + ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, + name=org.name, + value=login_source, + id=login_option.id, + ) + ) return login_option @staticmethod def get_contacts(org_id): """Get the contacts for the given org.""" - current_app.logger.debug('get_contacts>') + logger.debug("get_contacts>") org = OrgModel.find_by_org_id(org_id) if org is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -623,13 +672,13 @@ def get_contacts(org_id): collection = [] for contact_link in org.contacts: collection.append(ContactService(contact_link.contact).as_dict()) - return {'contacts': collection} + return {"contacts": collection} @staticmethod def add_contact(org_id, contact_info): """Create a new contact for this org.""" # check for existing contact (only one contact per org for now) - current_app.logger.debug('>add_contact') + logger.debug(">add_contact") org = OrgModel.find_by_org_id(org_id) if org is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -645,14 +694,14 @@ def add_contact(org_id, contact_info): contact_link.contact = contact contact_link.org = org contact_link.save() - current_app.logger.debug('update_contact ') + logger.debug(">update_contact ") org = OrgModel.find_by_org_id(org_id) if org is None: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -665,7 +714,7 @@ def update_contact(org_id, contact_info): contact = contact_link.contact contact.update_from_dict(**camelback2snake(contact_info)) contact.save() - current_app.logger.debug('delete_contact ') + logger.debug(">delete_contact ") org = OrgModel.find_by_org_id(org_id) if not org or not org.contacts: raise BusinessException(Error.DATA_NOT_FOUND, None) deleted_contact = Org.__delete_contact(org) - current_app.logger.debug('') + suspension_reason_description = ( + SuspensionReasonCode[suspension_reason_code].value + if suspension_reason_code in [item.name for item in SuspensionReasonCode] + else "" + ) + ActivityLogPublisher.publish_activity( + Activity( + org_model.id, + ActivityAction.ACCOUNT_SUSPENSION.value, + name=org_model.name, + value=suspension_reason_description, + ) + ) + logger.debug("change_org_status>") return Org(org_model) @staticmethod - def approve_or_reject(org_id: int, is_approved: bool, origin_url: str = None, - task_action: str = None): + def approve_or_reject(org_id: int, is_approved: bool, origin_url: str = None, task_action: str = None): """Mark the affidavit as approved or rejected.""" - current_app.logger.debug('find_affidavit_by_org_id ') + logger.debug(">find_affidavit_by_org_id ") return Org(org) @staticmethod - def send_staff_review_account_reminder(relationship_id, - task_relationship_type=TaskRelationshipType.ORG.value): + def send_staff_review_account_reminder(relationship_id, task_relationship_type=TaskRelationshipType.ORG.value): """Send staff review account reminder notification.""" - current_app.logger.debug('') + logger.debug("send_approved_rejected_govm_govn_notification>") except Exception as e: # noqa=B901 - current_app.logger.error('') + logger.debug("change_org_access_type>") return Org(org_model) def change_org_api_access(self, has_api_access): """Update the org API access.""" - current_app.logger.debug('') + logger.debug("change_org_api_access>") return Org(org_model) def patch_org(self, action: str = None, request_json: Dict[str, any] = None): @@ -951,21 +991,25 @@ def patch_org(self, action: str = None, request_json: Dict[str, any] = None): raise BusinessException(Error.PATCH_INVALID_ACTION, None) if patch_action == PatchActions.UPDATE_STATUS: - status_code = request_json.get('statusCode', None) - suspension_reason_code = request_json.get('suspensionReasonCode', None) + status_code = request_json.get("statusCode", None) + suspension_reason_code = request_json.get("suspensionReasonCode", None) if status_code is None: raise BusinessException(Error.INVALID_INPUT, None) if status_code == OrgStatus.SUSPENDED.value and suspension_reason_code is None: raise BusinessException(Error.INVALID_INPUT, None) return self.change_org_status(status_code, suspension_reason_code).as_dict() if patch_action == PatchActions.UPDATE_ACCESS_TYPE: - access_type = request_json.get('accessType', None) + access_type = request_json.get("accessType", None) # Currently, only accounts with the following access types can be updated - if access_type is None or access_type not in [AccessType.REGULAR.value, AccessType.REGULAR_BCEID.value, - AccessType.EXTRA_PROVINCIAL.value, AccessType.GOVN.value]: + if access_type is None or access_type not in [ + AccessType.REGULAR.value, + AccessType.REGULAR_BCEID.value, + AccessType.EXTRA_PROVINCIAL.value, + AccessType.GOVN.value, + ]: raise BusinessException(Error.INVALID_INPUT, None) return self.change_org_access_type(access_type).as_dict() if patch_action == PatchActions.UPDATE_API_ACCESS: - has_api_access = request_json.get('hasApiAccess', False) + has_api_access = request_json.get("hasApiAccess", False) return self.change_org_api_access(has_api_access).as_dict() return None diff --git a/auth-api/src/auth_api/services/permissions.py b/auth-api/src/auth_api/services/permissions.py index 63cb2b4a7d..233295e0b8 100644 --- a/auth-api/src/auth_api/services/permissions.py +++ b/auth-api/src/auth_api/services/permissions.py @@ -14,14 +14,16 @@ """Service to invoke Rest services.""" from typing import Dict, List, Tuple -from flask import current_app from sqlalchemy.exc import SQLAlchemyError +from structured_logging import StructuredLogging from auth_api.models.permissions import Permissions as PermissionsModel from ..utils.cache import cache from ..utils.enums import OrgStatus +logger = StructuredLogging.get_logger() + class Permissions: # pylint: disable=too-few-public-methods """Service for user settings.""" @@ -49,7 +51,7 @@ def build_all_permission_cache(cls): cache.set(key, val) except SQLAlchemyError as e: - current_app.logger.info('Error on building cache %s', e) + logger.info("Error on building cache %s", e) @staticmethod def get_permissions_for_membership(org_status, membership_type): @@ -57,15 +59,17 @@ def get_permissions_for_membership(org_status, membership_type): # Just a tweak til we get all org status to DB # TODO fix this logic if org_status not in ( - OrgStatus.NSF_SUSPENDED.value, OrgStatus.PENDING_STAFF_REVIEW.value, OrgStatus.SUSPENDED.value): + OrgStatus.NSF_SUSPENDED.value, + OrgStatus.PENDING_STAFF_REVIEW.value, + OrgStatus.SUSPENDED.value, + ): org_status = None key_tuple = (org_status, membership_type) actions_from_cache = cache.get(key_tuple) if actions_from_cache: actions = actions_from_cache else: - permissions = PermissionsModel.get_permissions_by_membership(org_status, - membership_type) + permissions = PermissionsModel.get_permissions_by_membership(org_status, membership_type) actions = [] for permission in permissions: actions.append(permission.actions) diff --git a/auth-api/src/auth_api/services/products.py b/auth-api/src/auth_api/services/products.py index 9822e1b280..eca62ee1dd 100644 --- a/auth-api/src/auth_api/services/products.py +++ b/auth-api/src/auth_api/services/products.py @@ -15,9 +15,9 @@ from datetime import datetime from typing import Any, Dict, List -from flask import current_app from sqlalchemy import and_, case, func, literal, or_ from sqlalchemy.exc import SQLAlchemyError +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error @@ -34,22 +34,35 @@ from auth_api.services.user import User as UserService from auth_api.utils.constants import BCOL_PROFILE_PRODUCT_MAP from auth_api.utils.enums import ( - AccessType, ActivityAction, KeycloakGroupActions, OrgType, ProductCode, ProductSubscriptionStatus, Status, - TaskAction, TaskRelationshipStatus, TaskRelationshipType, TaskStatus) + AccessType, + ActivityAction, + KeycloakGroupActions, + OrgType, + ProductCode, + ProductSubscriptionStatus, + Status, + TaskAction, + TaskRelationshipStatus, + TaskRelationshipType, + TaskStatus, +) from auth_api.utils.user_context import UserContext, user_context from ..utils.account_mailer import publish_to_mailer from ..utils.cache import cache from ..utils.notifications import ( - ProductNotificationInfo, ProductSubscriptionInfo, get_product_notification_data, get_product_notification_type) + ProductNotificationInfo, + ProductSubscriptionInfo, + get_product_notification_data, + get_product_notification_type, +) from ..utils.roles import CLIENT_ADMIN_ROLES, CLIENT_AUTH_ROLES, PREMIUM_ORG_TYPES, STAFF from .activity_log_publisher import ActivityLogPublisher from .authorization import check_auth from .task import Task as TaskService - -QUALIFIED_SUPPLIER_PRODUCT_CODES = [ProductCode.MHR_QSLN.value, ProductCode.MHR_QSHD.value, - ProductCode.MHR_QSHM.value] +QUALIFIED_SUPPLIER_PRODUCT_CODES = [ProductCode.MHR_QSLN.value, ProductCode.MHR_QSHD.value, ProductCode.MHR_QSHM.value] +logger = StructuredLogging.get_logger() class Product: @@ -66,7 +79,7 @@ def build_all_products_cache(cls): for product in product_list: cache.set(product.code, product.type_code) except SQLAlchemyError as e: - current_app.logger.info('Error on building cache %s', e) + logger.info("Error on building cache %s", e) @staticmethod def find_product_type_by_code(code: str) -> str: @@ -75,7 +88,7 @@ def find_product_type_by_code(code: str) -> str: if code_from_cache: return code_from_cache product_code_model: ProductCodeModel = ProductCodeModel.find_by_code(code) - return getattr(product_code_model, 'type_code', '') + return getattr(product_code_model, "type_code", "") @staticmethod def _validate_product_resubmission(task: TaskModel, product_model: ProductCodeModel): @@ -102,9 +115,9 @@ def resubmit_product_subscription(org_id, subscription_data: Dict[str, Any], ski check_auth(one_of_roles=(*CLIENT_ADMIN_ROLES, STAFF), org_id=org_id) user = UserModel.find_by_jwt_token() - subscriptions_list = subscription_data.get('subscriptions') + subscriptions_list = subscription_data.get("subscriptions") for subscription in subscriptions_list: - product_code = subscription.get('productCode') + product_code = subscription.get("productCode") existing_sub = ProductSubscriptionModel.find_by_org_id_product_code(org_id, product_code) product_model: ProductCodeModel = ProductCodeModel.find_by_code(product_code) @@ -112,27 +125,31 @@ def resubmit_product_subscription(org_id, subscription_data: Dict[str, Any], ski if not existing_sub: continue - task: TaskModel = TaskModel.find_by_task_relationship_id(existing_sub.id, - TaskRelationshipType.PRODUCT.value, - TaskStatus.COMPLETED.value) + task: TaskModel = TaskModel.find_by_task_relationship_id( + existing_sub.id, TaskRelationshipType.PRODUCT.value, TaskStatus.COMPLETED.value + ) Product._validate_product_resubmission(task, product_model) - Product._reset_subscription_and_review_task(review_task=task, - product_model=product_model, - subscription=existing_sub, - user_id=user.id) + Product._reset_subscription_and_review_task( + review_task=task, product_model=product_model, subscription=existing_sub, user_id=user.id + ) - Product._send_product_subscription_confirmation(ProductNotificationInfo( - product_model=product_model, - product_sub_model=existing_sub, - is_confirmation=True - ), org.id) + Product._send_product_subscription_confirmation( + ProductNotificationInfo( + product_model=product_model, product_sub_model=existing_sub, is_confirmation=True + ), + org.id, + ) return Product.get_all_product_subscription(org_id=org_id, skip_auth=True) @staticmethod - def create_product_subscription(org_id, subscription_data: Dict[str, Any], # pylint: disable=too-many-locals - is_new_transaction: bool = True, skip_auth=False, - auto_approve=False): + def create_product_subscription( + org_id, + subscription_data: Dict[str, Any], # pylint: disable=too-many-locals + is_new_transaction: bool = True, + skip_auth=False, + auto_approve=False, + ): """Create product subscription for the user. create product subscription first @@ -145,9 +162,9 @@ def create_product_subscription(org_id, subscription_data: Dict[str, Any], # py if not skip_auth: check_auth(one_of_roles=(*CLIENT_ADMIN_ROLES, STAFF), org_id=org_id) - subscriptions_list = subscription_data.get('subscriptions') + subscriptions_list = subscription_data.get("subscriptions") for subscription in subscriptions_list: - product_code = subscription.get('productCode') + product_code = subscription.get("productCode") existing_product_subscriptions = ProductSubscriptionModel.find_by_org_id_product_code(org_id, product_code) if existing_product_subscriptions: raise BusinessException(Error.PRODUCT_SUBSCRIPTION_EXISTS, None) @@ -161,18 +178,16 @@ def create_product_subscription(org_id, subscription_data: Dict[str, Any], # py continue subscription_status = Product.find_subscription_status(org, product_model, auto_approve) - product_subscription = Product._subscribe_and_publish_activity(org_id, - product_code, - subscription_status, - product_model.description) + product_subscription = Product._subscribe_and_publish_activity( + org_id, product_code, subscription_status, product_model.description + ) # If there is a linked product, add subscription to that too. # This is to handle cases where Names and Business Registry is combined together. if product_model.linked_product_code: - Product._subscribe_and_publish_activity(org_id, - product_model.linked_product_code, - subscription_status, - product_model.description) + Product._subscribe_and_publish_activity( + org_id, product_model.linked_product_code, subscription_status, product_model.description + ) # If there is a parent product, add subscription to that to # This is to satisfy any preceding subscriptions required @@ -182,20 +197,24 @@ def create_product_subscription(org_id, subscription_data: Dict[str, Any], # py # create a staff review task for this product subscription if pending status if subscription_status == ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value: user = UserModel.find_by_jwt_token() - external_source_id = subscription.get('externalSourceId') - Product._create_review_task(ProductReviewTask(org_id=org.id, - org_name=org.name, - product_code=product_subscription.product_code, - product_description=product_model.description, - product_subscription_id=product_subscription.id, - user_id=user.id, - external_source_id=external_source_id - )) - Product._send_product_subscription_confirmation(ProductNotificationInfo( - product_model=product_model, - product_sub_model=product_subscription, - is_confirmation=True - ), org.id) + external_source_id = subscription.get("externalSourceId") + Product._create_review_task( + ProductReviewTask( + org_id=org.id, + org_name=org.name, + product_code=product_subscription.product_code, + product_description=product_model.description, + product_subscription_id=product_subscription.id, + user_id=user.id, + external_source_id=external_source_id, + ) + ) + Product._send_product_subscription_confirmation( + ProductNotificationInfo( + product_model=product_model, product_sub_model=product_subscription, is_confirmation=True + ), + org.id, + ) else: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -216,15 +235,13 @@ def _send_product_subscription_confirmation(product_notification_info: ProductNo def _update_parent_subscription(org_id, sub_product_model, subscription_status): parent_code = sub_product_model.parent_code parent_product_model: ProductCodeModel = ProductCodeModel.find_by_code(parent_code) - existing_parent_sub = ProductSubscriptionModel \ - .find_by_org_id_product_code(org_id, parent_code) + existing_parent_sub = ProductSubscriptionModel.find_by_org_id_product_code(org_id, parent_code) # Parent sub does not exist create it and return if not existing_parent_sub: - Product._subscribe_and_publish_activity(org_id, - sub_product_model.parent_code, - subscription_status, - parent_product_model.description) + Product._subscribe_and_publish_activity( + org_id, sub_product_model.parent_code, subscription_status, parent_product_model.description + ) return # Parent sub exists and is not active - update the status @@ -233,21 +250,22 @@ def _update_parent_subscription(org_id, sub_product_model, subscription_status): existing_parent_sub.flush() @staticmethod - def _subscribe_and_publish_activity(org_id: int, product_code: str, status_code: str, - product_model_description: str): - subscription = ProductSubscriptionModel(org_id=org_id, product_code=product_code, status_code=status_code)\ - .flush() + def _subscribe_and_publish_activity( + org_id: int, product_code: str, status_code: str, product_model_description: str + ): + subscription = ProductSubscriptionModel( + org_id=org_id, product_code=product_code, status_code=status_code + ).flush() if status_code == ProductSubscriptionStatus.ACTIVE.value: - ActivityLogPublisher.publish_activity(Activity(org_id, - ActivityAction.ADD_PRODUCT_AND_SERVICE.value, - name=product_model_description)) + ActivityLogPublisher.publish_activity( + Activity(org_id, ActivityAction.ADD_PRODUCT_AND_SERVICE.value, name=product_model_description) + ) return subscription @staticmethod - def _reset_subscription_and_review_task(review_task: TaskModel, - product_model: ProductCodeModel, - subscription: ProductSubscriptionModel, - user_id: str): + def _reset_subscription_and_review_task( + review_task: TaskModel, product_model: ProductCodeModel, subscription: ProductSubscriptionModel, user_id: str + ): review_task.status = TaskStatus.OPEN.value review_task.related_to = user_id review_task.relationship_status = TaskRelationshipStatus.PENDING_STAFF_REVIEW.value @@ -257,9 +275,9 @@ def _reset_subscription_and_review_task(review_task: TaskModel, review_task.save() if product_model.parent_code: - Product._update_parent_subscription(subscription.org_id, - product_model, - ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value) + Product._update_parent_subscription( + subscription.org_id, product_model, ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value + ) subscription.status_code = ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value subscription.save() @@ -267,22 +285,25 @@ def _reset_subscription_and_review_task(review_task: TaskModel, @staticmethod def _create_review_task(review_task: ProductReviewTask): task_type = review_task.product_description - action_type = TaskAction.QUALIFIED_SUPPLIER_REVIEW.value \ - if review_task.product_code in QUALIFIED_SUPPLIER_PRODUCT_CODES \ + action_type = ( + TaskAction.QUALIFIED_SUPPLIER_REVIEW.value + if review_task.product_code in QUALIFIED_SUPPLIER_PRODUCT_CODES else TaskAction.PRODUCT_REVIEW.value + ) - task_info = {'name': review_task.org_name, - 'relationshipId': review_task.product_subscription_id, - 'relatedTo': review_task.user_id, - 'dateSubmitted': datetime.today(), - 'relationshipType': TaskRelationshipType.PRODUCT.value, - 'type': task_type, - 'action': action_type, - 'status': TaskStatus.OPEN.value, - 'accountId': review_task.org_id, - 'relationship_status': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, - 'externalSourceId': review_task.external_source_id - } + task_info = { + "name": review_task.org_name, + "relationshipId": review_task.product_subscription_id, + "relatedTo": review_task.user_id, + "dateSubmitted": datetime.today(), + "relationshipType": TaskRelationshipType.PRODUCT.value, + "type": task_type, + "action": action_type, + "status": TaskStatus.OPEN.value, + "accountId": review_task.org_id, + "relationship_status": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + "externalSourceId": review_task.external_source_id, + } TaskService.create_task(task_info, False) @staticmethod @@ -291,8 +312,11 @@ def find_subscription_status(org, product_model, auto_approve=False): # GOVM accounts has default active subscriptions skip_review_types = [AccessType.GOVM.value] if product_model.need_review and auto_approve is False: - return ProductSubscriptionStatus.ACTIVE.value if (org.access_type in skip_review_types) \ + return ( + ProductSubscriptionStatus.ACTIVE.value + if (org.access_type in skip_review_types) else ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value + ) return ProductSubscriptionStatus.ACTIVE.value @staticmethod @@ -309,10 +333,13 @@ def create_subscription_from_bcol_profile(org_id: int, bcol_profile_flags: List[ org_id, product_code ) if not subscription: - ProductSubscriptionModel(org_id=org_id, product_code=product_code, - status_code=ProductSubscriptionStatus.ACTIVE.value).flush() - elif subscription and \ - (existing_sub := subscription).status_code != ProductSubscriptionStatus.ACTIVE.value: + ProductSubscriptionModel( + org_id=org_id, product_code=product_code, status_code=ProductSubscriptionStatus.ACTIVE.value + ).flush() + elif ( + subscription + and (existing_sub := subscription).status_code != ProductSubscriptionStatus.ACTIVE.value + ): existing_sub.status_code = ProductSubscriptionStatus.ACTIVE.value existing_sub.flush() @@ -320,18 +347,17 @@ def create_subscription_from_bcol_profile(org_id: int, bcol_profile_flags: List[ @user_context def get_products(include_hidden: bool = True, staff_check: bool = True, **kwargs): """Get a list of all products.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] if staff_check: include_hidden = user_from_context.is_staff() and include_hidden - products = ProductCodeModel.get_all_products() if include_hidden \ - else ProductCodeModel.get_visible_products() + products = ProductCodeModel.get_all_products() if include_hidden else ProductCodeModel.get_visible_products() return ProductCodeSchema().dump(products, many=True) @staticmethod @user_context def get_all_product_subscription(org_id, skip_auth=False, **kwargs): """Get a list of all products with their subscription details.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] org = OrgModel.find_by_org_id(org_id) if not org: raise BusinessException(Error.DATA_NOT_FOUND, None) @@ -343,21 +369,24 @@ def get_all_product_subscription(org_id, skip_auth=False, **kwargs): subscriptions_dict = {x.product_code: x.status_code for x in product_subscriptions} # Include hidden products only for staff and SBC staff - include_hidden = user_from_context.is_staff() \ - or org.type_code == OrgType.SBC_STAFF.value \ - or kwargs.get('include_hidden', False) + include_hidden = ( + user_from_context.is_staff() + or org.type_code == OrgType.SBC_STAFF.value + or kwargs.get("include_hidden", False) + ) products = Product.get_products(include_hidden=include_hidden, staff_check=False) for product in products: - product['subscriptionStatus'] = subscriptions_dict.get(product.get('code'), - ProductSubscriptionStatus.NOT_SUBSCRIBED.value) + product["subscriptionStatus"] = subscriptions_dict.get( + product.get("code"), ProductSubscriptionStatus.NOT_SUBSCRIBED.value + ) return products @staticmethod def update_product_subscription(product_sub_info: ProductSubscriptionInfo, is_new_transaction: bool = True): """Update Product Subscription.""" - current_app.logger.debug('update_product_subscription ') + logger.debug(">update_product_subscription ") @staticmethod - def approve_reject_parent_subscription(parent_product_code: int, is_approved: bool, is_hold: bool, - org_id: int, is_new_transaction: bool = True): + def approve_reject_parent_subscription( + parent_product_code: int, is_approved: bool, is_hold: bool, org_id: int, is_new_transaction: bool = True + ): """Approve or reject Parent Product Subscription.""" - current_app.logger.debug('approve_reject_parent_subscription ') + ActivityLogPublisher.publish_activity( + Activity(org_id, ActivityAction.ADD_PRODUCT_AND_SERVICE.value, name=product_model.description) + ) + logger.debug(">approve_reject_parent_subscription ") @staticmethod def is_reapproved(product_sub_status: str, is_approved: bool, is_resubmitted: bool = False) -> bool: @@ -463,14 +504,16 @@ def is_reapproved(product_sub_status: str, is_approved: bool, is_resubmitted: bo 1) in REJECTED state and is_approved 2) in PENDING_STAFF_REVIEW, is_approved and is_resubmitted """ - return (product_sub_status == ProductSubscriptionStatus.REJECTED.value and is_approved) or \ - (product_sub_status == ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value and - is_approved and is_resubmitted) + return (product_sub_status == ProductSubscriptionStatus.REJECTED.value and is_approved) or ( + product_sub_status == ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value + and is_approved + and is_resubmitted + ) @staticmethod def send_product_subscription_notification(product_notification_info: ProductNotificationInfo): """Send Approved product subscription notification to the user.""" - current_app.logger.debug('') + logger.debug("") except Exception as e: # noqa=B901 - current_app.logger.error(' List[KeycloakGroupSubscription]: """Generate Keycloak Group Subscriptions.""" - ps_max_subquery = db.session.query( - func.max(ProductSubscriptionModel.id).label('id'), - ProductSubscriptionModel.product_code, - ProductSubscriptionModel.org_id - ) \ - .group_by(ProductSubscriptionModel.product_code, ProductSubscriptionModel.org_id) \ + ps_max_subquery = ( + db.session.query( + func.max(ProductSubscriptionModel.id).label("id"), + ProductSubscriptionModel.product_code, + ProductSubscriptionModel.org_id, + ) + .group_by(ProductSubscriptionModel.product_code, ProductSubscriptionModel.org_id) .subquery() + ) - m_max_subquery = db.session.query( - func.max(MembershipModel.id).label('id'), - MembershipModel.org_id, - MembershipModel.user_id - ) \ - .group_by(MembershipModel.org_id, MembershipModel.user_id) \ + m_max_subquery = ( + db.session.query(func.max(MembershipModel.id).label("id"), MembershipModel.org_id, MembershipModel.user_id) + .group_by(MembershipModel.org_id, MembershipModel.user_id) .subquery() + ) active_subscription_case = case( - [ - (and_(MembershipModel.status == Status.ACTIVE.value, ProductSubscriptionModel.status_code == - ProductSubscriptionStatus.ACTIVE.value), 1), - ], - else_=0 + ( + and_( + MembershipModel.status == Status.ACTIVE.value, + ProductSubscriptionModel.status_code == ProductSubscriptionStatus.ACTIVE.value, + ), + 1, + ), + else_=0, ) - user_subscriptions = db.session.query(UserModel, ProductCodeModel) \ - .join(ProductCodeModel, literal(True)) \ - .outerjoin(m_max_subquery, m_max_subquery.c.user_id == UserModel.id) \ - .outerjoin(MembershipModel, MembershipModel.id == m_max_subquery.c.id) \ + user_subscriptions = ( + db.session.query(UserModel, ProductCodeModel) + .join(ProductCodeModel, literal(True)) + .outerjoin(m_max_subquery, m_max_subquery.c.user_id == UserModel.id) + .outerjoin(MembershipModel, MembershipModel.id == m_max_subquery.c.id) .outerjoin( - # pylint: disable=comparison-with-callable - ps_max_subquery, ps_max_subquery.c.product_code == ProductCodeModel.code) \ - .outerjoin(ProductSubscriptionModel, ProductSubscriptionModel.id == ps_max_subquery.c.id) \ - .filter(or_( + # pylint: disable=comparison-with-callable + ps_max_subquery, + ps_max_subquery.c.product_code == ProductCodeModel.code, + ) + .outerjoin(ProductSubscriptionModel, ProductSubscriptionModel.id == ps_max_subquery.c.id) + .filter( + or_( ProductSubscriptionModel.org_id == MembershipModel.org_id, ProductSubscriptionModel.org_id.is_(None), - MembershipModel.org_id.is_(None)))\ - .filter(UserModel.id.in_(user_ids)) \ - .filter(ProductCodeModel.keycloak_group.isnot(None))\ - .group_by(UserModel.id, UserModel.keycloak_guid, ProductCodeModel.code, ProductCodeModel.keycloak_group)\ - .order_by(UserModel.id, ProductCodeModel.code)\ + MembershipModel.org_id.is_(None), + ) + ) + .filter(UserModel.id.in_(user_ids)) + .filter(ProductCodeModel.keycloak_group.isnot(None)) + .group_by(UserModel.id, UserModel.keycloak_guid, ProductCodeModel.code, ProductCodeModel.keycloak_group) + .order_by(UserModel.id, ProductCodeModel.code) .with_entities( UserModel.id, UserModel.keycloak_guid, ProductCodeModel.code, ProductCodeModel.keycloak_group, - func.sum(active_subscription_case).label('active_subscription_count') - ).all() # pylint: disable=comparison-with-callable + func.sum(active_subscription_case).label("active_subscription_count"), + ) + .all() + ) # pylint: disable=comparison-with-callable keycloak_group_subscriptions = [] for ups in user_subscriptions: - action = KeycloakGroupActions.ADD_TO_GROUP.value \ - if ups.active_subscription_count > 0 else KeycloakGroupActions.REMOVE_FROM_GROUP.value + action = ( + KeycloakGroupActions.ADD_TO_GROUP.value + if ups.active_subscription_count > 0 + else KeycloakGroupActions.REMOVE_FROM_GROUP.value + ) kgs = KeycloakGroupSubscription(ups.keycloak_guid, ups.code, ups.keycloak_group, action) keycloak_group_subscriptions.append(kgs) @@ -550,10 +607,10 @@ def get_users_product_subscriptions_kc_groups(user_ids: List[int]) -> List[Keycl @staticmethod def update_users_products_keycloak_groups(user_ids: List[int]): """Update list of user's keycloak roles for product subscriptions.""" - current_app.logger.debug('update_users_products_keycloak_group ') + logger.debug(">update_users_products_keycloak_group ") @staticmethod def update_org_product_keycloak_groups(org_id: int): diff --git a/auth-api/src/auth_api/services/reset.py b/auth-api/src/auth_api/services/reset.py deleted file mode 100644 index a5c98156ae..0000000000 --- a/auth-api/src/auth_api/services/reset.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Service for reset test data.""" - -from auth_api.models import User as UserModel -from auth_api.models import db -from auth_api.services.keycloak import KeycloakService -from auth_api.utils.enums import LoginSource -from auth_api.utils.roles import Role -from auth_api.utils.user_context import UserContext, user_context - - -class ResetTestData: # pylint:disable=too-few-public-methods - """Cleanup all the data from model by created_by column.""" - - def __init__(self): - """Return a reset test data service instance.""" - - @staticmethod - @user_context - def reset(**kwargs): - """Cleanup all the data from all tables create by the provided user id.""" - user_from_context: UserContext = kwargs['user_context'] - if Role.TESTER.value in user_from_context.roles: # pylint: disable=too-many-nested-blocks - user = UserModel.find_by_jwt_token() - if user: - # TODO need to find a way to avoid using protected function - for model_class in db.Model._decl_class_registry.values(): # pylint:disable=protected-access - # skip version classes - if not (hasattr(model_class, 'transaction_id') and hasattr(model_class, 'end_transaction_id')): - if hasattr(model_class, 'created_by_id'): - for model in model_class.query.filter_by(created_by_id=user.id).all(): - model.reset() - if hasattr(model_class, 'modified_by_id'): - for model in model_class.query.filter_by(modified_by_id=user.id).all(): - model.reset() - # check the user is still exists or not - user = UserModel.find_by_jwt_token() - if user: - user.modified_by = None - user.modified_by_id = None - user.reset() - - # Reset opt from keycloak if from BCEID - login_source = user_from_context.login_source - - if login_source == LoginSource.BCEID.value: - KeycloakService.reset_otp(user_from_context.sub) diff --git a/auth-api/src/auth_api/services/rest_service.py b/auth-api/src/auth_api/services/rest_service.py index 0867ca9aa2..d5cee58b6d 100644 --- a/auth-api/src/auth_api/services/rest_service.py +++ b/auth-api/src/auth_api/services/rest_service.py @@ -23,29 +23,39 @@ from aiohttp.client_exceptions import ClientConnectorError # pylint:disable=ungrouped-imports from flask import current_app, request from requests.adapters import HTTPAdapter # pylint:disable=ungrouped-imports + # pylint:disable=ungrouped-imports from requests.exceptions import ConnectionError as ReqConnectionError from requests.exceptions import ConnectTimeout, HTTPError +from structured_logging import StructuredLogging from urllib3.util.retry import Retry from auth_api.exceptions import ServiceUnavailableException -from auth_api.utils.enums import AuthHeaderType, ContentType from auth_api.utils.cache import cache +from auth_api.utils.enums import AuthHeaderType, ContentType RETRY_ADAPTER = HTTPAdapter(max_retries=Retry(total=5, backoff_factor=1, status_forcelist=[404])) +logger = StructuredLogging.get_logger() class RestService: """Service to invoke Rest services which uses OAuth 2.0 implementation.""" @staticmethod - def _invoke(rest_method, endpoint, token=None, # pylint: disable=too-many-arguments - auth_header_type: AuthHeaderType = AuthHeaderType.BEARER, - content_type: ContentType = ContentType.JSON, data=None, raise_for_status: bool = True, - additional_headers: dict = None, generate_token: bool = True): + def _invoke( # pylint: disable=too-many-positional-arguments,too-many-arguments + rest_method, + endpoint, + token=None, + auth_header_type: AuthHeaderType = AuthHeaderType.BEARER, + content_type: ContentType = ContentType.JSON, + data=None, + raise_for_status: bool = True, + additional_headers: dict = None, + generate_token: bool = True, + ): """Invoke different method depending on the input.""" # just to avoid the duplicate code for PUT and POSt - current_app.logger.debug(f'<_invoke-{rest_method}') + logger.debug(f"<_invoke-{rest_method}") if not token and generate_token: token = _get_token() @@ -54,165 +64,235 @@ def _invoke(rest_method, endpoint, token=None, # pylint: disable=too-many-argum if content_type == ContentType.JSON: data = json.dumps(data) - current_app.logger.debug(f'Endpoint : {endpoint}') - current_app.logger.debug(f'headers : {headers}') + logger.debug(f"Endpoint : {endpoint}") + logger.debug(f"headers : {headers}") response = None try: invoke_rest_method = getattr(requests, rest_method) - response = invoke_rest_method(endpoint, data=data, headers=headers, - timeout=current_app.config.get('CONNECT_TIMEOUT', 60)) + response = invoke_rest_method( + endpoint, data=data, headers=headers, timeout=current_app.config.get("CONNECT_TIMEOUT", 60) + ) if raise_for_status: response.raise_for_status() except (ReqConnectionError, ConnectTimeout) as exc: - current_app.logger.error('---Error on POST---') - current_app.logger.error(exc) + logger.error("---Error on POST---") + logger.error(exc) raise ServiceUnavailableException(exc) from exc except HTTPError as exc: - current_app.logger.error(f'HTTPError on POST {endpoint} with status code ' - f"{exc.response.status_code if exc.response else ''}") + logger.error( + f"HTTPError on POST {endpoint} with status code " f"{exc.response.status_code if exc.response else ''}" + ) if response and response.status_code >= 500: raise ServiceUnavailableException(exc) from exc raise exc finally: RestService.__log_response(response) - current_app.logger.debug('>post') + logger.debug(">post") return response @staticmethod def __log_response(response): if response is not None: - current_app.logger.info(f'Response Headers {response.headers}') - if response.headers and isinstance(response.headers, Iterable) and \ - 'Content-Type' in response.headers and \ - response.headers['Content-Type'] == ContentType.JSON.value: - current_app.logger.info(f"response : {response.text if response else ''}") + logger.info(f"Response Headers {response.headers}") + if ( + response.headers + and isinstance(response.headers, Iterable) + and "Content-Type" in response.headers + and response.headers["Content-Type"] == ContentType.JSON.value + ): + logger.info(f"response : {response.text if response else ''}") @staticmethod - def post(endpoint, token=None, # pylint: disable=too-many-arguments - auth_header_type: AuthHeaderType = AuthHeaderType.BEARER, - content_type: ContentType = ContentType.JSON, data=None, raise_for_status: bool = True, - additional_headers: dict = None, generate_token: bool = True): + def post( # pylint: disable=too-many-positional-arguments,too-many-arguments + endpoint, + token=None, + auth_header_type: AuthHeaderType = AuthHeaderType.BEARER, + content_type: ContentType = ContentType.JSON, + data=None, + raise_for_status: bool = True, + additional_headers: dict = None, + generate_token: bool = True, + ): """POST service.""" - current_app.logger.debug('= 500: raise ServiceUnavailableException(exc) from exc raise exc finally: - current_app.logger.debug(response.headers if response else 'Empty Response Headers') - current_app.logger.info(f"response : {response.text if response else ''}") + logger.debug(response.headers if response else "Empty Response Headers") + logger.info(f"response : {response.text if response else ''}") - current_app.logger.debug('>GET') + logger.debug(">GET") return response @staticmethod @cache.cached(query_string=True) - def get_service_account_token(config_id='KEYCLOAK_SERVICE_ACCOUNT_ID', - config_secret='KEYCLOAK_SERVICE_ACCOUNT_SECRET') -> str: + def get_service_account_token( + config_id="KEYCLOAK_SERVICE_ACCOUNT_ID", config_secret="KEYCLOAK_SERVICE_ACCOUNT_SECRET" + ) -> str: """Generate a service account token.""" kc_service_id = current_app.config.get(config_id) kc_secret = current_app.config.get(config_secret) - issuer_url = current_app.config.get('JWT_OIDC_ISSUER') - token_url = issuer_url + '/protocol/openid-connect/token' - auth_response = requests.post(token_url, auth=(kc_service_id, kc_secret), headers={ - 'Content-Type': ContentType.FORM_URL_ENCODED.value}, data='grant_type=client_credentials', - timeout=current_app.config.get('CONNECT_TIMEOUT', 60)) + issuer_url = current_app.config.get("JWT_OIDC_ISSUER") + token_url = issuer_url + "/protocol/openid-connect/token" + auth_response = requests.post( + token_url, + auth=(kc_service_id, kc_secret), + headers={"Content-Type": ContentType.FORM_URL_ENCODED.value}, + data="grant_type=client_credentials", + timeout=current_app.config.get("CONNECT_TIMEOUT", 60), + ) auth_response.raise_for_status() - return auth_response.json().get('access_token') + return auth_response.json().get("access_token") @staticmethod def _generate_headers(content_type, additional_headers, token, auth_header_type): """Generate headers.""" return { - 'Content-Type': content_type.value, + "Content-Type": content_type.value, **(additional_headers if additional_headers else {}), - **({'Authorization': auth_header_type.value.format(token)} if token else {}) + **({"Authorization": auth_header_type.value.format(token)} if token else {}), } @staticmethod async def call_posts_in_parallel(call_info: dict, token: str): """Call the services in parallel and return the responses.""" - headers = {'Content-Type': 'application/json', 'Authorization': f'Bearer {token}'} + headers = {"Content-Type": "application/json", "Authorization": f"Bearer {token}"} responses = [] # call all urls in parallel async with aiohttp.ClientSession() as session: - fetch_tasks = [asyncio.create_task(session.post( - data['url'], json=data['payload'], headers=headers)) for data in call_info] + fetch_tasks = [ + asyncio.create_task(session.post(data["url"], json=data["payload"], headers=headers)) + for data in call_info + ] tasks = await asyncio.gather(*fetch_tasks, return_exceptions=True) for task in tasks: if isinstance(task, ClientConnectorError): # if no response from task we will go in here (i.e. namex-api is down) - current_app.logger.error( - '---Error in _call_urls_in_parallel: no response from %s---', task.os_error) - raise ServiceUnavailableException(f'No response from {task.os_error}') + error_msg = f"---Error in _call_urls_in_parallel: no response from {task.os_error} ---" + logger.error(error_msg) + raise ServiceUnavailableException(f"No response from {task.os_error}") if task.status != HTTPStatus.OK: - current_app.logger.error('---Error in _call_urls_in_parallel: error response from %s---', task.url) - raise ServiceUnavailableException(f'Error response from {task.url}') + error_msg = f"---Error in _call_urls_in_parallel: error response from {task.url} ---" + logger.error(error_msg) + raise ServiceUnavailableException(f"Error response from {task.url}") task_json = await task.json() responses.append(task_json) return responses def _get_token() -> str: - token: str = request.headers['Authorization'] if request and 'Authorization' in request.headers else None - return token.replace('Bearer ', '') if token else None + token: str = request.headers["Authorization"] if request and "Authorization" in request.headers else None + return token.replace("Bearer ", "") if token else None diff --git a/auth-api/src/auth_api/services/simple_org.py b/auth-api/src/auth_api/services/simple_org.py index cb47f0c3bb..8a74f49b33 100644 --- a/auth-api/src/auth_api/services/simple_org.py +++ b/auth-api/src/auth_api/services/simple_org.py @@ -14,19 +14,19 @@ """Service for managing Simplified Organization data.""" from jinja2 import Environment, FileSystemLoader -from flask import current_app from sqlalchemy import String, and_, desc, func, or_ +from structured_logging import StructuredLogging from auth_api.config import get_named_config -from auth_api.models import db from auth_api.models import Org as OrgModel +from auth_api.models import db from auth_api.models.dataclass import SimpleOrgSearch from auth_api.schemas.simple_org import SimpleOrgInfoSchema from auth_api.utils.converter import Converter - -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) CONFIG = get_named_config() +logger = StructuredLogging.get_logger() class SimpleOrg: # pylint: disable=too-few-public-methods @@ -43,10 +43,11 @@ def __init__(self, model): @classmethod def search(cls, search_criteria: SimpleOrgSearch): """Search org records and returned a simplified result set.""" - current_app.logger.debug('search') + logger.debug(">search") return { - 'page': search_criteria.page, - 'limit': search_criteria.limit, - 'items': org_list, - 'total': pagination.total + "page": search_criteria.page, + "limit": search_criteria.limit, + "items": org_list, + "total": pagination.total, } @classmethod diff --git a/auth-api/src/auth_api/services/task.py b/auth-api/src/auth_api/services/task.py index 907fd9c3a1..efd2a3f333 100644 --- a/auth-api/src/auth_api/services/task.py +++ b/auth-api/src/auth_api/services/task.py @@ -21,8 +21,8 @@ from flask import current_app from jinja2 import Environment, FileSystemLoader -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sbc_common_components.utils.enums import QueueMessageTypes +from structured_logging import StructuredLogging from auth_api.exceptions import BusinessException, Error from auth_api.models import Membership as MembershipModel @@ -38,11 +38,10 @@ from auth_api.utils.notifications import ProductSubscriptionInfo from auth_api.utils.util import camelback2snake # noqa: I005 +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) +logger = StructuredLogging.get_logger() -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) - -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class Task: # pylint: disable=too-many-instance-attributes """Manages all aspects of the Task Entity. @@ -60,7 +59,6 @@ def identifier(self): """Return the identifier for this user.""" return self._model.id - @ServiceTracing.disable_tracing def as_dict(self, exclude: List = None): """Return the Task as a python dict. @@ -74,19 +72,19 @@ def as_dict(self, exclude: List = None): @staticmethod def create_task(task_info: dict, do_commit: bool = True): """Create a new task record.""" - current_app.logger.debug('create_task ') + logger.debug(">create_task ") return Task(task_model) @staticmethod def close_task(task_id, remarks: [] = None, do_commit: bool = True): """Close a task.""" - current_app.logger.debug('update_task ') + logger.debug(">update_task ") return Task(task_model) def _update_relationship(self, origin_url: str = None): """Retrieve the relationship record and update the status.""" task_model: TaskModel = self._model - current_app.logger.debug('update_task_relationship ') + logger.debug(">update_task_relationship ") @staticmethod def get_task_remark(task_model: TaskModel): @@ -173,62 +177,70 @@ def get_task_remark(task_model: TaskModel): return None @staticmethod - def _notify_admin_about_hold(task_model, org: OrgModel = None, is_new_bceid_admin_request: bool = False, - membership_id: int = None, user: UserModel = None): + def _notify_admin_about_hold( + task_model, + org: OrgModel = None, + is_new_bceid_admin_request: bool = False, + membership_id: int = None, + user: UserModel = None, + ): if is_new_bceid_admin_request: create_account_signin_route = urllib.parse.quote_plus( - f"{current_app.config.get('BCEID_ADMIN_SETUP_ROUTE')}/" - f'{task_model.account_id}/' - f'{membership_id}') - admin_emails = user.contacts[0].contact.email if user.contacts else '' + f"{current_app.config.get('BCEID_ADMIN_SETUP_ROUTE')}/" f"{task_model.account_id}/" f"{membership_id}" + ) + admin_emails = user.contacts[0].contact.email if user.contacts else "" account_id = task_model.account_id mailer_type = QueueMessageTypes.RESUBMIT_BCEID_ADMIN_NOTIFICATION.value else: - create_account_signin_route = urllib.parse. \ - quote_plus(f"{current_app.config.get('BCEID_ACCOUNT_SETUP_ROUTE')}/" - f'{org.id}') + create_account_signin_route = urllib.parse.quote_plus( + f"{current_app.config.get('BCEID_ACCOUNT_SETUP_ROUTE')}/" f"{org.id}" + ) admin_emails = UserService.get_admin_emails_for_org(org.id) account_id = org.id mailer_type = QueueMessageTypes.RESUBMIT_BCEID_ORG_NOTIFICATION.value - if admin_emails == '': - current_app.logger.error('No admin email record for org id %s', org.id) - current_app.logger.error('update_task_org ') + OrgService.approve_or_reject( + org_id=org_id, is_approved=is_approved, origin_url=origin_url, task_action=task_action + ) + + logger.debug(">update_task_org ") @staticmethod def _update_bceid_admin(is_approved: bool, user_id: int): """Approve/Reject BCeId Admin User and Affidavit.""" from auth_api.services import Affidavit # pylint:disable=cyclic-import, import-outside-toplevel - current_app.logger.debug('update_bceid_admin_to_org ') + logger.debug(">update_bceid_admin_to_org ") @staticmethod def _update_product_subscription(product_sub_info: ProductSubscriptionInfo): """Review Product Subscription.""" - current_app.logger.debug('<_update_product_subscription ') + logger.debug("<_update_product_subscription ") from auth_api.services import Product as ProductService # pylint:disable=cyclic-import, import-outside-toplevel # Approve/Reject Product subscription ProductService.update_product_subscription(product_sub_info=product_sub_info, is_new_transaction=False) - current_app.logger.debug('>_update_product_subscription ') + logger.debug(">_update_product_subscription ") @staticmethod def fetch_tasks(task_search: TaskSearch): """Search all tasks.""" - current_app.logger.debug('fetch_tasks ') + logger.debug(">fetch_tasks ") return tasks diff --git a/auth-api/src/auth_api/services/user.py b/auth-api/src/auth_api/services/user.py index 33a2c0b80f..f94341d598 100644 --- a/auth-api/src/auth_api/services/user.py +++ b/auth-api/src/auth_api/services/user.py @@ -17,16 +17,14 @@ """ import json +from http import HTTPStatus from typing import Dict, List -from flask import current_app from jinja2 import Environment, FileSystemLoader from requests import HTTPError -from sbc_common_components.tracing.service_tracing import ServiceTracing # noqa: I001 from sbc_common_components.utils.enums import QueueMessageTypes +from structured_logging import StructuredLogging -from auth_api import status as http_status -from auth_api.models.dataclass import Activity from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import Contact as ContactModel @@ -35,13 +33,21 @@ from auth_api.models import Org as OrgModel from auth_api.models import User as UserModel from auth_api.models import db +from auth_api.models.dataclass import Activity from auth_api.schemas import UserSchema - from auth_api.services.authorization import check_auth from auth_api.services.keycloak_user import KeycloakUser from auth_api.utils import util from auth_api.utils.enums import ( - AccessType, ActivityAction, DocumentType, IdpHint, LoginSource, OrgStatus, Status, UserStatus) + AccessType, + ActivityAction, + DocumentType, + IdpHint, + LoginSource, + OrgStatus, + Status, + UserStatus, +) from auth_api.utils.roles import ADMIN, CLIENT_ADMIN_ROLES, COORDINATOR, STAFF, Role from auth_api.utils.user_context import UserContext, user_context from auth_api.utils.util import camelback2snake @@ -52,11 +58,10 @@ from .documents import Documents as DocumentService from .keycloak import KeycloakService - -ENV = Environment(loader=FileSystemLoader('.'), autoescape=True) +ENV = Environment(loader=FileSystemLoader("."), autoescape=True) +logger = StructuredLogging.get_logger() -@ServiceTracing.trace(ServiceTracing.enable_tracing, ServiceTracing.should_be_tracing) class User: # pylint: disable=too-many-instance-attributes disable=too-many-public-methods """Manages all aspects of the User Entity. @@ -89,7 +94,6 @@ def type(self) -> str: """Return the type for the user.""" return self._model.type - @ServiceTracing.disable_tracing def as_dict(self): """Return the User as a python dict. @@ -100,9 +104,12 @@ def as_dict(self): return obj @staticmethod - def create_user_and_add_membership(memberships: List[dict], org_id, - # pylint: disable=too-many-locals, too-many-statements, too-many-branches - single_mode: bool = False): + def create_user_and_add_membership( + memberships: List[dict], + org_id, + # pylint: disable=too-many-locals, too-many-statements, too-many-branches + single_mode: bool = False, + ): """ Create user(s) in the DB and upstream keycloak. @@ -113,26 +120,26 @@ def create_user_and_add_membership(memberships: List[dict], org_id, """ User._validate_and_throw_exception(memberships, org_id, single_mode) - current_app.logger.debug('create_user') + logger.debug("create_user") users = [] for membership in memberships: - username = membership['username'] - current_app.logger.debug(f'create user username: {username}') + username = membership["username"] + logger.debug(f"create user username: {username}") create_user_request = User._create_kc_user(membership) - db_username = IdpHint.BCROS.value + '/' + username + db_username = IdpHint.BCROS.value + "/" + username user_model = UserModel.find_by_username(db_username) re_enable_user = False existing_kc_user = KeycloakService.get_user_by_username(username) - enabled_in_kc = getattr(existing_kc_user, 'enabled', True) - if getattr(user_model, 'status', None) == Status.INACTIVE.value and not enabled_in_kc: + enabled_in_kc = getattr(existing_kc_user, "enabled", True) + if getattr(user_model, "status", None) == Status.INACTIVE.value and not enabled_in_kc: membership_model = MembershipModel.find_membership_by_userid(user_model.id) - re_enable_user = membership_model.org_id == org_id + re_enable_user = membership_model.org_id == int(org_id or -1) if user_model and not re_enable_user: - current_app.logger.debug('Existing users found in DB') + logger.debug("Existing users found in DB") users.append(User._get_error_dict(username, Error.USER_ALREADY_EXISTS)) continue - if membership.get('update_password_on_login', True): # by default , reset needed + if membership.get("update_password_on_login", True): # by default , reset needed create_user_request.update_password_on_login() try: if re_enable_user: @@ -140,11 +147,13 @@ def create_user_and_add_membership(memberships: List[dict], org_id, else: kc_user = KeycloakService.add_user(create_user_request, throw_error_if_exists=True) except BusinessException as err: - current_app.logger.error('create_user in keycloak failed :duplicate user %s', err) + error_msg = f"create_user in keycloak failed :duplicate user {err}" + logger.error(error_msg) users.append(User._get_error_dict(username, Error.USER_ALREADY_EXISTS)) continue except HTTPError as err: - current_app.logger.error('create_user in keycloak failed %s', err) + error_msg = f"create_user in keycloak failed {err}" + logger.error(error_msg) users.append(User._get_error_dict(username, Error.FAILED_ADDING_USER_ERROR)) continue try: @@ -154,17 +163,18 @@ def create_user_and_add_membership(memberships: List[dict], org_id, user_model.login_source = LoginSource.BCROS.value user_model.flush() membership_model.status = Status.ACTIVE.value - membership_model.membership_type_code = membership['membershipType'] + membership_model.membership_type_code = membership["membershipType"] membership_model.flush() else: user_model = User._create_new_user_and_membership(db_username, kc_user, membership, org_id) db.session.commit() # commit is for session ;need not to invoke for every object user_dict = User(user_model).as_dict() - user_dict.update({'http_status': http_status.HTTP_201_CREATED, 'error': ''}) + user_dict.update({"http_status": HTTPStatus.CREATED, "error": ""}) users.append(user_dict) except Exception as e: # NOQA # pylint: disable=broad-except - current_app.logger.error('Error on create_user_and_add_membership: %s', e) + error_msg = f"Error on create_user_and_add_membership {e}" + logger.error(error_msg) db.session.rollback() if re_enable_user: User._update_user_in_kc(create_user_request) @@ -173,7 +183,7 @@ def create_user_and_add_membership(memberships: List[dict], org_id, users.append(User._get_error_dict(username, Error.FAILED_ADDING_USER_ERROR)) continue - return {'users': users} + return {"users": users} @staticmethod def _update_user_in_kc(create_user_request): @@ -185,7 +195,7 @@ def _update_user_in_kc(create_user_request): @staticmethod def _validate_and_throw_exception(memberships, org_id, single_mode): if single_mode: # make sure no bulk operation and only owner is created using if no auth - if len(memberships) > 1 or memberships[0].get('membershipType') not in [ADMIN, COORDINATOR]: + if len(memberships) > 1 or memberships[0].get("membershipType") not in [ADMIN, COORDINATOR]: raise BusinessException(Error.INVALID_USER_CREDENTIALS, None) else: check_auth(org_id=org_id, one_of_roles=(COORDINATOR, ADMIN, STAFF)) @@ -196,23 +206,35 @@ def _validate_and_throw_exception(memberships, org_id, single_mode): @staticmethod def _create_new_user_and_membership(db_username, kc_user, membership, org_id): - user_model: UserModel = UserModel(username=db_username, - is_terms_of_use_accepted=False, status=Status.ACTIVE.value, - type=Role.ANONYMOUS_USER.name, - email=membership.get('email', None), - firstname=kc_user.first_name, lastname=kc_user.last_name, - login_source=LoginSource.BCROS.value) + user_model: UserModel = UserModel( + username=db_username, + is_terms_of_use_accepted=False, + status=Status.ACTIVE.value, + type=Role.ANONYMOUS_USER.name, + email=membership.get("email", None), + firstname=kc_user.first_name, + lastname=kc_user.last_name, + login_source=LoginSource.BCROS.value, + ) user_model.flush() - membership_model = MembershipModel(org_id=org_id, user_id=user_model.id, - membership_type_code=membership['membershipType'], - membership_type_status=Status.ACTIVE.value) + membership_model = MembershipModel( + org_id=org_id, + user_id=user_model.id, + membership_type_code=membership["membershipType"], + membership_type_status=Status.ACTIVE.value, + ) membership_model.flush() - name = {'first_name': user_model.firstname, 'last_name': user_model.lastname} - ActivityLogPublisher.publish_activity(Activity(org_id, ActivityAction.APPROVE_TEAM_MEMBER.value, - name=json.dumps(name), - value=membership['membershipType'], - id=user_model.id)) + name = {"first_name": user_model.firstname, "last_name": user_model.lastname} + ActivityLogPublisher.publish_activity( + Activity( + org_id, + ActivityAction.APPROVE_TEAM_MEMBER.value, + name=json.dumps(name), + value=membership["membershipType"], + id=user_model.id, + ) + ) return user_model @staticmethod @@ -227,28 +249,26 @@ def delete_otp_for_user(user_name, origin_url: str = None): KeycloakService.reset_otp(str(user.keycloak_guid)) User.send_otp_authenticator_reset_notification(user.email, origin_url, org_id) except HTTPError as err: - current_app.logger.error('update_user in keycloak failed %s', err) + error_msg = f"update_user in keycloak failed {err}" + logger.error(error_msg) raise BusinessException(Error.UNDEFINED_ERROR, err) from err @staticmethod def send_otp_authenticator_reset_notification(recipient_email, origin_url, org_id): """Send Authenticator reset notification to the user.""" - current_app.logger.debug(' User Owner : {is_user_an_owner},Has other owners :{org_has_other_owners}') + logger.info(f"Org :{org.name} --> User Owner : {is_user_an_owner},Has other owners :{org_has_other_owners}") if is_user_an_owner and not org_has_other_owners: - current_app.logger.info(f'Affiliated entities : {len(org.affiliated_entities)}') + logger.info(f"Affiliated entities : {len(org.affiliated_entities)}") if len(org.affiliated_entities) == 0: org.status_code = OrgStatus.INACTIVE.value org.flush() @@ -645,14 +674,15 @@ def __remove_org_membership(org, user_id): @user_context def is_context_user_staff(**kwargs): """Check if user in user context has is a staff.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] return user_from_context.is_staff() @staticmethod def is_user_admin_or_coordinator(user, org_id: int) -> bool: """Check if user(userservice wrapper) provided is admin or coordinator for the given org id.""" - current_user_membership: MembershipModel = \ - MembershipModel.find_membership_by_user_and_org(user_id=user.identifier, org_id=org_id) + current_user_membership: MembershipModel = MembershipModel.find_membership_by_user_and_org( + user_id=user.identifier, org_id=org_id + ) if current_user_membership is None: return False diff --git a/auth-api/src/auth_api/services/user_settings.py b/auth-api/src/auth_api/services/user_settings.py index 7d272217b6..bd550f1659 100644 --- a/auth-api/src/auth_api/services/user_settings.py +++ b/auth-api/src/auth_api/services/user_settings.py @@ -14,10 +14,13 @@ """Service to invoke Rest services.""" from flask import current_app +from structured_logging import StructuredLogging from auth_api.models.user_settings import UserSettings as UserSettingsModel from auth_api.services.org import Org as OrgService +logger = StructuredLogging.get_logger() + class UserSettings: # pylint: disable=too-few-public-methods """Service for user settings.""" @@ -29,23 +32,30 @@ def __init__(self, model): @staticmethod def fetch_user_settings(user_id): """Create a new organization.""" - current_app.logger.debug(' ValidatorResponse: """Validate and return correct access type.""" - access_type: str = kwargs.get('accessType') - user: UserContext = kwargs['user_context'] + access_type: str = kwargs.get("accessType") + user: UserContext = kwargs["user_context"] error = None validator_response = ValidatorResponse() if access_type: @@ -31,11 +31,16 @@ def validate(**kwargs) -> ValidatorResponse: error = Error.USER_CANT_CREATE_ANONYMOUS_ORG if not user.is_staff_admin() and access_type in AccessType.GOVM.value: error = Error.USER_CANT_CREATE_GOVM_ORG - if not user.is_bceid_user() and access_type in \ - (AccessType.EXTRA_PROVINCIAL.value, AccessType.REGULAR_BCEID.value): + if not user.is_bceid_user() and access_type in ( + AccessType.EXTRA_PROVINCIAL.value, + AccessType.REGULAR_BCEID.value, + ): error = Error.USER_CANT_CREATE_EXTRA_PROVINCIAL_ORG - if user.is_bceid_user() and access_type not in \ - (AccessType.EXTRA_PROVINCIAL.value, AccessType.REGULAR_BCEID.value, AccessType.GOVN.value): + if user.is_bceid_user() and access_type not in ( + AccessType.EXTRA_PROVINCIAL.value, + AccessType.REGULAR_BCEID.value, + AccessType.GOVN.value, + ): error = Error.USER_CANT_CREATE_REGULAR_ORG if error is not None: validator_response.add_error(error) @@ -46,5 +51,5 @@ def validate(**kwargs) -> ValidatorResponse: access_type = AccessType.EXTRA_PROVINCIAL.value elif not user.is_staff_admin(): access_type = AccessType.REGULAR.value - validator_response.add_info({'access_type': access_type}) + validator_response.add_info({"access_type": access_type}) return validator_response diff --git a/auth-api/src/auth_api/services/validators/account_limit.py b/auth-api/src/auth_api/services/validators/account_limit.py index fccc81415e..077644429b 100644 --- a/auth-api/src/auth_api/services/validators/account_limit.py +++ b/auth-api/src/auth_api/services/validators/account_limit.py @@ -24,12 +24,12 @@ @user_context def validate(is_fatal=False, **kwargs) -> ValidatorResponse: """Validate account limit for user.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] validator_response = ValidatorResponse() if not user_from_context.is_staff_admin(): user: UserModel = UserModel.find_by_jwt_token() count = OrgModel.get_count_of_org_created_by_user_id(user.id) - if count >= current_app.config.get('MAX_NUMBER_OF_ORGS'): + if count >= current_app.config.get("MAX_NUMBER_OF_ORGS"): validator_response.add_error(Error.MAX_NUMBER_OF_ORGS_LIMIT) if is_fatal: raise BusinessException(Error.MAX_NUMBER_OF_ORGS_LIMIT, None) diff --git a/auth-api/src/auth_api/services/validators/bcol_credentials.py b/auth-api/src/auth_api/services/validators/bcol_credentials.py index c7a3ed10d8..b5a5db4e50 100644 --- a/auth-api/src/auth_api/services/validators/bcol_credentials.py +++ b/auth-api/src/auth_api/services/validators/bcol_credentials.py @@ -13,11 +13,11 @@ # limitations under the License. """Util for validating BCOL data.""" import json +from http import HTTPStatus from flask import current_app -from auth_api import status as http_status -from auth_api.exceptions import BusinessException, CustomException, Error +from auth_api.exceptions import BusinessException, Error from auth_api.services.rest_service import RestService from auth_api.services.validators.validator_response import ValidatorResponse from auth_api.utils.user_context import UserContext, user_context @@ -26,26 +26,29 @@ @user_context def validate(is_fatal=False, **kwargs) -> ValidatorResponse: """Validate bcol credentials.""" - bcol_credential = kwargs.get('bcol_credential') - org_id = kwargs.get('org_id', None) - user: UserContext = kwargs['user_context'] + bcol_credential = kwargs.get("bcol_credential") + org_id = kwargs.get("org_id", None) + user: UserContext = kwargs["user_context"] validator_response = ValidatorResponse() - bcol_response = RestService.post(endpoint=current_app.config.get('BCOL_API_URL') + '/profiles', - data=bcol_credential, token=user.bearer_token, raise_for_status=False) - if bcol_response.status_code != http_status.HTTP_200_OK: + bcol_response = RestService.post( + endpoint=current_app.config.get("BCOL_API_URL") + "/profiles", + data=bcol_credential, + token=user.bearer_token, + raise_for_status=False, + ) + if bcol_response.status_code != HTTPStatus.OK: error = json.loads(bcol_response.text) - validator_response.add_error( - CustomException(error['detail'], bcol_response.status_code)) + validator_response.add_error(BusinessException(error["detail"], bcol_response.status_code)) if is_fatal: - raise BusinessException(CustomException(error['detail'], bcol_response.status_code), None) + raise BusinessException(error["detail"], bcol_response.status_code) else: - bcol_account_number = bcol_response.json().get('accountNumber') + bcol_account_number = bcol_response.json().get("accountNumber") from auth_api.services.org import Org as OrgService # pylint:disable=cyclic-import, import-outside-toplevel + if OrgService.bcol_account_link_check(bcol_account_number, org_id): - validator_response.add_error( - Error.BCOL_ACCOUNT_ALREADY_LINKED) + validator_response.add_error(Error.BCOL_ACCOUNT_ALREADY_LINKED) if is_fatal: raise BusinessException(Error.BCOL_ACCOUNT_ALREADY_LINKED, None) else: - validator_response.add_info({'bcol_response': bcol_response}) + validator_response.add_info({"bcol_response": bcol_response}) return validator_response diff --git a/auth-api/src/auth_api/services/validators/duplicate_org_name.py b/auth-api/src/auth_api/services/validators/duplicate_org_name.py index f94aefa4ac..a4d7e93c09 100644 --- a/auth-api/src/auth_api/services/validators/duplicate_org_name.py +++ b/auth-api/src/auth_api/services/validators/duplicate_org_name.py @@ -22,9 +22,9 @@ @user_context def validate(is_fatal=False, **kwargs) -> ValidatorResponse: """Validate and return org name.""" - name = kwargs.get('name') - branch_name = kwargs.get('branch_name') - org_id = kwargs.get('org_id', None) + name = kwargs.get("name") + branch_name = kwargs.get("branch_name") + org_id = kwargs.get("org_id", None) validator_response = ValidatorResponse() existing_similar_orgs = OrgModel.find_similar_org_by_name(name, org_id=org_id, branch_name=branch_name) if existing_similar_orgs: diff --git a/auth-api/src/auth_api/services/validators/payment_type.py b/auth-api/src/auth_api/services/validators/payment_type.py index 49695e3c8a..b50fa3d1aa 100644 --- a/auth-api/src/auth_api/services/validators/payment_type.py +++ b/auth-api/src/auth_api/services/validators/payment_type.py @@ -23,18 +23,28 @@ @user_context def validate(is_fatal=False, **kwargs) -> ValidatorResponse: """Validate and return correct access type.""" - selected_payment_method: str = kwargs.get('selected_payment_method') - access_type: str = kwargs.get('access_type') - org_type: str = kwargs.get('org_type') - default_cc_method = PaymentMethod.DIRECT_PAY.value if current_app.config.get( - 'DIRECT_PAY_ENABLED') else PaymentMethod.CREDIT_CARD.value + selected_payment_method: str = kwargs.get("selected_payment_method") + access_type: str = kwargs.get("access_type") + org_type: str = kwargs.get("org_type") + default_cc_method = ( + PaymentMethod.DIRECT_PAY.value + if current_app.config.get("DIRECT_PAY_ENABLED") + else PaymentMethod.CREDIT_CARD.value + ) validator_response = ValidatorResponse() non_ejv_payment_methods = ( - PaymentMethod.CREDIT_CARD.value, PaymentMethod.DIRECT_PAY.value, - PaymentMethod.PAD.value, PaymentMethod.BCOL.value, PaymentMethod.EFT.value) + PaymentMethod.CREDIT_CARD.value, + PaymentMethod.DIRECT_PAY.value, + PaymentMethod.PAD.value, + PaymentMethod.BCOL.value, + PaymentMethod.EFT.value, + ) org_payment_method_mapping = { OrgType.BASIC: ( - PaymentMethod.CREDIT_CARD.value, PaymentMethod.DIRECT_PAY.value, PaymentMethod.ONLINE_BANKING.value), + PaymentMethod.CREDIT_CARD.value, + PaymentMethod.DIRECT_PAY.value, + PaymentMethod.ONLINE_BANKING.value, + ), OrgType.PREMIUM: non_ejv_payment_methods, OrgType.SBC_STAFF: non_ejv_payment_methods, OrgType.STAFF: non_ejv_payment_methods, @@ -47,13 +57,11 @@ def validate(is_fatal=False, **kwargs) -> ValidatorResponse: if selected_payment_method in valid_types: payment_type = selected_payment_method else: - validator_response.add_error( - Error.INVALID_INPUT) + validator_response.add_error(Error.INVALID_INPUT) if is_fatal: raise BusinessException(Error.INVALID_INPUT, None) else: premium_org_types = (OrgType.PREMIUM, OrgType.SBC_STAFF, OrgType.STAFF) - payment_type = PaymentMethod.BCOL.value if \ - org_type in premium_org_types else default_cc_method - validator_response.add_info({'payment_type': payment_type}) + payment_type = PaymentMethod.BCOL.value if org_type in premium_org_types else default_cc_method + validator_response.add_info({"payment_type": payment_type}) return validator_response diff --git a/auth-api/src/auth_api/status.py b/auth-api/src/auth_api/status.py deleted file mode 100644 index 6c3f62b0fa..0000000000 --- a/auth-api/src/auth_api/status.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Descriptive HTTP status codes for readability. - -A set of constants built using the HTTP Code Names -https://www.restapitutorial.com/httpstatuscodes.html - -Making it a little easier for those reading code, that don't have the table memorized. -""" - - -def is_informational(code): - """Return that the code is a provisional response.""" - return 100 <= code <= 199 - - -def is_success(code): - """Return that the client's request was successfully received, understood, and accepted.""" - return 200 <= code <= 299 - - -def is_redirect(code): - """Return that further action needs to be taken by the user agent in order to fulfill the request.""" - return 300 <= code <= 399 - - -def is_client_error(code): - """Return that the client seems to have erred.""" - return 400 <= code <= 499 - - -def is_server_error(code): - """Return that the server is aware that it has erred or is incapable of performing the request.""" - return 500 <= code <= 599 - - -HTTP_100_CONTINUE = 100 -HTTP_101_SWITCHING_PROTOCOLS = 101 -HTTP_200_OK = 200 -HTTP_201_CREATED = 201 -HTTP_202_ACCEPTED = 202 -HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203 -HTTP_204_NO_CONTENT = 204 -HTTP_205_RESET_CONTENT = 205 -HTTP_206_PARTIAL_CONTENT = 206 -HTTP_207_MULTI_STATUS = 207 -HTTP_300_MULTIPLE_CHOICES = 300 -HTTP_301_MOVED_PERMANENTLY = 301 -HTTP_302_FOUND = 302 -HTTP_303_SEE_OTHER = 303 -HTTP_304_NOT_MODIFIED = 304 -HTTP_305_USE_PROXY = 305 -HTTP_306_RESERVED = 306 -HTTP_307_TEMPORARY_REDIRECT = 307 -HTTP_308_PERMANENT_REDIRECT = 308 -HTTP_400_BAD_REQUEST = 400 -HTTP_401_UNAUTHORIZED = 401 -HTTP_402_PAYMENT_REQUIRED = 402 -HTTP_403_FORBIDDEN = 403 -HTTP_404_NOT_FOUND = 404 -HTTP_405_METHOD_NOT_ALLOWED = 405 -HTTP_406_NOT_ACCEPTABLE = 406 -HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407 -HTTP_408_REQUEST_TIMEOUT = 408 -HTTP_409_CONFLICT = 409 -HTTP_410_GONE = 410 -HTTP_411_LENGTH_REQUIRED = 411 -HTTP_412_PRECONDITION_FAILED = 412 -HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413 -HTTP_414_REQUEST_URI_TOO_LONG = 414 -HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415 -HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416 -HTTP_417_EXPECTATION_FAILED = 417 -HTTP_428_PRECONDITION_REQUIRED = 428 -HTTP_429_TOO_MANY_REQUESTS = 429 -HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 431 -HTTP_444_CONNECTION_CLOSED_WITHOUT_RESPONSE = 444 -HTTP_500_INTERNAL_SERVER_ERROR = 500 -HTTP_501_NOT_IMPLEMENTED = 501 -HTTP_502_BAD_GATEWAY = 502 -HTTP_503_SERVICE_UNAVAILABLE = 503 -HTTP_504_GATEWAY_TIMEOUT = 504 -HTTP_505_HTTP_VERSION_NOT_SUPPORTED = 505 -HTTP_508_LOOP_DETECTED = 508 -HTTP_510_NOT_EXTENDED = 510 -HTTP_511_NETWORK_AUTHENTICATION_REQUIRED = 511 diff --git a/auth-api/src/auth_api/tracer.py b/auth-api/src/auth_api/tracer.py deleted file mode 100644 index ef84bf6899..0000000000 --- a/auth-api/src/auth_api/tracer.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tracing subsystem class. - -This module initializes and provides the tracing component from sbc_common_components -""" - -from sbc_common_components.tracing.api_tracer import ApiTracer -from sbc_common_components.tracing.api_tracing import ApiTracing - - -class Tracer(): # pylint: disable=too-few-public-methods - """Singleton class that wraps sbc_common_components tracing.""" - - __instance = None - - @staticmethod - def get_instance(): - """Retrieve singleton JWTWrapper.""" - if Tracer.__instance is None: - Tracer() - return Tracer.__instance - - def __init__(self): - """Virtually private constructor.""" - if Tracer.__instance is not None: - # pylint: disable=broad-exception-raised - raise Exception('Attempt made to create multiple tracing instances') - - api_tracer = ApiTracer() - Tracer.__instance = ApiTracing(api_tracer.tracer) # pylint: disable=unused-private-member diff --git a/auth-api/src/auth_api/utils/account_mailer.py b/auth-api/src/auth_api/utils/account_mailer.py index 5a5da373f9..4cd4d98184 100644 --- a/auth-api/src/auth_api/utils/account_mailer.py +++ b/auth-api/src/auth_api/utils/account_mailer.py @@ -14,12 +14,16 @@ """helper to publish to mailer.""" import uuid from datetime import datetime, timezone + from flask import current_app from simple_cloudevent import SimpleCloudEvent +from structured_logging import StructuredLogging from auth_api.services.gcp_queue import GcpQueue, queue from auth_api.utils.enums import QueueSources +logger = StructuredLogging.get_logger() + def publish_to_mailer(notification_type, data=None, source=QueueSources.AUTH_API.value): """Publish to Account Mailer.""" @@ -29,9 +33,10 @@ def publish_to_mailer(notification_type, data=None, source=QueueSources.AUTH_API subject=None, time=datetime.now(tz=timezone.utc).isoformat(), type=notification_type, - data=data + data=data, ) try: - queue.publish(current_app.config.get('ACCOUNT_MAILER_TOPIC'), GcpQueue.to_queue_message(cloud_event)) + queue.publish(current_app.config.get("ACCOUNT_MAILER_TOPIC"), GcpQueue.to_queue_message(cloud_event)) except Exception as e: # NOQA # pylint: disable=broad-except - current_app.logger.error(f'Failed to publish to mailer: {str(e)}') + error_msg = f"Failed to publish to mailer {e}" + logger.error(error_msg) diff --git a/auth-api/src/auth_api/utils/api_gateway.py b/auth-api/src/auth_api/utils/api_gateway.py index 62c5478eb7..f8359e3d05 100644 --- a/auth-api/src/auth_api/utils/api_gateway.py +++ b/auth-api/src/auth_api/utils/api_gateway.py @@ -20,312 +20,302 @@ def generate_client_representation(account_id: int, client_id_pattern: str, env: """Return dictionary for api gateway client user.""" _id = str(uuid.uuid4()) _secret = secrets.token_urlsafe(36) - if env != 'prod': - client_id_pattern += '-sandbox' + if env != "prod": + client_id_pattern += "-sandbox" _client_id = client_id_pattern.format(account_id=account_id) client_json: dict = { - 'id': _id, - 'clientId': _client_id, - 'rootUrl': '', - 'adminUrl': '', - 'baseUrl': '', - 'surrogateAuthRequired': False, - 'enabled': True, - 'alwaysDisplayInConsole': False, - 'clientAuthenticatorType': 'client-secret', - 'secret': _secret, - 'redirectUris': [ - ], - 'webOrigins': [ - ], - 'notBefore': 0, - 'bearerOnly': False, - 'consentRequired': False, - 'standardFlowEnabled': False, - 'implicitFlowEnabled': False, - 'directAccessGrantsEnabled': False, - 'serviceAccountsEnabled': True, - 'publicClient': False, - 'frontchannelLogout': False, - 'protocol': 'openid-connect', - 'attributes': { - 'saml.assertion.signature': 'false', - 'saml.multivalued.roles': 'false', - 'saml.force.post.binding': 'false', - 'saml.encrypt': 'false', - 'saml.server.signature': 'false', - 'saml.server.signature.keyinfo.ext': 'false', - 'exclude.session.state.from.auth.response': 'false', - 'client_credentials.use_refresh_token': 'false', - 'saml_force_name_id_format': 'false', - 'saml.client.signature': 'false', - 'tls.client.certificate.bound.access.tokens': 'false', - 'saml.authnstatement': 'false', - 'display.on.consent.screen': 'false', - 'saml.onetimeuse.condition': 'false' + "id": _id, + "clientId": _client_id, + "rootUrl": "", + "adminUrl": "", + "baseUrl": "", + "surrogateAuthRequired": False, + "enabled": True, + "alwaysDisplayInConsole": False, + "clientAuthenticatorType": "client-secret", + "secret": _secret, + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": False, + "consentRequired": False, + "standardFlowEnabled": False, + "implicitFlowEnabled": False, + "directAccessGrantsEnabled": False, + "serviceAccountsEnabled": True, + "publicClient": False, + "frontchannelLogout": False, + "protocol": "openid-connect", + "attributes": { + "saml.assertion.signature": "false", + "saml.multivalued.roles": "false", + "saml.force.post.binding": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "saml.server.signature.keyinfo.ext": "false", + "exclude.session.state.from.auth.response": "false", + "client_credentials.use_refresh_token": "false", + "saml_force_name_id_format": "false", + "saml.client.signature": "false", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "saml.onetimeuse.condition": "false", }, - 'authenticationFlowBindingOverrides': {}, - 'fullScopeAllowed': True, - 'nodeReRegistrationTimeout': -1, - 'protocolMappers': [ + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": True, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ { - 'name': 'Client ID', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-usersessionmodel-note-mapper', - 'consentRequired': False, - 'config': { - 'user.session.note': 'clientId', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'clientId', - 'jsonType.label': 'String' - } + "name": "Client ID", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": False, + "config": { + "user.session.note": "clientId", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientId", + "jsonType.label": "String", + }, }, { - 'name': 'preferred_username', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': _client_id, - 'userinfo.token.claim': 'true', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'preferred_username', - 'jsonType.label': 'String' - } + "name": "preferred_username", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": _client_id, + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String", + }, }, { - 'name': 'family name', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-usermodel-property-mapper', - 'consentRequired': False, - 'config': { - 'userinfo.token.claim': 'true', - 'user.attribute': 'lastName', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'lastname', - 'jsonType.label': 'String' - } + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": False, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "lastname", + "jsonType.label": "String", + }, }, { - 'name': 'full name', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': _client_id, - 'userinfo.token.claim': 'true', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'name', - 'jsonType.label': 'String' - } + "name": "full name", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": _client_id, + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "name", + "jsonType.label": "String", + }, }, { - 'name': 'username', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': _client_id, - 'userinfo.token.claim': 'true', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'username', - 'jsonType.label': 'String' - } + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": _client_id, + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "username", + "jsonType.label": "String", + }, }, { - 'name': 'given name', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': _client_id, - 'userinfo.token.claim': 'true', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'lastname', - 'jsonType.label': 'String' - } + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": _client_id, + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "lastname", + "jsonType.label": "String", + }, }, { - 'name': 'name', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': _client_id, - 'userinfo.token.claim': 'true', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'name', - 'jsonType.label': 'String' - } + "name": "name", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": _client_id, + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "name", + "jsonType.label": "String", + }, }, { - 'name': 'role list', - 'protocol': 'saml', - 'protocolMapper': 'saml-role-list-mapper', - 'consentRequired': False, - 'config': { - 'single': 'false', - 'attribute.nameformat': 'Basic', - 'attribute.name': 'Role' - } + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": False, + "config": {"single": "false", "attribute.nameformat": "Basic", "attribute.name": "Role"}, }, { - 'name': 'realm roles', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-usermodel-realm-role-mapper', - 'consentRequired': False, - 'config': { - 'multivalued': 'true', - 'userinfo.token.claim': 'false', - 'id.token.claim': 'false', - 'access.token.claim': 'true', - 'claim.name': 'roles', - 'jsonType.label': 'String' - } + "name": "realm roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": False, + "config": { + "multivalued": "true", + "userinfo.token.claim": "false", + "id.token.claim": "false", + "access.token.claim": "true", + "claim.name": "roles", + "jsonType.label": "String", + }, }, { - 'name': 'aud-account-services-mapper', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-audience-mapper', - 'consentRequired': False, - 'config': { - 'id.token.claim': 'false', - 'access.token.claim': 'true', - 'included.custom.audience': 'account-services', - 'userinfo.token.claim': 'false' - } + "name": "aud-account-services-mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-mapper", + "consentRequired": False, + "config": { + "id.token.claim": "false", + "access.token.claim": "true", + "included.custom.audience": "account-services", + "userinfo.token.claim": "false", + }, }, { - 'name': 'aud-ppr-services-mapper', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-audience-mapper', - 'consentRequired': False, - 'config': { - 'id.token.claim': 'false', - 'access.token.claim': 'true', - 'included.custom.audience': 'ppr-services', - 'userinfo.token.claim': 'false' - } + "name": "aud-ppr-services-mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-mapper", + "consentRequired": False, + "config": { + "id.token.claim": "false", + "access.token.claim": "true", + "included.custom.audience": "ppr-services", + "userinfo.token.claim": "false", + }, }, { - 'name': 'idp_userid', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': _client_id, - 'userinfo.token.claim': 'true', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'idp_userid', - 'jsonType.label': 'String' - } + "name": "idp_userid", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": _client_id, + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "idp_userid", + "jsonType.label": "String", + }, }, { - 'name': 'email', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-usermodel-property-mapper', - 'consentRequired': False, - 'config': { - 'userinfo.token.claim': 'true', - 'user.attribute': 'email', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'email', - 'jsonType.label': 'String' - } + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": False, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String", + }, }, { - 'name': 'Source Mapper', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': 'API_GW', - 'userinfo.token.claim': 'false', - 'id.token.claim': 'false', - 'access.token.claim': 'true', - 'claim.name': 'loginSource', - 'jsonType.label': 'String' - } + "name": "Source Mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": "API_GW", + "userinfo.token.claim": "false", + "id.token.claim": "false", + "access.token.claim": "true", + "claim.name": "loginSource", + "jsonType.label": "String", + }, }, { - 'name': 'aud-entity-services-mapper', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-audience-mapper', - 'consentRequired': False, - 'config': { - 'id.token.claim': 'false', - 'access.token.claim': 'true', - 'included.custom.audience': 'entity-services', - 'userinfo.token.claim': 'false' - } + "name": "aud-entity-services-mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-mapper", + "consentRequired": False, + "config": { + "id.token.claim": "false", + "access.token.claim": "true", + "included.custom.audience": "entity-services", + "userinfo.token.claim": "false", + }, }, { - 'name': 'Client IP Address', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-usersessionmodel-note-mapper', - 'consentRequired': False, - 'config': { - 'user.session.note': 'clientAddress', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'clientAddress', - 'jsonType.label': 'String' - } + "name": "Client IP Address", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": False, + "config": { + "user.session.note": "clientAddress", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientAddress", + "jsonType.label": "String", + }, }, { - 'name': 'Client Host', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-usersessionmodel-note-mapper', - 'consentRequired': False, - 'config': { - 'user.session.note': 'clientHost', - 'id.token.claim': 'true', - 'access.token.claim': 'true', - 'claim.name': 'clientHost', - 'jsonType.label': 'String' - } + "name": "Client Host", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": False, + "config": { + "user.session.note": "clientHost", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientHost", + "jsonType.label": "String", + }, }, { - 'name': 'AccountId', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-hardcoded-claim-mapper', - 'consentRequired': False, - 'config': { - 'claim.value': str(account_id), - 'userinfo.token.claim': 'false', - 'id.token.claim': 'false', - 'access.token.claim': 'true', - 'claim.name': 'Account-Id', - 'jsonType.label': 'String' - } + "name": "AccountId", + "protocol": "openid-connect", + "protocolMapper": "oidc-hardcoded-claim-mapper", + "consentRequired": False, + "config": { + "claim.value": str(account_id), + "userinfo.token.claim": "false", + "id.token.claim": "false", + "access.token.claim": "true", + "claim.name": "Account-Id", + "jsonType.label": "String", + }, }, { - 'name': 'aud-business-search-services-mapper', - 'protocol': 'openid-connect', - 'protocolMapper': 'oidc-audience-mapper', - 'consentRequired': False, - 'config': { - 'included.client.audience': 'business-search-service', - 'id.token.claim': 'false', - 'access.token.claim': 'true' - } - } - ], - 'defaultClientScopes': [ - 'web-origins', - 'roles' + "name": "aud-business-search-services-mapper", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-mapper", + "consentRequired": False, + "config": { + "included.client.audience": "business-search-service", + "id.token.claim": "false", + "access.token.claim": "true", + }, + }, ], - 'optionalClientScopes': [ - ] + "defaultClientScopes": ["web-origins", "roles"], + "optionalClientScopes": [], } return client_json diff --git a/auth-api/src/auth_api/utils/auth.py b/auth-api/src/auth_api/utils/auth.py new file mode 100644 index 0000000000..be641ae9ac --- /dev/null +++ b/auth-api/src/auth_api/utils/auth.py @@ -0,0 +1,37 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Bring in the common JWT Manager.""" +from flask_jwt_oidc import JwtManager + +jwt = JwtManager() # pylint: disable=invalid-name; lower case name as used by convention in most Flask apps diff --git a/auth-api/src/auth_api/utils/cache.py b/auth-api/src/auth_api/utils/cache.py index b5b4ca7c03..5ad5a5c7ec 100644 --- a/auth-api/src/auth_api/utils/cache.py +++ b/auth-api/src/auth_api/utils/cache.py @@ -13,22 +13,22 @@ # limitations under the License. """Bring in the common cache.""" import os + from flask_caching import Cache -cache_servers = os.environ.get('CACHE_MEMCACHED_SERVERS') +cache_servers = os.environ.get("CACHE_MEMCACHED_SERVERS") if cache_servers: - cache = Cache(config={'CACHE_TYPE': 'MemcachedCache', - 'CACHE_MEMCACHED_SERVERS': cache_servers.split(',')}) + cache = Cache(config={"CACHE_TYPE": "MemcachedCache", "CACHE_MEMCACHED_SERVERS": cache_servers.split(",")}) else: - redis_host = os.environ.get('CACHE_REDIS_HOST') - redis_port = os.environ.get('CACHE_REDIS_PORT') + redis_host = os.environ.get("CACHE_REDIS_HOST") + redis_port = os.environ.get("CACHE_REDIS_PORT") if redis_host and redis_port: - cache = Cache(config={'CACHE_TYPE': 'RedisCache', - 'CACHE_REDIS_HOST': redis_host, - 'CACHE_REDIS_PORT': redis_port}) + cache = Cache( + config={"CACHE_TYPE": "RedisCache", "CACHE_REDIS_HOST": redis_host, "CACHE_REDIS_PORT": redis_port} + ) else: - cache = Cache(config={'CACHE_TYPE': 'simple'}) # pylint: disable=invalid-name + cache = Cache(config={"CACHE_TYPE": "simple"}) # pylint: disable=invalid-name diff --git a/auth-api/src/auth_api/utils/constants.py b/auth-api/src/auth_api/utils/constants.py index be42da3a2e..f88de269a4 100644 --- a/auth-api/src/auth_api/utils/constants.py +++ b/auth-api/src/auth_api/utils/constants.py @@ -14,34 +14,34 @@ """Constants definitions.""" # Group names -GROUP_PUBLIC_USERS = 'public_users' -GROUP_ACCOUNT_HOLDERS = 'account_holders' -GROUP_ANONYMOUS_USERS = 'anonymous_users' -GROUP_GOV_ACCOUNT_USERS = 'gov_account_users' -GROUP_API_GW_USERS = 'api_gateway_users' -GROUP_API_GW_SANDBOX_USERS = 'api_gateway_sandbox_users' +GROUP_PUBLIC_USERS = "public_users" +GROUP_ACCOUNT_HOLDERS = "account_holders" +GROUP_ANONYMOUS_USERS = "anonymous_users" +GROUP_GOV_ACCOUNT_USERS = "gov_account_users" +GROUP_API_GW_USERS = "api_gateway_users" +GROUP_API_GW_SANDBOX_USERS = "api_gateway_sandbox_users" # Affidavit folder -AFFIDAVIT_FOLDER_NAME = 'Affidavits' +AFFIDAVIT_FOLDER_NAME = "Affidavits" # BCol profile to product mapping, this will grow as and when more products are onboarded. BCOL_PROFILE_PRODUCT_MAP = { - 'VS': 'VS', - 'PPR': 'RPPR', - 'RURLPROP': 'RPT', - # 'COURT_SERVICES': 'CSO', - # "OSBR":'', - # "ADS", - # "COLIN_TYPE", - # "COMP", - # "ICBC", - # "MH", - # "LTO", - # "SES", - # "PPR", - # "CCREF", - # "CCREL", - # "ATSOURCE", - # "EMERGIS", - # "LOCATION_CODE" + "VS": "VS", + "PPR": "RPPR", + "RURLPROP": "RPT", + # 'COURT_SERVICES': 'CSO', + # "OSBR":'', + # "ADS", + # "COLIN_TYPE", + # "COMP", + # "ICBC", + # "MH", + # "LTO", + # "SES", + # "PPR", + # "CCREF", + # "CCREL", + # "ATSOURCE", + # "EMERGIS", + # "LOCATION_CODE" } diff --git a/auth-api/src/auth_api/utils/converter.py b/auth-api/src/auth_api/utils/converter.py index 0d001c80b7..794ce9987e 100644 --- a/auth-api/src/auth_api/utils/converter.py +++ b/auth-api/src/auth_api/utils/converter.py @@ -1,7 +1,9 @@ """Converter module to support decimal and datetime serialization.""" -from decimal import Decimal + from datetime import datetime +from decimal import Decimal from typing import Any, Dict + import cattrs diff --git a/auth-api/src/auth_api/utils/endpoints_enums.py b/auth-api/src/auth_api/utils/endpoints_enums.py index 04abba138d..69efe39a1d 100644 --- a/auth-api/src/auth_api/utils/endpoints_enums.py +++ b/auth-api/src/auth_api/utils/endpoints_enums.py @@ -18,7 +18,7 @@ class EndpointEnum(str, Enum): """Endpoint route url paths.""" - API_V1 = '/api/v1' - API = '/api' - TEST_API = '/test' + API_V1 = "/api/v1" + API = "/api" + TEST_API = "/test" DEFAULT_API = API_V1 diff --git a/auth-api/src/auth_api/utils/enums.py b/auth-api/src/auth_api/utils/enums.py index 24fa91b551..6c1545d1c2 100644 --- a/auth-api/src/auth_api/utils/enums.py +++ b/auth-api/src/auth_api/utils/enums.py @@ -18,136 +18,136 @@ class AuthHeaderType(Enum): """Authorization header types.""" - BASIC = 'Basic {}' - BEARER = 'Bearer {}' + BASIC = "Basic {}" + BEARER = "Bearer {}" class ContentType(Enum): """Http Content Types.""" - JSON = 'application/json' - FORM_URL_ENCODED = 'application/x-www-form-urlencoded' - PDF = 'application/pdf' + JSON = "application/json" + FORM_URL_ENCODED = "application/x-www-form-urlencoded" + PDF = "application/pdf" class NotificationType(Enum): """notification types.""" - ROLE_CHANGED = 'ROLE_CHANGED' - MEMBERSHIP_APPROVED = 'MEMBERSHIP_APPROVED' + ROLE_CHANGED = "ROLE_CHANGED" + MEMBERSHIP_APPROVED = "MEMBERSHIP_APPROVED" class CorpType(Enum): """Corp Types.""" - NR = 'NR' - CP = 'CP' # Coperative - TMP = 'TMP' # Incorporation Application - CTMP = 'CTMP' # Continuation In - RTMP = 'RTMP' # Registration - ATMP = 'ATMP' # Amalgamation - BC = 'BC' # Limited Company - BEN = 'BEN' # Benefit Company - ULC = 'ULC' # Unlimited Liability - CC = 'CC' # Community Contribution - C = 'C' # Continuation In BC - CBEN = 'CBEN' # Continuation In BEN - CCC = 'CCC' # Continuation In CC - CUL = 'CUL' # Continuation In ULC - GP = 'GP' # General Partnership - SP = 'SP' # Sole Proprietorship + NR = "NR" + CP = "CP" # Coperative + TMP = "TMP" # Incorporation Application + CTMP = "CTMP" # Continuation In + RTMP = "RTMP" # Registration + ATMP = "ATMP" # Amalgamation + BC = "BC" # Limited Company + BEN = "BEN" # Benefit Company + ULC = "ULC" # Unlimited Liability + CC = "CC" # Community Contribution + C = "C" # Continuation In BC + CBEN = "CBEN" # Continuation In BEN + CCC = "CCC" # Continuation In CC + CUL = "CUL" # Continuation In ULC + GP = "GP" # General Partnership + SP = "SP" # Sole Proprietorship class ProductTypeCode(Enum): """Product Type code.""" - INTERNAL = 'INTERNAL' - PARTNER = 'PARTNER' + INTERNAL = "INTERNAL" + PARTNER = "PARTNER" class RequiredAction(Enum): """Keycloak required actions.""" - VERIFY_EMAIL = 'VERIFY_EMAIL' - UPDATE_PROFILE = 'UPDATE_PROFILE' - CONFIGURE_TOTP = 'CONFIGURE_TOTP' - UPDATE_PASSWORD = 'UPDATE_PASSWORD' + VERIFY_EMAIL = "VERIFY_EMAIL" + UPDATE_PROFILE = "UPDATE_PROFILE" + CONFIGURE_TOTP = "CONFIGURE_TOTP" + UPDATE_PASSWORD = "UPDATE_PASSWORD" class PaymentMethod(Enum): """Payment types.""" - CREDIT_CARD = 'CC' - BCOL = 'DRAWDOWN' - DIRECT_PAY = 'DIRECT_PAY' - ONLINE_BANKING = 'ONLINE_BANKING' - PAD = 'PAD' - EJV = 'EJV' - EFT = 'EFT' + CREDIT_CARD = "CC" + BCOL = "DRAWDOWN" + DIRECT_PAY = "DIRECT_PAY" + ONLINE_BANKING = "ONLINE_BANKING" + PAD = "PAD" + EJV = "EJV" + EFT = "EFT" class PaymentAccountStatus(Enum): """Payment types.""" - CREATED = 'CREATED' - PENDING = 'PENDING' - FAILED = 'FAILED' + CREATED = "CREATED" + PENDING = "PENDING" + FAILED = "FAILED" class OrgType(Enum): """Org types.""" - PREMIUM = 'PREMIUM' - BASIC = 'BASIC' - STAFF = 'STAFF' - SBC_STAFF = 'SBC_STAFF' + PREMIUM = "PREMIUM" + BASIC = "BASIC" + STAFF = "STAFF" + SBC_STAFF = "SBC_STAFF" class DocumentType(Enum): """Document types.""" - TERMS_OF_USE = 'termsofuse' - TERMS_OF_USE_DIRECTOR_SEARCH = 'termsofuse_directorsearch' - TERMS_OF_USE_GOVM = 'termsofuse_govm' - AFFIDAVIT = 'affidavit' - TERMS_OF_USE_PAD = 'termsofuse_pad' + TERMS_OF_USE = "termsofuse" + TERMS_OF_USE_DIRECTOR_SEARCH = "termsofuse_directorsearch" + TERMS_OF_USE_GOVM = "termsofuse_govm" + AFFIDAVIT = "affidavit" + TERMS_OF_USE_PAD = "termsofuse_pad" class NRStatus(Enum): """NR statuses.""" - APPROVED = 'APPROVED' - CONDITIONAL = 'CONDITIONAL' - DRAFT = 'DRAFT' - CONSUMED = 'CONSUMED' - INPROGRESS = 'INPROGRESS' + APPROVED = "APPROVED" + CONDITIONAL = "CONDITIONAL" + DRAFT = "DRAFT" + CONSUMED = "CONSUMED" + INPROGRESS = "INPROGRESS" class NRNameStatus(Enum): """NR name statuses.""" - APPROVED = 'APPROVED' - CONDITION = 'CONDITION' + APPROVED = "APPROVED" + CONDITION = "CONDITION" class AffidavitStatus(Enum): """Affidavit statuses.""" - PENDING = 'PENDING' - APPROVED = 'APPROVED' - REJECTED = 'REJECTED' - INACTIVE = 'INACTIVE' + PENDING = "PENDING" + APPROVED = "APPROVED" + REJECTED = "REJECTED" + INACTIVE = "INACTIVE" class AccessType(Enum): """Access Types.""" - REGULAR = 'REGULAR' - REGULAR_BCEID = 'REGULAR_BCEID' - EXTRA_PROVINCIAL = 'EXTRA_PROVINCIAL' - ANONYMOUS = 'ANONYMOUS' - GOVM = 'GOVM' # for govt ministry - GOVN = 'GOVN' # for govt non-ministry + REGULAR = "REGULAR" + REGULAR_BCEID = "REGULAR_BCEID" + EXTRA_PROVINCIAL = "EXTRA_PROVINCIAL" + ANONYMOUS = "ANONYMOUS" + GOVM = "GOVM" # for govt ministry + GOVN = "GOVN" # for govt non-ministry class Status(Enum): @@ -170,175 +170,176 @@ class UserStatus(Enum): class OrgStatus(Enum): """User Membership status.""" - ACTIVE = 'ACTIVE' - INACTIVE = 'INACTIVE' - REJECTED = 'REJECTED' - PENDING_ACTIVATION = 'PENDING_ACTIVATION' - NSF_SUSPENDED = 'NSF_SUSPENDED' - SUSPENDED = 'SUSPENDED' # this is basically staff suspended for now - PENDING_INVITE_ACCEPT = 'PENDING_INVITE_ACCEPT' # staff invited user and waiting for account creation from user. - PENDING_STAFF_REVIEW = 'PENDING_STAFF_REVIEW' # user created , staff need to approve. + ACTIVE = "ACTIVE" + INACTIVE = "INACTIVE" + REJECTED = "REJECTED" + PENDING_ACTIVATION = "PENDING_ACTIVATION" + NSF_SUSPENDED = "NSF_SUSPENDED" + SUSPENDED = "SUSPENDED" # this is basically staff suspended for now + PENDING_INVITE_ACCEPT = "PENDING_INVITE_ACCEPT" # staff invited user and waiting for account creation from user. + PENDING_STAFF_REVIEW = "PENDING_STAFF_REVIEW" # user created , staff need to approve. class ProductSubscriptionStatus(Enum): """Product Subscription status.""" - ACTIVE = 'ACTIVE' - INACTIVE = 'INACTIVE' - REJECTED = 'REJECTED' - PENDING_STAFF_REVIEW = 'PENDING_STAFF_REVIEW' - NOT_SUBSCRIBED = 'NOT_SUBSCRIBED' - SUSPENDED = 'SUSPENDED' # this is basically staff suspended for now + ACTIVE = "ACTIVE" + INACTIVE = "INACTIVE" + REJECTED = "REJECTED" + PENDING_STAFF_REVIEW = "PENDING_STAFF_REVIEW" + NOT_SUBSCRIBED = "NOT_SUBSCRIBED" + SUSPENDED = "SUSPENDED" # this is basically staff suspended for now class SuspensionReasonCode(Enum): """Suspension Reason Code for suspending an account.""" - OWNER_CHANGE = 'Account Ownership Change' - DISPUTE = 'Account Ownership Dispute' - COURT_ORDER = 'Court Order' - FRAUDULENT = 'Fraudulent Activity' - OVERDUE_EFT = 'Overdue EFT Payments' + OWNER_CHANGE = "Account Ownership Change" + DISPUTE = "Account Ownership Dispute" + COURT_ORDER = "Court Order" + FRAUDULENT = "Fraudulent Activity" + OVERDUE_EFT = "Overdue EFT Payments" class InvitationType(Enum): """Invitation type.""" - GOVM = 'GOVM' # Used to indicate an anonymous account invitation - DIRECTOR_SEARCH = 'DIRECTOR_SEARCH' # Used to indicate an anonymous account invitation - STANDARD = 'STANDARD' # Used to indicate the standard email invite with admin approval + GOVM = "GOVM" # Used to indicate an anonymous account invitation + DIRECTOR_SEARCH = "DIRECTOR_SEARCH" # Used to indicate an anonymous account invitation + STANDARD = "STANDARD" # Used to indicate the standard email invite with admin approval class AffiliationInvitationType(Enum): """Affiliation Invitation type.""" - EMAIL = 'EMAIL' - REQUEST = 'REQUEST' # Used to indicate an affiliation invitation initiated through Access Request modal + EMAIL = "EMAIL" + REQUEST = "REQUEST" # Used to indicate an affiliation invitation initiated through Access Request modal @classmethod def from_value(cls, value): """Return instance from value of the enum.""" - return \ - AffiliationInvitationType(value) if value in cls._value2member_map_ else None # pylint: disable=no-member + return ( + AffiliationInvitationType(value) if value in cls._value2member_map_ else None + ) # pylint: disable=no-member class IdpHint(Enum): """IdpHint for user login.""" - BCROS = 'bcros' - BCEID = 'bceid' + BCROS = "bcros" + BCEID = "bceid" class InvitationStatus(Enum): """Invitation statuses.""" - ACCEPTED = 'ACCEPTED' - PENDING = 'PENDING' - EXPIRED = 'EXPIRED' - FAILED = 'FAILED' + ACCEPTED = "ACCEPTED" + PENDING = "PENDING" + EXPIRED = "EXPIRED" + FAILED = "FAILED" class LoginSource(Enum): """Login source values.""" - PASSCODE = 'PASSCODE' - BCSC = 'BCSC' - BCEID = 'BCEID' - STAFF = 'IDIR' - BCROS = 'BCROS' - API_GW = 'API_GW' - IDIR = 'IDIR' + PASSCODE = "PASSCODE" + BCSC = "BCSC" + BCEID = "BCEID" + STAFF = "IDIR" + BCROS = "BCROS" + API_GW = "API_GW" + IDIR = "IDIR" class ProductCode(Enum): """Product code.""" - BUSINESS = 'BUSINESS' - BUSINESS_SEARCH = 'BUSINESS_SEARCH' - VS = 'VS' - BCA = 'BCA' - PPR = 'PPR' - DIR_SEARCH = 'DIR_SEARCH' - NAMES_REQUEST = 'NRO' - MHR = 'MHR' - MHR_QSLN = 'MHR_QSLN' # Qualified Supplier - Lawyers and Notaries - MHR_QSHM = 'MHR_QSHM' # Qualified Supplier - Home Manufacturers - MHR_QSHD = 'MHR_QSHD' # Qualified Supplier - Home Dealers - NDS = 'NDS' + BUSINESS = "BUSINESS" + BUSINESS_SEARCH = "BUSINESS_SEARCH" + VS = "VS" + BCA = "BCA" + PPR = "PPR" + DIR_SEARCH = "DIR_SEARCH" + NAMES_REQUEST = "NRO" + MHR = "MHR" + MHR_QSLN = "MHR_QSLN" # Qualified Supplier - Lawyers and Notaries + MHR_QSHM = "MHR_QSHM" # Qualified Supplier - Home Manufacturers + MHR_QSHD = "MHR_QSHD" # Qualified Supplier - Home Dealers + NDS = "NDS" class TaskRelationshipType(Enum): """Task relationship type.""" - ORG = 'ORG' # Task related to Org staff review - AFFIDAVIT = 'AFFIDAVIT' - PRODUCT = 'PRODUCT' - USER = 'USER' + ORG = "ORG" # Task related to Org staff review + AFFIDAVIT = "AFFIDAVIT" + PRODUCT = "PRODUCT" + USER = "USER" class TaskStatus(Enum): """Task relationship type.""" - OPEN = 'OPEN' # Open Task - needs to be taken action - COMPLETED = 'COMPLETED' # Task has been acted upon - HOLD = 'HOLD' - CLOSED = 'CLOSED' + OPEN = "OPEN" # Open Task - needs to be taken action + COMPLETED = "COMPLETED" # Task has been acted upon + HOLD = "HOLD" + CLOSED = "CLOSED" class TaskRelationshipStatus(Enum): """Task Relationship status.""" - ACTIVE = 'ACTIVE' - INACTIVE = 'INACTIVE' - REJECTED = 'REJECTED' - PENDING_ACTIVATION = 'PENDING_ACTIVATION' - NSF_SUSPENDED = 'NSF_SUSPENDED' - SUSPENDED = 'SUSPENDED' # this is basically staff suspended for now - PENDING_INVITE_ACCEPT = 'PENDING_INVITE_ACCEPT' # staff invited user and waiting for account creation from user. - PENDING_STAFF_REVIEW = 'PENDING_STAFF_REVIEW' # user created , staff need to approve. + ACTIVE = "ACTIVE" + INACTIVE = "INACTIVE" + REJECTED = "REJECTED" + PENDING_ACTIVATION = "PENDING_ACTIVATION" + NSF_SUSPENDED = "NSF_SUSPENDED" + SUSPENDED = "SUSPENDED" # this is basically staff suspended for now + PENDING_INVITE_ACCEPT = "PENDING_INVITE_ACCEPT" # staff invited user and waiting for account creation from user. + PENDING_STAFF_REVIEW = "PENDING_STAFF_REVIEW" # user created , staff need to approve. class TaskTypePrefix(Enum): """Task Type prefix to be appended to type column while saving a task.""" - NEW_ACCOUNT_STAFF_REVIEW = 'New Account' - GOVM_REVIEW = 'GovM' - BCEID_ADMIN = 'BCeID Admin' - GOVN_REVIEW = 'GovN' + NEW_ACCOUNT_STAFF_REVIEW = "New Account" + GOVM_REVIEW = "GovM" + BCEID_ADMIN = "BCeID Admin" + GOVN_REVIEW = "GovN" class TaskAction(Enum): """Task action.""" - AFFIDAVIT_REVIEW = 'AFFIDAVIT_REVIEW' - ACCOUNT_REVIEW = 'ACCOUNT_REVIEW' - PRODUCT_REVIEW = 'PRODUCT_REVIEW' - QUALIFIED_SUPPLIER_REVIEW = 'QUALIFIED_SUPPLIER_REVIEW' + AFFIDAVIT_REVIEW = "AFFIDAVIT_REVIEW" + ACCOUNT_REVIEW = "ACCOUNT_REVIEW" + PRODUCT_REVIEW = "PRODUCT_REVIEW" + QUALIFIED_SUPPLIER_REVIEW = "QUALIFIED_SUPPLIER_REVIEW" class ActivityAction(Enum): """Different actions in an activity.""" - INVITE_TEAM_MEMBER = 'INVITE_TEAM_MEMBER' - APPROVE_TEAM_MEMBER = 'APPROVE_TEAM_MEMBER' - REMOVE_TEAM_MEMBER = 'REMOVE_TEAM_MEMBER' - RESET_2FA = 'RESET_2FA' - PAYMENT_INFO_CHANGE = 'PAYMENT_INFO_CHANGE' - CREATE_AFFILIATION = 'CREATE_AFFILIATION' - REMOVE_AFFILIATION = 'REMOVE_AFFILIATION' - ACCOUNT_NAME_CHANGE = 'ACCOUNT_NAME_CHANGE' - ACCOUNT_ADDRESS_CHANGE = 'ACCOUNT_ADDRESS_CHANGE' - AUTHENTICATION_METHOD_CHANGE = 'AUTHENTICATION_METHOD_CHANGE' - ACCOUNT_SUSPENSION = 'ACCOUNT_SUSPENSION' - ADD_PRODUCT_AND_SERVICE = 'ADD_PRODUCT_AND_SERVICE' + INVITE_TEAM_MEMBER = "INVITE_TEAM_MEMBER" + APPROVE_TEAM_MEMBER = "APPROVE_TEAM_MEMBER" + REMOVE_TEAM_MEMBER = "REMOVE_TEAM_MEMBER" + RESET_2FA = "RESET_2FA" + PAYMENT_INFO_CHANGE = "PAYMENT_INFO_CHANGE" + CREATE_AFFILIATION = "CREATE_AFFILIATION" + REMOVE_AFFILIATION = "REMOVE_AFFILIATION" + ACCOUNT_NAME_CHANGE = "ACCOUNT_NAME_CHANGE" + ACCOUNT_ADDRESS_CHANGE = "ACCOUNT_ADDRESS_CHANGE" + AUTHENTICATION_METHOD_CHANGE = "AUTHENTICATION_METHOD_CHANGE" + ACCOUNT_SUSPENSION = "ACCOUNT_SUSPENSION" + ADD_PRODUCT_AND_SERVICE = "ADD_PRODUCT_AND_SERVICE" class PatchActions(Enum): """Patch Actions.""" - UPDATE_STATUS = 'updateStatus' - UPDATE_ACCESS_TYPE = 'updateAccessType' - UPDATE_API_ACCESS = 'updateApiAccess' + UPDATE_STATUS = "updateStatus" + UPDATE_ACCESS_TYPE = "updateAccessType" + UPDATE_API_ACCESS = "updateApiAccess" @classmethod def from_value(cls, value): @@ -349,28 +350,28 @@ def from_value(cls, value): class KeycloakGroupActions(Enum): """Keycloak group actions.""" - ADD_TO_GROUP = 'ADD_TO_GROUP' - REMOVE_FROM_GROUP = 'REMOVE_FROM_GROUP' + ADD_TO_GROUP = "ADD_TO_GROUP" + REMOVE_FROM_GROUP = "REMOVE_FROM_GROUP" class NRActionCodes(Enum): """Name Request Action Codes.""" - AMALGAMATE = 'AML' - ASSUMED = 'ASSUMED' # FUTURE: should be AS (as in LEAR)? - CHANGE_NAME = 'CHG' - CONVERSION = 'CNV' # aka Alteration - DBA = 'DBA' # doing business as - MOVE = 'MVE' # continuation in - NEW_BUSINESS = 'NEW' # incorporate or register - RESTORE = 'REH' # restore or reinstate - RENEW = 'REN' # restore with new name request - RESTORATION = 'REST' # FUTURE: unused? delete? - RESUBMIT = 'RESUBMIT' # FUTURE: unused? delete? + AMALGAMATE = "AML" + ASSUMED = "ASSUMED" # FUTURE: should be AS (as in LEAR)? + CHANGE_NAME = "CHG" + CONVERSION = "CNV" # aka Alteration + DBA = "DBA" # doing business as + MOVE = "MVE" # continuation in + NEW_BUSINESS = "NEW" # incorporate or register + RESTORE = "REH" # restore or reinstate + RENEW = "REN" # restore with new name request + RESTORATION = "REST" # FUTURE: unused? delete? + RESUBMIT = "RESUBMIT" # FUTURE: unused? delete? class QueueSources(Enum): """Queue sources for PAY.""" - AUTH_API = 'auth-api' - AUTH_QUEUE = 'auth-queue' + AUTH_API = "auth-api" + AUTH_QUEUE = "auth-queue" diff --git a/auth-api/src/auth_api/utils/error.py b/auth-api/src/auth_api/utils/error.py new file mode 100644 index 0000000000..07ea997578 --- /dev/null +++ b/auth-api/src/auth_api/utils/error.py @@ -0,0 +1,68 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Core error handlers and custom exceptions.""" +from flask import jsonify +from structured_logging import StructuredLogging +from werkzeug.exceptions import HTTPException +from werkzeug.routing import RoutingException + +logger = StructuredLogging.get_logger() + + +def init_app(app): + """Initialize the error handlers for the Flask app instance.""" + app.register_error_handler(HTTPException, handle_http_error) + app.register_error_handler(Exception, handle_uncaught_error) + + +def handle_http_error(error): + """Handle HTTPExceptions.""" + # As werkzeug's routing exceptions also inherit from HTTPException, + # check for those and allow them to return with redirect responses. + if isinstance(error, RoutingException): + return error + + response = jsonify({"message": error.description}) + response.status_code = error.code + return response + + +def handle_uncaught_error(error: Exception): # pylint: disable=unused-argument + """Handle any uncaught exceptions.""" + + error_msg = f"Uncaught exception {error}" + logger.error(error_msg) + response = jsonify({"message": "Internal server error"}) + response.status_code = 500 + return response diff --git a/auth-api/src/auth_api/utils/notifications.py b/auth-api/src/auth_api/utils/notifications.py index 9780fd3ce9..75ef001ae3 100644 --- a/auth-api/src/auth_api/utils/notifications.py +++ b/auth-api/src/auth_api/utils/notifications.py @@ -18,14 +18,12 @@ from typing import Optional from sbc_common_components.utils.enums import QueueMessageTypes + from auth_api.models import ProductCode as ProductCodeModel from auth_api.models import ProductSubscription as ProductSubscriptionModel from auth_api.utils.enums import ProductCode, ProductSubscriptionStatus -DETAILED_MHR_NOTIFICATIONS = (ProductCode.MHR_QSLN.value, - ProductCode.MHR_QSHD.value, - ProductCode.MHR_QSHM.value - ) +DETAILED_MHR_NOTIFICATIONS = (ProductCode.MHR_QSLN.value, ProductCode.MHR_QSHD.value, ProductCode.MHR_QSHM.value) @dataclass @@ -56,27 +54,27 @@ class ProductNotificationInfo: class ProductSubjectDescriptor(Enum): """Notification product subject descriptor.""" - MHR_QUALIFIED_SUPPLIER = 'Manufactured Home Registry Qualified Supplier' + MHR_QUALIFIED_SUPPLIER = "Manufactured Home Registry Qualified Supplier" # e.g. You've been approved for {{MHR_QUALIFIED_SUPPLIER}} access to... class ProductAccessDescriptor(Enum): """Notification product access descriptor.""" - MHR_QUALIFIED_SUPPLIER = 'Qualified Supplier' + MHR_QUALIFIED_SUPPLIER = "Qualified Supplier" # e.g. You've been approved for Qualified Supplier access to {{MHR}}. class ProductCategoryDescriptor(Enum): """Notification product category descriptor.""" - MHR = 'the Manufactured Home Registry' + MHR = "the Manufactured Home Registry" class NotificationAttachmentType(Enum): """Notification attachment type.""" - MHR_QS = 'QUALIFIED_SUPPLIER' + MHR_QS = "QUALIFIED_SUPPLIER" def get_product_notification_type(product_notification_info: ProductNotificationInfo): @@ -133,22 +131,19 @@ def get_product_notification_data(product_notification_info: ProductNotification def get_default_product_notification_data(product_model: ProductCodeModel, recipient_emails: str): """Get the default product notification data.""" - data = { - 'productName': product_model.description, - 'emailAddresses': recipient_emails - } + data = {"productName": product_model.description, "emailAddresses": recipient_emails} return data def get_mhr_qs_approval_data(product_model: ProductCodeModel, recipient_emails: str, is_reapproved: bool = False): """Get the mhr qualified supplier product approval notification data.""" data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'isReapproved': is_reapproved, - 'productName': product_model.description, - 'emailAddresses': recipient_emails + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "isReapproved": is_reapproved, + "productName": product_model.description, + "emailAddresses": recipient_emails, } return data @@ -156,14 +151,14 @@ def get_mhr_qs_approval_data(product_model: ProductCodeModel, recipient_emails: def get_mhr_qs_rejected_data(product_model: ProductCodeModel, recipient_emails: str, reject_reason: str = None): """Get the mhr qualified supplier product rejected notification data.""" data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'accessDisclaimer': True, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'productName': product_model.description, - 'emailAddresses': recipient_emails, - 'remarks': reject_reason, - 'contactType': get_notification_contact_type(product_model.code) + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "accessDisclaimer": True, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "productName": product_model.description, + "emailAddresses": recipient_emails, + "remarks": reject_reason, + "contactType": get_notification_contact_type(product_model.code), } return data @@ -171,18 +166,18 @@ def get_mhr_qs_rejected_data(product_model: ProductCodeModel, recipient_emails: def get_mhr_qs_confirmation_data(product_model: ProductCodeModel, recipient_emails: str): """Get the mhr qualified supplier product confirmation notification data.""" data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'productName': product_model.description, - 'emailAddresses': recipient_emails, - 'contactType': get_notification_contact_type(product_model.code), - 'hasAgreementAttachment': True, - 'attachmentType': NotificationAttachmentType.MHR_QS.value, + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "productName": product_model.description, + "emailAddresses": recipient_emails, + "contactType": get_notification_contact_type(product_model.code), + "hasAgreementAttachment": True, + "attachmentType": NotificationAttachmentType.MHR_QS.value, } return data def get_notification_contact_type(product_code: str) -> str: """Get the notification contact type for a product.""" - return 'BCOL' if product_code == ProductCode.MHR_QSLN.value else 'BCREG' + return "BCOL" if product_code == ProductCode.MHR_QSLN.value else "BCREG" diff --git a/auth-api/src/auth_api/utils/role_validator.py b/auth-api/src/auth_api/utils/role_validator.py index 7f4f29104b..9bc8ae8a13 100644 --- a/auth-api/src/auth_api/utils/role_validator.py +++ b/auth-api/src/auth_api/utils/role_validator.py @@ -17,12 +17,12 @@ A simple decorator to validate roles. """ from functools import wraps +from http import HTTPStatus from typing import Dict from flask import abort, g -from auth_api.auth import jwt as _jwt -from auth_api import status as http_status +from auth_api.utils.auth import jwt as _jwt def validate_roles(**role_args): @@ -38,16 +38,21 @@ def decorated(func): @_jwt.requires_auth def wrapper(*args, **kwargs): token_info: Dict = _get_token_info() or {} - user_roles: list = token_info.get('realm_access', None).get('roles', []) if 'realm_access' in token_info \ - else [] - allowed_roles = role_args.get('allowed_roles', []) - not_allowed_roles = role_args.get('not_allowed_roles', []) + user_roles: list = ( + token_info.get("realm_access", None).get("roles", []) if "realm_access" in token_info else [] + ) + allowed_roles = role_args.get("allowed_roles", []) + not_allowed_roles = role_args.get("not_allowed_roles", []) if len(set(allowed_roles).intersection(user_roles)) < 1: - abort(http_status.HTTP_401_UNAUTHORIZED, - description='Missing the role(s) required to access this endpoint') + abort( + HTTPStatus.UNAUTHORIZED, + description="Missing the role(s) required to access this endpoint", + ) if len(set(not_allowed_roles).intersection(user_roles)) > 0: - abort(http_status.HTTP_401_UNAUTHORIZED, - description='Not allowed role(s) present.Denied access to this endpoint') + abort( + HTTPStatus.UNAUTHORIZED, + description="Not allowed role(s) present.Denied access to this endpoint", + ) return func(*args, **kwargs) return wrapper @@ -56,4 +61,4 @@ def wrapper(*args, **kwargs): def _get_token_info() -> Dict: - return g.jwt_oidc_token_info if g and 'jwt_oidc_token_info' in g else {} + return g.jwt_oidc_token_info if g and "jwt_oidc_token_info" in g else {} diff --git a/auth-api/src/auth_api/utils/roles.py b/auth-api/src/auth_api/utils/roles.py index dd7f42196b..79f839b8ac 100644 --- a/auth-api/src/auth_api/utils/roles.py +++ b/auth-api/src/auth_api/utils/roles.py @@ -20,44 +20,50 @@ class Role(Enum): """User Role.""" - VIEWER = 'view' - EDITOR = 'edit' - PUBLIC_USER = 'public_user' - ACCOUNT_HOLDER = 'account_holder' - GOV_ACCOUNT_USER = 'gov_account_user' - ANONYMOUS_USER = 'anonymous_user' - ACCOUNT_IDENTITY = 'account_identity' - MANAGE_EFT = 'manage_eft' - - SYSTEM = 'system' - TESTER = 'tester' - - STAFF = 'staff' - STAFF_VIEW_ACCOUNTS = 'view_accounts' - STAFF_MANAGE_ACCOUNTS = 'manage_accounts' - STAFF_SEARCH = 'search' - STAFF_CREATE_ACCOUNTS = 'create_accounts' - STAFF_MANAGE_BUSINESS = 'manage_business' - STAFF_SUSPEND_ACCOUNTS = 'suspend_accounts' + VIEWER = "view" + EDITOR = "edit" + PUBLIC_USER = "public_user" + ACCOUNT_HOLDER = "account_holder" + GOV_ACCOUNT_USER = "gov_account_user" + ANONYMOUS_USER = "anonymous_user" + ACCOUNT_IDENTITY = "account_identity" + MANAGE_EFT = "manage_eft" + + SYSTEM = "system" + TESTER = "tester" + + STAFF = "staff" + STAFF_VIEW_ACCOUNTS = "view_accounts" + STAFF_MANAGE_ACCOUNTS = "manage_accounts" + STAFF_SEARCH = "search" + STAFF_CREATE_ACCOUNTS = "create_accounts" + STAFF_MANAGE_BUSINESS = "manage_business" + STAFF_SUSPEND_ACCOUNTS = "suspend_accounts" # Membership types -STAFF = 'STAFF' -COORDINATOR = 'COORDINATOR' -ADMIN = 'ADMIN' -USER = 'USER' +STAFF = "STAFF" +COORDINATOR = "COORDINATOR" +ADMIN = "ADMIN" +USER = "USER" VALID_STATUSES = (Status.ACTIVE.value, Status.PENDING_APPROVAL.value, Status.PENDING_STAFF_REVIEW.value) -VALID_ORG_STATUSES = (OrgStatus.ACTIVE.value, OrgStatus.NSF_SUSPENDED.value, - OrgStatus.SUSPENDED.value, OrgStatus.PENDING_INVITE_ACCEPT.value, - OrgStatus.PENDING_STAFF_REVIEW.value) -VALID_SUBSCRIPTION_STATUSES = (ProductSubscriptionStatus.ACTIVE.value, - ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value, - ProductSubscriptionStatus.REJECTED.value) +VALID_ORG_STATUSES = ( + OrgStatus.ACTIVE.value, + OrgStatus.NSF_SUSPENDED.value, + OrgStatus.SUSPENDED.value, + OrgStatus.PENDING_INVITE_ACCEPT.value, + OrgStatus.PENDING_STAFF_REVIEW.value, +) +VALID_SUBSCRIPTION_STATUSES = ( + ProductSubscriptionStatus.ACTIVE.value, + ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value, + ProductSubscriptionStatus.REJECTED.value, +) CLIENT_ADMIN_ROLES = (COORDINATOR, ADMIN) CLIENT_AUTH_ROLES = (*CLIENT_ADMIN_ROLES, USER) ALL_ALLOWED_ROLES = (*CLIENT_AUTH_ROLES, STAFF) -EXCLUDED_FIELDS = ('status_code', 'type_code') +EXCLUDED_FIELDS = ("status_code", "type_code") PREMIUM_ORG_TYPES = (OrgType.PREMIUM.value, OrgType.SBC_STAFF.value, OrgType.STAFF.value) diff --git a/auth-api/src/auth_api/utils/run_version.py b/auth-api/src/auth_api/utils/run_version.py index 821edbb755..e4964f90fb 100644 --- a/auth-api/src/auth_api/utils/run_version.py +++ b/auth-api/src/auth_api/utils/run_version.py @@ -13,17 +13,19 @@ # limitations under the License. """Supply version and commit hash info.""" import os +from importlib.metadata import version -from auth_api.version import __version__ - -def _get_build_openshift_commit_hash(): - return os.getenv('OPENSHIFT_BUILD_COMMIT', None) +def _get_commit_hash(): + """Return the containers ref if present.""" + if (commit_hash := os.getenv("VCS_REF", None)) and commit_hash != "missing": + return commit_hash + return None def get_run_version(): """Return a formatted version string for this service.""" - commit_hash = _get_build_openshift_commit_hash() - if commit_hash: - return f'{__version__}-{commit_hash}' - return __version__ + ver = version(__name__[: __name__.find(".")]) + if commit_hash := _get_commit_hash(): + return f"{ver}-{commit_hash}" + return ver diff --git a/auth-api/src/auth_api/utils/user_context.py b/auth-api/src/auth_api/utils/user_context.py index 4b76bfd19c..a71dbd23ed 100644 --- a/auth-api/src/auth_api/utils/user_context.py +++ b/auth-api/src/auth_api/utils/user_context.py @@ -35,14 +35,15 @@ def __init__(self): """Return a User Context object.""" token_info: Dict = _get_token_info() or {} self._token_info = token_info - self._user_name: str = token_info.get('username', token_info.get('preferred_username', None)) - self._first_name: str = token_info.get('firstname', None) - self._last_name: str = token_info.get('lastname', None) + self._user_name: str = token_info.get("username", token_info.get("preferred_username", None)) + self._first_name: str = token_info.get("firstname", None) + self._last_name: str = token_info.get("lastname", None) self._bearer_token: str = _get_token() - self._roles: list = token_info.get('realm_access', None).get('roles', []) if 'realm_access' in token_info \ - else [] - self._sub: str = token_info.get('sub', None) - self._login_source: str = token_info.get('loginSource', None) + self._roles: list = ( + token_info.get("realm_access", None).get("roles", []) if "realm_access" in token_info else [] + ) + self._sub: str = token_info.get("sub", None) + self._login_source: str = token_info.get("loginSource", None) self._name: str = f"{token_info.get('firstname', None)} {token_info.get('lastname', None)}" @property @@ -108,14 +109,14 @@ def token_info(self) -> Dict: @property def account_id_claim(self) -> Dict: """Return the account id.""" - return _get_token_info().get('Account-Id', None) + return _get_token_info().get("Account-Id", None) @property def account_id(self) -> Dict: """Return the account id.""" - account_id = _get_token_info().get('Account-Id', None) + account_id = _get_token_info().get("Account-Id", None) if not account_id: - account_id = request.headers['Account-Id'] if request and 'Account-Id' in request.headers else None + account_id = request.headers["Account-Id"] if request and "Account-Id" in request.headers else None return account_id @property @@ -130,16 +131,16 @@ def user_context(function): @functools.wraps(function) def wrapper(*func_args, **func_kwargs): context = _get_context() - func_kwargs['user_context'] = context + func_kwargs["user_context"] = context return function(*func_args, **func_kwargs) return wrapper def _get_token_info() -> Dict: - return g.jwt_oidc_token_info if g and 'jwt_oidc_token_info' in g else {} + return g.jwt_oidc_token_info if g and "jwt_oidc_token_info" in g else {} def _get_token() -> str: - token: str = request.headers['Authorization'] if request and 'Authorization' in request.headers else None - return token.replace('Bearer ', '') if token else None + token: str = request.headers["Authorization"] if request and "Authorization" in request.headers else None + return token.replace("Bearer ", "") if token else None diff --git a/auth-api/src/auth_api/utils/util.py b/auth-api/src/auth_api/utils/util.py index 657413dd05..55ab98f274 100644 --- a/auth-api/src/auth_api/utils/util.py +++ b/auth-api/src/auth_api/utils/util.py @@ -22,18 +22,18 @@ import re import urllib +import humps from flask import current_app, request -from humps.main import camelize, decamelize def camelback2snake(camel_dict: dict): """Convert the passed dictionary's keys from camelBack case to snake_case.""" - return decamelize(camel_dict) + return humps.decamelize(camel_dict) def snake2camelback(snake_dict: dict): """Convert the passed dictionary's keys from snake_case to camelBack case.""" - return camelize(snake_dict) + return humps.camelize(snake_dict) class Singleton(type): @@ -50,34 +50,34 @@ def __call__(cls, *args, **kwargs): def digitify(payload: str) -> int: """Return the digits from the string.""" - return int(re.sub(r'\D', '', payload)) + return int(re.sub(r"\D", "", payload)) def escape_wam_friendly_url(param): """Return encoded/escaped url.""" - base64_org_name = base64.b64encode(bytes(param, encoding='utf-8')).decode('utf-8') - encode_org_name = urllib.parse.quote(base64_org_name, safe='') + base64_org_name = base64.b64encode(bytes(param, encoding="utf-8")).decode("utf-8") + encode_org_name = urllib.parse.quote(base64_org_name, safe="") return encode_org_name def mask_email(email: str) -> str: """Return masked email.""" if email: - parts = email.split('@') + parts = email.split("@") if len(parts) == 2: username, domain = parts - masked_username = username[:2] + '*' * (len(username) - 2) - masked_domain = domain[:2] + '*' * (len(domain) - 2) - email = masked_username + '@' + masked_domain + masked_username = username[:2] + "*" * (len(username) - 2) + masked_domain = domain[:2] + "*" * (len(domain) - 2) + email = masked_username + "@" + masked_domain return email def get_request_environment(): """Return the environment corresponding to the user request.""" env = None - sandbox_host = current_app.config['AUTH_WEB_SANDBOX_HOST'] - if os.getenv('FLASK_ENV') == 'production' and sandbox_host in request.host_url: - env = 'sandbox' + sandbox_host = current_app.config["AUTH_WEB_SANDBOX_HOST"] + if os.getenv("FLASK_ENV") == "production" and sandbox_host in request.host_url: + env = "sandbox" return env @@ -85,4 +85,4 @@ def extract_numbers(input_string: str): """Extract numbers from an input string.""" if input_string is None: return None - return ''.join([char for char in input_string if char.isdigit()]) + return "".join([char for char in input_string if char.isdigit()]) diff --git a/auth-api/tests/conftest.py b/auth-api/tests/conftest.py index a2424108de..f6ad08b443 100644 --- a/auth-api/tests/conftest.py +++ b/auth-api/tests/conftest.py @@ -12,188 +12,173 @@ # See the License for the specific language governing permissions and # limitations under the License. """Common setup and fixtures for the pytest suite used by this service.""" -from concurrent.futures import CancelledError import time +from concurrent.futures import CancelledError import pytest from flask_migrate import Migrate, upgrade from sqlalchemy import event, text +from sqlalchemy_utils import create_database, database_exists, drop_database from auth_api import create_app, setup_jwt_manager -from auth_api.auth import jwt as _jwt from auth_api.exceptions import BusinessException, Error from auth_api.models import db as _db +from auth_api.utils.auth import jwt as _jwt -def mock_token(config_id='', config_secret=''): +def mock_token(config_id="", config_secret=""): """Mock token generator.""" - return 'TOKEN....' + return "TOKEN...." -@pytest.fixture(scope='session') +@pytest.fixture(scope="session", autouse=True) def app(): """Return a session-wide application configured in TEST mode.""" - _app = create_app('testing') + _app = create_app("testing") return _app -@pytest.fixture(scope='function') +@pytest.fixture(scope="function", autouse=True) def app_request(): """Return a session-wide application configured in TEST mode.""" - _app = create_app('testing') + _app = create_app("testing") return _app -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def client(app): # pylint: disable=redefined-outer-name """Return a session-wide Flask test client.""" return app.test_client() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def jwt(): """Return a session-wide jwt manager.""" return _jwt -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def client_ctx(app): # pylint: disable=redefined-outer-name """Return session-wide Flask test client.""" with app.test_client() as _client: yield _client -@pytest.fixture(scope='session') +@pytest.fixture(scope="session", autouse=True) def db(app): # pylint: disable=redefined-outer-name, invalid-name - """Return a session-wide initialised database. - - Drops schema, and recreate. - """ + """Return a session-wide initialised database.""" with app.app_context(): - drop_schema_sql = """DROP SCHEMA public CASCADE; - CREATE SCHEMA public; - GRANT ALL ON SCHEMA public TO postgres; - GRANT ALL ON SCHEMA public TO public; - """ - - sess = _db.session() - sess.execute(drop_schema_sql) - sess.commit() - - # ############################################ - # There are 2 approaches, an empty database, or the same one that the app will use - # create the tables - # _db.create_all() - # or - # Use Alembic to load all of the DB revisions including supporting lookup data - # This is the path we'll use in auth_api!! - - # even though this isn't referenced directly, it sets up the internal configs that upgrade needs + if database_exists(_db.engine.url): + drop_database(_db.engine.url) + create_database(_db.engine.url) + _db.session().execute(text('SET TIME ZONE "UTC";')) Migrate(app, _db) upgrade() - return _db -@pytest.fixture(scope='function') -def session(app, db): # pylint: disable=redefined-outer-name, invalid-name +@pytest.fixture(scope="function") +def session(db, app): # pylint: disable=redefined-outer-name, invalid-name """Return a function-scoped session.""" with app.app_context(): - conn = db.engine.connect() - txn = conn.begin() - - options = dict(bind=conn, binds={}) - sess = db.create_scoped_session(options=options) - - # establish a SAVEPOINT just before beginning the test - # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint) - sess.begin_nested() - - @event.listens_for(sess(), 'after_transaction_end') - def restart_savepoint(sess2, trans): # pylint: disable=unused-variable - # Detecting whether this is indeed the nested transaction of the test - if trans.nested and not trans._parent.nested: # pylint: disable=protected-access - # Handle where test DOESN'T session.commit(), - sess2.expire_all() - sess.begin_nested() - - db.session = sess - - sql = text('select 1') - sess.execute(sql) - - yield sess - - # Cleanup - sess.remove() - # This instruction rollsback any commit that were executed in the tests. - txn.rollback() - conn.close() - - -@pytest.fixture(scope='session', autouse=True) + with db.engine.connect() as conn: + transaction = conn.begin() + sess = db._make_scoped_session(dict(bind=conn)) # pylint: disable=protected-access + # Establish SAVEPOINT (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint) + nested = sess.begin_nested() + old_session = db.session + db.session = sess + db.session.commit = nested.commit + db.session.rollback = nested.rollback + + @event.listens_for(sess, "after_transaction_end") + def restart_savepoint(sess2, trans): # pylint: disable=unused-variable + nonlocal nested + if trans.nested: + # Handle where test DOESN'T session.commit() + sess2.expire_all() + nested = sess.begin_nested() + # When using a SAVEPOINT via the Session.begin_nested() or Connection.begin_nested() methods, + # the transaction object returned must be used to commit or rollback the SAVEPOINT. + # Calling the Session.commit() or Connection.commit() methods will always commit the + # outermost transaction; this is a SQLAlchemy 2.0 specific behavior that is + # reversed from the 1.x series + db.session = sess + db.session.commit = nested.commit + db.session.rollback = nested.rollback + + try: + yield db.session + finally: + db.session.remove() + transaction.rollback() + event.remove(sess, "after_transaction_end", restart_savepoint) + db.session = old_session + + +@pytest.fixture(scope="session", autouse=True) def auto(docker_services, app): """Spin up a keycloak instance and initialize jwt.""" - if app.config['USE_TEST_KEYCLOAK_DOCKER']: - docker_services.start('keycloak') - docker_services.wait_for_service('keycloak', 8081) + if app.config["USE_TEST_KEYCLOAK_DOCKER"]: + docker_services.start("keycloak") + docker_services.wait_for_service("keycloak", 8081, timeout=60.0) setup_jwt_manager(app, _jwt) - if app.config['USE_DOCKER_MOCK']: - docker_services.start('minio') - docker_services.start('notify') - docker_services.start('bcol') - docker_services.start('pay') - docker_services.start('proxy') - docker_services.wait_for_service('minio', 9000) - time.sleep(10) + if app.config["USE_DOCKER_MOCK"]: + docker_services.start("minio") + docker_services.start("notify") + docker_services.start("bcol") + docker_services.start("pay") + docker_services.start("proxy") + docker_services.wait_for_service("minio", 9000, timeout=60.0) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_compose_files(pytestconfig): """Get the docker-compose.yml absolute path.""" import os - return [ - os.path.join(str(pytestconfig.rootdir), 'tests/docker', 'docker-compose.yml') - ] + + return [os.path.join(str(pytestconfig.rootdir), "tests/docker", "docker-compose.yml")] @pytest.fixture() def auth_mock(monkeypatch): """Mock check_auth.""" - monkeypatch.setattr('auth_api.services.entity.check_auth', lambda *args, **kwargs: None) - monkeypatch.setattr('auth_api.services.org.check_auth', lambda *args, **kwargs: None) - monkeypatch.setattr('auth_api.services.invitation.check_auth', lambda *args, **kwargs: None) - monkeypatch.setattr('auth_api.services.affiliation_invitation.check_auth', lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.entity.check_auth", lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.org.check_auth", lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.invitation.check_auth", lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.affiliation_invitation.check_auth", lambda *args, **kwargs: None) @pytest.fixture() def notify_mock(monkeypatch): """Mock send_email.""" - monkeypatch.setattr('auth_api.services.invitation.send_email', lambda *args, **kwargs: None) - monkeypatch.setattr('auth_api.services.affiliation_invitation.send_email', lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.invitation.send_email", lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.affiliation_invitation.send_email", lambda *args, **kwargs: None) @pytest.fixture() def notify_org_mock(monkeypatch): """Mock send_email.""" - monkeypatch.setattr('auth_api.services.org.send_email', lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.org.send_email", lambda *args, **kwargs: None) @pytest.fixture() def keycloak_mock(monkeypatch): """Mock keycloak services.""" - monkeypatch.setattr('auth_api.services.keycloak.KeycloakService.join_account_holders_group', - lambda *args, **kwargs: None) - monkeypatch.setattr('auth_api.services.keycloak.KeycloakService.join_users_group', - lambda *args, **kwargs: None) - monkeypatch.setattr('auth_api.services.keycloak.KeycloakService.remove_from_account_holders_group', - lambda *args, **kwargs: None) - monkeypatch.setattr('auth_api.services.keycloak.KeycloakService.add_or_remove_product_keycloak_groups', - lambda *args, **kwargs: None) + monkeypatch.setattr( + "auth_api.services.keycloak.KeycloakService.join_account_holders_group", lambda *args, **kwargs: None + ) + monkeypatch.setattr("auth_api.services.keycloak.KeycloakService.join_users_group", lambda *args, **kwargs: None) + monkeypatch.setattr( + "auth_api.services.keycloak.KeycloakService.remove_from_account_holders_group", lambda *args, **kwargs: None + ) + monkeypatch.setattr( + "auth_api.services.keycloak.KeycloakService.add_or_remove_product_keycloak_groups", lambda *args, **kwargs: None + ) @pytest.fixture() @@ -203,8 +188,9 @@ def business_exception_mock(monkeypatch): def get_business(business_identifier, token): raise BusinessException(Error.AFFILIATION_INVITATION_BUSINESS_NOT_FOUND, None) - monkeypatch.setattr('auth_api.services.affiliation_invitation.AffiliationInvitation._get_business_details', - get_business) + monkeypatch.setattr( + "auth_api.services.affiliation_invitation.AffiliationInvitation._get_business_details", get_business + ) @pytest.fixture() @@ -212,36 +198,31 @@ def business_mock(monkeypatch): """Mock get business call.""" def get_business(business_identifier, token): - return { - 'business': { - 'identifier': 'CP0002103', - 'legalName': 'BarFoo, Inc.', - 'legalType': 'CP' - } - } + return {"business": {"identifier": "CP0002103", "legalName": "BarFoo, Inc.", "legalType": "CP"}} def get_businesses(business_identifiers, token): return [ { - 'identifier': 'CP0002103', - 'legalName': 'BarFoo, Inc.', - 'legalType': 'CP', - 'state': 'ACTIVE', + "identifier": "CP0002103", + "legalName": "BarFoo, Inc.", + "legalType": "CP", + "state": "ACTIVE", }, { - - 'identifier': 'CP0002104', - 'legalName': 'BarFooMeToo, Inc.', - 'legalType': 'CP', - 'state': 'ACTIVE', - } + "identifier": "CP0002104", + "legalName": "BarFooMeToo, Inc.", + "legalType": "CP", + "state": "ACTIVE", + }, ] - monkeypatch.setattr('auth_api.services.affiliation_invitation.AffiliationInvitation._get_business_details', - get_business) + monkeypatch.setattr( + "auth_api.services.affiliation_invitation.AffiliationInvitation._get_business_details", get_business + ) - monkeypatch.setattr('auth_api.services.affiliation_invitation.AffiliationInvitation._get_multiple_business_details', - get_businesses) + monkeypatch.setattr( + "auth_api.services.affiliation_invitation.AffiliationInvitation._get_multiple_business_details", get_businesses + ) @pytest.fixture() @@ -250,21 +231,13 @@ def nr_mock(monkeypatch): def get_nr(business_identifier): return { - 'applicants': { - 'emailAddress': 'test@test.com', - 'phoneNumber': '1112223333' - }, - 'names': [ - { - 'name': 'TEST INC..', - 'state': 'APPROVED' - } - ], - 'state': 'APPROVED', - 'requestTypeCd': 'BC' + "applicants": {"emailAddress": "test@test.com", "phoneNumber": "1112223333"}, + "names": [{"name": "TEST INC..", "state": "APPROVED"}], + "state": "APPROVED", + "requestTypeCd": "BC", } - monkeypatch.setattr('auth_api.services.affiliation.Affiliation._get_nr_details', get_nr) + monkeypatch.setattr("auth_api.services.affiliation.Affiliation._get_nr_details", get_nr) @pytest.fixture() @@ -273,20 +246,12 @@ def minio_mock(monkeypatch): def get_nr(business_identifier): return { - 'applicants': { - 'emailAddress': 'test@test.com', - 'phoneNumber': '1112223333' - }, - 'names': [ - { - 'name': 'TEST INC..', - 'state': 'APPROVED' - } - ], - 'state': 'APPROVED' + "applicants": {"emailAddress": "test@test.com", "phoneNumber": "1112223333"}, + "names": [{"name": "TEST INC..", "state": "APPROVED"}], + "state": "APPROVED", } - monkeypatch.setattr('auth_api.services.minio.MinioService._get_client', get_nr) + monkeypatch.setattr("auth_api.services.minio.MinioService._get_client", get_nr) @pytest.fixture() @@ -294,22 +259,13 @@ def staff_user_mock(monkeypatch): """Mock user_context.""" def token_info(): # pylint: disable=unused-argument; mocks of library methods - return { - 'username': 'staff user', - 'realm_access': { - 'roles': [ - 'staff', - 'edit', - 'create_accounts' - ] - } - } + return {"username": "staff user", "realm_access": {"roles": ["staff", "edit", "create_accounts"]}} def mock_auth(): # pylint: disable=unused-argument; mocks of library methods - return 'test' + return "test" - monkeypatch.setattr('auth_api.utils.user_context._get_token', mock_auth) - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', token_info) + monkeypatch.setattr("auth_api.utils.user_context._get_token", mock_auth) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", token_info) @pytest.fixture() @@ -317,21 +273,13 @@ def bceid_user_mock(monkeypatch): """Mock user_context.""" def token_info(): # pylint: disable=unused-argument; mocks of library methods - return { - 'username': 'CP1234567 user', - 'realm_access': { - 'roles': [ - 'edit', - 'create_accounts' - ] - } - } + return {"username": "CP1234567 user", "realm_access": {"roles": ["edit", "create_accounts"]}} def mock_auth(): # pylint: disable=unused-argument; mocks of library methods - return 'test' + return "test" - monkeypatch.setattr('auth_api.utils.user_context._get_token', mock_auth) - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', token_info) + monkeypatch.setattr("auth_api.utils.user_context._get_token", mock_auth) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", token_info) @pytest.fixture() @@ -339,27 +287,19 @@ def system_user_mock(monkeypatch): """Mock user_context.""" def token_info(): # pylint: disable=unused-argument; mocks of library methods - return { - 'username': 'staff user', - 'realm_access': { - 'roles': [ - 'staff', - 'edit', - 'system' - ] - } - } + return {"username": "staff user", "realm_access": {"roles": ["staff", "edit", "system"]}} def mock_auth(): # pylint: disable=unused-argument; mocks of library methods - return 'test' + return "test" - monkeypatch.setattr('auth_api.utils.user_context._get_token', mock_auth) - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', token_info) + monkeypatch.setattr("auth_api.utils.user_context._get_token", mock_auth) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", token_info) @pytest.fixture(autouse=True) def mock_pub_sub_call(mocker): """Mock pub sub call.""" + class PublisherMock: """Publisher Mock.""" @@ -368,6 +308,6 @@ def __init__(self, *args, **kwargs): def publish(self, *args, **kwargs): """Publish mock.""" - raise CancelledError('This is a mock') + raise CancelledError("This is a mock") - mocker.patch('google.cloud.pubsub_v1.PublisherClient', PublisherMock) + mocker.patch("google.cloud.pubsub_v1.PublisherClient", PublisherMock) diff --git a/auth-api/tests/unit/api/test_account.py b/auth-api/tests/unit/api/test_account.py index 63610167a4..7e2a0f536d 100644 --- a/auth-api/tests/unit/api/test_account.py +++ b/auth-api/tests/unit/api/test_account.py @@ -18,57 +18,69 @@ """ import copy +from http import HTTPStatus -from auth_api import status as http_status from auth_api.schemas import utils as schema_utils from tests.utilities.factory_scenarios import TestJwtClaims from tests.utilities.factory_utils import ( - TestOrgInfo, TestOrgTypeInfo, factory_auth_header, factory_membership_model, factory_org_model, - factory_product_model, factory_user_model) + TestOrgInfo, + TestOrgTypeInfo, + factory_auth_header, + factory_membership_model, + factory_org_model, + factory_product_model, + factory_user_model, +) def test_authorizations_for_account_returns_200(app, client, jwt, session): # pylint:disable=unused-argument """Assert authorizations for product returns 200.""" - product_code = 'PPR' + product_code = "PPR" user = factory_user_model() org = factory_org_model() factory_membership_model(user.id, org.id) claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/accounts/{org.id}/products/{product_code}/authorizations', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/accounts/{org.id}/products/{product_code}/authorizations", + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert len(rv.json.get('roles')) == 0 + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert len(rv.json.get("roles")) == 0 def test_authorizations_for_account_with_search_returns_200(client, jwt, session): # pylint:disable=unused-argument """Assert authorizations for product returns 200.""" - product_code = 'PPR' + product_code = "PPR" user = factory_user_model() org = factory_org_model() factory_membership_model(user.id, org.id) factory_product_model(org.id) claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/accounts/{org.id}/products/{product_code}/authorizations', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/accounts/{org.id}/products/{product_code}/authorizations", + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert len(rv.json.get('roles')) > 0 + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert len(rv.json.get("roles")) > 0 def test_authorizations_with_multiple_accounts_returns_200(client, jwt, session): # pylint:disable=unused-argument """Assert authorizations for product returns 200.""" - product_code = 'PPR' + product_code = "PPR" user = factory_user_model() org = factory_org_model() factory_membership_model(user.id, org.id) @@ -78,41 +90,50 @@ def test_authorizations_with_multiple_accounts_returns_200(client, jwt, session) factory_membership_model(user.id, org.id) claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/accounts/{org2.id}/products/{product_code}/authorizations', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/accounts/{org2.id}/products/{product_code}/authorizations", + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert len(rv.json.get('roles')) == 0 + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert len(rv.json.get("roles")) == 0 headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/accounts/{org.id}/products/{product_code}/authorizations', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/accounts/{org.id}/products/{product_code}/authorizations", + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - len(rv.json.get('roles')) > 0 + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + len(rv.json.get("roles")) > 0 def test_authorizations_for_extended_returns_200(app, client, jwt, session): # pylint:disable=unused-argument """Assert authorizations for product returns 200.""" - product_code = 'PPR' + product_code = "PPR" user = factory_user_model() org = factory_org_model() factory_membership_model(user.id, org.id) factory_product_model(org.id) claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/accounts/{org.id}/products/{product_code}/authorizations?expanded=true', - headers=headers, content_type='application/json') - - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert len(rv.json.get('roles')) > 0 - assert rv.json.get('account').get('name') == org.name + rv = client.get( + f"/api/v1/accounts/{org.id}/products/{product_code}/authorizations?expanded=true", + headers=headers, + content_type="application/json", + ) + + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert len(rv.json.get("roles")) > 0 + assert rv.json.get("account").get("name") == org.name diff --git a/auth-api/tests/unit/api/test_activity_log.py b/auth-api/tests/unit/api/test_activity_log.py index c8d261f6c1..ca7291f81a 100644 --- a/auth-api/tests/unit/api/test_activity_log.py +++ b/auth-api/tests/unit/api/test_activity_log.py @@ -16,20 +16,25 @@ Test-Suite to ensure that the /Activity Log endpoint is working as expected. """ import copy +from http import HTTPStatus -from auth_api import status as http_status from auth_api.schemas import utils as schema_utils from auth_api.utils.enums import ActivityAction from tests.utilities.factory_scenarios import TestJwtClaims, TestUserInfo from tests.utilities.factory_utils import ( - factory_activity_log_model, factory_auth_header, factory_membership_model, factory_org_model, factory_user_model) + factory_activity_log_model, + factory_auth_header, + factory_membership_model, + factory_org_model, + factory_user_model, +) def test_fetch_log_no_content_no_org(client, jwt, session): # pylint:disable=unused-argument """Assert that the none can be fetched.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/orgs/1/activity-logs', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/1/activity-logs", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK def test_fetch_activity_log(client, jwt, session): # pylint:disable=unused-argument @@ -38,37 +43,36 @@ def test_fetch_activity_log(client, jwt, session): # pylint:disable=unused-argu org = factory_org_model() factory_activity_log_model( - actor=user.id, - action=ActivityAction.APPROVE_TEAM_MEMBER.value, - item_name='Superb', - item_value='' + actor=user.id, action=ActivityAction.APPROVE_TEAM_MEMBER.value, item_name="Superb", item_value="" ) factory_activity_log_model( actor=user.id, action=ActivityAction.CREATE_AFFILIATION.value, org_id=org.id, - item_name='Great Business', - item_value='' + item_name="Great Business", + item_value="", ) factory_activity_log_model( actor=user.id, action=ActivityAction.REMOVE_AFFILIATION.value, org_id=org.id, - item_name='Must sleep', - item_value='Getting Late' + item_name="Must sleep", + item_value="Getting Late", ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get(f'/api/v1/orgs/{org.id}/activity-logs', - headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/orgs/{org.id}/activity-logs", headers=headers, content_type="application/json") activity_logs = rv.json - assert len(activity_logs.get('activityLogs')) == 2 - assert schema_utils.validate(activity_logs, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - - rv = client.get(f'/api/v1/orgs/{org.id}/activity-logs?action={ActivityAction.REMOVE_AFFILIATION.value}', - headers=headers, content_type='application/json') - assert len(rv.json.get('activityLogs')) == 1 + assert len(activity_logs.get("activityLogs")) == 2 + assert schema_utils.validate(activity_logs, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + + rv = client.get( + f"/api/v1/orgs/{org.id}/activity-logs?action={ActivityAction.REMOVE_AFFILIATION.value}", + headers=headers, + content_type="application/json", + ) + assert len(rv.json.get("activityLogs")) == 1 def test_fetch_activity_log_masking(client, jwt, session): # pylint:disable=unused-argument @@ -77,44 +81,34 @@ def test_fetch_activity_log_masking(client, jwt, session): # pylint:disable=unu org = factory_org_model() factory_membership_model(user.id, org.id) - factory_activity_log_model( - actor=user.id, - action=ActivityAction.CREATE_AFFILIATION.value, - org_id=org.id - ) + factory_activity_log_model(actor=user.id, action=ActivityAction.CREATE_AFFILIATION.value, org_id=org.id) user_with_token = TestUserInfo.user_staff_admin - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] staff_user = factory_user_model(TestUserInfo.user_staff_admin) - factory_activity_log_model( - actor=staff_user.id, - action=ActivityAction.REMOVE_AFFILIATION.value, - org_id=org.id - ) + factory_activity_log_model(actor=staff_user.id, action=ActivityAction.REMOVE_AFFILIATION.value, org_id=org.id) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get(f'/api/v1/orgs/{org.id}/activity-logs', - headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/orgs/{org.id}/activity-logs", headers=headers, content_type="application/json") activity_logs = rv.json - assert len(activity_logs.get('activityLogs')) == 2 - assert schema_utils.validate(activity_logs, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - staff_actor = activity_logs.get('activityLogs')[0] - assert staff_actor.get('actor') == f'{staff_user.firstname} {staff_user.lastname}' + assert len(activity_logs.get("activityLogs")) == 2 + assert schema_utils.validate(activity_logs, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + staff_actor = activity_logs.get("activityLogs")[0] + assert staff_actor.get("actor") == f"{staff_user.firstname} {staff_user.lastname}" - user_actor = activity_logs.get('activityLogs')[1] - assert user_actor.get('actor') == f'{user.firstname} {user.lastname}' + user_actor = activity_logs.get("activityLogs")[1] + assert user_actor.get("actor") == f"{user.firstname} {user.lastname}" claims = copy.deepcopy(TestJwtClaims.public_account_holder_user.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/orgs/{org.id}/activity-logs', - headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/orgs/{org.id}/activity-logs", headers=headers, content_type="application/json") activity_logs = rv.json - staff_actor = activity_logs.get('activityLogs')[0] - assert staff_actor.get('actor') == 'BC Registry Staff' + staff_actor = activity_logs.get("activityLogs")[0] + assert staff_actor.get("actor") == "BC Registry Staff" - user_actor = activity_logs.get('activityLogs')[1] - assert user_actor.get('actor') == f'{user.firstname} {user.lastname}' + user_actor = activity_logs.get("activityLogs")[1] + assert user_actor.get("actor") == f"{user.firstname} {user.lastname}" diff --git a/auth-api/tests/unit/api/test_affiliation_invitation.py b/auth-api/tests/unit/api/test_affiliation_invitation.py index 7ddd541649..9e88870e41 100644 --- a/auth-api/tests/unit/api/test_affiliation_invitation.py +++ b/auth-api/tests/unit/api/test_affiliation_invitation.py @@ -17,10 +17,10 @@ Test-Suite to ensure that the /affiliationsInvitations endpoint is working as expected. """ import json +from http import HTTPStatus import pytest -from auth_api import status as http_status from auth_api.schemas import utils as schema_utils from auth_api.services import AffiliationInvitation as AffiliationInvitationService from auth_api.services.keycloak import KeycloakService @@ -29,652 +29,807 @@ from tests.utilities.factory_scenarios import TestContactInfo, TestEntityInfo, TestJwtClaims, TestOrgInfo from tests.utilities.factory_utils import factory_affiliation_invitation, factory_auth_header - KEYCLOAK_SERVICE = KeycloakService() -@pytest.mark.parametrize('from_org_info, to_org_info, entity_info, role, claims', [ - (TestOrgInfo.affiliation_from_org, TestOrgInfo.affiliation_to_org, TestEntityInfo.entity_lear_mock, 'ADMIN', - TestJwtClaims.public_user_role), - (TestOrgInfo.affiliation_from_org, TestOrgInfo.affiliation_to_org, TestEntityInfo.entity_lear_mock, 'USER', - TestJwtClaims.public_user_role), - (TestOrgInfo.affiliation_from_org, TestOrgInfo.affiliation_to_org, TestEntityInfo.entity_lear_mock, 'COORDINATOR', - TestJwtClaims.public_user_role), - (TestOrgInfo.affiliation_from_org, TestOrgInfo.affiliation_to_org, TestEntityInfo.entity_lear_mock, 'ADMIN', - TestJwtClaims.public_bceid_user), - (TestOrgInfo.affiliation_from_org, TestOrgInfo.affiliation_to_org, TestEntityInfo.entity_lear_mock, 'USER', - TestJwtClaims.public_bceid_user), - (TestOrgInfo.affiliation_from_org, TestOrgInfo.affiliation_to_org, TestEntityInfo.entity_lear_mock, 'COORDINATOR', - TestJwtClaims.public_bceid_user) -]) -def test_add_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock, - from_org_info, to_org_info, entity_info, role, - claims): # pylint:disable=unused-argument +@pytest.mark.parametrize( + "from_org_info, to_org_info, entity_info, role, claims", + [ + ( + TestOrgInfo.affiliation_from_org, + TestOrgInfo.affiliation_to_org, + TestEntityInfo.entity_lear_mock, + "ADMIN", + TestJwtClaims.public_user_role, + ), + ( + TestOrgInfo.affiliation_from_org, + TestOrgInfo.affiliation_to_org, + TestEntityInfo.entity_lear_mock, + "USER", + TestJwtClaims.public_user_role, + ), + ( + TestOrgInfo.affiliation_from_org, + TestOrgInfo.affiliation_to_org, + TestEntityInfo.entity_lear_mock, + "COORDINATOR", + TestJwtClaims.public_user_role, + ), + ( + TestOrgInfo.affiliation_from_org, + TestOrgInfo.affiliation_to_org, + TestEntityInfo.entity_lear_mock, + "ADMIN", + TestJwtClaims.public_bceid_user, + ), + ( + TestOrgInfo.affiliation_from_org, + TestOrgInfo.affiliation_to_org, + TestEntityInfo.entity_lear_mock, + "USER", + TestJwtClaims.public_bceid_user, + ), + ( + TestOrgInfo.affiliation_from_org, + TestOrgInfo.affiliation_to_org, + TestEntityInfo.entity_lear_mock, + "COORDINATOR", + TestJwtClaims.public_bceid_user, + ), + ], +) +def test_add_affiliation_invitation( + client, jwt, session, keycloak_mock, business_mock, from_org_info, to_org_info, entity_info, role, claims +): # pylint:disable=unused-argument """Assert that an affiliation invitation can be POSTed.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock, - from_org_info, - to_org_info, - entity_info, - claims) - - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock, from_org_info, to_org_info, entity_info, claims + ) + + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv_invitation.data) - assert rv_invitation.status_code == http_status.HTTP_201_CREATED - assert dictionary.get('token') is not None + assert rv_invitation.status_code == HTTPStatus.CREATED + assert dictionary.get("token") is not None result_json = rv_invitation.json - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] - assert result_json['fromOrg'] - assert result_json['fromOrg']['id'] == from_org_id - assert result_json['toOrg'] - assert result_json['toOrg']['id'] == to_org_id - assert result_json['businessIdentifier'] == business_identifier - assert result_json['status'] == 'PENDING' + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] + assert result_json["fromOrg"] + assert result_json["fromOrg"]["id"] == from_org_id + assert result_json["toOrg"] + assert result_json["toOrg"]["id"] == to_org_id + assert result_json["businessIdentifier"] == business_identifier + assert result_json["status"] == "PENDING" # Defaults to EMAIL affiliation invitation type - assert result_json['type'] == 'EMAIL' + assert result_json["type"] == "EMAIL" # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['recipientEmail']) - - -@pytest.mark.parametrize('from_org_info, entity_info, role, claims', [ - (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, 'ADMIN', TestJwtClaims.public_user_role), - (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, 'USER', TestJwtClaims.public_user_role), - (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, 'COORDINATOR', TestJwtClaims.public_user_role), - (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, 'ADMIN', TestJwtClaims.public_bceid_user), - (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, 'USER', TestJwtClaims.public_bceid_user), - (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, 'COORDINATOR', TestJwtClaims.public_bceid_user) -]) -def test_add_affiliation_invitation_exclude_to_org(client, jwt, session, keycloak_mock, business_mock, - from_org_info, entity_info, role, - claims): # pylint:disable=unused-argument + assert_masked_email(TestContactInfo.contact1["email"], result_json["recipientEmail"]) + + +@pytest.mark.parametrize( + "from_org_info, entity_info, role, claims", + [ + (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, "ADMIN", TestJwtClaims.public_user_role), + (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, "USER", TestJwtClaims.public_user_role), + ( + TestOrgInfo.affiliation_from_org, + TestEntityInfo.entity_lear_mock, + "COORDINATOR", + TestJwtClaims.public_user_role, + ), + (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, "ADMIN", TestJwtClaims.public_bceid_user), + (TestOrgInfo.affiliation_from_org, TestEntityInfo.entity_lear_mock, "USER", TestJwtClaims.public_bceid_user), + ( + TestOrgInfo.affiliation_from_org, + TestEntityInfo.entity_lear_mock, + "COORDINATOR", + TestJwtClaims.public_bceid_user, + ), + ], +) +def test_add_affiliation_invitation_exclude_to_org( + client, jwt, session, keycloak_mock, business_mock, from_org_info, entity_info, role, claims +): # pylint:disable=unused-argument """Assert that an affiliation invitation can be POSTed without a to_org.""" - headers, from_org_id, to_org_id, \ - business_identifier = setup_affiliation_invitation_data(client=client, - jwt=jwt, - session=session, - keycloak_mock=keycloak_mock, - from_org_info=from_org_info, - entity_info=entity_info, - claims=claims) - - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=None, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client=client, + jwt=jwt, + session=session, + keycloak_mock=keycloak_mock, + from_org_info=from_org_info, + entity_info=entity_info, + claims=claims, + ) + + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=None, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv_invitation.data) - assert rv_invitation.status_code == http_status.HTTP_201_CREATED - assert dictionary.get('token') is not None + assert rv_invitation.status_code == HTTPStatus.CREATED + assert dictionary.get("token") is not None result_json = rv_invitation.json - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] - assert result_json['fromOrg'] - assert result_json['fromOrg']['id'] == from_org_id - assert not result_json.get('toOrg') - assert result_json['businessIdentifier'] == business_identifier - assert result_json['status'] == 'PENDING' + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] + assert result_json["fromOrg"] + assert result_json["fromOrg"]["id"] == from_org_id + assert not result_json.get("toOrg") + assert result_json["businessIdentifier"] == business_identifier + assert result_json["status"] == "PENDING" # Defaults to EMAIL affiliation invitation type - assert result_json['type'] == 'EMAIL' + assert result_json["type"] == "EMAIL" # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], result_json["recipientEmail"]) def test_affiliation_invitation_already_exists(client, jwt, session, keycloak_mock, business_mock): """Assert that POSTing an already existing affiliation invitation returns a 400.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') - - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation(from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') - - assert rv_invitation.status_code == http_status.HTTP_400_BAD_REQUEST + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) + + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) + + assert rv_invitation.status_code == HTTPStatus.BAD_REQUEST dictionary = json.loads(rv_invitation.data) - assert dictionary['message'] == 'The data you want to insert already exists.' + assert dictionary["message"] == "The data you want to insert already exists." def test_affiliation_invitation_business_not_found(client, jwt, session, keycloak_mock, business_exception_mock): """Assert that POSTing with a business identifier not found in LEAR raises the appropriate exception.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') - - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation(from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') - - assert rv_invitation.status_code == http_status.HTTP_400_BAD_REQUEST + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) + + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) + + assert rv_invitation.status_code == HTTPStatus.BAD_REQUEST dictionary = json.loads(rv_invitation.data) - assert dictionary['message'] == 'The business specified for the affiliation invitation could not be found.' - assert dictionary['code'] == 'AFFILIATION_INVITATION_BUSINESS_NOT_FOUND' + assert dictionary["message"] == "The business specified for the affiliation invitation could not be found." + assert dictionary["code"] == "AFFILIATION_INVITATION_BUSINESS_NOT_FOUND" def test_affiliation_invitation_already_exists_exclude_to_org(client, jwt, session, keycloak_mock, business_mock): """Assert that POSTing an already existing affiliation invitation returns a 400.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=None, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') - - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation(from_org_id=from_org_id, - to_org_id=None, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') - - assert rv_invitation.status_code == http_status.HTTP_400_BAD_REQUEST + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=None, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) + + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=None, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) + + assert rv_invitation.status_code == HTTPStatus.BAD_REQUEST dictionary = json.loads(rv_invitation.data) - assert dictionary['message'] == 'The data you want to insert already exists.' + assert dictionary["message"] == "The data you want to insert already exists." def test_affiliation_invitation_missing_contact(client, jwt, session, keycloak_mock, business_mock): """Assert that creating an invitation with a missing contact email returns a 400.""" - headers, from_org_id, to_org_id, \ - business_identifier = setup_affiliation_invitation_data(client=client, - jwt=jwt, - session=session, - keycloak_mock=keycloak_mock, - create_contact=False) - - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=None, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client=client, jwt=jwt, session=session, keycloak_mock=keycloak_mock, create_contact=False + ) + + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=None, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) - assert rv_invitation.status_code == http_status.HTTP_400_BAD_REQUEST + assert rv_invitation.status_code == HTTPStatus.BAD_REQUEST dictionary = json.loads(rv_invitation.data) - assert dictionary['message'] == 'Business contact email not valid.' + assert dictionary["message"] == "Business contact email not valid." def test_delete_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation can be deleted.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - rv_invitation = client.delete('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), - headers=headers, content_type='application/json') - assert rv_invitation.status_code == http_status.HTTP_200_OK + rv_invitation = client.delete( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + headers=headers, + content_type="application/json", + ) + assert rv_invitation.status_code == HTTPStatus.OK - rv_invitation = client.get('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), - headers=headers, content_type='application/json') - assert rv_invitation.status_code == http_status.HTTP_404_NOT_FOUND + rv_invitation = client.get( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + headers=headers, + content_type="application/json", + ) + assert rv_invitation.status_code == HTTPStatus.NOT_FOUND dictionary = json.loads(rv_invitation.data) - assert dictionary['message'] == 'The requested affiliation invitation could not be found.' + assert dictionary["message"] == "The requested affiliation invitation could not be found." def test_delete_accepted_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock): """Assert that an accepted affiliation invitation can be deleted.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - affiliation_invitation_token = AffiliationInvitationService.generate_confirmation_token(affiliation_invitation_id, - from_org_id, - to_org_id, - business_identifier) + affiliation_invitation_token = AffiliationInvitationService.generate_confirmation_token( + affiliation_invitation_id, from_org_id, to_org_id, business_identifier + ) assert affiliation_invitation_token is not None # Accept invitation - rv_invitation = client.put('/api/v1/affiliationInvitations/{}/token/{}'.format(affiliation_invitation_id, - affiliation_invitation_token), - headers=headers, content_type='application/json') + rv_invitation = client.put( + "/api/v1/affiliationInvitations/{}/token/{}".format(affiliation_invitation_id, affiliation_invitation_token), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv_invitation.data) - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert dictionary['status'] == InvitationStatus.ACCEPTED.value + assert rv_invitation.status_code == HTTPStatus.OK + assert dictionary["status"] == InvitationStatus.ACCEPTED.value # Delete the accepted invitation - rv_invitation = client.delete('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), - headers=headers, content_type='application/json') - assert rv_invitation.status_code == http_status.HTTP_200_OK + rv_invitation = client.delete( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + headers=headers, + content_type="application/json", + ) + assert rv_invitation.status_code == HTTPStatus.OK - rv_invitation = client.get('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), - headers=headers, content_type='application/json') - assert rv_invitation.status_code == http_status.HTTP_404_NOT_FOUND + rv_invitation = client.get( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + headers=headers, + content_type="application/json", + ) + assert rv_invitation.status_code == HTTPStatus.NOT_FOUND dictionary = json.loads(rv_invitation.data) - assert dictionary['message'] == 'The requested affiliation invitation could not be found.' + assert dictionary["message"] == "The requested affiliation invitation could not be found." def test_add_affiliation_invitation_invalid(client, jwt, session, business_mock): """Assert that POSTing an invalid affiliation invitation returns a 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=None, - to_org_id=None, - business_identifier=None)), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps(factory_affiliation_invitation(from_org_id=None, to_org_id=None, business_identifier=None)), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_get_affiliation_invitation_by_id(client, jwt, session, keycloak_mock, business_mock): """Assert that an invitation can be retrieved.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - rv = client.get('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + headers=headers, + content_type="application/json", + ) result_json = rv.json - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert result_json['id'] == affiliation_invitation_id + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] + assert rv.status_code == HTTPStatus.OK + assert result_json["id"] == affiliation_invitation_id # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], result_json["recipientEmail"]) def test_update_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation can be updated.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] updated_affiliation_invitation = {} - rv_invitation = client.patch('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), data=json.dumps( - updated_affiliation_invitation), - headers=headers, content_type='application/json') + rv_invitation = client.patch( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + data=json.dumps(updated_affiliation_invitation), + headers=headers, + content_type="application/json", + ) result_json = rv_invitation.json - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] + assert rv_invitation.status_code == HTTPStatus.OK + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] dictionary = json.loads(rv_invitation.data) - assert dictionary['status'] == 'PENDING' + assert dictionary["status"] == "PENDING" # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], result_json["recipientEmail"]) def test_update_affiliation_invitation_exclude_to_org(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation can be updated.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=None, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=None, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] updated_affiliation_invitation = {} - rv_invitation = client.patch('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), data=json.dumps( - updated_affiliation_invitation), - headers=headers, content_type='application/json') + rv_invitation = client.patch( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + data=json.dumps(updated_affiliation_invitation), + headers=headers, + content_type="application/json", + ) result_json = rv_invitation.json - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] + assert rv_invitation.status_code == HTTPStatus.OK + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] dictionary = json.loads(rv_invitation.data) - assert dictionary['status'] == 'PENDING' + assert dictionary["status"] == "PENDING" # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], result_json["recipientEmail"]) def test_expire_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation can be expired.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - updated_affiliation_invitation = {'status': 'EXPIRED'} + updated_affiliation_invitation = {"status": "EXPIRED"} - rv_invitation = client.patch('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), data=json.dumps( - updated_affiliation_invitation), - headers=headers, content_type='application/json') + rv_invitation = client.patch( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + data=json.dumps(updated_affiliation_invitation), + headers=headers, + content_type="application/json", + ) result_json = rv_invitation.json - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] + assert rv_invitation.status_code == HTTPStatus.OK + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] dictionary = json.loads(rv_invitation.data) - assert dictionary['status'] == 'EXPIRED' + assert dictionary["status"] == "EXPIRED" # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], result_json["recipientEmail"]) def test_expire_affiliation_invitation_exclude_to_org(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation can be expired.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=None, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=None, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - updated_affiliation_invitation = {'status': 'EXPIRED'} + updated_affiliation_invitation = {"status": "EXPIRED"} - rv_invitation = client.patch('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), data=json.dumps( - updated_affiliation_invitation), - headers=headers, content_type='application/json') + rv_invitation = client.patch( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + data=json.dumps(updated_affiliation_invitation), + headers=headers, + content_type="application/json", + ) result_json = rv_invitation.json - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] + assert rv_invitation.status_code == HTTPStatus.OK + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] dictionary = json.loads(rv_invitation.data) - assert dictionary['status'] == 'EXPIRED' + assert dictionary["status"] == "EXPIRED" # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], result_json["recipientEmail"]) def test_accept_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation can be accepted.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - affiliation_invitation_token = AffiliationInvitationService.generate_confirmation_token(affiliation_invitation_id, - from_org_id, - to_org_id, - business_identifier) + affiliation_invitation_token = AffiliationInvitationService.generate_confirmation_token( + affiliation_invitation_id, from_org_id, to_org_id, business_identifier + ) assert affiliation_invitation_token is not None - rv_invitation = client.put('/api/v1/affiliationInvitations/{}/token/{}'.format(affiliation_invitation_id, - affiliation_invitation_token), - headers=headers, content_type='application/json') + rv_invitation = client.put( + "/api/v1/affiliationInvitations/{}/token/{}".format(affiliation_invitation_id, affiliation_invitation_token), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv_invitation.data) - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert dictionary['status'] == 'ACCEPTED' + assert rv_invitation.status_code == HTTPStatus.OK + assert dictionary["status"] == "ACCEPTED" # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], dictionary['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], dictionary["recipientEmail"]) # Assert from org affiliation is created - rv_affiliations = client.get('/api/v1/orgs/{}/affiliations'.format(from_org_id), headers=headers) - assert rv_affiliations.status_code == http_status.HTTP_200_OK + rv_affiliations = client.get("/api/v1/orgs/{}/affiliations".format(from_org_id), headers=headers) + assert rv_affiliations.status_code == HTTPStatus.OK - assert schema_utils.validate(rv_affiliations.json, 'affiliations_response')[0] + assert schema_utils.validate(rv_affiliations.json, "affiliations_response")[0] affiliations = json.loads(rv_affiliations.data) assert affiliations is not None - assert len(affiliations['entities']) == 1 - assert affiliations['entities'][0]['businessIdentifier'] == business_identifier + assert len(affiliations["entities"]) == 1 + assert affiliations["entities"][0]["businessIdentifier"] == business_identifier # Assert to org affiliation is empty - rv_affiliations = client.get('/api/v1/orgs/{}/affiliations'.format(to_org_id), headers=headers) - assert rv_affiliations.status_code == http_status.HTTP_200_OK + rv_affiliations = client.get("/api/v1/orgs/{}/affiliations".format(to_org_id), headers=headers) + assert rv_affiliations.status_code == HTTPStatus.OK - assert schema_utils.validate(rv_affiliations.json, 'affiliations_response')[0] + assert schema_utils.validate(rv_affiliations.json, "affiliations_response")[0] affiliations = json.loads(rv_affiliations.data) assert affiliations is not None - assert len(affiliations['entities']) == 0 + assert len(affiliations["entities"]) == 0 def test_get_affiliation_invitations(client, jwt, session, keycloak_mock, business_mock): """Assert that affiliation invitations can be retrieved.""" - headers, from_org_id, to_org_id, business_identifier = \ - setup_affiliation_invitation_data(client, jwt, session, keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv_invitations = client.get('/api/v1/affiliationInvitations?fromOrgId={}'.format(from_org_id), - headers=headers, - content_type='application/json') + rv_invitations = client.get( + "/api/v1/affiliationInvitations?fromOrgId={}".format(from_org_id), + headers=headers, + content_type="application/json", + ) result_json = rv_invitations.json - assert rv_invitations.status_code == http_status.HTTP_200_OK - assert result_json['affiliationInvitations'] - assert len(result_json['affiliationInvitations']) == 1 + assert rv_invitations.status_code == HTTPStatus.OK + assert result_json["affiliationInvitations"] + assert len(result_json["affiliationInvitations"]) == 1 # Assert email is masked - assert_masked_email(TestContactInfo.contact1['email'], result_json['affiliationInvitations'][0]['recipientEmail']) + assert_masked_email(TestContactInfo.contact1["email"], result_json["affiliationInvitations"][0]["recipientEmail"]) def test_get_affiliation_invitations_deleted(client, jwt, session, keycloak_mock, business_mock): """Assert that affiliation invitations that are soft deleted are not returned.""" - headers, from_org_id, to_org_id, business_identifier = \ - setup_affiliation_invitation_data(client, jwt, session, keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - affiliation_invitation_token = AffiliationInvitationService.generate_confirmation_token(affiliation_invitation_id, - from_org_id, - to_org_id, - business_identifier) + affiliation_invitation_token = AffiliationInvitationService.generate_confirmation_token( + affiliation_invitation_id, from_org_id, to_org_id, business_identifier + ) assert affiliation_invitation_token is not None # Accept invitation - rv_invitation = client.put('/api/v1/affiliationInvitations/{}/token/{}'.format(affiliation_invitation_id, - affiliation_invitation_token), - headers=headers, content_type='application/json') + rv_invitation = client.put( + "/api/v1/affiliationInvitations/{}/token/{}".format(affiliation_invitation_id, affiliation_invitation_token), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv_invitation.data) - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert dictionary['status'] == InvitationStatus.ACCEPTED.value + assert rv_invitation.status_code == HTTPStatus.OK + assert dictionary["status"] == InvitationStatus.ACCEPTED.value # Delete invitation - soft delete - client.delete('/api/v1/affiliationInvitations/{}'.format(affiliation_invitation_id), - headers=headers, content_type='application/json') - assert rv_invitation.status_code == http_status.HTTP_200_OK + client.delete( + "/api/v1/affiliationInvitations/{}".format(affiliation_invitation_id), + headers=headers, + content_type="application/json", + ) + assert rv_invitation.status_code == HTTPStatus.OK # Confirm soft deleted invitation does not return on search with fromOrgId - rv_invitations = client.get('/api/v1/affiliationInvitations?fromOrgId={}'.format(from_org_id), - headers=headers, - content_type='application/json') + rv_invitations = client.get( + "/api/v1/affiliationInvitations?fromOrgId={}".format(from_org_id), + headers=headers, + content_type="application/json", + ) result_json = rv_invitations.json - assert rv_invitations.status_code == http_status.HTTP_200_OK - assert len(result_json['affiliationInvitations']) == 0 + assert rv_invitations.status_code == HTTPStatus.OK + assert len(result_json["affiliationInvitations"]) == 0 # Confirm soft deleted invitation does not return on search with orgId - rv_invitations = client.get('/api/v1/affiliationInvitations?orgId={}'.format(from_org_id), - headers=headers, - content_type='application/json') + rv_invitations = client.get( + "/api/v1/affiliationInvitations?orgId={}".format(from_org_id), headers=headers, content_type="application/json" + ) result_json = rv_invitations.json - assert rv_invitations.status_code == http_status.HTTP_200_OK - assert len(result_json['affiliationInvitations']) == 0 - - -def setup_affiliation_invitation_data(client, jwt, session, keycloak_mock, - from_org_info=TestOrgInfo.affiliation_from_org, - to_org_info=TestOrgInfo.affiliation_to_org, - entity_info=TestEntityInfo.entity_lear_mock, - claims=TestJwtClaims.public_user_role, - create_contact=True): # pylint:disable=unused-argument + assert rv_invitations.status_code == HTTPStatus.OK + assert len(result_json["affiliationInvitations"]) == 0 + + +def setup_affiliation_invitation_data( + client, + jwt, + session, + keycloak_mock, + from_org_info=TestOrgInfo.affiliation_from_org, + to_org_info=TestOrgInfo.affiliation_to_org, + entity_info=TestEntityInfo.entity_lear_mock, + claims=TestJwtClaims.public_user_role, + create_contact=True, +): # pylint:disable=unused-argument """Set up seed data for an affiliation invitation.""" headers = factory_auth_header(jwt=jwt, claims=claims) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv_from_org = client.post('/api/v1/orgs', data=json.dumps(from_org_info), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv_from_org = client.post( + "/api/v1/orgs", data=json.dumps(from_org_info), headers=headers, content_type="application/json" + ) # if you run into this, it's likely it's to a pay-api call. Make sure your secrets are correct. - assert rv_from_org.status_code != http_status.HTTP_500_INTERNAL_SERVER_ERROR + assert rv_from_org.status_code != HTTPStatus.INTERNAL_SERVER_ERROR - rv_to_org = client.post('/api/v1/orgs', data=json.dumps(to_org_info), - headers=headers, content_type='application/json') + rv_to_org = client.post( + "/api/v1/orgs", data=json.dumps(to_org_info), headers=headers, content_type="application/json" + ) headers_entity = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv_entity = client.post('/api/v1/entities', data=json.dumps(entity_info), - headers=headers_entity, content_type='application/json') + rv_entity = client.post( + "/api/v1/entities", data=json.dumps(entity_info), headers=headers_entity, content_type="application/json" + ) if create_contact: - client.post('/api/v1/entities/{}/contacts'.format(entity_info['businessIdentifier']), - headers=headers_entity, - data=json.dumps(TestContactInfo.contact1), - content_type='application/json') - + client.post( + "/api/v1/entities/{}/contacts".format(entity_info["businessIdentifier"]), + headers=headers_entity, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) dictionary_from_org = json.loads(rv_from_org.data) dictionary_to_org = json.loads(rv_to_org.data) dictionary_entity = json.loads(rv_entity.data) - from_org_id = dictionary_from_org['id'] - to_org_id = dictionary_to_org['id'] - business_identifier = dictionary_entity['businessIdentifier'] + from_org_id = dictionary_from_org["id"] + to_org_id = dictionary_to_org["id"] + business_identifier = dictionary_entity["businessIdentifier"] return headers, from_org_id, to_org_id, business_identifier def _get_entity_json_for_post(counter): - return {'businessIdentifier': f'CP000{counter}', - 'businessNumber': f'791861078BC{counter}', - 'name': f'Testing entity {counter}', - 'passCode': '222222222', 'corpTypeCode': 'CP'} - - -def setup_additional_affiliation_invitation_data(client, jwt, session, keycloak_mock, - claims=TestJwtClaims.public_user_role, - new_org_count=5, - new_entity_count=1 - ): # pylint:disable=unused-argument + return { + "businessIdentifier": f"CP000{counter}", + "businessNumber": f"791861078BC{counter}", + "name": f"Testing entity {counter}", + "passCode": "222222222", + "corpTypeCode": "CP", + } + + +def setup_additional_affiliation_invitation_data( + client, jwt, session, keycloak_mock, claims=TestJwtClaims.public_user_role, new_org_count=5, new_entity_count=1 +): # pylint:disable=unused-argument """Set up additional data for testing affiliation invitations.""" headers = factory_auth_header(jwt=jwt, claims=claims) headers_entity = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) @@ -683,193 +838,203 @@ def setup_additional_affiliation_invitation_data(client, jwt, session, keycloak_ new_entity_business_identifiers = [] for i in range(new_org_count): - new_org_json = client.post('/api/v1/orgs', data=json.dumps({'name': f'Test Affiliation Invitation Org {i}'}), - headers=headers, content_type='application/json') + new_org_json = client.post( + "/api/v1/orgs", + data=json.dumps({"name": f"Test Affiliation Invitation Org {i}"}), + headers=headers, + content_type="application/json", + ) new_org: dict = json.loads(new_org_json.data) - new_org_ids.append(new_org.get('id')) + new_org_ids.append(new_org.get("id")) for i in range(new_entity_count): new_entity_info = _get_entity_json_for_post(i) - client.post('/api/v1/entities', data=json.dumps(new_entity_info), - headers=headers_entity, content_type='application/json') + client.post( + "/api/v1/entities", + data=json.dumps(new_entity_info), + headers=headers_entity, + content_type="application/json", + ) - new_entity_business_identifiers.append(new_entity_info['businessIdentifier']) + new_entity_business_identifiers.append(new_entity_info["businessIdentifier"]) return new_org_ids, new_entity_business_identifiers def test_authorize_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation (type REQUEST) can be authorized.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) - rv_invitation = client.post('/api/v1/affiliationInvitations', data=json.dumps( - factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier)), - headers=headers, content_type='application/json') + rv_invitation = client.post( + "/api/v1/affiliationInvitations", + data=json.dumps( + factory_affiliation_invitation( + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) + ), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - rv_invitation = client.patch(f'/api/v1/affiliationInvitations/{affiliation_invitation_id}/authorization/accept', - headers=headers, - content_type='application/json') + rv_invitation = client.patch( + f"/api/v1/affiliationInvitations/{affiliation_invitation_id}/authorization/accept", + headers=headers, + content_type="application/json", + ) result_json = rv_invitation.json - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] + assert rv_invitation.status_code == HTTPStatus.OK + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] dictionary = json.loads(rv_invitation.data) - assert dictionary['status'] == 'ACCEPTED' + assert dictionary["status"] == "ACCEPTED" def test_reject_authorize_affiliation_invitation(client, jwt, session, keycloak_mock, business_mock): """Assert that an affiliation invitation (type REQUEST) can be refused to be authorized.""" - headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) + headers, from_org_id, to_org_id, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) # create affiliation invitation in test sample_invite = factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=business_identifier) + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=business_identifier + ) rv_invitation = client.post( - '/api/v1/affiliationInvitations', + "/api/v1/affiliationInvitations", data=json.dumps(sample_invite), - headers=headers, content_type='application/json' + headers=headers, + content_type="application/json", ) invitation_dictionary = json.loads(rv_invitation.data) - affiliation_invitation_id = invitation_dictionary['id'] + affiliation_invitation_id = invitation_dictionary["id"] - rv_invitation = client.patch(f'/api/v1/affiliationInvitations/{affiliation_invitation_id}/authorization/refuse', - headers=headers, - content_type='application/json') + rv_invitation = client.patch( + f"/api/v1/affiliationInvitations/{affiliation_invitation_id}/authorization/refuse", + headers=headers, + content_type="application/json", + ) result_json = rv_invitation.json - assert rv_invitation.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(result_json, 'affiliation_invitation_response')[0] + assert rv_invitation.status_code == HTTPStatus.OK + assert schema_utils.validate(result_json, "affiliation_invitation_response")[0] dictionary = json.loads(rv_invitation.data) - assert dictionary['status'] == 'FAILED' + assert dictionary["status"] == "FAILED" -def _create_affiliations_for_test(client, headers, - org_id1, org_id2, org_id3, org_id4, - business_identifier1, business_identifier2): +def _create_affiliations_for_test( + client, headers, org_id1, org_id2, org_id3, org_id4, business_identifier1, business_identifier2 +): sample_invites = [ factory_affiliation_invitation( - from_org_id=org_id1, - to_org_id=org_id2, - business_identifier=business_identifier1, - invitation_type='REQUEST' + from_org_id=org_id1, to_org_id=org_id2, business_identifier=business_identifier1, invitation_type="REQUEST" ), factory_affiliation_invitation( - from_org_id=org_id2, - to_org_id=org_id3, - business_identifier=business_identifier1, - invitation_type='REQUEST'), + from_org_id=org_id2, to_org_id=org_id3, business_identifier=business_identifier1, invitation_type="REQUEST" + ), factory_affiliation_invitation( - from_org_id=org_id3, - to_org_id=org_id4, - business_identifier=business_identifier1, - invitation_type='REQUEST'), + from_org_id=org_id3, to_org_id=org_id4, business_identifier=business_identifier1, invitation_type="REQUEST" + ), factory_affiliation_invitation( - from_org_id=org_id4, - to_org_id=org_id1, - business_identifier=business_identifier1, - invitation_type='REQUEST'), + from_org_id=org_id4, to_org_id=org_id1, business_identifier=business_identifier1, invitation_type="REQUEST" + ), factory_affiliation_invitation( - from_org_id=org_id4, - to_org_id=org_id1, - business_identifier=business_identifier2, - invitation_type='REQUEST')] + from_org_id=org_id4, to_org_id=org_id1, business_identifier=business_identifier2, invitation_type="REQUEST" + ), + ] # create affiliation invitation in test for i in range(len(sample_invites)): data = json.dumps(sample_invites[i]) - client.post( - '/api/v1/affiliationInvitations', - data=data, - headers=headers, content_type='application/json') + client.post("/api/v1/affiliationInvitations", data=data, headers=headers, content_type="application/json") def test_getting_affiliation_invitations_for_the_org(app, client, jwt, session, keycloak_mock, business_mock): """Assert that correct count of affiliation invitations is returned for provided org id.""" - orig_val_max_number_of_orgs = app.config.get('MAX_NUMBER_OF_ORGS') + orig_val_max_number_of_orgs = app.config.get("MAX_NUMBER_OF_ORGS") app.config.update(MAX_NUMBER_OF_ORGS=10) # setup all the required data - headers, org_id_0a, org_id_0b, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) - new_org_ids, new_business_identifiers = setup_additional_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) - - _create_affiliations_for_test(client, headers, - org_id1=new_org_ids[0], - org_id2=new_org_ids[1], - org_id3=new_org_ids[2], - org_id4=new_org_ids[3], - business_identifier1=business_identifier, - business_identifier2=new_business_identifiers[0]) + headers, org_id_0a, org_id_0b, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) + new_org_ids, new_business_identifiers = setup_additional_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) + + _create_affiliations_for_test( + client, + headers, + org_id1=new_org_ids[0], + org_id2=new_org_ids[1], + org_id3=new_org_ids[2], + org_id4=new_org_ids[3], + business_identifier1=business_identifier, + business_identifier2=new_business_identifiers[0], + ) expected_org_id = new_org_ids[1] - url = f'/api/v1/affiliationInvitations?orgId={expected_org_id}&businessDetails=True' + url = f"/api/v1/affiliationInvitations?orgId={expected_org_id}&businessDetails=True" affiliation_invitations_response = client.get(url, headers=headers) affiliation_invitations_dict: dict = json.loads(affiliation_invitations_response.data) - affiliation_invitations = affiliation_invitations_dict['affiliationInvitations'] + affiliation_invitations = affiliation_invitations_dict["affiliationInvitations"] - assert len(affiliation_invitations) == 2 # should be two, one for 'toOrg' other for 'fromOrg' - assert affiliation_invitations[0]['toOrg']['id'] == expected_org_id \ - or affiliation_invitations[0]['fromOrg']['id'] == expected_org_id - assert affiliation_invitations[1]['toOrg']['id'] == expected_org_id \ - or affiliation_invitations[1]['fromOrg']['id'] == expected_org_id + assert len(affiliation_invitations) == 2 # should be two, one for 'toOrg' other for 'fromOrg' + assert ( + affiliation_invitations[0]["toOrg"]["id"] == expected_org_id + or affiliation_invitations[0]["fromOrg"]["id"] == expected_org_id + ) + assert ( + affiliation_invitations[1]["toOrg"]["id"] == expected_org_id + or affiliation_invitations[1]["fromOrg"]["id"] == expected_org_id + ) app.config.update(MAX_NUMBER_OF_ORGS=orig_val_max_number_of_orgs) -def test_getting_affiliation_invitations_sent_to_org_for_entity(app, client, jwt, session, keycloak_mock, - business_mock): +def test_getting_affiliation_invitations_sent_to_org_for_entity( + app, client, jwt, session, keycloak_mock, business_mock +): """Assert that correct count of affiliation invitations is returned for provided org id and business identifier.""" # setup all the required data - orig_val_max_number_of_orgs = app.config.get('MAX_NUMBER_OF_ORGS') + orig_val_max_number_of_orgs = app.config.get("MAX_NUMBER_OF_ORGS") app.config.update(MAX_NUMBER_OF_ORGS=10) - headers, org_id_0a, org_id_0b, business_identifier = setup_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) - new_org_ids, new_business_identifiers = setup_additional_affiliation_invitation_data(client, - jwt, - session, - keycloak_mock) - - _create_affiliations_for_test(client, headers, - org_id1=new_org_ids[0], - org_id2=new_org_ids[1], - org_id3=new_org_ids[2], - org_id4=new_org_ids[3], - business_identifier1=new_business_identifiers[0], - business_identifier2=business_identifier) + headers, org_id_0a, org_id_0b, business_identifier = setup_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) + new_org_ids, new_business_identifiers = setup_additional_affiliation_invitation_data( + client, jwt, session, keycloak_mock + ) + + _create_affiliations_for_test( + client, + headers, + org_id1=new_org_ids[0], + org_id2=new_org_ids[1], + org_id3=new_org_ids[2], + org_id4=new_org_ids[3], + business_identifier1=new_business_identifiers[0], + business_identifier2=business_identifier, + ) expected_org_id = new_org_ids[0] - url = f'/api/v1/affiliationInvitations?toOrgId=' \ - f'{expected_org_id}&businessIdentifier={business_identifier}&businessDetails=True' + url = ( + f"/api/v1/affiliationInvitations?toOrgId=" + f"{expected_org_id}&businessIdentifier={business_identifier}&businessDetails=True" + ) affiliation_invitations_response = client.get(url, headers=headers) affiliation_invitations_dict: dict = json.loads(affiliation_invitations_response.data) - affiliation_invitations = affiliation_invitations_dict['affiliationInvitations'] + affiliation_invitations = affiliation_invitations_dict["affiliationInvitations"] assert len(affiliation_invitations) == 1 # should be only one to org and business identifier match - assert affiliation_invitations[0]['toOrg']['id'] == expected_org_id - assert affiliation_invitations[0]['entity']['businessIdentifier'] == business_identifier + assert affiliation_invitations[0]["toOrg"]["id"] == expected_org_id + assert affiliation_invitations[0]["entity"]["businessIdentifier"] == business_identifier app.config.update(MAX_NUMBER_OF_ORGS=orig_val_max_number_of_orgs) diff --git a/auth-api/tests/unit/api/test_bcol_profiles.py b/auth-api/tests/unit/api/test_bcol_profiles.py index 70b46fc6e3..a2c5e0f4db 100644 --- a/auth-api/tests/unit/api/test_bcol_profiles.py +++ b/auth-api/tests/unit/api/test_bcol_profiles.py @@ -19,8 +19,8 @@ import copy import json +from http import HTTPStatus -from auth_api import status as http_status from auth_api.schemas import utils as schema_utils from auth_api.utils.enums import OrgStatus from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo @@ -32,11 +32,15 @@ def test_bcol_profiles_returns_200(app, client, jwt, session): # pylint:disable claims = copy.deepcopy(TestJwtClaims.public_user_role.value) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post('/api/v1/bcol-profiles', data=json.dumps(TestOrgInfo.bcol_linked().get('bcOnlineCredential')), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/bcol-profiles", + data=json.dumps(TestOrgInfo.bcol_linked().get("bcOnlineCredential")), + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'bconline_response')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "bconline_response")[0] def test_bcol_id_already_linked(client, jwt, session): @@ -44,18 +48,24 @@ def test_bcol_id_already_linked(client, jwt, session): claims = copy.deepcopy(TestJwtClaims.public_user_role.value) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post('/api/v1/bcol-profiles', data=json.dumps(TestOrgInfo.bcol_linked().get('bcOnlineCredential')), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/bcol-profiles", + data=json.dumps(TestOrgInfo.bcol_linked().get("bcOnlineCredential")), + headers=headers, + content_type="application/json", + ) bcol_org = factory_org_model(org_info=TestOrgInfo.org3, bcol_info=TestOrgInfo.bcol_linked()) - bcol_org.bcol_account_id = rv.json['accountNumber'] + bcol_org.bcol_account_id = rv.json["accountNumber"] bcol_org.save() - rv_duplicate = client.post('/api/v1/bcol-profiles', - data=json.dumps(TestOrgInfo.bcol_linked().get('bcOnlineCredential')), - headers=headers, - content_type='application/json') - assert rv_duplicate.status_code == http_status.HTTP_409_CONFLICT + rv_duplicate = client.post( + "/api/v1/bcol-profiles", + data=json.dumps(TestOrgInfo.bcol_linked().get("bcOnlineCredential")), + headers=headers, + content_type="application/json", + ) + assert rv_duplicate.status_code == HTTPStatus.CONFLICT def test_bcol_id_already_linked_to_rejected(client, jwt, session): @@ -63,16 +73,22 @@ def test_bcol_id_already_linked_to_rejected(client, jwt, session): claims = copy.deepcopy(TestJwtClaims.public_user_role.value) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post('/api/v1/bcol-profiles', data=json.dumps(TestOrgInfo.bcol_linked().get('bcOnlineCredential')), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/bcol-profiles", + data=json.dumps(TestOrgInfo.bcol_linked().get("bcOnlineCredential")), + headers=headers, + content_type="application/json", + ) bcol_org = factory_org_model(org_info=TestOrgInfo.org3, bcol_info=TestOrgInfo.bcol_linked()) - bcol_org.bcol_account_id = rv.json['accountNumber'] + bcol_org.bcol_account_id = rv.json["accountNumber"] bcol_org.status_code = OrgStatus.REJECTED.value bcol_org.save() - rv_duplicate = client.post('/api/v1/bcol-profiles', - data=json.dumps(TestOrgInfo.bcol_linked().get('bcOnlineCredential')), - headers=headers, - content_type='application/json') - assert rv_duplicate.status_code == http_status.HTTP_200_OK + rv_duplicate = client.post( + "/api/v1/bcol-profiles", + data=json.dumps(TestOrgInfo.bcol_linked().get("bcOnlineCredential")), + headers=headers, + content_type="application/json", + ) + assert rv_duplicate.status_code == HTTPStatus.OK diff --git a/auth-api/tests/unit/api/test_bulk_user.py b/auth-api/tests/unit/api/test_bulk_user.py index 9d392b4ea4..d62c60ff92 100644 --- a/auth-api/tests/unit/api/test_bulk_user.py +++ b/auth-api/tests/unit/api/test_bulk_user.py @@ -18,10 +18,9 @@ """ import json import uuid +from http import HTTPStatus from random import randint - -from auth_api import status as http_status from auth_api.config import get_named_config from auth_api.schemas import utils as schema_utils from auth_api.services.keycloak import KeycloakService @@ -29,86 +28,92 @@ from tests.utilities.factory_scenarios import BulkUserTestScenario, TestJwtClaims, TestOrgInfo from tests.utilities.factory_utils import factory_auth_header, factory_invitation_anonymous - KEYCLOAK_SERVICE = KeycloakService() -CONFIG = get_named_config('testing') +CONFIG = get_named_config("testing") def test_add_user(client, jwt, session): # pylint:disable=unused-argument """Assert that a user can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'user_response')[0] + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "user_response")[0] def test_add_user_admin_valid_bcros(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org admin can create members.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_dir_search_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation_anonymous(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation_anonymous(org_id=org_id)), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert dictionary.get('token') is not None - assert rv.status_code == http_status.HTTP_201_CREATED + assert dictionary.get("token") is not None + assert rv.status_code == HTTPStatus.CREATED user = { - 'username': 'testuser{}'.format(randint(0, 1000)), - 'password': 'Password@1234', + "username": "testuser{}".format(randint(0, 1000)), + "password": "Password@1234", } - rv = client.post('/api/v1/users/bcros', data=json.dumps(user), - headers={'invitation_token': dictionary.get('token')}, content_type='application/json') + rv = client.post( + "/api/v1/users/bcros", + data=json.dumps(user), + headers={"invitation_token": dictionary.get("token")}, + content_type="application/json", + ) # Login as this user invited_user_token = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': str(uuid.uuid4()), - 'firstname': 'Test', - 'lastname': 'User', - 'preferred_username': 'bcros/{}'.format(user.get('username')), - 'realm_access': { - 'roles': [] - }, - 'roles': [], - 'accessType': 'ANONYMOUS', - 'product_code': ProductCode.DIR_SEARCH.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": str(uuid.uuid4()), + "firstname": "Test", + "lastname": "User", + "preferred_username": "bcros/{}".format(user.get("username")), + "realm_access": {"roles": []}, + "roles": [], + "accessType": "ANONYMOUS", + "product_code": ProductCode.DIR_SEARCH.value, } headers = factory_auth_header(jwt=jwt, claims=invited_user_token) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED # headers = factory_auth_header(jwt=jwt, # claims=TestJwtClaims.anonymous_bcros_role) user_input = BulkUserTestScenario.get_bulk_user1_for_org(org_id) - rv = client.post('/api/v1/bulk/users', headers=headers, - data=json.dumps(user_input), - content_type='application/json') - assert len(rv.json['users']) == 2 - assert schema_utils.validate(rv.json, 'anonymous_user_response')[0] - - assert rv.json['users'][0]['httpStatus'] == 201 - assert rv.json['users'][0]['httpStatus'] == 201 - assert rv.json['users'][0]['error'] == '' - assert rv.json['users'][1]['error'] == '' - assert rv.json['users'][0]['username'] == IdpHint.BCROS.value + '/' + user_input['users'][0]['username'] - assert rv.json['users'][1]['username'] == IdpHint.BCROS.value + '/' + user_input['users'][1]['username'] - - rv = client.post('/api/v1/bulk/users', headers=headers, - data=json.dumps(user_input), - content_type='application/json') - - assert len(rv.json['users']) == 2 - assert schema_utils.validate(rv.json, 'anonymous_user_response')[0] - assert rv.json['users'][0]['httpStatus'] == 409 - assert rv.json['users'][1]['httpStatus'] == 409 - assert rv.json['users'][0]['error'] == 'The username is already taken' - assert rv.json['users'][1]['error'] == 'The username is already taken' + rv = client.post( + "/api/v1/bulk/users", headers=headers, data=json.dumps(user_input), content_type="application/json" + ) + assert len(rv.json["users"]) == 2 + assert schema_utils.validate(rv.json, "anonymous_user_response")[0] + + assert rv.json["users"][0]["httpStatus"] == 201 + assert rv.json["users"][0]["httpStatus"] == 201 + assert rv.json["users"][0]["error"] == "" + assert rv.json["users"][1]["error"] == "" + assert rv.json["users"][0]["username"] == IdpHint.BCROS.value + "/" + user_input["users"][0]["username"] + assert rv.json["users"][1]["username"] == IdpHint.BCROS.value + "/" + user_input["users"][1]["username"] + + rv = client.post( + "/api/v1/bulk/users", headers=headers, data=json.dumps(user_input), content_type="application/json" + ) + + assert len(rv.json["users"]) == 2 + assert schema_utils.validate(rv.json, "anonymous_user_response")[0] + assert rv.json["users"][0]["httpStatus"] == 409 + assert rv.json["users"][1]["httpStatus"] == 409 + assert rv.json["users"][0]["error"] == "The username is already taken" + assert rv.json["users"][1]["error"] == "The username is already taken" diff --git a/auth-api/tests/unit/api/test_codes.py b/auth-api/tests/unit/api/test_codes.py index d65b5b2804..cd608da557 100644 --- a/auth-api/tests/unit/api/test_codes.py +++ b/auth-api/tests/unit/api/test_codes.py @@ -17,9 +17,9 @@ Test-Suite to ensure that the /entities endpoint is working as expected. """ +from http import HTTPStatus from unittest.mock import patch -from auth_api import status as http_status from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.schemas import utils as schema_utils @@ -28,27 +28,27 @@ def test_get_codes(client, jwt, session): # pylint:disable=unused-argument """Assert that the code type can be fetched.""" - code_type = 'membership_types' - rv = client.get('/api/v1/codes/{}'.format(code_type), content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'codes')[0] + code_type = "membership_types" + rv = client.get("/api/v1/codes/{}".format(code_type), content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "codes")[0] - rv = client.get('/api/v1/codes/{}'.format(code_type.upper()), content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'codes')[0] + rv = client.get("/api/v1/codes/{}".format(code_type.upper()), content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "codes")[0] def test_get_codes_404(client, jwt, session): # pylint:disable=unused-argument """Assert that the code type can not be fetched.""" - rv = client.get('/api/v1/codes/{}'.format('aaaaaaa'), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.get("/api/v1/codes/{}".format("aaaaaaa"), content_type="application/json") + assert rv.status_code == HTTPStatus.NOT_FOUND - rv = client.get('/api/v1/codes/{}'.format(''), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.get("/api/v1/codes/{}".format(""), content_type="application/json") + assert rv.status_code == HTTPStatus.NOT_FOUND def test_get_codes_returns_exception(client, jwt, session): # pylint:disable=unused-argument """Assert that the code type can not be fetched and with expcetion.""" - with patch.object(CodesService, 'fetch_codes', side_effect=BusinessException(Error.UNDEFINED_ERROR, None)): - rv = client.get('/api/v1/codes/{}'.format('membership_type'), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + with patch.object(CodesService, "fetch_codes", side_effect=BusinessException(Error.UNDEFINED_ERROR, None)): + rv = client.get("/api/v1/codes/{}".format("membership_type"), content_type="application/json") + assert rv.status_code == HTTPStatus.BAD_REQUEST diff --git a/auth-api/tests/unit/api/test_cors_preflight.py b/auth-api/tests/unit/api/test_cors_preflight.py index 543f758b64..a55176c678 100644 --- a/auth-api/tests/unit/api/test_cors_preflight.py +++ b/auth-api/tests/unit/api/test_cors_preflight.py @@ -18,285 +18,292 @@ """ -from auth_api import status as http_status +from http import HTTPStatus def test_preflight_account(app, client, jwt, session): """Assert preflight responses for accounts are correct.""" - rv = client.options('/api/v1/accounts/1/products/1/authorizations', - headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options( + "/api/v1/accounts/1/products/1/authorizations", headers={"Access-Control-Request-Method": "GET"} + ) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_activity_log(app, client, jwt, session): """Assert preflight responses for activity logs are correct.""" - rv = client.options('/api/v1/orgs/1/activity-logs', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/orgs/1/activity-logs", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_affiliation_invitation(app, client, jwt, session): """Assert preflight responses for affiliation invitations are correct.""" - rv = client.options('/api/v1/affiliationInvitations', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, POST') + rv = client.options("/api/v1/affiliationInvitations", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, POST") - rv = client.options('/api/v1/affiliationInvitations/1', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, PATCH') + rv = client.options("/api/v1/affiliationInvitations/1", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, PATCH") - rv = client.options('/api/v1/affiliationInvitations/1/token/ABC', headers={'Access-Control-Request-Method': 'PUT'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'PUT') + rv = client.options("/api/v1/affiliationInvitations/1/token/ABC", headers={"Access-Control-Request-Method": "PUT"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "PUT") - rv = client.options('/api/v1/affiliationInvitations/1/authorization/ACTION', - headers={'Access-Control-Request-Method': 'PATCH'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'PATCH') + rv = client.options( + "/api/v1/affiliationInvitations/1/authorization/ACTION", headers={"Access-Control-Request-Method": "PATCH"} + ) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "PATCH") def test_preflight_bcol_profiles(app, client, jwt, session): """Assert preflight responses for bcol profiles are correct.""" - rv = client.options('/api/v1/bcol-profiles', headers={'Access-Control-Request-Method': 'POST'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'POST') + rv = client.options("/api/v1/bcol-profiles", headers={"Access-Control-Request-Method": "POST"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "POST") def test_preflight_bulk_users(app, client, jwt, session): """Assert preflight responses for bcol profiles are correct.""" - rv = client.options('/api/v1/bulk/users', headers={'Access-Control-Request-Method': 'POST'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'POST') + rv = client.options("/api/v1/bulk/users", headers={"Access-Control-Request-Method": "POST"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "POST") def test_preflight_codes(app, client, jwt, session): """Assert preflight responses for codes are correct.""" - rv = client.options('/api/v1/codes/CODETYPE', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/codes/CODETYPE", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_documents(app, client, jwt, session): """Assert preflight responses for documents are correct.""" - rv = client.options('/api/v1/documents/DOCTYPE', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/documents/DOCTYPE", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/documents/FILENAME/signatures', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/documents/FILENAME/signatures", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/documents/affidavit', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/documents/affidavit", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_entity(app, client, jwt, session): """Assert preflight responses for entity are correct.""" - rv = client.options('/api/v1/entities', headers={'Access-Control-Request-Method': 'POST'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'POST') + rv = client.options("/api/v1/entities", headers={"Access-Control-Request-Method": "POST"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "POST") - rv = client.options('/api/v1/entities/BUSINESS_IDENTIFIER', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, PATCH') + rv = client.options("/api/v1/entities/BUSINESS_IDENTIFIER", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, PATCH") - rv = client.options('/api/v1/entities/BUSINESS_IDENTIFIER/contacts', - headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, POST, PUT') + rv = client.options( + "/api/v1/entities/BUSINESS_IDENTIFIER/contacts", headers={"Access-Control-Request-Method": "GET"} + ) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, POST, PUT") - rv = client.options('/api/v1/entities/BUSINESS_IDENTIFIER/authorizations', - headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options( + "/api/v1/entities/BUSINESS_IDENTIFIER/authorizations", headers={"Access-Control-Request-Method": "GET"} + ) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_invitation(app, client, jwt, session): """Assert preflight responses for invitations are correct.""" - rv = client.options('/api/v1/invitations', headers={'Access-Control-Request-Method': 'POST'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'POST') + rv = client.options("/api/v1/invitations", headers={"Access-Control-Request-Method": "POST"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "POST") - rv = client.options('/api/v1/invitations/1', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, PATCH') + rv = client.options("/api/v1/invitations/1", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, PATCH") - rv = client.options('/api/v1/invitations/tokens/ABC', headers={'Access-Control-Request-Method': 'PUT'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, PUT') + rv = client.options("/api/v1/invitations/tokens/ABC", headers={"Access-Control-Request-Method": "PUT"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, PUT") def test_preflight_notifications(app, client, jwt, session): """Assert preflight responses for notifications are correct.""" - rv = client.options('/api/v1/users/1/org/2/notifications', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/users/1/org/2/notifications", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_org(app, client, jwt, session): """Assert preflight responses for org are correct.""" - rv = client.options('/api/v1/orgs', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, POST') + rv = client.options("/api/v1/orgs", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, POST") - rv = client.options('/api/v1/orgs/1', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, PATCH, PUT') + rv = client.options("/api/v1/orgs/1", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, PATCH, PUT") - rv = client.options('/api/v1/orgs/1/login-options', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, POST, PUT') + rv = client.options("/api/v1/orgs/1/login-options", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, POST, PUT") - rv = client.options('/api/v1/orgs/1/contacts', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, POST, PUT') + rv = client.options("/api/v1/orgs/1/contacts", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, POST, PUT") - rv = client.options('/api/v1/orgs/1/affiliations', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, POST') + rv = client.options("/api/v1/orgs/1/affiliations", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, POST") - rv = client.options('/api/v1/orgs/affiliation/BUSINESS_IDENTIFIER', - headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options( + "/api/v1/orgs/affiliation/BUSINESS_IDENTIFIER", headers={"Access-Control-Request-Method": "GET"} + ) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/orgs/1/affiliations/BUSINESS_IDENTIFIER', - headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET') + rv = client.options( + "/api/v1/orgs/1/affiliations/BUSINESS_IDENTIFIER", headers={"Access-Control-Request-Method": "GET"} + ) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET") - rv = client.options('/api/v1/orgs/1/members', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/orgs/1/members", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/orgs/1/members/1', headers={'Access-Control-Request-Method': 'PATCH'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, PATCH') + rv = client.options("/api/v1/orgs/1/members/1", headers={"Access-Control-Request-Method": "PATCH"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, PATCH") - rv = client.options('/api/v1/orgs/1/invitations', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/orgs/1/invitations", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/orgs/1/admins/affidavits', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/orgs/1/admins/affidavits", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/orgs/1/payment_info', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/orgs/1/payment_info", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_org_api_keys(app, client, jwt, session): """Assert preflight responses for org api keys are correct.""" - rv = client.options('/api/v1/orgs/1/api-keys', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, POST') + rv = client.options("/api/v1/orgs/1/api-keys", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, POST") - rv = client.options('/api/v1/orgs/1/api-keys/KEY', headers={'Access-Control-Request-Method': 'DELETE'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE') + rv = client.options("/api/v1/orgs/1/api-keys/KEY", headers={"Access-Control-Request-Method": "DELETE"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE") def test_preflight_org_authorizations(app, client, jwt, session): """Assert preflight responses for org authorizations are correct.""" - rv = client.options('/api/v1/orgs/1/authorizations', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/orgs/1/authorizations", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_org_products(app, client, jwt, session): """Assert preflight responses for org products are correct.""" - rv = client.options('/api/v1/orgs/1/products', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, PATCH, POST') + rv = client.options("/api/v1/orgs/1/products", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, PATCH, POST") def test_preflight_org_permissions(app, client, jwt, session): """Assert preflight responses for org permissions are correct.""" - rv = client.options('/api/v1/permissions/ORG_STATUS/MEMBERSHIP_TYPE', - headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options( + "/api/v1/permissions/ORG_STATUS/MEMBERSHIP_TYPE", headers={"Access-Control-Request-Method": "GET"} + ) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_products(app, client, jwt, session): """Assert preflight responses for products are correct.""" - rv = client.options('/api/v1/products', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/products", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_task(app, client, jwt, session): """Assert preflight responses for tasks are correct.""" - rv = client.options('/api/v1/tasks', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/tasks", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/tasks/1', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, PUT') + rv = client.options("/api/v1/tasks/1", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, PUT") def test_preflight_user(app, client, jwt, session): """Assert preflight responses for user are correct.""" - rv = client.options('/api/v1/users', headers={'Access-Control-Request-Method': 'POST'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, POST') + rv = client.options("/api/v1/users", headers={"Access-Control-Request-Method": "POST"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, POST") - rv = client.options('/api/v1/users/bcros', headers={'Access-Control-Request-Method': 'POST'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'POST') + rv = client.options("/api/v1/users/bcros", headers={"Access-Control-Request-Method": "POST"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "POST") - rv = client.options('/api/v1/users/USERNAME/otp', headers={'Access-Control-Request-Method': 'DELETE'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE') + rv = client.options("/api/v1/users/USERNAME/otp", headers={"Access-Control-Request-Method": "DELETE"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE") - rv = client.options('/api/v1/users/USERNAME', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, PATCH') + rv = client.options("/api/v1/users/USERNAME", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, PATCH") - rv = client.options('/api/v1/users/@me', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, PATCH') + rv = client.options("/api/v1/users/@me", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, PATCH") - rv = client.options('/api/v1/users/contacts', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'DELETE, GET, POST, PUT') + rv = client.options("/api/v1/users/contacts", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "DELETE, GET, POST, PUT") - rv = client.options('/api/v1/users/orgs', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/users/orgs", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/users/orgs/123/membership', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/users/orgs/123/membership", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") - rv = client.options('/api/v1/users/USERGUID/affidavits', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET, POST') + rv = client.options("/api/v1/users/USERGUID/affidavits", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET, POST") - rv = client.options('/api/v1/users/authorizations', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/users/authorizations", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_simple_org(app, client, jwt, session): """Assert preflight responses for simple org are correct.""" - rv = client.options('/api/v1/orgs/simple', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/orgs/simple", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def test_preflight_user_settings(app, client, jwt, session): """Assert preflight responses for user settings are correct.""" - rv = client.options('/api/v1/users/1/settings', headers={'Access-Control-Request-Method': 'GET'}) - assert rv.status_code == http_status.HTTP_200_OK - assert_access_control_headers(rv, '*', 'GET') + rv = client.options("/api/v1/users/1/settings", headers={"Access-Control-Request-Method": "GET"}) + assert rv.status_code == HTTPStatus.OK + assert_access_control_headers(rv, "*", "GET") def assert_access_control_headers(rv, origins: str, methods: str): """Assert access control headers are correct.""" - assert rv.headers['Access-Control-Allow-Origin'] == origins - assert rv.headers['Access-Control-Allow-Methods'] == methods + assert rv.headers["Access-Control-Allow-Origin"] == origins + assert rv.headers["Access-Control-Allow-Methods"] == methods diff --git a/auth-api/tests/unit/api/test_documents.py b/auth-api/tests/unit/api/test_documents.py index ab71cde61e..16c872a07c 100644 --- a/auth-api/tests/unit/api/test_documents.py +++ b/auth-api/tests/unit/api/test_documents.py @@ -17,103 +17,108 @@ Test-Suite to ensure that the /documents endpoint is working as expected. """ -from auth_api import status as http_status +from http import HTTPStatus + from auth_api.schemas import utils as schema_utils from tests.utilities.factory_scenarios import TestJwtClaims from tests.utilities.factory_utils import ( - factory_auth_header, factory_document_model, get_tos_latest_version, get_tos_pad_latest_version) + factory_auth_header, + factory_document_model, + get_tos_latest_version, + get_tos_pad_latest_version, +) def test_documents_returns_200(client, jwt, session): # pylint:disable=unused-argument """Assert get documents endpoint returns 200.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/documents/termsofuse', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/termsofuse", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('versionId') == get_tos_latest_version() + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("versionId") == get_tos_latest_version() - rv = client.get('/api/v1/documents/termsofuse_pad', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/termsofuse_pad", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('versionId') == get_tos_pad_latest_version() + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("versionId") == get_tos_pad_latest_version() headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.anonymous_bcros_role) - rv = client.get('/api/v1/documents/termsofuse', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/termsofuse", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'document')[0] - assert rv.json.get('versionId') == 'd1' + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "document")[0] + assert rv.json.get("versionId") == "d1" - rv = client.get('/api/v1/documents/termsofuse_pad', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/termsofuse_pad", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('versionId') == get_tos_pad_latest_version() + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("versionId") == get_tos_pad_latest_version() def test_invalid_documents_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert get documents endpoint returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/documents/junk', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/junk", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_404_NOT_FOUND - assert schema_utils.validate(rv.json, 'error')[0] - assert rv.json.get('message') == 'The requested invitation could not be found.' + assert rv.status_code == HTTPStatus.NOT_FOUND + assert schema_utils.validate(rv.json, "error")[0] + assert rv.json.get("message") == "The requested invitation could not be found." def test_documents_returns_200_for_some_type(client, jwt, session): # pylint:disable=unused-argument """Assert get documents endpoint with different type returns 200.""" - html_content = '' - version_id = '10' - factory_document_model(version_id, 'sometype', html_content) + html_content = "" + version_id = "10" + factory_document_model(version_id, "sometype", html_content) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/documents/sometype', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/sometype", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'document')[0] - assert rv.json.get('content') == html_content - assert rv.json.get('versionId') == version_id + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "document")[0] + assert rv.json.get("content") == html_content + assert rv.json.get("versionId") == version_id def test_documents_returns_latest_always(client, jwt, session): # pylint:disable=unused-argument """Assert get documents endpoint returns latest version of document.""" - html_content_1 = '1' - version_id_1 = '20' # putting higher numbers so that version number doesnt collide with existing in db - factory_document_model(version_id_1, 'termsofuse', html_content_1) + html_content_1 = "1" + version_id_1 = "20" # putting higher numbers so that version number doesnt collide with existing in db + factory_document_model(version_id_1, "termsofuse", html_content_1) - html_content_2 = '3' - version_id_2 = f'{get_tos_latest_version()}1' + html_content_2 = "3" + version_id_2 = f"{get_tos_latest_version()}1" # putting higher numbers so that version number doesnt collide with existing in db - factory_document_model(version_id_2, 'termsofuse', html_content_2) + factory_document_model(version_id_2, "termsofuse", html_content_2) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/documents/termsofuse', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/termsofuse", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'document')[0] - assert rv.json.get('content') == html_content_2 - assert rv.json.get('versionId') == version_id_2 + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "document")[0] + assert rv.json.get("content") == html_content_2 + assert rv.json.get("versionId") == version_id_2 - version_id_3 = 'd30' # putting higher numbers so that version number doesnt collide with existing in db - factory_document_model(version_id_3, 'termsofuse_directorsearch', html_content_1) + version_id_3 = "d30" # putting higher numbers so that version number doesnt collide with existing in db + factory_document_model(version_id_3, "termsofuse_directorsearch", html_content_1) - version_id_4 = 'd31' # putting higher numbers so that version number doesnt collide with existing in db - factory_document_model(version_id_4, 'termsofuse_directorsearch', html_content_2) + version_id_4 = "d31" # putting higher numbers so that version number doesnt collide with existing in db + factory_document_model(version_id_4, "termsofuse_directorsearch", html_content_2) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.anonymous_bcros_role) - rv = client.get('/api/v1/documents/termsofuse', headers=headers, content_type='application/json') + rv = client.get("/api/v1/documents/termsofuse", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'document')[0] - assert rv.json.get('content') == html_content_2 - assert rv.json.get('versionId') == version_id_4 + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "document")[0] + assert rv.json.get("content") == html_content_2 + assert rv.json.get("versionId") == version_id_4 def test_document_signature_get_returns_200(client, jwt, session): # pylint:disable=unused-argument """Assert get documents/filename/signatures endpoint returns 200.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - file_name = 'test_file.jpeg' - rv = client.get(f'/api/v1/documents/{file_name}/signatures', headers=headers, content_type='application/json') + file_name = "test_file.jpeg" + rv = client.get(f"/api/v1/documents/{file_name}/signatures", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('key').startswith('Affidavits/') + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("key").startswith("Affidavits/") diff --git a/auth-api/tests/unit/api/test_documents_affidavit.py b/auth-api/tests/unit/api/test_documents_affidavit.py index 085228f7b3..fdc4fe0bd4 100644 --- a/auth-api/tests/unit/api/test_documents_affidavit.py +++ b/auth-api/tests/unit/api/test_documents_affidavit.py @@ -17,13 +17,14 @@ Test-Suite to ensure that the /documents/affidavit endpoint is working as expected. """ -from auth_api import status as http_status +from http import HTTPStatus + from auth_api.utils.enums import ContentType def test_affidavit_returns_200(client, jwt, session): # pylint:disable=unused-argument """Assert get affidavit documents endpoint returns 200.""" - rv = client.get('/api/v1/documents/affidavit') - assert rv.headers['Content-Type'] == ContentType.PDF.value - assert rv.status_code == http_status.HTTP_200_OK - assert rv.headers['Content-Disposition'] == 'attachment; filename=affidavit_v1.pdf' + rv = client.get("/api/v1/documents/affidavit") + assert rv.headers["Content-Type"] == ContentType.PDF.value + assert rv.status_code == HTTPStatus.OK + assert rv.headers["Content-Disposition"] == "attachment; filename=affidavit_v1.pdf" diff --git a/auth-api/tests/unit/api/test_entity.py b/auth-api/tests/unit/api/test_entity.py index d220578fae..fb741e3080 100644 --- a/auth-api/tests/unit/api/test_entity.py +++ b/auth-api/tests/unit/api/test_entity.py @@ -19,320 +19,432 @@ import copy import json -import pytest +from http import HTTPStatus from unittest.mock import patch -from auth_api import status as http_status +import pytest + from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.schemas import utils as schema_utils from auth_api.services import Entity as EntityService from tests.utilities.factory_scenarios import TestContactInfo, TestEntityInfo, TestJwtClaims from tests.utilities.factory_utils import ( - factory_affiliation_model, factory_affiliation_model_by_identifier, factory_auth_header, factory_entity_model, - factory_membership_model, factory_org_model, factory_user_model) + factory_affiliation_model, + factory_affiliation_model_by_identifier, + factory_auth_header, + factory_entity_model, + factory_membership_model, + factory_org_model, + factory_user_model, +) def test_add_entity(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'business')[0] + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "business")[0] @pytest.mark.parametrize( - 'test_name, legal_type', [ - ('BC Limited Company', 'BC'), - ('BC Community Contribution Company', 'CC'), - ('BC Unlimited Liability Company', 'ULC') - ]) + "test_name, legal_type", + [ + ("BC Limited Company", "BC"), + ("BC Community Contribution Company", "CC"), + ("BC Unlimited Liability Company", "ULC"), + ], +) def test_temp_business_with_subtype(client, jwt, session, test_name, legal_type): # pylint:disable=unused-argument """Assert that a temp business with subtype can be POSTed and retrieved.""" temp_business_json = { - 'businessIdentifier': 'QWERTYUIO', - 'name': 'NR 1234567', - 'corpTypeCode': 'TMP', - 'corpSubTypeCode': legal_type + "businessIdentifier": "QWERTYUIO", + "name": "NR 1234567", + "corpTypeCode": "TMP", + "corpSubTypeCode": legal_type, } headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(temp_business_json), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'business')[0] - - entity_rv = client.get('/api/v1/entities/{}'.format(temp_business_json['businessIdentifier']), - headers=headers, content_type='application/json') - - assert entity_rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(entity_rv.json, 'business')[0] + rv = client.post( + "/api/v1/entities", data=json.dumps(temp_business_json), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "business")[0] + + entity_rv = client.get( + "/api/v1/entities/{}".format(temp_business_json["businessIdentifier"]), + headers=headers, + content_type="application/json", + ) + + assert entity_rv.status_code == HTTPStatus.OK + assert schema_utils.validate(entity_rv.json, "business")[0] dictionary = json.loads(entity_rv.data) - assert dictionary['corpSubType']['code'] == legal_type + assert dictionary["corpSubType"]["code"] == legal_type def test_add_entity_invalid_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid entity returns a 400.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.invalid), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.invalid), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_entity_no_auth_returns_401(client, session): # pylint:disable=unused-argument """Assert that POSTing an entity without an auth header returns a 401.""" - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=None, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_entity_invalid_returns_exception(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid entity returns an exception.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.system_role) - with patch.object(EntityService, 'save_entity', side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') + with patch.object(EntityService, "save_entity", side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity1), + headers=headers, + content_type="application/json", + ) assert rv.status_code == 400 def test_get_entity(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be retrieved via GET.""" headers_system = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv_create = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers_system, content_type='application/json') - assert rv_create.status_code == http_status.HTTP_201_CREATED + rv_create = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity1), + headers=headers_system, + content_type="application/json", + ) + assert rv_create.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.get('/api/v1/entities/{}'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/entities/{}".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'business')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "business")[0] dictionary = json.loads(rv.data) - assert dictionary['businessIdentifier'] == TestEntityInfo.entity1['businessIdentifier'] + assert dictionary["businessIdentifier"] == TestEntityInfo.entity1["businessIdentifier"] def test_get_entity_unauthorized_user_returns_403(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be retrieved via GET.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') + client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/entities/{}'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/entities/{}".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_403_FORBIDDEN + assert rv.status_code == HTTPStatus.FORBIDDEN def test_get_entity_no_auth_returns_401(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity cannot be retrieved without an authorization header.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - rv = client.get('/api/v1/entities/{}'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + rv = client.get( + "/api/v1/entities/{}".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=None, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_get_entity_no_entity_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that attempting to retrieve a non-existent entity returns a 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.get('/api/v1/entities/{}'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.get( + "/api/v1/entities/{}".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_add_contact(client, jwt, session): # pylint:disable=unused-argument """Assert that a contact can be added to an entity.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'business')[0] + rv = client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "business")[0] dictionary = json.loads(rv.data) - assert len(dictionary['contacts']) == 1 - assert dictionary['contacts'][0]['email'] == TestContactInfo.contact1['email'] + assert len(dictionary["contacts"]) == 1 + assert dictionary["contacts"][0]["email"] == TestContactInfo.contact1["email"] def test_add_contact_invalid_format_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that adding an invalidly formatted contact returns a 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - rv = client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.invalid), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + rv = client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.invalid), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_contact_no_entity_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that adding a contact to a non-existant Entity returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_add_contact_duplicate_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that adding a duplicate contact to an Entity returns 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - rv = client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + rv = client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_update_contact(client, jwt, session): # pylint:disable=unused-argument """Assert that a contact can be updated on an entity.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - rv = client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - - rv = client.put('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact2), content_type='application/json') - - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'business')[0] + client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + rv = client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + + rv = client.put( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact2), + content_type="application/json", + ) + + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "business")[0] dictionary = json.loads(rv.data) - assert len(dictionary['contacts']) == 1 - assert dictionary['contacts'][0]['email'] == TestContactInfo.contact2['email'] + assert len(dictionary["contacts"]) == 1 + assert dictionary["contacts"][0]["email"] == TestContactInfo.contact2["email"] def test_update_contact_invalid_format_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that updating with an invalidly formatted contact returns a 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - rv = client.put('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.invalid), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + rv = client.put( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.invalid), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_update_contact_no_entity_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that updating a contact on a non-existant entity returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.put('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.put( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_update_contact_missing_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that updating a non-existant contact returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - rv = client.put('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + rv = client.put( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_update_entity_success(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be updated.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - - client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - - rv = client.patch('/api/v1/entities/{}'.format(TestEntityInfo.entity1['businessIdentifier']), - data=json.dumps(TestEntityInfo.entity2), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'business')[0] + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + + client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + + rv = client.patch( + "/api/v1/entities/{}".format(TestEntityInfo.entity1["businessIdentifier"]), + data=json.dumps(TestEntityInfo.entity2), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "business")[0] dictionary = json.loads(rv.data) - assert dictionary['businessIdentifier'] == TestEntityInfo.entity2['businessIdentifier'] + assert dictionary["businessIdentifier"] == TestEntityInfo.entity2["businessIdentifier"] # test business id alone can be updated - rv = client.patch('/api/v1/entities/{}'.format(TestEntityInfo.entity2['businessIdentifier']), - data=json.dumps({'businessIdentifier': 'CPNEW123', 'folioNumber': '123'}), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'business')[0] + rv = client.patch( + "/api/v1/entities/{}".format(TestEntityInfo.entity2["businessIdentifier"]), + data=json.dumps({"businessIdentifier": "CPNEW123", "folioNumber": "123"}), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "business")[0] dictionary = json.loads(rv.data) - assert dictionary['businessIdentifier'] == 'CPNEW123' - assert dictionary['folioNumber'] == '123' + assert dictionary["businessIdentifier"] == "CPNEW123" + assert dictionary["folioNumber"] == "123" def test_update_entity_with_folio_number(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be updated.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - - client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + + client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.patch('/api/v1/entities/{}'.format(TestEntityInfo.entity1['businessIdentifier']), - data=json.dumps(TestEntityInfo.entity_folio_number), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_403_FORBIDDEN + rv = client.patch( + "/api/v1/entities/{}".format(TestEntityInfo.entity1["businessIdentifier"]), + data=json.dumps(TestEntityInfo.entity_folio_number), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.FORBIDDEN user = factory_user_model() org = factory_org_model() factory_membership_model(user.id, org.id) - factory_affiliation_model_by_identifier(TestEntityInfo.entity1['businessIdentifier'], org.id) + factory_affiliation_model_by_identifier(TestEntityInfo.entity1["businessIdentifier"], org.id) claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.patch('/api/v1/entities/{}'.format(TestEntityInfo.entity1['businessIdentifier']), - data=json.dumps(TestEntityInfo.entity_folio_number), - headers=headers, content_type='application/json') - - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'business')[0] + rv = client.patch( + "/api/v1/entities/{}".format(TestEntityInfo.entity1["businessIdentifier"]), + data=json.dumps(TestEntityInfo.entity_folio_number), + headers=headers, + content_type="application/json", + ) + + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "business")[0] dictionary = json.loads(rv.data) - assert dictionary['businessIdentifier'] == TestEntityInfo.entity2['businessIdentifier'] - assert dictionary['folioNumber'] == TestEntityInfo.entity_folio_number['folioNumber'] + assert dictionary["businessIdentifier"] == TestEntityInfo.entity2["businessIdentifier"] + assert dictionary["folioNumber"] == TestEntityInfo.entity_folio_number["folioNumber"] def test_update_entity_failures(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.patch('/api/v1/entities/{}'.format('1234'), - data=json.dumps(TestEntityInfo.entity2), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.patch( + "/api/v1/entities/{}".format("1234"), + data=json.dumps(TestEntityInfo.entity2), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND - rv = client.patch('/api/v1/entities/{}'.format('1234'), - data=json.dumps(TestEntityInfo.entity2), - content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.patch( + "/api/v1/entities/{}".format("1234"), data=json.dumps(TestEntityInfo.entity2), content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_authorizations_for_staff_returns_200(client, jwt, session): # pylint:disable=unused-argument """Assert authorizations for staff user returns 200.""" - inc_number = 'tester' + inc_number = "tester" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get(f'/api/v1/entities/{inc_number}/authorizations', - headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/entities/{inc_number}/authorizations", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] def test_authorizations_for_affiliated_users_returns_200(client, jwt, session): # pylint:disable=unused-argument @@ -344,15 +456,18 @@ def test_authorizations_for_affiliated_users_returns_200(client, jwt, session): factory_affiliation_model(entity.id, org.id) claims = copy.deepcopy(TestJwtClaims.edit_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/entities/{entity.business_identifier}/authorizations', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/entities/{entity.business_identifier}/authorizations", + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert rv.json.get('orgMembership') == 'ADMIN' + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert rv.json.get("orgMembership") == "ADMIN" def test_authorizations_for_expanded_result(client, jwt, session): # pylint:disable=unused-argument @@ -364,25 +479,31 @@ def test_authorizations_for_expanded_result(client, jwt, session): # pylint:dis factory_affiliation_model(entity.id, org.id) claims = copy.deepcopy(TestJwtClaims.edit_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/entities/{entity.business_identifier}/authorizations', - headers=headers, content_type='application/json') - - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert rv.json.get('orgMembership') == 'ADMIN' - assert rv.json.get('account', None) is None - - rv = client.get(f'/api/v1/entities/{entity.business_identifier}/authorizations?expanded=true', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert rv.json.get('account') is not None - assert rv.json.get('account').get('name') == org.name - assert rv.json.get('business').get('name') == entity.name - assert rv.json.get('business').get('folioNumber') == entity.folio_number + rv = client.get( + f"/api/v1/entities/{entity.business_identifier}/authorizations", + headers=headers, + content_type="application/json", + ) + + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert rv.json.get("orgMembership") == "ADMIN" + assert rv.json.get("account", None) is None + + rv = client.get( + f"/api/v1/entities/{entity.business_identifier}/authorizations?expanded=true", + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert rv.json.get("account") is not None + assert rv.json.get("account").get("name") == org.name + assert rv.json.get("business").get("name") == entity.name + assert rv.json.get("business").get("folioNumber") == entity.folio_number def test_authorizations_expanded_for_staff(client, jwt, session): # pylint:disable=unused-argument @@ -394,114 +515,148 @@ def test_authorizations_expanded_for_staff(client, jwt, session): # pylint:disa factory_affiliation_model(entity.id, org.id) claims = copy.deepcopy(TestJwtClaims.edit_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get(f'/api/v1/entities/{entity.business_identifier}/authorizations?expanded=true', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/entities/{entity.business_identifier}/authorizations?expanded=true", + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'account_response')[0] - assert rv.json.get('account') is not None - assert rv.json.get('account').get('name') == org.name - assert rv.json.get('business').get('name') == entity.name - assert rv.json.get('business').get('folioNumber') == entity.folio_number + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "account_response")[0] + assert rv.json.get("account") is not None + assert rv.json.get("account").get("name") == org.name + assert rv.json.get("business").get("name") == entity.name + assert rv.json.get("business").get("folioNumber") == entity.folio_number def test_delete_entity(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be deleted.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED - rv = client.delete('/api/v1/entities/{}'.format(TestEntityInfo.entity1.get('businessIdentifier')), headers=headers) - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete("/api/v1/entities/{}".format(TestEntityInfo.entity1.get("businessIdentifier")), headers=headers) + assert rv.status_code == HTTPStatus.NO_CONTENT def test_delete_entity_unauthorized(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity cannot be deleted by any role other than system.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.delete('/api/v1/entities/{}'.format(TestEntityInfo.entity1.get('businessIdentifier')), headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.delete( + "/api/v1/entities/{}".format(TestEntityInfo.entity1.get("businessIdentifier")), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_entity_idempotent(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity can be POSTed with same data.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_202_ACCEPTED - assert schema_utils.validate(rv.json, 'business')[0] + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.ACCEPTED + assert schema_utils.validate(rv.json, "business")[0] def test_reset_passcode_success(client, jwt, session): # pylint:disable=unused-argument """Assert that an entity passcode can be reset.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - - client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1.get('businessIdentifier')), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + + client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1.get("businessIdentifier")), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.patch('/api/v1/entities/{}'.format(TestEntityInfo.entity1.get('businessIdentifier')), - data=json.dumps(TestEntityInfo.entity_reset_passcode), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'business')[0] + rv = client.patch( + "/api/v1/entities/{}".format(TestEntityInfo.entity1.get("businessIdentifier")), + data=json.dumps(TestEntityInfo.entity_reset_passcode), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "business")[0] dictionary = json.loads(rv.data) - assert dictionary['businessIdentifier'] == TestEntityInfo.entity1['businessIdentifier'] - assert dictionary['passCodeClaimed'] is False + assert dictionary["businessIdentifier"] == TestEntityInfo.entity1["businessIdentifier"] + assert dictionary["passCodeClaimed"] is False def test_get_entity_contacts(client, jwt, session): """Assert that an entity contacts can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv_create = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv_create.status_code == http_status.HTTP_201_CREATED - client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') + rv_create = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv_create.status_code == HTTPStatus.CREATED + client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get(f'/api/v1/entities/{TestEntityInfo.entity1["businessIdentifier"]}/contacts', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + f'/api/v1/entities/{TestEntityInfo.entity1["businessIdentifier"]}/contacts', + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK data = json.loads(rv.data) - assert data['email'] != TestContactInfo.contact1['email'] - assert data['email'] == 'fo*@ba*****' - assert 'phone' not in data - assert 'phone_extension' not in data + assert data["email"] != TestContactInfo.contact1["email"] + assert data["email"] == "fo*@ba*****" + assert "phone" not in data + assert "phone_extension" not in data def test_get_entity_authentication(client, jwt, session): """Assert that an entity authentication can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv_create = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') - assert rv_create.status_code == http_status.HTTP_201_CREATED - client.post('/api/v1/entities/{}/contacts'.format(TestEntityInfo.entity1['businessIdentifier']), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') + rv_create = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) + assert rv_create.status_code == HTTPStatus.CREATED + client.post( + "/api/v1/entities/{}/contacts".format(TestEntityInfo.entity1["businessIdentifier"]), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get(f'/api/v1/entities/{TestEntityInfo.entity1["businessIdentifier"]}/authentication', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + f'/api/v1/entities/{TestEntityInfo.entity1["businessIdentifier"]}/authentication', + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK data = json.loads(rv.data) - assert data['contactEmail'] != TestContactInfo.contact1['email'] - assert data['contactEmail'] == 'fo*@ba*****' - assert 'hasValidPassCode' in data + assert data["contactEmail"] != TestContactInfo.contact1["email"] + assert data["contactEmail"] == "fo*@ba*****" + assert "hasValidPassCode" in data diff --git a/auth-api/tests/unit/api/test_invitation.py b/auth-api/tests/unit/api/test_invitation.py index 0ccaff2c38..56e16dbdde 100644 --- a/auth-api/tests/unit/api/test_invitation.py +++ b/auth-api/tests/unit/api/test_invitation.py @@ -17,10 +17,10 @@ Test-Suite to ensure that the /invitations endpoint is working as expected. """ import json +from http import HTTPStatus import pytest -from auth_api import status as http_status from auth_api.schemas import utils as schema_utils from auth_api.services import Invitation as InvitationService from auth_api.services.keycloak import KeycloakService @@ -32,140 +32,214 @@ KEYCLOAK_SERVICE = KeycloakService() -@pytest.mark.parametrize('org_info, role, claims', [ - (TestOrgInfo.org_regular, 'ADMIN', TestJwtClaims.public_user_role), - (TestOrgInfo.org_regular, 'USER', TestJwtClaims.public_user_role), - (TestOrgInfo.org_regular, 'COORDINATOR', TestJwtClaims.public_user_role), - (TestOrgInfo.org_regular_bceid, 'ADMIN', TestJwtClaims.public_bceid_user), - (TestOrgInfo.org_regular_bceid, 'USER', TestJwtClaims.public_bceid_user), - (TestOrgInfo.org_regular_bceid, 'COORDINATOR', TestJwtClaims.public_bceid_user) -]) +@pytest.mark.parametrize( + "org_info, role, claims", + [ + (TestOrgInfo.org_regular, "ADMIN", TestJwtClaims.public_user_role), + (TestOrgInfo.org_regular, "USER", TestJwtClaims.public_user_role), + (TestOrgInfo.org_regular, "COORDINATOR", TestJwtClaims.public_user_role), + (TestOrgInfo.org_regular_bceid, "ADMIN", TestJwtClaims.public_bceid_user), + (TestOrgInfo.org_regular_bceid, "USER", TestJwtClaims.public_bceid_user), + (TestOrgInfo.org_regular_bceid, "COORDINATOR", TestJwtClaims.public_bceid_user), + ], +) def test_add_invitation(client, jwt, session, keycloak_mock, org_info, role, claims): # pylint:disable=unused-argument """Assert that an invitation can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(org_info), headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post("/api/v1/orgs", data=json.dumps(org_info), headers=headers, content_type="application/json") dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=org_id, membership_type=role)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=org_id, membership_type=role)), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert dictionary.get('token') is not None - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'invitation_response')[0] + assert dictionary.get("token") is not None + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "invitation_response")[0] def test_add_invitation_invalid(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid invitation returns a 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=None)), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=None)), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_get_invitations_by_id(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an invitation can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=org_id)), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv.data) - invitation_id = invitation_dictionary['id'] - rv = client.get('/api/v1/invitations/{}'.format(invitation_id), headers=headers, content_type='application/json') - assert schema_utils.validate(rv.json, 'invitation_response')[0] - assert rv.status_code == http_status.HTTP_200_OK + invitation_id = invitation_dictionary["id"] + rv = client.get("/api/v1/invitations/{}".format(invitation_id), headers=headers, content_type="application/json") + assert schema_utils.validate(rv.json, "invitation_response")[0] + assert rv.status_code == HTTPStatus.OK def test_delete_invitation(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an invitation can be deleted.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=org_id)), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv.data) - invitation_id = invitation_dictionary['id'] - rv = client.delete('/api/v1/invitations/{}'.format(invitation_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + invitation_id = invitation_dictionary["id"] + rv = client.delete("/api/v1/invitations/{}".format(invitation_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK - rv = client.get('/api/v1/invitations/{}'.format(invitation_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.get("/api/v1/invitations/{}".format(invitation_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NOT_FOUND dictionary = json.loads(rv.data) - assert dictionary['message'] == 'The requested invitation could not be found.' + assert dictionary["message"] == "The requested invitation could not be found." def test_update_invitation(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an invitation can be updated.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=org_id)), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv.data) - invitation_id = invitation_dictionary['id'] + invitation_id = invitation_dictionary["id"] updated_invitation = {} - rv = client.patch('/api/v1/invitations/{}'.format(invitation_id), data=json.dumps(updated_invitation), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'invitation_response')[0] + rv = client.patch( + "/api/v1/invitations/{}".format(invitation_id), + data=json.dumps(updated_invitation), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "invitation_response")[0] dictionary = json.loads(rv.data) - assert dictionary['status'] == 'PENDING' + assert dictionary["status"] == "PENDING" def test_validate_token(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a token is valid.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=org_id)), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv.data) - invitation_id = invitation_dictionary['id'] + invitation_id = invitation_dictionary["id"] invitation_id_token = InvitationService.generate_confirmation_token(invitation_id) - rv = client.get('/api/v1/invitations/tokens/{}'.format(invitation_id_token), headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - - -@pytest.mark.parametrize('org_info, role, claims, source, exp_status', [ - (TestOrgInfo.org_regular, 'ADMIN', TestJwtClaims.public_account_holder_user, LoginSource.BCSC.value, - Status.PENDING_APPROVAL), - (TestOrgInfo.org_regular, 'USER', TestJwtClaims.public_account_holder_user, LoginSource.BCSC.value, - Status.PENDING_APPROVAL), - (TestOrgInfo.org_regular, 'COORDINATOR', TestJwtClaims.public_account_holder_user, LoginSource.BCSC.value, - Status.PENDING_APPROVAL), - (TestOrgInfo.org_regular_bceid, 'ADMIN', TestJwtClaims.public_bceid_account_holder_user, LoginSource.BCEID.value, - Status.PENDING_STAFF_REVIEW), - (TestOrgInfo.org_regular_bceid, 'USER', TestJwtClaims.public_bceid_account_holder_user, LoginSource.BCEID.value, - Status.PENDING_APPROVAL), - (TestOrgInfo.org_regular_bceid, 'COORDINATOR', TestJwtClaims.public_bceid_account_holder_user, - LoginSource.BCEID.value, Status.PENDING_APPROVAL) -]) -def test_accept_public_users_invitation(client, jwt, session, org_info, role, # pylint:disable=unused-argument - claims, source, exp_status): + rv = client.get( + "/api/v1/invitations/tokens/{}".format(invitation_id_token), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK + + +@pytest.mark.parametrize( + "org_info, role, claims, source, exp_status", + [ + ( + TestOrgInfo.org_regular, + "ADMIN", + TestJwtClaims.public_account_holder_user, + LoginSource.BCSC.value, + Status.PENDING_APPROVAL, + ), + ( + TestOrgInfo.org_regular, + "USER", + TestJwtClaims.public_account_holder_user, + LoginSource.BCSC.value, + Status.PENDING_APPROVAL, + ), + ( + TestOrgInfo.org_regular, + "COORDINATOR", + TestJwtClaims.public_account_holder_user, + LoginSource.BCSC.value, + Status.PENDING_APPROVAL, + ), + ( + TestOrgInfo.org_regular_bceid, + "ADMIN", + TestJwtClaims.public_bceid_account_holder_user, + LoginSource.BCEID.value, + Status.PENDING_STAFF_REVIEW, + ), + ( + TestOrgInfo.org_regular_bceid, + "USER", + TestJwtClaims.public_bceid_account_holder_user, + LoginSource.BCEID.value, + Status.PENDING_APPROVAL, + ), + ( + TestOrgInfo.org_regular_bceid, + "COORDINATOR", + TestJwtClaims.public_bceid_account_holder_user, + LoginSource.BCEID.value, + Status.PENDING_APPROVAL, + ), + ], +) +def test_accept_public_users_invitation( + client, jwt, session, org_info, role, claims, source, exp_status # pylint:disable=unused-argument +): """Assert that an invitation can be accepted.""" headers = factory_auth_header(jwt=jwt, claims=claims) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(org_info), headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post("/api/v1/orgs", data=json.dumps(org_info), headers=headers, content_type="application/json") dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=org_id, membership_type=role)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=org_id, membership_type=role)), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv.data) - invitation_id = invitation_dictionary['id'] + invitation_id = invitation_dictionary["id"] invitation_id_token = InvitationService.generate_confirmation_token(invitation_id) request = KeycloakScenario.create_user_request() @@ -173,57 +247,70 @@ def test_accept_public_users_invitation(client, jwt, session, org_info, role, # user = KEYCLOAK_SERVICE.get_user_by_username(request.user_name) user_id = user.id headers_invitee = factory_auth_header(jwt=jwt, claims=TestJwtClaims.get_test_user(user_id, source=source)) - client.post('/api/v1/users', headers=headers_invitee, content_type='application/json') - rv = client.put('/api/v1/invitations/tokens/{}'.format(invitation_id_token), headers=headers_invitee, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - - rv = client.get(f'/api/v1/orgs/{org_id}/members?status={exp_status.name}', - headers=headers, - content_type='application/json') + client.post("/api/v1/users", headers=headers_invitee, content_type="application/json") + rv = client.put( + "/api/v1/invitations/tokens/{}".format(invitation_id_token), + headers=headers_invitee, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + + rv = client.get( + f"/api/v1/orgs/{org_id}/members?status={exp_status.name}", headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - assert len(dictionary['members']) == 1 + assert len(dictionary["members"]) == 1 # Assert that the user got added to the keycloak groups user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_PUBLIC_USERS in groups -@pytest.mark.skip(reason='This is wrong test case') +@pytest.mark.skip(reason="This is wrong test case") def test_accept_gov_account_invitation(client, jwt, session): # pylint:disable=unused-argument """Assert that an invitation can be accepted.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.gov_account_holder_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id=org_id)), + headers=headers, + content_type="application/json", + ) invitation_dictionary = json.loads(rv.data) - invitation_id = invitation_dictionary['id'] + invitation_id = invitation_dictionary["id"] invitation_id_token = InvitationService.generate_confirmation_token(invitation_id) request = KeycloakScenario.create_user_request() KEYCLOAK_SERVICE.add_user(request, return_if_exists=True) user = KEYCLOAK_SERVICE.get_user_by_username(request.user_name) user_id = user.id - headers_invitee = factory_auth_header(jwt=jwt, claims=TestJwtClaims.get_test_user(user_id, source='IDIR', roles=[])) - client.post('/api/v1/users', headers=headers_invitee, content_type='application/json') - rv = client.put('/api/v1/invitations/tokens/{}'.format(invitation_id_token), headers=headers_invitee, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - - rv = client.get('/api/v1/orgs/{}/members?status=PENDING_APPROVAL'.format(org_id), - headers=headers, - content_type='application/json') + headers_invitee = factory_auth_header(jwt=jwt, claims=TestJwtClaims.get_test_user(user_id, source="IDIR", roles=[])) + client.post("/api/v1/users", headers=headers_invitee, content_type="application/json") + rv = client.put( + "/api/v1/invitations/tokens/{}".format(invitation_id_token), + headers=headers_invitee, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + + rv = client.get( + "/api/v1/orgs/{}/members?status=PENDING_APPROVAL".format(org_id), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert len(dictionary['members']) == 1 + assert len(dictionary["members"]) == 1 # Assert that the user got added to the keycloak groups user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_GOV_ACCOUNT_USERS in groups diff --git a/auth-api/tests/unit/api/test_meta.py b/auth-api/tests/unit/api/test_meta.py deleted file mode 100644 index d8138cdce5..0000000000 --- a/auth-api/tests/unit/api/test_meta.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tests to assure the meta end-point. - -Test-Suite to ensure that the /meta endpoint is working as expected. -""" -from tests import skip_in_pod - - -@skip_in_pod -def test_meta_no_commit_hash(client): - """Assert that the endpoint returns just the services __version__.""" - from auth_api.version import __version__ # pylint: disable=import-outside-toplevel - - rv = client.get('/api/v1/meta/info') - - assert rv.status_code == 200 - assert rv.json == {'API': f'auth_api/{__version__}'} - - -def test_meta_with_commit_hash(monkeypatch, client): - """Assert that the endpoint return __version__ and the last git hash used to build the services image.""" - from auth_api.version import __version__ # pylint: disable=import-outside-toplevel - - commit_hash = 'deadbeef_ha' - monkeypatch.setenv('OPENSHIFT_BUILD_COMMIT', commit_hash) - - rv = client.get('/api/v1/meta/info') - assert rv.status_code == 200 - assert rv.json == {'API': f'auth_api/{__version__}-{commit_hash}'} diff --git a/auth-api/tests/unit/api/test_ops.py b/auth-api/tests/unit/api/test_ops.py index c8aee86854..015241694c 100644 --- a/auth-api/tests/unit/api/test_ops.py +++ b/auth-api/tests/unit/api/test_ops.py @@ -16,29 +16,35 @@ Test-Suite to ensure that the /ops endpoint is working as expected. """ +from sqlalchemy.exc import SQLAlchemyError + +from auth_api.models import db def test_ops_healthz_success(client): """Assert that the service is healthy if it can successfully access the database.""" - rv = client.get('/ops/healthz') + rv = client.get("/ops/healthz") assert rv.status_code == 200 - assert rv.json == {'message': 'api is healthy'} + assert rv.json == {"message": "api is healthy"} -def test_ops_healthz_fail(app_request): +def test_ops_healthz_fail(app_request, monkeypatch): """Assert that the service is unhealthy if a connection toThe database cannot be made.""" - app_request.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://does:not@exist:5432/nada' - with app_request.test_client() as client: - rv = client.get('/ops/healthz') + def db_error(_): + raise SQLAlchemyError(1, 2, code="42") + + monkeypatch.setattr(db.session, "execute", db_error) + with app_request.test_client() as client: + rv = client.get("/ops/healthz") assert rv.status_code == 500 - assert rv.json == {'message': 'api is down'} + assert rv.json == {"message": "api is down"} def test_ops_readyz(client): """Asserts that the service is ready to serve.""" - rv = client.get('/ops/readyz') + rv = client.get("/ops/readyz") assert rv.status_code == 200 - assert rv.json == {'message': 'api is ready'} + assert rv.json == {"message": "api is ready"} diff --git a/auth-api/tests/unit/api/test_org.py b/auth-api/tests/unit/api/test_org.py index bbb08a2257..5962fe09b6 100644 --- a/auth-api/tests/unit/api/test_org.py +++ b/auth-api/tests/unit/api/test_org.py @@ -18,13 +18,13 @@ """ import json import uuid +from datetime import datetime +from http import HTTPStatus from unittest.mock import patch import pytest -from datetime import datetime from faker import Faker -from auth_api import status as http_status from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import Affidavit as AffidavitModel @@ -40,1022 +40,1251 @@ from auth_api.services import Task as TaskService from auth_api.services import User as UserService from auth_api.utils.enums import ( - AccessType, AffidavitStatus, CorpType, NRActionCodes, NRStatus, OrgStatus, OrgType, PatchActions, PaymentMethod, - ProductCode, ProductSubscriptionStatus, Status, - SuspensionReasonCode, TaskStatus, TaskRelationshipStatus) + AccessType, + AffidavitStatus, + CorpType, + NRActionCodes, + NRStatus, + OrgStatus, + OrgType, + PatchActions, + PaymentMethod, + ProductCode, + ProductSubscriptionStatus, + Status, + SuspensionReasonCode, + TaskRelationshipStatus, + TaskStatus, +) from auth_api.utils.roles import ADMIN # noqa: I005 from tests.utilities.factory_scenarios import ( - DeleteAffiliationPayload, TestAffidavit, TestAffliationInfo, TestContactInfo, TestEntityInfo, TestJwtClaims, - TestOrgInfo, TestPaymentMethodInfo) + DeleteAffiliationPayload, + TestAffidavit, + TestAffliationInfo, + TestContactInfo, + TestEntityInfo, + TestJwtClaims, + TestOrgInfo, + TestPaymentMethodInfo, +) from tests.utilities.factory_utils import ( - convert_org_to_staff_org, factory_affiliation_model, factory_auth_header, factory_entity_model, factory_invitation, - factory_invitation_anonymous, factory_membership_model, factory_org_model, factory_user_model, - patch_pay_account_delete, patch_pay_account_delete_error) + convert_org_to_staff_org, + factory_affiliation_model, + factory_auth_header, + factory_entity_model, + factory_invitation, + factory_invitation_anonymous, + factory_membership_model, + factory_org_model, + factory_user_model, + patch_pay_account_delete, + patch_pay_account_delete_error, +) FAKE = Faker() -@pytest.mark.parametrize('org_info', [TestOrgInfo.org1, TestOrgInfo.org_onlinebanking, TestOrgInfo.org_with_products, - TestOrgInfo.org_regular, TestOrgInfo.org_with_all_info]) +@pytest.mark.parametrize( + "org_info", + [ + TestOrgInfo.org1, + TestOrgInfo.org_onlinebanking, + TestOrgInfo.org_with_products, + TestOrgInfo.org_regular, + TestOrgInfo.org_with_all_info, + ], +) def test_add_org(client, jwt, session, keycloak_mock, org_info): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(org_info), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'org_response')[0] - - -@pytest.mark.parametrize('org_info', [TestOrgInfo.org1, TestOrgInfo.org_onlinebanking, TestOrgInfo.org_with_products, - TestOrgInfo.org_regular, TestOrgInfo.org_with_all_info]) + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post("/api/v1/orgs", data=json.dumps(org_info), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "org_response")[0] + + +@pytest.mark.parametrize( + "org_info", + [ + TestOrgInfo.org1, + TestOrgInfo.org_onlinebanking, + TestOrgInfo.org_with_products, + TestOrgInfo.org_regular, + TestOrgInfo.org_with_all_info, + ], +) def test_add_org_by_anon_user(client, jwt, session, keycloak_mock, org_info): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.anonymous_bcros_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(org_info), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post("/api/v1/orgs", data=json.dumps(org_info), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_basic_org_with_pad_throws_error(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - org_info = {'name': 'My Test Org', 'paymentType': 'PAD'} - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(org_info), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + org_info = {"name": "My Test Org", "paymentType": "PAD"} + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post("/api/v1/orgs", data=json.dumps(org_info), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_search_org_by_client(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be searched.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED # Ensure no exception is thrown by including letters in the id. headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.get('/api/v1/orgs?id={}'.format('FFF1234'), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs?id={}".format("FFF1234"), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK # system search headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.get('/api/v1/orgs?name={}'.format(TestOrgInfo.org1.get('name')), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + "/api/v1/orgs?name={}".format(TestOrgInfo.org1.get("name")), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('name') == TestOrgInfo.org1.get('name') + assert orgs.get("orgs")[0].get("name") == TestOrgInfo.org1.get("name") headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/orgs?name={}'.format(TestOrgInfo.org1.get('name')), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + "/api/v1/orgs?name={}".format(TestOrgInfo.org1.get("name")), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK orgs = json.loads(rv.data) assert bool(orgs) is False - rv = client.get('/api/v1/orgs?name={}'.format('notanexistingorgname'), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.get( + "/api/v1/orgs?name={}".format("notanexistingorgname"), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.NO_CONTENT # staff search headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get('/api/v1/orgs?name={}'.format(TestOrgInfo.org1.get('name')), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get( + "/api/v1/orgs?name={}".format(TestOrgInfo.org1.get("name")), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('name') == TestOrgInfo.org1.get('name') + assert orgs.get("orgs")[0].get("name") == TestOrgInfo.org1.get("name") headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get('/api/v1/orgs?status={}'.format('ACTIVE'), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get("/api/v1/orgs?status={}".format("ACTIVE"), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('name') == TestOrgInfo.org1.get('name') + assert orgs.get("orgs")[0].get("name") == TestOrgInfo.org1.get("name") headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get('/api/v1/orgs?status={}&type={}'.format('ACTIVE', 'REGULAR'), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get( + "/api/v1/orgs?status={}&type={}".format("ACTIVE", "REGULAR"), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('name') == TestOrgInfo.org1.get('name') + assert orgs.get("orgs")[0].get("name") == TestOrgInfo.org1.get("name") def test_duplicate_name(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be searched using multiple syntax.""" # Create active org headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - name = TestOrgInfo.org1.get('name') - rv = client.get(f'/api/v1/orgs?validateName=true&name={name}', - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + name = TestOrgInfo.org1.get("name") + rv = client.get(f"/api/v1/orgs?validateName=true&name={name}", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK + assert rv.status_code == HTTPStatus.OK # not existing brnach name ; so 204 - rv = client.get(f'/api/v1/orgs?validateName=true&name={name}&branchName=foo', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/orgs?validateName=true&name={name}&branchName=foo", headers=headers, content_type="application/json" + ) - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + assert rv.status_code == HTTPStatus.NO_CONTENT # empty brnach name; so 200 - rv = client.get(f'/api/v1/orgs?validateName=true&name={name}&branchName=', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/orgs?validateName=true&name={name}&branchName=", headers=headers, content_type="application/json" + ) - assert rv.status_code == http_status.HTTP_200_OK + assert rv.status_code == HTTPStatus.OK # does not conflict with rejected accounts rejected_org = factory_org_model(org_info=TestOrgInfo.org2) rejected_org.status_code = OrgStatus.REJECTED.value rejected_org.save() - rv = client.get(f'/api/v1/orgs?validateName=true&name={rejected_org.name}&branchName=', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.get( + f"/api/v1/orgs?validateName=true&name={rejected_org.name}&branchName=", + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NO_CONTENT def test_search_org_by_client_multiple_status(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be searched using multiple syntax.""" # Create active org headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED # create suspended org public_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=public_headers, content_type='application/json') + client.post("/api/v1/users", headers=public_headers, content_type="application/json") - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), - headers=public_headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=public_headers, + content_type="application/json", + ) + assert org_response.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.SUSPENDED.value, - 'suspensionReasonCode': SuspensionReasonCode.OWNER_CHANGE.name}), - headers=headers, content_type='application/json') - assert org_patch_response.json.get('orgStatus') == OrgStatus.SUSPENDED.value + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps( + {"statusCode": OrgStatus.SUSPENDED.value, "suspensionReasonCode": SuspensionReasonCode.OWNER_CHANGE.name} + ), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.json.get("orgStatus") == OrgStatus.SUSPENDED.value headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') - document_signature = client.get('/api/v1/documents/test.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key = document_signature.json.get('key') - client.post('/api/v1/users/{}/affidavits'.format(TestJwtClaims.public_user_role.get('sub')), - headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), - content_type='application/json') - - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address(name='foobar1')), - headers=headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + client.post("/api/v1/users", headers=headers, content_type="application/json") + document_signature = client.get( + "/api/v1/documents/test.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key = document_signature.json.get("key") + client.post( + "/api/v1/users/{}/affidavits".format(TestJwtClaims.public_user_role.get("sub")), + headers=headers, + data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), + content_type="application/json", + ) + + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address(name="foobar1")), + headers=headers, + content_type="application/json", + ) + assert org_response.status_code == HTTPStatus.CREATED # staff search headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get('/api/v1/orgs', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get("/api/v1/orgs", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('total') == 3 + assert orgs.get("total") == 3 - rv = client.get('/api/v1/orgs?status=ACTIVE&status=SUSPENDED', - headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs?status=ACTIVE&status=SUSPENDED", headers=headers, content_type="application/json") orgs = json.loads(rv.data) - assert orgs.get('total') == 2 + assert orgs.get("total") == 2 - rv = client.get('/api/v1/orgs?status=ACTIVE&status=SUSPENDED&status=PENDING_STAFF_REVIEW', - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/orgs?status=ACTIVE&status=SUSPENDED&status=PENDING_STAFF_REVIEW", + headers=headers, + content_type="application/json", + ) orgs = json.loads(rv.data) - assert orgs.get('total') == 3 + assert orgs.get("total") == 3 - rv = client.get('/api/v1/orgs?status=ACTIVE&status=SUSPENDED&status=PENDING_STAFF_REVIEW&status=ABCS', - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/orgs?status=ACTIVE&status=SUSPENDED&status=PENDING_STAFF_REVIEW&status=ABCS", + headers=headers, + content_type="application/json", + ) orgs = json.loads(rv.data) - assert orgs.get('total') == 3 + assert orgs.get("total") == 3 - rv = client.get('/api/v1/orgs?status=PENDING_STAFF_REVIEW', - headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs?status=PENDING_STAFF_REVIEW", headers=headers, content_type="application/json") orgs = json.loads(rv.data) - assert orgs.get('total') == 1 + assert orgs.get("total") == 1 def test_search_org_for_dir_search(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be searched.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_dir_search_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) # staff search with manage account role gets both ORG headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get('/api/v1/orgs', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get("/api/v1/orgs", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert len(orgs.get('orgs')) == 2 + assert len(orgs.get("orgs")) == 2 # staff search with staff_admin_role gets both ORG headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.get('/api/v1/orgs', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get("/api/v1/orgs", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert len(orgs.get('orgs')) == 2 + assert len(orgs.get("orgs")) == 2 # staff search with out manage account role gets only normal org headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_view_accounts_role) - rv = client.get('/api/v1/orgs', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get("/api/v1/orgs", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert len(orgs.get('orgs')) == 1 + assert len(orgs.get("orgs")) == 1 def test_add_govm_org_staff_admin(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_govm), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_govm), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - assert dictionary['accessType'] == AccessType.GOVM.value - assert dictionary['orgType'] == OrgType.PREMIUM.value - assert dictionary['orgStatus'] == OrgStatus.PENDING_INVITE_ACCEPT.value - assert schema_utils.validate(rv.json, 'org_response')[0] + assert dictionary["accessType"] == AccessType.GOVM.value + assert dictionary["orgType"] == OrgType.PREMIUM.value + assert dictionary["orgStatus"] == OrgStatus.PENDING_INVITE_ACCEPT.value + assert schema_utils.validate(rv.json, "org_response")[0] def test_add_govm_full_flow(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_govm), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_govm), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - assert dictionary.get('branchName') == TestOrgInfo.org_govm.get('branchName') - org_id = dictionary['id'] + assert dictionary.get("branchName") == TestOrgInfo.org_govm.get("branchName") + org_id = dictionary["id"] # Invite a user to the org - rv = client.post('/api/v1/invitations', - data=json.dumps(factory_invitation(org_id, 'abc123@email.com', membership_type=ADMIN)), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id, "abc123@email.com", membership_type=ADMIN)), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - invitation_id = dictionary['id'] + invitation_id = dictionary["id"] invitation_id_token = InvitationService.generate_confirmation_token(invitation_id) # Get pending members for the org as invitee and assert length of 1 - rv = client.get('/api/v1/orgs/{}/members?status=ACTIVE'.format(org_id), headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}/members?status=ACTIVE".format(org_id), headers=headers) + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert not dictionary # Create/login as invited user headers_invited = factory_auth_header(jwt=jwt, claims=TestJwtClaims.gov_account_holder_user) - rv = client.post('/api/v1/users', headers=headers_invited, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers_invited, content_type="application/json") # Accept invite as invited user - rv = client.put('/api/v1/invitations/tokens/{}'.format(invitation_id_token), - headers=headers_invited, content_type='application/json') + rv = client.put( + "/api/v1/invitations/tokens/{}".format(invitation_id_token), + headers=headers_invited, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['status'] == 'ACCEPTED' + assert dictionary["status"] == "ACCEPTED" # Get ACTIVE members for the org as invitee and assert length of 1 - rv = client.get('/api/v1/orgs/{}/members?status=ACTIVE'.format(org_id), headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}/members?status=ACTIVE".format(org_id), headers=headers) + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['members'] - assert len(dictionary['members']) == 1 + assert dictionary["members"] + assert len(dictionary["members"]) == 1 update_org_payload = { - 'mailingAddress': { - 'city': 'Innisfail', - 'country': 'CA', - 'region': 'AB', - 'postalCode': 'T4G 1P5', - 'street': 'D-4619 45 Ave', - 'streetAdditional': '' + "mailingAddress": { + "city": "Innisfail", + "country": "CA", + "region": "AB", + "postalCode": "T4G 1P5", + "street": "D-4619 45 Ave", + "streetAdditional": "", }, - 'paymentInfo': { - 'revenueAccount': { - 'projectCode': '100', - 'responsibilityCentre': '100', - 'serviceLine': '100', - 'stob': '100' - } + "paymentInfo": { + "revenueAccount": {"projectCode": "100", "responsibilityCentre": "100", "serviceLine": "100", "stob": "100"} }, - 'productSubscriptions': [ - { - 'productCode': 'VS' - }, - { - 'productCode': 'BCA' - } - ] + "productSubscriptions": [{"productCode": "VS"}, {"productCode": "BCA"}], } - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(update_org_payload), - headers=headers_invited, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps(update_org_payload), + headers=headers_invited, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK - rv_products = client.get(f'/api/v1/orgs/{org_id}/products', - headers=headers_invited, content_type='application/json') + rv_products = client.get( + f"/api/v1/orgs/{org_id}/products", headers=headers_invited, content_type="application/json" + ) list_products = json.loads(rv_products.data) - vs_product = next(x for x in list_products if x.get('code') == 'VS') - assert vs_product.get('subscriptionStatus') == 'ACTIVE' + vs_product = next(x for x in list_products if x.get("code") == "VS") + assert vs_product.get("subscriptionStatus") == "ACTIVE" def test_add_anonymous_org_staff_admin(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - assert dictionary['accessType'] == 'ANONYMOUS' - assert schema_utils.validate(rv.json, 'org_response')[0] + assert dictionary["accessType"] == "ANONYMOUS" + assert schema_utils.validate(rv.json, "org_response")[0] def test_add_govm_org_by_user_exception(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_govm), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_govm), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_anonymous_org_by_user_exception(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED -def test_add_org_staff_admin_anonymous_not_passed(client, jwt, session, - keycloak_mock): # pylint:disable=unused-argument +def test_add_org_staff_admin_anonymous_not_passed( + client, jwt, session, keycloak_mock +): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps({'name': 'My Test Org', 'accessType': AccessType.ANONYMOUS.value}), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", + data=json.dumps({"name": "My Test Org", "accessType": AccessType.ANONYMOUS.value}), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - assert dictionary['accessType'] == 'ANONYMOUS' + assert dictionary["accessType"] == "ANONYMOUS" def test_add_org_staff_admin_any_number_of_orgs(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org2), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org3), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org4), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org5), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org2), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org3), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org4), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org5), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED def test_add_org_multiple(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.But in limited number.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv1 = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv1.status_code == http_status.HTTP_201_CREATED - rv2 = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org2), - headers=headers, content_type='application/json') - assert rv2.status_code == http_status.HTTP_201_CREATED - rv3 = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org3), - headers=headers, content_type='application/json') - assert rv3.status_code == http_status.HTTP_201_CREATED - rv4 = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org4), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv1 = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv1.status_code == HTTPStatus.CREATED + rv2 = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org2), headers=headers, content_type="application/json" + ) + assert rv2.status_code == HTTPStatus.CREATED + rv3 = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org3), headers=headers, content_type="application/json" + ) + assert rv3.status_code == HTTPStatus.CREATED + rv4 = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org4), headers=headers, content_type="application/json" + ) # max number of orgs reached. - assert rv4.status_code == http_status.HTTP_400_BAD_REQUEST + assert rv4.status_code == HTTPStatus.BAD_REQUEST def test_add_same_org_409(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED, 'created first org' - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_409_CONFLICT, 'not able to create duplicates org' + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED, "created first org" + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CONFLICT, "not able to create duplicates org" def test_add_org_invalid_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns a 400.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.invalid), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_org_invalid_space_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns a 400.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_space), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.invalid_name_space), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_org_invalid_spaces_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns a 400.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_spaces), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.invalid_name_spaces), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_org_invalid_end_space_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns a 400.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_end_space), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.invalid_name_end_space), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_org_invalid_start_space_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns a 400.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.invalid_name_start_space), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.invalid_name_start_space), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_org_invalid_returns_401(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns a 401.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.view_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_org_normal_staff_invalid_returns_401(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns a 401.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.staff_role) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_org_invalid_user_returns_401(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an org with invalid user returns a 401.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.public_user_role) - with patch.object(UserService, 'find_by_jwt_token', return_value=None): - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + with patch.object(UserService, "find_by_jwt_token", return_value=None): + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_org_invalid_returns_exception(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing an invalid org returns an exception.""" headers = factory_auth_header(jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") - with patch.object(OrgService, 'create_org', side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + with patch.object(OrgService, "create_org", side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) assert rv.status_code == 400 - assert schema_utils.validate(rv.json, 'exception')[0] + assert schema_utils.validate(rv.json, "exception")[0] def test_get_org(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be retrieved via GET.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - rv = client.get('/api/v1/orgs/{}'.format(org_id), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'org_response')[0] + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "org_response")[0] dictionary = json.loads(rv.data) - assert dictionary['id'] == org_id + assert dictionary["id"] == org_id def test_get_org_no_auth_returns_401(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org cannot be retrieved without an authorization header.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.get('/api/v1/orgs/{}'.format(org_id), - headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + org_id = dictionary["id"] + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=None, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_get_org_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that attempting to retrieve a non-existent org returns a 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - rv = client.get('/api/v1/orgs/{}'.format(999), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.get("/api/v1/orgs/{}".format(999), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NOT_FOUND def test_update_org_duplicate_branch_name(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be updated via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] # assert updating branch name works new_branch_name = FAKE.name() - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps({'branchName': new_branch_name}), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - rv = client.get(f'/api/v1/orgs/{org_id}', headers=headers, - content_type='application/json') + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps({"branchName": new_branch_name}), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + rv = client.get(f"/api/v1/orgs/{org_id}", headers=headers, content_type="application/json") dictionary = json.loads(rv.data) - assert dictionary.get('branchName') == new_branch_name - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + assert dictionary.get("branchName") == new_branch_name + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - new_org_id = dictionary['id'] + new_org_id = dictionary["id"] # assert updating branch name to same name doesnt work - rv = client.put('/api/v1/orgs/{}'.format(new_org_id), data=json.dumps({'branchName': new_branch_name}), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_409_CONFLICT + rv = client.put( + "/api/v1/orgs/{}".format(new_org_id), + data=json.dumps({"branchName": new_branch_name}), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CONFLICT def test_update_org(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be updated via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] # get created org and assert there is no contacts - rv = client.get(f'/api/v1/orgs/{org_id}/contacts', headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get(f"/api/v1/orgs/{org_id}/contacts", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) # assert no contacts - assert len(dictionary.get('contacts')) == 0 + assert len(dictionary.get("contacts")) == 0 # assert updating org name works alrite name = FAKE.name() - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps({'name': name}), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - rv = client.get(f'/api/v1/orgs/{org_id}', headers=headers, - content_type='application/json') + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps({"name": name}), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + rv = client.get(f"/api/v1/orgs/{org_id}", headers=headers, content_type="application/json") dictionary = json.loads(rv.data) - assert dictionary.get('name') == name + assert dictionary.get("name") == name # update mailing address org_with_mailing_address = TestOrgInfo.update_org_with_mailing_address() - rv = client.put(f'/api/v1/orgs/{org_id}', data=json.dumps(org_with_mailing_address), headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.put( + f"/api/v1/orgs/{org_id}", + data=json.dumps(org_with_mailing_address), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK - rv = client.get(f'/api/v1/orgs/{org_id}/contacts', headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get(f"/api/v1/orgs/{org_id}/contacts", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK - assert schema_utils.validate(rv.json, 'contacts')[0] + assert schema_utils.validate(rv.json, "contacts")[0] dictionary = json.loads(rv.data) - actual_mailing_address = org_with_mailing_address.get('mailingAddress') - assert actual_mailing_address.get('city') == dictionary['contacts'][0].get('city') - assert actual_mailing_address.get('postalCode') == dictionary['contacts'][0].get('postalCode') + actual_mailing_address = org_with_mailing_address.get("mailingAddress") + assert actual_mailing_address.get("city") == dictionary["contacts"][0].get("city") + assert actual_mailing_address.get("postalCode") == dictionary["contacts"][0].get("postalCode") # Update other org details all_org_info = TestOrgInfo.update_org_with_all_info - rv = client.put(f'/api/v1/orgs/{org_id}', data=json.dumps(all_org_info), headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('businessType') == all_org_info['businessType'] - assert rv.json.get('businessSize') == all_org_info['businessSize'] - assert rv.json.get('isBusinessAccount') == all_org_info['isBusinessAccount'] + rv = client.put( + f"/api/v1/orgs/{org_id}", data=json.dumps(all_org_info), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("businessType") == all_org_info["businessType"] + assert rv.json.get("businessSize") == all_org_info["businessSize"] + assert rv.json.get("isBusinessAccount") == all_org_info["isBusinessAccount"] def test_update_org_payment_method_for_basic_org(client, jwt, session, keycloak_mock): """Assert that an orgs payment details can be updated via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] new_payment_method = TestPaymentMethodInfo.get_payment_method_input(PaymentMethod.ONLINE_BANKING) - rv = client.put(f'/api/v1/orgs/{org_id}', data=json.dumps(new_payment_method), headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.put( + f"/api/v1/orgs/{org_id}", data=json.dumps(new_payment_method), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK - new_payment_method = {'paymentInfo': {'paymentMethod': PaymentMethod.BCOL.value}} - rv = client.put(f'/api/v1/orgs/{org_id}', data=json.dumps(new_payment_method), headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST, 'Assert BCOL cant be used for Basic Account' + new_payment_method = {"paymentInfo": {"paymentMethod": PaymentMethod.BCOL.value}} + rv = client.put( + f"/api/v1/orgs/{org_id}", data=json.dumps(new_payment_method), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST, "Assert BCOL cant be used for Basic Account" def test_upgrade_anon_org_fail(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be updated via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - assert rv.status_code == http_status.HTTP_201_CREATED - assert rv.json.get('orgType') == OrgType.BASIC.value - assert rv.json.get('name') == TestOrgInfo.org_anonymous.get('name') + assert rv.status_code == HTTPStatus.CREATED + assert rv.json.get("orgType") == OrgType.BASIC.value + assert rv.json.get("name") == TestOrgInfo.org_anonymous.get("name") - org_id = dictionary['id'] + org_id = dictionary["id"] # upgrade with same data premium_info = TestOrgInfo.bcol_linked() - premium_info['typeCode'] = OrgType.PREMIUM.value + premium_info["typeCode"] = OrgType.PREMIUM.value - rv = client.put('/api/v1/orgs/{}?action=UPGRADE'.format(org_id), - data=json.dumps(premium_info), headers=headers, - content_type='application/json') + rv = client.put( + "/api/v1/orgs/{}?action=UPGRADE".format(org_id), + data=json.dumps(premium_info), + headers=headers, + content_type="application/json", + ) # FRCR review change.Staff cant change org details - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_update_premium_org(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be updated via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bcol_linked()), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert rv.json.get('orgType') == OrgType.PREMIUM.value + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.bcol_linked()), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + assert rv.json.get("orgType") == OrgType.PREMIUM.value dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] # Update with same data - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(TestOrgInfo.update_bcol_linked()), - headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'org_response')[0] + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps(TestOrgInfo.update_bcol_linked()), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "org_response")[0] def test_update_org_type_to_staff_fails(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that org type doesn't get updated.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bcol_linked()), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert rv.json.get('orgType') == OrgType.PREMIUM.value + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.bcol_linked()), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + assert rv.json.get("orgType") == OrgType.PREMIUM.value dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] data = TestOrgInfo.update_bcol_linked() - data['typeCode'] = OrgType.SBC_STAFF.value + data["typeCode"] = OrgType.SBC_STAFF.value - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(data), - headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.put( + "/api/v1/orgs/{}".format(org_id), data=json.dumps(data), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK - rv = client.get('/api/v1/orgs/{}'.format(org_id), headers=headers, content_type='application/json') - assert rv.json.get('orgType') == OrgType.PREMIUM.value + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.json.get("orgType") == OrgType.PREMIUM.value - data['typeCode'] = OrgType.STAFF.value - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(data), - headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + data["typeCode"] = OrgType.STAFF.value + rv = client.put( + "/api/v1/orgs/{}".format(org_id), data=json.dumps(data), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK - rv = client.get('/api/v1/orgs/{}'.format(org_id), headers=headers, content_type='application/json') - assert rv.json.get('orgType') == OrgType.PREMIUM.value + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.json.get("orgType") == OrgType.PREMIUM.value def test_get_org_payment_settings(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be updated via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bcol_linked()), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.bcol_linked()), headers=headers, content_type="application/json" + ) - assert schema_utils.validate(rv.json, 'org_response')[0] - assert rv.status_code == http_status.HTTP_201_CREATED - assert rv.json.get('orgType') == OrgType.PREMIUM.value + assert schema_utils.validate(rv.json, "org_response")[0] + assert rv.status_code == HTTPStatus.CREATED + assert rv.json.get("orgType") == OrgType.PREMIUM.value headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.get('/api/v1/orgs/{}/contacts'.format(org_id), headers=headers) - assert schema_utils.validate(rv.json, 'contacts')[0] + org_id = dictionary["id"] + rv = client.get("/api/v1/orgs/{}/contacts".format(org_id), headers=headers) + assert schema_utils.validate(rv.json, "contacts")[0] def test_update_org_returns_400(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can not be updated and return 400 error via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(TestOrgInfo.invalid), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps(TestOrgInfo.invalid), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_update_org_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that attempting to update a non-existent org returns a 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.put('/api/v1/orgs/{}'.format(999), data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.put( + "/api/v1/orgs/{}".format(999), + data=json.dumps(TestOrgInfo.org1), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_update_org_returns_exception(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that attempting to update a non-existent org returns an exception.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - with patch.object(OrgService, 'update_org', side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + with patch.object(OrgService, "update_org", side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps(TestOrgInfo.org1), + headers=headers, + content_type="application/json", + ) assert rv.status_code == 400 - assert schema_utils.validate(rv.json, 'exception')[0] + assert schema_utils.validate(rv.json, "exception")[0] def test_add_contact(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a contact can be added to an org.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - assert dictionary['email'] == TestContactInfo.contact1['email'] - assert schema_utils.validate(rv.json, 'contact_response')[0] + assert dictionary["email"] == TestContactInfo.contact1["email"] + assert schema_utils.validate(rv.json, "contact_response")[0] def test_add_contact_invalid_format_returns_400(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that adding an invalidly formatted contact returns a 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.invalid), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.invalid), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_contact_valid_email_returns_201(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that adding an valid formatted contact with special characters in email returns a 201.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.email_valid), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'contacts')[0] + rv = client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.email_valid), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "contacts")[0] def test_add_contact_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that adding a contact to a non-existant org returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/orgs/{}/contacts'.format(99), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.post( + "/api/v1/orgs/{}/contacts".format(99), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_add_contact_duplicate_returns_400(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that adding a duplicate contact to an org returns 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + rv = client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_update_contact(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a contact can be updated on an org.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED - rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact2), content_type='application/json') + rv = client.put( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact2), + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'contact_response')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "contact_response")[0] dictionary = json.loads(rv.data) - assert dictionary['email'] == TestContactInfo.contact2['email'] + assert dictionary["email"] == TestContactInfo.contact2["email"] -def test_update_contact_invalid_format_returns_400(client, jwt, session, - keycloak_mock): # pylint:disable=unused-argument +def test_update_contact_invalid_format_returns_400( + client, jwt, session, keycloak_mock +): # pylint:disable=unused-argument """Assert that updating with an invalidly formatted contact returns a 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.invalid), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + rv = client.put( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.invalid), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST -def test_update_contact_valid_email_format_returns_200(client, jwt, session, - keycloak_mock): # pylint:disable=unused-argument +def test_update_contact_valid_email_format_returns_200( + client, jwt, session, keycloak_mock +): # pylint:disable=unused-argument """Assert that updating with an validly formatted contact with special characters in email returns a 200.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.email_valid), content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'contact_response')[0] + client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + rv = client.put( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.email_valid), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "contact_response")[0] def test_update_contact_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that updating a contact on a non-existant entity returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.put('/api/v1/orgs/{}/contacts'.format(99), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.put( + "/api/v1/orgs/{}/contacts".format(99), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_update_contact_missing_returns_404(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that updating a non-existant contact returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.put('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.put( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_delete_contact(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a contact can be deleted on an org.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED - rv = client.delete('/api/v1/orgs/{}/contacts'.format(org_id), - headers=headers, data=json.dumps(TestContactInfo.contact2), content_type='application/json') + rv = client.delete( + "/api/v1/orgs/{}/contacts".format(org_id), + headers=headers, + data=json.dumps(TestContactInfo.contact2), + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'contact_response')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "contact_response")[0] - rv = client.get('/api/v1/orgs/{}/contacts'.format(org_id), headers=headers) + rv = client.get("/api/v1/orgs/{}/contacts".format(org_id), headers=headers) dictionary = None dictionary = json.loads(rv.data) - assert len(dictionary['contacts']) == 0 + assert len(dictionary["contacts"]) == 0 def test_delete_contact_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that deleting a contact on a non-existant entity returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.delete('/api/v1/orgs/{}/contacts'.format(99), - headers=headers, data=json.dumps(TestContactInfo.contact1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.delete( + "/api/v1/orgs/{}/contacts".format(99), + headers=headers, + data=json.dumps(TestContactInfo.contact1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_delete_contact_returns_exception(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that attempting to delete an org returns an exception.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - with patch.object(OrgService, 'delete_contact', side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): - rv = client.delete('/api/v1/orgs/{}/contacts'.format(org_id), headers=headers, content_type='application/json') + with patch.object(OrgService, "delete_contact", side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): + rv = client.delete("/api/v1/orgs/{}/contacts".format(org_id), headers=headers, content_type="application/json") assert rv.status_code == 400 - assert schema_utils.validate(rv.json, 'exception')[0] + assert schema_utils.validate(rv.json, "exception")[0] def test_get_members(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a list of members for an org can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.get('/api/v1/orgs/{}/members'.format(org_id), - headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/{}/members".format(org_id), headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'members')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "members")[0] dictionary = json.loads(rv.data) - assert dictionary['members'] - assert len(dictionary['members']) == 1 - assert dictionary['members'][0]['membershipTypeCode'] == 'ADMIN' + assert dictionary["members"] + assert len(dictionary["members"]) == 1 + assert dictionary["members"][0]["membershipTypeCode"] == "ADMIN" def test_delete_org(client, jwt, session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument @@ -1065,17 +1294,17 @@ def test_delete_org(client, jwt, session, keycloak_mock, monkeypatch): # pylint org_payload = TestOrgInfo.org_with_all_info headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(org_payload), headers=headers, content_type='application/json') - org_id = rv.json.get('id') - rv = client.delete('/api/v1/orgs/{}'.format(org_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post("/api/v1/orgs", data=json.dumps(org_payload), headers=headers, content_type="application/json") + org_id = rv.json.get("id") + rv = client.delete("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT # 2 - Verify orgs with affiliations can be deleted and assert passcode is reset. # 3 - Verify orgs with members and other admins can be deleted. - org_payload['name'] = FAKE.name() - rv = client.post('/api/v1/orgs', data=json.dumps(org_payload), headers=headers, content_type='application/json') - org_id = rv.json.get('id') + org_payload["name"] = FAKE.name() + rv = client.post("/api/v1/orgs", data=json.dumps(org_payload), headers=headers, content_type="application/json") + org_id = rv.json.get("id") entity = factory_entity_model() entity_id = entity.id passcode = entity.pass_code @@ -1083,8 +1312,8 @@ def test_delete_org(client, jwt, session, keycloak_mock, monkeypatch): # pylint member_user = factory_user_model() factory_membership_model(member_user.id, org_id=org_id) - rv = client.delete('/api/v1/orgs/{}'.format(org_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT assert EntityModel.find_by_id(entity_id).pass_code != passcode assert AffiliationModel.find_by_id(affiliation.id) is None for membership in MembershipModel.find_members_by_org_id(org_id): @@ -1092,21 +1321,25 @@ def test_delete_org(client, jwt, session, keycloak_mock, monkeypatch): # pylint # 3 - Verify bceid orgs can be deleted and assert the affidavit is INACTIVE. headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - user = client.post('/api/v1/users', headers=headers, content_type='application/json') + user = client.post("/api/v1/users", headers=headers, content_type="application/json") affidavit: AffidavitModel = AffidavitModel( document_id=str(uuid.uuid4()), - issuer='TEST', + issuer="TEST", status_code=AffidavitStatus.APPROVED.value, - user_id=user.json.get('id') + user_id=user.json.get("id"), ).save() affidavit_id = affidavit.id - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bceid_org_with_all_info), headers=headers, - content_type='application/json') - org_id = org_response.json.get('id') - rv = client.delete('/api/v1/orgs/{}'.format(org_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.bceid_org_with_all_info), + headers=headers, + content_type="application/json", + ) + org_id = org_response.json.get("id") + rv = client.delete("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT assert AffidavitModel.find_by_id(affidavit_id).status_code == AffidavitStatus.INACTIVE.value @@ -1115,464 +1348,596 @@ def test_delete_org_failures(client, jwt, session, keycloak_mock, monkeypatch): patch_pay_account_delete_error(monkeypatch) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.delete('/api/v1/orgs/{}'.format(org_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST - assert rv.json.get('code') == 'OUTSTANDING_CREDIT' + org_id = dictionary["id"] + rv = client.delete("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.BAD_REQUEST + assert rv.json.get("code") == "OUTSTANDING_CREDIT" def test_get_invitations(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a list of invitations for an org can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id, 'abc123@email.com')), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id, "abc123@email.com")), + headers=headers, + content_type="application/json", + ) - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id, 'xyz456@email.com')), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id, "xyz456@email.com")), + headers=headers, + content_type="application/json", + ) - rv = client.get('/api/v1/orgs/{}/invitations'.format(org_id), - headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/{}/invitations".format(org_id), headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'invitations')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "invitations")[0] dictionary = json.loads(rv.data) - assert dictionary['invitations'] - assert len(dictionary['invitations']) == 2 - assert dictionary['invitations'][0]['recipientEmail'] == 'abc123@email.com' - assert dictionary['invitations'][1]['recipientEmail'] == 'xyz456@email.com' + assert dictionary["invitations"] + assert len(dictionary["invitations"]) == 2 + assert dictionary["invitations"][0]["recipientEmail"] == "abc123@email.com" + assert dictionary["invitations"][1]["recipientEmail"] == "xyz456@email.com" def test_update_anon_org(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be updated via PUT.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - dictionary = json.loads(rv.data) - assert dictionary['accessType'] == 'ANONYMOUS' - org_id = dictionary['id'] - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps({'name': 'helo2'}), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + dictionary = json.loads(rv.data) + assert dictionary["accessType"] == "ANONYMOUS" + org_id = dictionary["id"] + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps({"name": "helo2"}), + headers=headers, + content_type="application/json", + ) # FRCR review changes..staff cant change org details - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + assert rv.status_code == HTTPStatus.UNAUTHORIZED public_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.put('/api/v1/orgs/{}'.format(org_id), data=json.dumps({'name': 'helo2'}), - headers=public_headers, content_type='application/json') + rv = client.put( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps({"name": "helo2"}), + headers=public_headers, + content_type="application/json", + ) # not an admin/owner..so unauthorized will be thrown when trying to access it - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_update_member(client, jwt, session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert that a member of an org can have their role updated.""" # Set up: create/login user, create org headers_invitee = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers_invitee, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers_invitee, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers_invitee, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers_invitee, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] # Invite a user to the org - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation(org_id, 'abc123@email.com')), - headers=headers_invitee, content_type='application/json') + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id, "abc123@email.com")), + headers=headers_invitee, + content_type="application/json", + ) dictionary = json.loads(rv.data) - invitation_id = dictionary['id'] + invitation_id = dictionary["id"] invitation_id_token = InvitationService.generate_confirmation_token(invitation_id) # Create/login as invited user headers_invited = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_role_2) - rv = client.post('/api/v1/users', headers=headers_invited, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers_invited, content_type="application/json") # Accept invite as invited user - rv = client.put('/api/v1/invitations/tokens/{}'.format(invitation_id_token), - headers=headers_invited, content_type='application/json') + rv = client.put( + "/api/v1/invitations/tokens/{}".format(invitation_id_token), + headers=headers_invited, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['status'] == 'ACCEPTED' + assert dictionary["status"] == "ACCEPTED" # Get pending members for the org as invitee and assert length of 1 - rv = client.get('/api/v1/orgs/{}/members?status=PENDING_APPROVAL'.format(org_id), headers=headers_invitee) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}/members?status=PENDING_APPROVAL".format(org_id), headers=headers_invitee) + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['members'] - assert len(dictionary['members']) == 1 + assert dictionary["members"] + assert len(dictionary["members"]) == 1 # Find the pending member - new_member = dictionary['members'][0] - assert new_member['membershipTypeCode'] == 'USER' - member_id = new_member['id'] + new_member = dictionary["members"][0] + assert new_member["membershipTypeCode"] == "USER" + member_id = new_member["id"] # Update the new member - rv = client.patch('/api/v1/orgs/{}/members/{}'.format(org_id, member_id), headers=headers_invitee, - data=json.dumps({'role': 'COORDINATOR'}), content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'membership')[0] + rv = client.patch( + "/api/v1/orgs/{}/members/{}".format(org_id, member_id), + headers=headers_invitee, + data=json.dumps({"role": "COORDINATOR"}), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "membership")[0] dictionary = json.loads(rv.data) - assert dictionary['membershipTypeCode'] == 'COORDINATOR' + assert dictionary["membershipTypeCode"] == "COORDINATOR" def test_add_affiliation(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a contact can be added to an org.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers, - data=json.dumps(TestAffliationInfo.affiliation3), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'affiliation_response')[0] + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + headers=headers, + data=json.dumps(TestAffliationInfo.affiliation3), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "affiliation_response")[0] dictionary = json.loads(rv.data) - assert dictionary['organization']['id'] == org_id + assert dictionary["organization"]["id"] == org_id -def test_add_affiliation_invalid_format_returns_400(client, jwt, session, - keycloak_mock): # pylint:disable=unused-argument +def test_add_affiliation_invalid_format_returns_400( + client, jwt, session, keycloak_mock +): # pylint:disable=unused-argument """Assert that adding an invalidly formatted affiliations returns a 400.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - headers=headers, data=json.dumps(TestAffliationInfo.invalid), content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + headers=headers, + data=json.dumps(TestAffliationInfo.invalid), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_affiliation_no_org_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that adding a contact to a non-existant org returns 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/orgs/{}/affiliations'.format(99), headers=headers, - data=json.dumps(TestAffliationInfo.affiliation1), content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(99), + headers=headers, + data=json.dumps(TestAffliationInfo.affiliation1), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_add_affiliation_returns_exception(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that attempting to delete an affiliation returns an exception.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity1), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", data=json.dumps(TestEntityInfo.entity1), headers=headers, content_type="application/json" + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - dictionary = json.loads(rv.data) - org_id = dictionary['id'] - - with patch.object(AffiliationService, 'create_affiliation', - side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None)): - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - data=json.dumps(TestAffliationInfo.affiliation1), - headers=headers, - content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + dictionary = json.loads(rv.data) + org_id = dictionary["id"] + + with patch.object( + AffiliationService, "create_affiliation", side_effect=BusinessException(Error.DATA_ALREADY_EXISTS, None) + ): + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + data=json.dumps(TestAffliationInfo.affiliation1), + headers=headers, + content_type="application/json", + ) assert rv.status_code == 400 - assert schema_utils.validate(rv.json, 'exception')[0] + assert schema_utils.validate(rv.json, "exception")[0] def test_add_new_business_affiliation_staff(client, jwt, session, keycloak_mock, nr_mock): """Assert that an affiliation can be added by staff.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] convert_org_to_staff_org(org_id, OrgType.SBC_STAFF.value) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_business) - rv = client.post('/api/v1/orgs/{}/affiliations?newBusiness=true'.format(org_id), headers=headers, - data=json.dumps(TestAffliationInfo.new_business_affiliation), content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'affiliation_response')[0] - certified_by_name = TestAffliationInfo.new_business_affiliation['certifiedByName'] + rv = client.post( + "/api/v1/orgs/{}/affiliations?newBusiness=true".format(org_id), + headers=headers, + data=json.dumps(TestAffliationInfo.new_business_affiliation), + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "affiliation_response")[0] + certified_by_name = TestAffliationInfo.new_business_affiliation["certifiedByName"] dictionary = json.loads(rv.data) - assert dictionary['organization']['id'] == org_id - assert dictionary['certifiedByName'] == certified_by_name + assert dictionary["organization"]["id"] == org_id + assert dictionary["certifiedByName"] == certified_by_name - rv = client.get('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}/affiliations".format(org_id), headers=headers) + assert rv.status_code == HTTPStatus.OK - assert schema_utils.validate(rv.json, 'affiliations_response')[0] + assert schema_utils.validate(rv.json, "affiliations_response")[0] affiliations = json.loads(rv.data) - assert affiliations['entities'][0]['affiliations'][0]['certifiedByName'] == certified_by_name + assert affiliations["entities"][0]["affiliations"][0]["certifiedByName"] == certified_by_name def test_get_affiliation(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a list of affiliation for an org can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.name_request), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.name_request), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - data=json.dumps(TestAffliationInfo.nr_affiliation), - headers=headers, - content_type='application/json') + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + data=json.dumps(TestAffliationInfo.nr_affiliation), + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_201_CREATED + assert rv.status_code == HTTPStatus.CREATED - business_identifier = TestAffliationInfo.nr_affiliation['businessIdentifier'] + business_identifier = TestAffliationInfo.nr_affiliation["businessIdentifier"] - rv = client.get(f'/api/v1/orgs/{org_id}/affiliations/{business_identifier}', headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get(f"/api/v1/orgs/{org_id}/affiliations/{business_identifier}", headers=headers) + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['business']['businessIdentifier'] == business_identifier + assert dictionary["business"]["businessIdentifier"] == business_identifier def test_get_affiliation_without_authrized(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a list of affiliation for an org can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.name_request), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.name_request), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - data=json.dumps(TestAffliationInfo.nr_affiliation), - headers=headers, - content_type='application/json') + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + data=json.dumps(TestAffliationInfo.nr_affiliation), + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_201_CREATED + assert rv.status_code == HTTPStatus.CREATED - business_identifier = TestAffliationInfo.nr_affiliation['businessIdentifier'] + business_identifier = TestAffliationInfo.nr_affiliation["businessIdentifier"] headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.anonymous_bcros_role) - rv = client.get(f'/api/v1/orgs/{org_id}/affiliations/{business_identifier}', headers=headers) - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.get(f"/api/v1/orgs/{org_id}/affiliations/{business_identifier}", headers=headers) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_get_affiliations(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that a list of affiliation for an org can be retrieved.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock2), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock2), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - data=json.dumps(TestAffliationInfo.affiliation3), - headers=headers, - content_type='application/json') - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - data=json.dumps(TestAffliationInfo.affiliation4), - headers=headers, - content_type='application/json') + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + data=json.dumps(TestAffliationInfo.affiliation3), + headers=headers, + content_type="application/json", + ) + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + data=json.dumps(TestAffliationInfo.affiliation4), + headers=headers, + content_type="application/json", + ) - rv = client.get('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}/affiliations".format(org_id), headers=headers) + assert rv.status_code == HTTPStatus.OK - assert schema_utils.validate(rv.json, 'affiliations_response')[0] + assert schema_utils.validate(rv.json, "affiliations_response")[0] affiliations = json.loads(rv.data) # Result is sorted desc order of created date - assert affiliations['entities'][1]['businessIdentifier'] == TestEntityInfo.entity_lear_mock['businessIdentifier'] - assert affiliations['entities'][0]['businessIdentifier'] == TestEntityInfo.entity_lear_mock2['businessIdentifier'] + assert affiliations["entities"][1]["businessIdentifier"] == TestEntityInfo.entity_lear_mock["businessIdentifier"] + assert affiliations["entities"][0]["businessIdentifier"] == TestEntityInfo.entity_lear_mock2["businessIdentifier"] def test_search_orgs_for_affiliation(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that search org with affiliation works.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') + client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - client.post('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers, - data=json.dumps(TestAffliationInfo.affiliation3), content_type='application/json') + client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + headers=headers, + data=json.dumps(TestAffliationInfo.affiliation3), + content_type="application/json", + ) # Create a system token headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.get('/api/v1/orgs?affiliation={}'.format(TestAffliationInfo.affiliation3.get('businessIdentifier')), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'orgs_response')[0] + rv = client.get( + "/api/v1/orgs?affiliation={}".format(TestAffliationInfo.affiliation3.get("businessIdentifier")), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "orgs_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('name') == TestOrgInfo.org1.get('name') + assert orgs.get("orgs")[0].get("name") == TestOrgInfo.org1.get("name") -def test_unauthorized_search_orgs_for_affiliation(client, jwt, session, - keycloak_mock): # pylint:disable=unused-argument +def test_unauthorized_search_orgs_for_affiliation( + client, jwt, session, keycloak_mock +): # pylint:disable=unused-argument """Assert that search org with affiliation works.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') + client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - client.post('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers, - data=json.dumps(TestAffliationInfo.affiliation3), content_type='application/json') + client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + headers=headers, + data=json.dumps(TestAffliationInfo.affiliation3), + content_type="application/json", + ) # Create a system token headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.edit_user_role) - rv = client.get('/api/v1/orgs?affiliation={}'.format(TestAffliationInfo.affiliation3.get('businessIdentifier')), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.get( + "/api/v1/orgs?affiliation={}".format(TestAffliationInfo.affiliation3.get("businessIdentifier")), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_bcol_linked_org(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bcol_linked()), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'org_response')[0] - assert rv.json.get('orgType') == OrgType.PREMIUM.value - assert rv.json.get('name') == TestOrgInfo.bcol_linked()['name'] + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.bcol_linked()), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "org_response")[0] + assert rv.json.get("orgType") == OrgType.PREMIUM.value + assert rv.json.get("name") == TestOrgInfo.bcol_linked()["name"] # assert user have access to VS, as this bcol linked user have VS access - org_id = rv.json.get('id') - rv = client.get('/api/v1/orgs/{}/products?includeInternal=false'.format(org_id), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + org_id = rv.json.get("id") + rv = client.get( + "/api/v1/orgs/{}/products?includeInternal=false".format(org_id), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK has_vs_access: bool = False for product in json.loads(rv.data): - if product.get('code') == ProductCode.VS.value: - has_vs_access = product.get('subscriptionStatus') == ProductSubscriptionStatus.ACTIVE.value - assert has_vs_access, 'test vs access' + if product.get("code") == ProductCode.VS.value: + has_vs_access = product.get("subscriptionStatus") == ProductSubscriptionStatus.ACTIVE.value + assert has_vs_access, "test vs access" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - org_search_response = client.get(f"/api/v1/orgs?name={TestOrgInfo.bcol_linked()['name']}", - headers=headers, content_type='application/json') + org_search_response = client.get( + f"/api/v1/orgs?name={TestOrgInfo.bcol_linked()['name']}", headers=headers, content_type="application/json" + ) - assert len(org_search_response.json.get('orgs')) == 1 - assert org_search_response.status_code == http_status.HTTP_200_OK + assert len(org_search_response.json.get("orgs")) == 1 + assert org_search_response.status_code == HTTPStatus.OK orgs = json.loads(org_search_response.data) - assert orgs.get('orgs')[0].get('name') == TestOrgInfo.bcol_linked()['name'] - account_id = orgs.get('orgs')[0].get('bcolAccountId') + assert orgs.get("orgs")[0].get("name") == TestOrgInfo.bcol_linked()["name"] + account_id = orgs.get("orgs")[0].get("bcolAccountId") # do a search with bcol account id and name org_search_response = client.get( f"/api/v1/orgs?name={TestOrgInfo.bcol_linked()['name']}&bcolAccountId={account_id}", - headers=headers, content_type='application/json') + headers=headers, + content_type="application/json", + ) orgs = json.loads(org_search_response.data) - assert orgs.get('orgs')[0].get('name') == TestOrgInfo.bcol_linked()['name'] - new_account_id = orgs.get('orgs')[0].get('bcolAccountId') + assert orgs.get("orgs")[0].get("name") == TestOrgInfo.bcol_linked()["name"] + new_account_id = orgs.get("orgs")[0].get("bcolAccountId") assert account_id == new_account_id -def test_add_bcol_linked_org_failure_mailing_address(client, jwt, session, - keycloak_mock): # pylint:disable=unused-argument +def test_add_bcol_linked_org_failure_mailing_address( + client, jwt, session, keycloak_mock +): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bcol_linked_incomplete_mailing_addrees()), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.bcol_linked_incomplete_mailing_addrees()), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_bcol_linked_org_different_name(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bcol_linked_different_name()), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - - -@pytest.mark.parametrize('test_name, nr_status, payment_status, error', [ - ('NR_Approved', NRStatus.APPROVED.value, 'COMPLETED', None), - ('NR_Draft', NRStatus.DRAFT.value, 'COMPLETED', None), - ('NR_Draft', NRStatus.DRAFT.value, 'REJECTED', Error.NR_NOT_PAID), - ('NR_Consumed', NRStatus.CONSUMED.value, 'COMPLETED', Error.NR_INVALID_STATUS) -]) -def test_new_business_affiliation(client, jwt, session, keycloak_mock, mocker, test_name, nr_status, payment_status, - error): + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.bcol_linked_different_name()), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + + +@pytest.mark.parametrize( + "test_name, nr_status, payment_status, error", + [ + ("NR_Approved", NRStatus.APPROVED.value, "COMPLETED", None), + ("NR_Draft", NRStatus.DRAFT.value, "COMPLETED", None), + ("NR_Draft", NRStatus.DRAFT.value, "REJECTED", Error.NR_NOT_PAID), + ("NR_Consumed", NRStatus.CONSUMED.value, "COMPLETED", Error.NR_INVALID_STATUS), + ], +) +def test_new_business_affiliation( + client, jwt, session, keycloak_mock, mocker, test_name, nr_status, payment_status, error +): """Assert that an NR can be affiliated to an org.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] nr_response = { - 'applicants': { - 'emailAddress': '', - 'phoneNumber': TestAffliationInfo.nr_affiliation['phone'], + "applicants": { + "emailAddress": "", + "phoneNumber": TestAffliationInfo.nr_affiliation["phone"], }, - 'names': [{ - 'name': 'TEST INC.', - 'state': nr_status - }], - 'state': nr_status, - 'requestTypeCd': 'BC' + "names": [{"name": "TEST INC.", "state": nr_status}], + "state": nr_status, + "requestTypeCd": "BC", } - payment_response = { - 'invoices': [{ - 'statusCode': payment_status - }] - } + payment_response = {"invoices": [{"statusCode": payment_status}]} - mocker.patch('auth_api.services.affiliation.Affiliation._get_nr_details', return_value=nr_response) + mocker.patch("auth_api.services.affiliation.Affiliation._get_nr_details", return_value=nr_response) - mocker.patch('auth_api.services.affiliation.Affiliation.get_nr_payment_details', - return_value=payment_response) + mocker.patch("auth_api.services.affiliation.Affiliation.get_nr_payment_details", return_value=payment_response) - rv = client.post('/api/v1/orgs/{}/affiliations?newBusiness=true'.format(org_id), headers=headers, - data=json.dumps(TestAffliationInfo.nr_affiliation), content_type='application/json') + rv = client.post( + "/api/v1/orgs/{}/affiliations?newBusiness=true".format(org_id), + headers=headers, + data=json.dumps(TestAffliationInfo.nr_affiliation), + content_type="application/json", + ) if error is None: - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'affiliation_response')[0] + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "affiliation_response")[0] dictionary = json.loads(rv.data) - assert dictionary['organization']['id'] == org_id - assert dictionary['business']['businessIdentifier'] == TestAffliationInfo.nr_affiliation['businessIdentifier'] + assert dictionary["organization"]["id"] == org_id + assert dictionary["business"]["businessIdentifier"] == TestAffliationInfo.nr_affiliation["businessIdentifier"] else: assert rv.status_code == error.status_code - assert rv.json['message'] == error.message + assert rv.json["message"] == error.message def test_get_org_admin_affidavits(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument @@ -1583,26 +1948,36 @@ def test_get_org_admin_affidavits(client, jwt, session, keycloak_mock): # pylin # 4. Create Org # 5. Get the affidavit as a bcol admin headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') - document_signature = client.get('/api/v1/documents/test.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key = document_signature.json.get('key') - affidavit_response = client.post('/api/v1/users/{}/affidavits'.format(TestJwtClaims.public_user_role.get('sub')), - headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), - content_type='application/json') - - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), headers=headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + client.post("/api/v1/users", headers=headers, content_type="application/json") + document_signature = client.get( + "/api/v1/documents/test.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key = document_signature.json.get("key") + affidavit_response = client.post( + "/api/v1/users/{}/affidavits".format(TestJwtClaims.public_user_role.get("sub")), + headers=headers, + data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), + content_type="application/json", + ) + + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=headers, + content_type="application/json", + ) + assert org_response.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - staff_response = client.get('/api/v1/orgs/{}/admins/affidavits'.format(org_response.json.get('id')), - headers=headers, content_type='application/json') + staff_response = client.get( + "/api/v1/orgs/{}/admins/affidavits".format(org_response.json.get("id")), + headers=headers, + content_type="application/json", + ) - assert schema_utils.validate(staff_response.json, 'affidavit_response')[0] - assert staff_response.json.get('documentId') == doc_key - assert staff_response.json.get('id') == affidavit_response.json.get('id') + assert schema_utils.validate(staff_response.json, "affidavit_response")[0] + assert staff_response.json.get("documentId") == doc_key + assert staff_response.json.get("id") == affidavit_response.json.get("id") def test_approve_org_with_pending_affidavits(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument @@ -1613,61 +1988,76 @@ def test_approve_org_with_pending_affidavits(client, jwt, session, keycloak_mock # 4. Create Org # 5. Get the affidavit as a bcol admin headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") # POST a contact to test user - client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=headers, content_type='application/json') - - document_signature = client.get('/api/v1/documents/test.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key = document_signature.json.get('key') - affidavit_response = client.post('/api/v1/users/{}/affidavits'.format(TestJwtClaims.public_user_role.get('sub')), - headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), - content_type='application/json') + client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), + headers=headers, + content_type="application/json", + ) - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), headers=headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + document_signature = client.get( + "/api/v1/documents/test.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key = document_signature.json.get("key") + affidavit_response = client.post( + "/api/v1/users/{}/affidavits".format(TestJwtClaims.public_user_role.get("sub")), + headers=headers, + data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), + content_type="application/json", + ) - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=headers, + content_type="application/json", ) + assert org_response.status_code == HTTPStatus.CREATED + + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) tasks = TaskService.fetch_tasks(task_search) - fetched_task = tasks['tasks'][0] + fetched_task = tasks["tasks"][0] update_task_payload = { - 'status': TaskStatus.COMPLETED.value, - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value + "status": TaskStatus.COMPLETED.value, + "relationshipStatus": TaskRelationshipStatus.ACTIVE.value, } headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.put('/api/v1/tasks/{}'.format(fetched_task['id']), - data=json.dumps(update_task_payload), - headers=headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(fetched_task["id"]), + data=json.dumps(update_task_payload), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - rv = client.get('/api/v1/orgs/{}'.format(org_response.json.get('id')), - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/orgs/{}".format(org_response.json.get("id")), headers=headers, content_type="application/json" + ) - assert rv.json.get('orgStatus') == OrgStatus.ACTIVE.value + assert rv.json.get("orgStatus") == OrgStatus.ACTIVE.value headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - staff_response = client.get('/api/v1/orgs/{}/admins/affidavits'.format(org_response.json.get('id')), - headers=headers, content_type='application/json') + staff_response = client.get( + "/api/v1/orgs/{}/admins/affidavits".format(org_response.json.get("id")), + headers=headers, + content_type="application/json", + ) - assert schema_utils.validate(staff_response.json, 'affidavit_response')[0] - assert staff_response.json.get('documentId') == doc_key - assert staff_response.json.get('id') == affidavit_response.json.get('id') - assert staff_response.json.get('status') == AffidavitStatus.APPROVED.value + assert schema_utils.validate(staff_response.json, "affidavit_response")[0] + assert staff_response.json.get("documentId") == doc_key + assert staff_response.json.get("id") == affidavit_response.json.get("id") + assert staff_response.json.get("status") == AffidavitStatus.APPROVED.value -@pytest.mark.skip(reason='Fix this later') -def test_approve_org_with_pending_affidavits_duplicate_affidavit(client, jwt, session, - keycloak_mock): # pylint:disable=unused-argument +@pytest.mark.skip(reason="Fix this later") +def test_approve_org_with_pending_affidavits_duplicate_affidavit( + client, jwt, session, keycloak_mock +): # pylint:disable=unused-argument """Assert that staff admin can approve pending affidavits.""" # 1. Create User # 2. Get document signed link @@ -1675,72 +2065,88 @@ def test_approve_org_with_pending_affidavits_duplicate_affidavit(client, jwt, se # 4. Create Org # 5. Get the affidavit as a bcol admin headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") # POST a contact to test user - client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=headers, content_type='application/json') - - document_signature = client.get('/api/v1/documents/test.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key = document_signature.json.get('key') - new_contact_email = 'test@test.com' - issuer = 'New_Issuer' - - client.post('/api/v1/users/{}/affidavits'.format(TestJwtClaims.public_user_role.get('sub')), - headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact( - doc_id=doc_key)), - content_type='application/json') - document_signature2 = client.get('/api/v1/documents/test2.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key2 = document_signature2.json.get('key') - affidavit_response_second_time = client.post( - '/api/v1/users/{}/affidavits'.format(TestJwtClaims.public_user_role.get('sub')), + client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key2, email=new_contact_email, - issuer=issuer)), - content_type='application/json') + content_type="application/json", + ) - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), headers=headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + document_signature = client.get( + "/api/v1/documents/test.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key = document_signature.json.get("key") + new_contact_email = "test@test.com" + issuer = "New_Issuer" + + client.post( + "/api/v1/users/{}/affidavits".format(TestJwtClaims.public_user_role.get("sub")), + headers=headers, + data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), + content_type="application/json", + ) + document_signature2 = client.get( + "/api/v1/documents/test2.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key2 = document_signature2.json.get("key") + affidavit_response_second_time = client.post( + "/api/v1/users/{}/affidavits".format(TestJwtClaims.public_user_role.get("sub")), + headers=headers, + data=json.dumps( + TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key2, email=new_contact_email, issuer=issuer) + ), + content_type="application/json", + ) - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=headers, + content_type="application/json", ) + assert org_response.status_code == HTTPStatus.CREATED + + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) tasks = TaskService.fetch_tasks(task_search) - fetched_task = tasks['tasks'][0] + fetched_task = tasks["tasks"][0] update_task_payload = { - 'status': TaskStatus.COMPLETED.value, - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value + "status": TaskStatus.COMPLETED.value, + "relationshipStatus": TaskRelationshipStatus.ACTIVE.value, } headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.put('/api/v1/tasks/{}'.format(fetched_task['id']), - data=json.dumps(update_task_payload), - headers=headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(fetched_task["id"]), + data=json.dumps(update_task_payload), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - rv = client.get('/api/v1/orgs/{}'.format(org_response.json.get('id')), - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/orgs/{}".format(org_response.json.get("id")), headers=headers, content_type="application/json" + ) - assert rv.json.get('orgStatus') == OrgStatus.ACTIVE.value + assert rv.json.get("orgStatus") == OrgStatus.ACTIVE.value headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - affidavit_staff_response = client.get('/api/v1/orgs/{}/admins/affidavits'.format(org_response.json.get('id')), - headers=headers, content_type='application/json') + affidavit_staff_response = client.get( + "/api/v1/orgs/{}/admins/affidavits".format(org_response.json.get("id")), + headers=headers, + content_type="application/json", + ) - assert schema_utils.validate(affidavit_staff_response.json, 'affidavit_response')[0] - assert affidavit_staff_response.json.get('documentId') == doc_key2 - assert affidavit_staff_response.json.get('id') == affidavit_response_second_time.json.get('id') - assert affidavit_staff_response.json.get('status') == AffidavitStatus.APPROVED.value - assert affidavit_staff_response.json.get('issuer') == issuer - assert affidavit_staff_response.json.get('contacts')[0].get('email') == new_contact_email + assert schema_utils.validate(affidavit_staff_response.json, "affidavit_response")[0] + assert affidavit_staff_response.json.get("documentId") == doc_key2 + assert affidavit_staff_response.json.get("id") == affidavit_response_second_time.json.get("id") + assert affidavit_staff_response.json.get("status") == AffidavitStatus.APPROVED.value + assert affidavit_staff_response.json.get("issuer") == issuer + assert affidavit_staff_response.json.get("contacts")[0].get("email") == new_contact_email def test_suspend_unsuspend(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument @@ -1751,68 +2157,90 @@ def test_suspend_unsuspend(client, jwt, session, keycloak_mock): # pylint:disab # 4. Create Org # 5. Get the affidavit as a bcol admin public_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=public_headers, content_type='application/json') + client.post("/api/v1/users", headers=public_headers, content_type="application/json") - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), - headers=public_headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=public_headers, + content_type="application/json", + ) + assert org_response.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.SUSPENDED.value, - 'suspensionReasonCode': SuspensionReasonCode.OWNER_CHANGE.name}), - headers=headers, content_type='application/json') - assert org_patch_response.json.get('orgStatus') == OrgStatus.SUSPENDED.value - assert org_patch_response.json.get('suspensionReasonCode') == SuspensionReasonCode.OWNER_CHANGE.name + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps( + {"statusCode": OrgStatus.SUSPENDED.value, "suspensionReasonCode": SuspensionReasonCode.OWNER_CHANGE.name} + ), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.json.get("orgStatus") == OrgStatus.SUSPENDED.value + assert org_patch_response.json.get("suspensionReasonCode") == SuspensionReasonCode.OWNER_CHANGE.name - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.ACTIVE.value}), - headers=headers, content_type='application/json') - assert org_patch_response.json.get('orgStatus') == OrgStatus.ACTIVE.value + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"statusCode": OrgStatus.ACTIVE.value}), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.json.get("orgStatus") == OrgStatus.ACTIVE.value # public user suspending/unsuspend shud give back error - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.SUSPENDED.value}), - headers=public_headers, content_type='application/json') - assert org_patch_response.status_code == http_status.HTTP_401_UNAUTHORIZED + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"statusCode": OrgStatus.SUSPENDED.value}), + headers=public_headers, + content_type="application/json", + ) + assert org_patch_response.status_code == HTTPStatus.UNAUTHORIZED - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.ACTIVE.value}), - headers=public_headers, content_type='application/json') - assert org_patch_response.status_code == http_status.HTTP_401_UNAUTHORIZED + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"statusCode": OrgStatus.ACTIVE.value}), + headers=public_headers, + content_type="application/json", + ) + assert org_patch_response.status_code == HTTPStatus.UNAUTHORIZED def test_org_suspended_reason(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be retrieved via GET.""" public_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=public_headers, content_type='application/json') + client.post("/api/v1/users", headers=public_headers, content_type="application/json") - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), - headers=public_headers, - content_type='application/json') + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=public_headers, + content_type="application/json", + ) dictionary = json.loads(org_response.data) - org_id = dictionary['id'] - assert org_response.status_code == http_status.HTTP_201_CREATED + org_id = dictionary["id"] + assert org_response.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.SUSPENDED.value, - 'suspensionReasonCode': SuspensionReasonCode.OWNER_CHANGE.name}), - headers=headers, content_type='application/json') - assert org_patch_response.json.get('orgStatus') == OrgStatus.SUSPENDED.value - assert org_patch_response.json.get('suspensionReasonCode') == SuspensionReasonCode.OWNER_CHANGE.name + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps( + {"statusCode": OrgStatus.SUSPENDED.value, "suspensionReasonCode": SuspensionReasonCode.OWNER_CHANGE.name} + ), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.json.get("orgStatus") == OrgStatus.SUSPENDED.value + assert org_patch_response.json.get("suspensionReasonCode") == SuspensionReasonCode.OWNER_CHANGE.name headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - rv = client.get('/api/v1/orgs/{}'.format(org_id), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'org_response')[0] + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "org_response")[0] dictionary = json.loads(rv.data) - assert dictionary['suspensionReasonCode'] == SuspensionReasonCode.OWNER_CHANGE.name + assert dictionary["suspensionReasonCode"] == SuspensionReasonCode.OWNER_CHANGE.name def test_search_orgs_with_pending_affidavits(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument @@ -1823,281 +2251,343 @@ def test_search_orgs_with_pending_affidavits(client, jwt, session, keycloak_mock # 4. Create Org # 5. Get the affidavit as a bcol admin headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') - document_signature = client.get('/api/v1/documents/test.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key = document_signature.json.get('key') - client.post('/api/v1/users/{}/affidavits'.format(TestJwtClaims.public_user_role.get('sub')), - headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), - content_type='application/json') - - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), headers=headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + client.post("/api/v1/users", headers=headers, content_type="application/json") + document_signature = client.get( + "/api/v1/documents/test.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key = document_signature.json.get("key") + client.post( + "/api/v1/users/{}/affidavits".format(TestJwtClaims.public_user_role.get("sub")), + headers=headers, + data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), + content_type="application/json", + ) + + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=headers, + content_type="application/json", + ) + assert org_response.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_search_response = client.get('/api/v1/orgs?status=PENDING_STAFF_REVIEW', - headers=headers, content_type='application/json') + org_search_response = client.get( + "/api/v1/orgs?status=PENDING_STAFF_REVIEW", headers=headers, content_type="application/json" + ) - assert len(org_search_response.json.get('orgs')) == 1 - assert schema_utils.validate(org_search_response.json, 'paged_response')[0] + assert len(org_search_response.json.get("orgs")) == 1 + assert schema_utils.validate(org_search_response.json, "paged_response")[0] def test_search_org_pagination(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that pagination works.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org2), - headers=headers, content_type='application/json') - client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org3), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + client.post("/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json") + client.post("/api/v1/orgs", data=json.dumps(TestOrgInfo.org2), headers=headers, content_type="application/json") + client.post("/api/v1/orgs", data=json.dumps(TestOrgInfo.org3), headers=headers, content_type="application/json") # staff search headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get('/api/v1/orgs?page=1&limit=10', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get("/api/v1/orgs?page=1&limit=10", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('total') == 3 - assert len(orgs.get('orgs')) == 3 + assert orgs.get("total") == 3 + assert len(orgs.get("orgs")) == 3 - rv = client.get('/api/v1/orgs?page=1&limit=2', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get("/api/v1/orgs?page=1&limit=2", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('total') == 3 - assert len(orgs.get('orgs')) == 2 + assert orgs.get("total") == 3 + assert len(orgs.get("orgs")) == 2 def test_search_org_invitations(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that pagination works.""" # Create 2 anonymous org invitations headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_dir_search_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - client.post('/api/v1/invitations', data=json.dumps(factory_invitation_anonymous(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation_anonymous(org_id=org_id)), + headers=headers, + content_type="application/json", + ) - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous_2), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous_2), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - client.post('/api/v1/invitations', data=json.dumps(factory_invitation_anonymous(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation_anonymous(org_id=org_id)), + headers=headers, + content_type="application/json", + ) # staff search - rv = client.get('/api/v1/orgs?status={}'.format(OrgStatus.PENDING_ACTIVATION.value), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get( + "/api/v1/orgs?status={}".format(OrgStatus.PENDING_ACTIVATION.value), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert len(orgs.get('orgs')) == 2 - assert len(orgs.get('orgs')[0].get('invitations')) == 1 + assert len(orgs.get("orgs")) == 2 + assert len(orgs.get("orgs")[0].get("invitations")) == 1 def test_delete_affiliation_no_payload(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an affiliation for an org can be removed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - data=json.dumps(TestAffliationInfo.affiliation3), - headers=headers, - content_type='application/json') + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + data=json.dumps(TestAffliationInfo.affiliation3), + headers=headers, + content_type="application/json", + ) - rv = client.get('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}/affiliations".format(org_id), headers=headers) + assert rv.status_code == HTTPStatus.OK - assert schema_utils.validate(rv.json, 'affiliations_response')[0] + assert schema_utils.validate(rv.json, "affiliations_response")[0] affiliations = json.loads(rv.data) # Result is sorted desc order of created date - assert affiliations['entities'][0]['businessIdentifier'] == TestEntityInfo.entity_lear_mock['businessIdentifier'] + assert affiliations["entities"][0]["businessIdentifier"] == TestEntityInfo.entity_lear_mock["businessIdentifier"] - affiliation_id = affiliations['entities'][0]['businessIdentifier'] + affiliation_id = affiliations["entities"][0]["businessIdentifier"] headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - da = client.delete('/api/v1/orgs/{org_id}/affiliations/{affiliation_id}'.format(org_id=org_id, - affiliation_id=affiliation_id), - headers=headers, - content_type='application/json') - assert da.status_code == http_status.HTTP_200_OK + da = client.delete( + "/api/v1/orgs/{org_id}/affiliations/{affiliation_id}".format(org_id=org_id, affiliation_id=affiliation_id), + headers=headers, + content_type="application/json", + ) + assert da.status_code == HTTPStatus.OK def test_delete_affiliation_payload_no_mail(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an affiliation for an org can be removed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - rv = client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] - rv = client.post('/api/v1/orgs/{}/affiliations'.format(org_id), - data=json.dumps(TestAffliationInfo.affiliation3), - headers=headers, - content_type='application/json') + rv = client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + data=json.dumps(TestAffliationInfo.affiliation3), + headers=headers, + content_type="application/json", + ) - rv = client.get('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}/affiliations".format(org_id), headers=headers) + assert rv.status_code == HTTPStatus.OK - assert schema_utils.validate(rv.json, 'affiliations_response')[0] + assert schema_utils.validate(rv.json, "affiliations_response")[0] affiliations = json.loads(rv.data) # Result is sorted desc order of created date - assert affiliations['entities'][0]['businessIdentifier'] == TestEntityInfo.entity_lear_mock['businessIdentifier'] + assert affiliations["entities"][0]["businessIdentifier"] == TestEntityInfo.entity_lear_mock["businessIdentifier"] - affiliation_id = affiliations['entities'][0]['businessIdentifier'] + affiliation_id = affiliations["entities"][0]["businessIdentifier"] headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - da = client.delete('/api/v1/orgs/{org_id}/affiliations/{affiliation_id}'.format(org_id=org_id, - affiliation_id=affiliation_id), - headers=headers, - data=json.dumps(DeleteAffiliationPayload.delete_affiliation2), - content_type='application/json') - assert da.status_code == http_status.HTTP_200_OK + da = client.delete( + "/api/v1/orgs/{org_id}/affiliations/{affiliation_id}".format(org_id=org_id, affiliation_id=affiliation_id), + headers=headers, + data=json.dumps(DeleteAffiliationPayload.delete_affiliation2), + content_type="application/json", + ) + assert da.status_code == HTTPStatus.OK def test_org_patch_validate_request_json(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Validate patch org endpoints based on different input.""" public_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=public_headers, content_type='application/json') + client.post("/api/v1/users", headers=public_headers, content_type="application/json") - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), - headers=public_headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=public_headers, + content_type="application/json", + ) + assert org_response.status_code == HTTPStatus.CREATED # Validate public Bcol user cannot do patch - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.SUSPENDED.value, - 'suspensionReasonCode': SuspensionReasonCode.OWNER_CHANGE.name}), - headers=public_headers, content_type='application/json') - assert org_patch_response.status_code == http_status.HTTP_401_UNAUTHORIZED + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps( + {"statusCode": OrgStatus.SUSPENDED.value, "suspensionReasonCode": SuspensionReasonCode.OWNER_CHANGE.name} + ), + headers=public_headers, + content_type="application/json", + ) + assert org_patch_response.status_code == HTTPStatus.UNAUTHORIZED # Validate patch - update status fails if it is missing one of json properties headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'statusCode': OrgStatus.SUSPENDED.value}), - headers=headers, content_type='application/json') - assert org_patch_response.status_code == http_status.HTTP_400_BAD_REQUEST + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"statusCode": OrgStatus.SUSPENDED.value}), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.status_code == HTTPStatus.BAD_REQUEST headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'suspensionReasonCode': SuspensionReasonCode.OWNER_CHANGE.name}), - headers=headers, content_type='application/json') - assert org_patch_response.status_code == http_status.HTTP_400_BAD_REQUEST + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"suspensionReasonCode": SuspensionReasonCode.OWNER_CHANGE.name}), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.status_code == HTTPStatus.BAD_REQUEST # Validate patch - update access type fails if it is missing one of json properties headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'action': PatchActions.UPDATE_ACCESS_TYPE.value, - 'accessType': AccessType.GOVM.value}), - headers=headers, content_type='application/json') - assert org_patch_response.status_code == http_status.HTTP_400_BAD_REQUEST + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"action": PatchActions.UPDATE_ACCESS_TYPE.value, "accessType": AccessType.GOVM.value}), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.status_code == HTTPStatus.BAD_REQUEST headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'action': PatchActions.UPDATE_ACCESS_TYPE.value}), - headers=headers, content_type='application/json') - assert org_patch_response.status_code == http_status.HTTP_400_BAD_REQUEST + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"action": PatchActions.UPDATE_ACCESS_TYPE.value}), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.status_code == HTTPStatus.BAD_REQUEST def test_org_patch_access_type(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert patch Org endpoint for access type.""" public_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=public_headers, content_type='application/json') + client.post("/api/v1/users", headers=public_headers, content_type="application/json") - org_response = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_with_mailing_address()), - headers=public_headers, - content_type='application/json') - assert org_response.status_code == http_status.HTTP_201_CREATED + org_response = client.post( + "/api/v1/orgs", + data=json.dumps(TestOrgInfo.org_with_mailing_address()), + headers=public_headers, + content_type="application/json", + ) + assert org_response.status_code == HTTPStatus.CREATED headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.bcol_admin_role) - org_patch_response = client.patch('/api/v1/orgs/{}'.format(org_response.json.get('id')), - data=json.dumps({'action': PatchActions.UPDATE_ACCESS_TYPE.value, - 'accessType': AccessType.GOVN.value}), - headers=headers, content_type='application/json') - assert org_patch_response.json.get('accessType') == AccessType.GOVN.value + org_patch_response = client.patch( + "/api/v1/orgs/{}".format(org_response.json.get("id")), + data=json.dumps({"action": PatchActions.UPDATE_ACCESS_TYPE.value, "accessType": AccessType.GOVN.value}), + headers=headers, + content_type="application/json", + ) + assert org_patch_response.json.get("accessType") == AccessType.GOVN.value def test_search_org_govm(client, jwt, session, monkeypatch): # pylint:disable=unused-argument """Create org_govm, find it in the search.""" # Set up: create/login user, create org headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") # Create govm organization. - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_govm), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_govm), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] # Invite a user to the org - rv = client.post('/api/v1/invitations', - data=json.dumps(factory_invitation(org_id, 'abc123@email.com', membership_type=ADMIN)), - headers=headers, content_type='application/json') + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation(org_id, "abc123@email.com", membership_type=ADMIN)), + headers=headers, + content_type="application/json", + ) # Fetch PENDING_ACTIVATION for govm. - rv = client.get('/api/v1/orgs?status=PENDING_ACTIVATION', headers=headers) - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs?status=PENDING_ACTIVATION", headers=headers) + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['orgs'] - assert len(dictionary['orgs']) == 1 + assert dictionary["orgs"] + assert len(dictionary["orgs"]) == 1 # 1 - assert org can be deleted without any dependencies like members or business affiliations. patch_pay_account_delete(monkeypatch) # Delete PENDING_INVITE_ACCEPT org. - rv = client.delete('/api/v1/orgs/{}'.format(org_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT def test_new_active_search(client, jwt, session, keycloak_mock): """Check for id, accessType , orgType, decisionMadeBy.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - org_id_1 = dictionary['id'] - decision_made_by = 'barney' + org_id_1 = dictionary["id"] + decision_made_by = "barney" org: OrgModel = OrgModel.find_by_org_id(org_id_1) org.decision_made_by = decision_made_by org.status_code = OrgStatus.ACTIVE.value org.save() - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_regular), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_govm), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_regular), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_govm), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] org: OrgModel = OrgModel.find_by_org_id(org_id) org.status_code = OrgStatus.ACTIVE.value org.save() @@ -2106,152 +2596,193 @@ def test_new_active_search(client, jwt, session, keycloak_mock): headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) # # Fetch by Id - rv = client.get(f'/api/v1/orgs?id={org_id_1}&status=ACTIVE', headers=headers) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get(f"/api/v1/orgs?id={org_id_1}&status=ACTIVE", headers=headers) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('id') == org_id_1 + assert orgs.get("orgs")[0].get("id") == org_id_1 # Fetch by accessType - rv = client.get(f'/api/v1/orgs?accessType={AccessType.GOVM.value}&status=ACTIVE', headers=headers) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get(f"/api/v1/orgs?accessType={AccessType.GOVM.value}&status=ACTIVE", headers=headers) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('accessType') == AccessType.GOVM.value + assert orgs.get("orgs")[0].get("accessType") == AccessType.GOVM.value # Fetch by orgType - rv = client.get(f'/api/v1/orgs?orgType={OrgType.PREMIUM.value}&status=ACTIVE', headers=headers) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get(f"/api/v1/orgs?orgType={OrgType.PREMIUM.value}&status=ACTIVE", headers=headers) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('orgType') == OrgType.PREMIUM.value + assert orgs.get("orgs")[0].get("orgType") == OrgType.PREMIUM.value # Fetch by decisionMadeBy - rv = client.get(f'/api/v1/orgs?decisionMadeBy={decision_made_by}&status=ACTIVE', headers=headers) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'paged_response')[0] + rv = client.get(f"/api/v1/orgs?decisionMadeBy={decision_made_by}&status=ACTIVE", headers=headers) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "paged_response")[0] orgs = json.loads(rv.data) - assert orgs.get('orgs')[0].get('decisionMadeBy') == decision_made_by - - -@pytest.mark.parametrize('test_name, businesses, drafts, drafts_with_nrs, nrs, dates', [ - ('businesses_only', [('BC1234567', CorpType.BC.value), ('BC1234566', CorpType.BC.value)], [], [], [], []), - ('drafts_only', [], - [('T12dfhsff1', CorpType.BC.value, CorpType.TMP.value), - ('T12dfhsff2', CorpType.GP.value, CorpType.RTMP.value)], - [], [], []), - ('nrs_only', [], [], [], [('NR 1234567', 'NEW'), ('NR 1234566', 'NEW')], []), - ('drafts_with_nrs', [], [], - [('T12dfhsff1', CorpType.BC.value, CorpType.TMP.value, 'NR 1234567'), - ('T12dfhsff2', CorpType.GP.value, CorpType.RTMP.value, 'NR 1234566')], - [('NR 1234567', 'AML'), ('NR 1234566', 'AML')], []), - ('affiliations_order', [], [], - [], [('abcde1', CorpType.BC.value, 'NR 123456'), - ('abcde2', CorpType.BC.value, 'NR 123457'), - ('abcde3', CorpType.BC.value, 'NR 123458'), - ('abcde4', CorpType.BC.value, 'NR 123459')], - [datetime(2021, 1, 1), datetime(2022, 2, 1), datetime(2022, 3, 1), datetime(2023, 2, 1)]), - ('all', [('BC1234567', CorpType.BC.value), ('BC1234566', CorpType.BC.value)], - [('T12dfhsff1', CorpType.BC.value, CorpType.TMP.value), - ('T12dfhsff2', CorpType.GP.value, CorpType.RTMP.value)], - [('T12dfhsff3', CorpType.BC.value, CorpType.TMP.value, 'NR 1234567'), - ('T12dfhsff4', CorpType.GP.value, CorpType.RTMP.value, 'NR 1234566')], - [('NR 1234567', 'AML'), ('NR 1234566', 'AML'), ('NR 1234565', 'AML')], - [datetime(2021, 1, 1), datetime(2022, 2, 1)]) -]) -def test_get_org_affiliations(client, jwt, session, keycloak_mock, mocker, - test_name, businesses, drafts, drafts_with_nrs, nrs, dates): + assert orgs.get("orgs")[0].get("decisionMadeBy") == decision_made_by + + +@pytest.mark.parametrize( + "test_name, businesses, drafts, drafts_with_nrs, nrs, dates", + [ + ("businesses_only", [("BC1234567", CorpType.BC.value), ("BC1234566", CorpType.BC.value)], [], [], [], []), + ( + "drafts_only", + [], + [ + ("T12dfhsff1", CorpType.BC.value, CorpType.TMP.value), + ("T12dfhsff2", CorpType.GP.value, CorpType.RTMP.value), + ], + [], + [], + [], + ), + ("nrs_only", [], [], [], [("NR 1234567", "NEW"), ("NR 1234566", "NEW")], []), + ( + "drafts_with_nrs", + [], + [], + [ + ("T12dfhsff1", CorpType.BC.value, CorpType.TMP.value, "NR 1234567"), + ("T12dfhsff2", CorpType.GP.value, CorpType.RTMP.value, "NR 1234566"), + ], + [("NR 1234567", "AML"), ("NR 1234566", "AML")], + [], + ), + ( + "affiliations_order", + [], + [], + [], + [ + ("abcde1", CorpType.BC.value, "NR 123456"), + ("abcde2", CorpType.BC.value, "NR 123457"), + ("abcde3", CorpType.BC.value, "NR 123458"), + ("abcde4", CorpType.BC.value, "NR 123459"), + ], + [datetime(2021, 1, 1), datetime(2022, 2, 1), datetime(2022, 3, 1), datetime(2023, 2, 1)], + ), + ( + "all", + [("BC1234567", CorpType.BC.value), ("BC1234566", CorpType.BC.value)], + [ + ("T12dfhsff1", CorpType.BC.value, CorpType.TMP.value), + ("T12dfhsff2", CorpType.GP.value, CorpType.RTMP.value), + ], + [ + ("T12dfhsff3", CorpType.BC.value, CorpType.TMP.value, "NR 1234567"), + ("T12dfhsff4", CorpType.GP.value, CorpType.RTMP.value, "NR 1234566"), + ], + [("NR 1234567", "AML"), ("NR 1234566", "AML"), ("NR 1234565", "AML")], + [datetime(2021, 1, 1), datetime(2022, 2, 1)], + ), + ], +) +def test_get_org_affiliations( + client, jwt, session, keycloak_mock, mocker, test_name, businesses, drafts, drafts_with_nrs, nrs, dates +): """Assert details of affiliations for an org are returned.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) # setup org - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] # setup mocks - businesses_details = [{ - 'adminFreeze': False, - 'goodStanding': True, - 'identifier': data[0], - 'legalName': 'KIALS BUSINESS NAME CORP.', - 'legalType': data[1], - 'state': 'ACTIVE', - 'taxId': '123' - } for data in businesses] - - drafts_details = [{ - 'identifier': data[0], - 'legalType': data[1], - 'draftType': data[2], - } for data in drafts] - - drafts_with_nr_details = [{ - 'identifier': data[0], - 'legalType': data[1], - 'draftType': data[2], - 'nrNumber': data[3] - } for data in drafts_with_nrs] - - nrs_details = [{ - 'actions': [], - 'applicants': { - 'emailAddress': '1@1.com', - 'phoneNumber': '1234567890', - }, - 'names': [{ - 'name': f'TEST INC. {nr}', - 'state': 'APPROVED' - }], - 'stateCd': 'APPROVED', - 'requestTypeCd': 'BC', - 'request_action_cd': nr[1], - 'nrNum': nr - } for nr in nrs] + businesses_details = [ + { + "adminFreeze": False, + "goodStanding": True, + "identifier": data[0], + "legalName": "KIALS BUSINESS NAME CORP.", + "legalType": data[1], + "state": "ACTIVE", + "taxId": "123", + } + for data in businesses + ] + + drafts_details = [ + { + "identifier": data[0], + "legalType": data[1], + "draftType": data[2], + } + for data in drafts + ] + + drafts_with_nr_details = [ + {"identifier": data[0], "legalType": data[1], "draftType": data[2], "nrNumber": data[3]} + for data in drafts_with_nrs + ] + + nrs_details = [ + { + "actions": [], + "applicants": { + "emailAddress": "1@1.com", + "phoneNumber": "1234567890", + }, + "names": [{"name": f"TEST INC. {nr}", "state": "APPROVED"}], + "stateCd": "APPROVED", + "requestTypeCd": "BC", + "request_action_cd": nr[1], + "nrNum": nr, + } + for nr in nrs + ] # Add dates to nrs_details for i, date in enumerate(dates): if i < len(nrs_details): - nrs_details[i]['created'] = date.isoformat() + nrs_details[i]["created"] = date.isoformat() entities_response = { - 'businessEntities': businesses_details, - 'draftEntities': drafts_details + drafts_with_nr_details + "businessEntities": businesses_details, + "draftEntities": drafts_details + drafts_with_nr_details, } nrs_response = nrs_details # mock function that calls namex / lear - mocker.patch('auth_api.services.rest_service.RestService.call_posts_in_parallel', - return_value=[entities_response, nrs_response]) - mocker.patch('auth_api.services.rest_service.RestService.get_service_account_token', - return_value='token') + mocker.patch( + "auth_api.services.rest_service.RestService.call_posts_in_parallel", + return_value=[entities_response, nrs_response], + ) + mocker.patch("auth_api.services.rest_service.RestService.get_service_account_token", return_value="token") - rv = client.get('/api/v1/orgs/{}/affiliations?new=true'.format(org_id), - headers=headers, - content_type='application/json') + rv = client.get( + "/api/v1/orgs/{}/affiliations?new=true".format(org_id), headers=headers, content_type="application/json" + ) - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('entities', None) and isinstance(rv.json['entities'], list) - assert len(rv.json['entities']) == len(businesses) + len(drafts) + len(nrs) + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("entities", None) and isinstance(rv.json["entities"], list) + assert len(rv.json["entities"]) == len(businesses) + len(drafts) + len(nrs) drafts_nr_numbers = [data[3] for data in drafts_with_nrs] - for entity in rv.json['entities']: - if entity['legalType'] == CorpType.NR.value: - assert entity['nameRequest']['nrNum'] not in drafts_nr_numbers - - if draft_type := entity.get('draftType', None): - expected = CorpType.RTMP.value if entity['legalType'] in [ - CorpType.SP.value, CorpType.GP.value] else CorpType.TMP.value - if entity.get('nameRequest', {}).get('requestActionCd') == NRActionCodes.AMALGAMATE.value: + for entity in rv.json["entities"]: + if entity["legalType"] == CorpType.NR.value: + assert entity["nameRequest"]["nrNum"] not in drafts_nr_numbers + + if draft_type := entity.get("draftType", None): + expected = ( + CorpType.RTMP.value + if entity["legalType"] in [CorpType.SP.value, CorpType.GP.value] + else CorpType.TMP.value + ) + if entity.get("nameRequest", {}).get("requestActionCd") == NRActionCodes.AMALGAMATE.value: expected = CorpType.ATMP.value assert draft_type == expected # Assert that the entities are sorted in descending order of creation dates - if test_name == 'affiliations_order' and len(dates) > 0: - created_order = [affiliation['nameRequest'].get('created') for affiliation in rv.json['entities']] + if test_name == "affiliations_order" and len(dates) > 0: + created_order = [affiliation["nameRequest"].get("created") for affiliation in rv.json["entities"]] dates.sort() date_iso = [date.isoformat() for date in dates] assert date_iso == created_order @@ -2261,72 +2792,82 @@ def _create_orgs_entities_and_affiliations(client, jwt, count): created_orgs = [] for i in range(0, count): headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.passcode) - client.post('/api/v1/entities', data=json.dumps(TestEntityInfo.entity_lear_mock), - headers=headers, content_type='application/json') + client.post( + "/api/v1/entities", + data=json.dumps(TestEntityInfo.entity_lear_mock), + headers=headers, + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") new_org = TestOrgInfo.org_details.copy() - new_org['name'] = new_org['name'] + ' ' + str(i) - new_org['branchName'] = 'branch-for-' + new_org['name'] - rv = client.post('/api/v1/orgs', data=json.dumps(new_org), - headers=headers, content_type='application/json') + new_org["name"] = new_org["name"] + " " + str(i) + new_org["branchName"] = "branch-for-" + new_org["name"] + rv = client.post("/api/v1/orgs", data=json.dumps(new_org), headers=headers, content_type="application/json") dictionary = json.loads(rv.data) - org_id = dictionary['id'] - new_org['id'] = org_id + org_id = dictionary["id"] + new_org["id"] = org_id created_orgs.append(new_org) - client.post('/api/v1/orgs/{}/affiliations'.format(org_id), headers=headers, - data=json.dumps(TestAffliationInfo.affiliation3), content_type='application/json') + client.post( + "/api/v1/orgs/{}/affiliations".format(org_id), + headers=headers, + data=json.dumps(TestAffliationInfo.affiliation3), + content_type="application/json", + ) return created_orgs -@pytest.mark.parametrize('expected_http_status, entries_count', - [ - (http_status.HTTP_200_OK, 1), - (http_status.HTTP_200_OK, 3), - (http_status.HTTP_200_OK, 0), - ], - ids=[ - 'Assert that fetching orgs filtered by business identifier works, single entry.', - 'Assert that fetching orgs filtered by business identifier works, multiple entries.', - 'Assert that fetching orgs filtered by business identifier works, no entry', - ] - ) -def test_get_orgs_by_affiliation(client, jwt, session, keycloak_mock, - expected_http_status, entries_count): +@pytest.mark.parametrize( + "expected_http_status, entries_count", + [ + (HTTPStatus.OK, 1), + (HTTPStatus.OK, 3), + (HTTPStatus.OK, 0), + ], + ids=[ + "Assert that fetching orgs filtered by business identifier works, single entry.", + "Assert that fetching orgs filtered by business identifier works, multiple entries.", + "Assert that fetching orgs filtered by business identifier works, no entry", + ], +) +def test_get_orgs_by_affiliation(client, jwt, session, keycloak_mock, expected_http_status, entries_count): """Assert that api call returns affiliated orgs.""" created_orgs = _create_orgs_entities_and_affiliations(client, jwt, entries_count) # Create a system token headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.get('/api/v1/orgs/affiliation/{}'.format(TestAffliationInfo.affiliation3.get('businessIdentifier')), - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/orgs/affiliation/{}".format(TestAffliationInfo.affiliation3.get("businessIdentifier")), + headers=headers, + content_type="application/json", + ) response = json.loads(rv.data) assert rv.status_code == expected_http_status - assert schema_utils.validate(rv.json, 'orgs_response')[1] - assert len(response.get('orgsDetails')) == entries_count - orgs_details = response.get('orgsDetails') + assert schema_utils.validate(rv.json, "orgs_response")[1] + assert len(response.get("orgsDetails")) == entries_count + orgs_details = response.get("orgsDetails") for co in created_orgs: - names = [od['name'] for od in orgs_details] - branches = [od['branchName'] for od in orgs_details] - assert co['name'] in names - assert co['branchName'] in branches + names = [od["name"] for od in orgs_details] + branches = [od["branchName"] for od in orgs_details] + assert co["name"] in names + assert co["branchName"] in branches for od in orgs_details: - assert 'name' in od - assert 'branchName' in od - assert 'uuid' in od - assert 'id' not in od + assert "name" in od + assert "branchName" in od + assert "uuid" in od + assert "id" not in od def test_get_orgs_by_affiliation_filtering_out_staff_orgs(app, client, jwt, session, keycloak_mock): """Assert that fetching orgs by affiliation do not return staff orgs.""" - orig_val_max_number_of_orgs = app.config.get('MAX_NUMBER_OF_ORGS') + orig_val_max_number_of_orgs = app.config.get("MAX_NUMBER_OF_ORGS") app.config.update(MAX_NUMBER_OF_ORGS=10) create_org_count = 6 @@ -2336,54 +2877,58 @@ def test_get_orgs_by_affiliation_filtering_out_staff_orgs(app, client, jwt, sess org3 = created_orgs[2] org5 = created_orgs[4] - convert_org_to_staff_org(org3['id'], OrgType.SBC_STAFF.value) - convert_org_to_staff_org(org5['id'], OrgType.STAFF.value) + convert_org_to_staff_org(org3["id"], OrgType.SBC_STAFF.value) + convert_org_to_staff_org(org5["id"], OrgType.STAFF.value) - staff_org_names = [org3['name'], org5['name']] + staff_org_names = [org3["name"], org5["name"]] expected_org_count = create_org_count - len(staff_org_names) # Create a system token headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.get('/api/v1/orgs/affiliation/{}'.format(TestAffliationInfo.affiliation3.get('businessIdentifier')), - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/orgs/affiliation/{}".format(TestAffliationInfo.affiliation3.get("businessIdentifier")), + headers=headers, + content_type="application/json", + ) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'orgs_response')[1] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "orgs_response")[1] response = json.loads(rv.data) - assert len(response.get('orgsDetails')) == expected_org_count # without org 3 and 5 - orgs_details = response.get('orgsDetails') + assert len(response.get("orgsDetails")) == expected_org_count # without org 3 and 5 + orgs_details = response.get("orgsDetails") for od in orgs_details: - assert od['name'] not in staff_org_names + assert od["name"] not in staff_org_names def test_update_org_api_access(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be searched.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] + org_id = dictionary["id"] headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - rv = client.get('/api/v1/orgs/{}'.format(org_id), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'org_response')[0] + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "org_response")[0] dictionary = json.loads(rv.data) - assert dictionary['id'] == org_id - assert dictionary['hasApiAccess'] is False + assert dictionary["id"] == org_id + assert dictionary["hasApiAccess"] is False headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - client.patch('/api/v1/orgs/{}'.format(org_id), - data=json.dumps({'hasApiAccess': True, - 'action': PatchActions.UPDATE_API_ACCESS.value}), - headers=headers, content_type='application/json') - rv = client.get('/api/v1/orgs/{}'.format(org_id), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - dictionary = json.loads(rv.data) - assert dictionary['hasApiAccess'] is True + client.patch( + "/api/v1/orgs/{}".format(org_id), + data=json.dumps({"hasApiAccess": True, "action": PatchActions.UPDATE_API_ACCESS.value}), + headers=headers, + content_type="application/json", + ) + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + dictionary = json.loads(rv.data) + assert dictionary["hasApiAccess"] is True diff --git a/auth-api/tests/unit/api/test_org_api_keys.py b/auth-api/tests/unit/api/test_org_api_keys.py index fa2b7e8abd..ae1f47b5ca 100644 --- a/auth-api/tests/unit/api/test_org_api_keys.py +++ b/auth-api/tests/unit/api/test_org_api_keys.py @@ -18,8 +18,8 @@ """ import json +from http import HTTPStatus -from auth_api import status as http_status from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo from tests.utilities.factory_utils import factory_auth_header @@ -28,93 +28,100 @@ def test_create_api_keys(client, jwt, session, keycloak_mock, monkeypatch): # p """Assert that api keys can be generated.""" # First create an account headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert not rv.json.get('hasApiAccess') - org_id = rv.json.get('id') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED + assert not rv.json.get("hasApiAccess") + org_id = rv.json.get("id") # Create a system token and create an API key for this account. headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) # Patch to return has_consumer as False, so that it would create a new consumer. - monkeypatch.setattr('auth_api.services.api_gateway.ApiGateway._consumer_exists', lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.api_gateway.ApiGateway._consumer_exists", lambda *args, **kwargs: None) def get_pay_account_mock(org, user): - return { - 'paymentMethod': 'PAD' - } + return {"paymentMethod": "PAD"} - monkeypatch.setattr('auth_api.services.api_gateway.ApiGateway._get_pay_account', get_pay_account_mock) - monkeypatch.setattr('auth_api.services.api_gateway.ApiGateway._create_sandbox_pay_account', - lambda *args, **kwargs: None) + monkeypatch.setattr("auth_api.services.api_gateway.ApiGateway._get_pay_account", get_pay_account_mock) + monkeypatch.setattr( + "auth_api.services.api_gateway.ApiGateway._create_sandbox_pay_account", lambda *args, **kwargs: None + ) - rv = client.post(f'/api/v1/orgs/{org_id}/api-keys', headers=headers, content_type='application/json', - data=json.dumps({ - 'environment': 'sandbox', - 'keyName': 'TEST' - })) - assert rv.json['consumer']['consumerKey'] + rv = client.post( + f"/api/v1/orgs/{org_id}/api-keys", + headers=headers, + content_type="application/json", + data=json.dumps({"environment": "sandbox", "keyName": "TEST"}), + ) + assert rv.json["consumer"]["consumerKey"] headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get(f'/api/v1/orgs/{org_id}', headers=headers, content_type='application/json') - assert rv.json.get('hasApiAccess') + rv = client.get(f"/api/v1/orgs/{org_id}", headers=headers, content_type="application/json") + assert rv.json.get("hasApiAccess") def test_list_api_keys(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that api keys can be listed.""" # First create an account user_header = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - client.post('/api/v1/users', headers=user_header, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), headers=user_header, - content_type='application/json') - org_id = rv.json.get('id') + client.post("/api/v1/users", headers=user_header, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=user_header, content_type="application/json" + ) + org_id = rv.json.get("id") # Create a system token and create an API key for this account. headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post(f'/api/v1/orgs/{org_id}/api-keys', headers=headers, content_type='application/json', - data=json.dumps({ - 'environment': 'dev', - 'keyName': 'TEST' - })) + rv = client.post( + f"/api/v1/orgs/{org_id}/api-keys", + headers=headers, + content_type="application/json", + data=json.dumps({"environment": "dev", "keyName": "TEST"}), + ) - rv = client.post(f'/api/v1/orgs/{org_id}/api-keys', headers=headers, content_type='application/json', - data=json.dumps({ - 'environment': 'dev', - 'keyName': 'TEST 2' - })) + rv = client.post( + f"/api/v1/orgs/{org_id}/api-keys", + headers=headers, + content_type="application/json", + data=json.dumps({"environment": "dev", "keyName": "TEST 2"}), + ) - rv = client.get(f'/api/v1/orgs/{org_id}/api-keys', headers=headers, content_type='application/json') - assert rv.json['consumer']['consumerKey'] + rv = client.get(f"/api/v1/orgs/{org_id}/api-keys", headers=headers, content_type="application/json") + assert rv.json["consumer"]["consumerKey"] - rv = client.get(f'/api/v1/orgs/{org_id}/api-keys', headers=user_header, content_type='application/json') - assert rv.json['consumer']['consumerKey'] + rv = client.get(f"/api/v1/orgs/{org_id}/api-keys", headers=user_header, content_type="application/json") + assert rv.json["consumer"]["consumerKey"] def test_revoke_api_key(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that api keys can be revoked.""" # First create an account user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_account_holder_user) - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), headers=user_headers, - content_type='application/json') - org_id = rv.json.get('id') + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=user_headers, content_type="application/json" + ) + org_id = rv.json.get("id") # Create a system token and create an API key for this account. headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv = client.post(f'/api/v1/orgs/{org_id}/api-keys', headers=headers, content_type='application/json', - data=json.dumps({ - 'environment': 'dev', - 'keyName': 'TEST' - })) + rv = client.post( + f"/api/v1/orgs/{org_id}/api-keys", + headers=headers, + content_type="application/json", + data=json.dumps({"environment": "dev", "keyName": "TEST"}), + ) - rv = client.get(f'/api/v1/orgs/{org_id}/api-keys', headers=headers, content_type='application/json') - key = rv.json['consumer']['consumerKey'][0]['apiKey'] + rv = client.get(f"/api/v1/orgs/{org_id}/api-keys", headers=headers, content_type="application/json") + key = rv.json["consumer"]["consumerKey"][0]["apiKey"] - rv = client.delete(f'/api/v1/orgs/{org_id}/api-keys/{key}', headers=headers, content_type='application/json') + rv = client.delete(f"/api/v1/orgs/{org_id}/api-keys/{key}", headers=headers, content_type="application/json") assert rv.status_code == 200 # Revoke an invalid key - rv = client.delete(f'/api/v1/orgs/{org_id}/api-keys/{key}-INVALID', headers=user_headers, - content_type='application/json') + rv = client.delete( + f"/api/v1/orgs/{org_id}/api-keys/{key}-INVALID", headers=user_headers, content_type="application/json" + ) assert rv.status_code == 404 diff --git a/auth-api/tests/unit/api/test_org_authorisations.py b/auth-api/tests/unit/api/test_org_authorisations.py index 47510164ad..ce3700ee44 100644 --- a/auth-api/tests/unit/api/test_org_authorisations.py +++ b/auth-api/tests/unit/api/test_org_authorisations.py @@ -18,8 +18,8 @@ """ import json +from http import HTTPStatus -from auth_api import status as http_status from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo, TestOrgProductsInfo from tests.utilities.factory_utils import factory_auth_header @@ -27,73 +27,81 @@ def test_add_org(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED orgs = json.loads(rv.data) - id = orgs.get('id') - rv = client.get(f'/api/v1/orgs/{id}/authorizations', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + id = orgs.get("id") + rv = client.get(f"/api/v1/orgs/{id}/authorizations", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK org_authorisations = json.loads(rv.data) - assert org_authorisations.get('orgMembership') == 'ADMIN' - assert org_authorisations.get('roles') + assert org_authorisations.get("orgMembership") == "ADMIN" + assert org_authorisations.get("roles") # NR should have all access since its internal - headers.update({'Product-Code': 'NRO'}) - rv = client.get(f'/api/v1/orgs/{id}/authorizations', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + headers.update({"Product-Code": "NRO"}) + rv = client.get(f"/api/v1/orgs/{id}/authorizations", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK org_authorisations_by_nro = json.loads(rv.data) - assert org_authorisations_by_nro.get('orgMembership') == 'ADMIN', 'NR should get admin access' - assert org_authorisations_by_nro.get('roles') + assert org_authorisations_by_nro.get("orgMembership") == "ADMIN", "NR should get admin access" + assert org_authorisations_by_nro.get("roles") # vital stats shouldn't get any access since its partner - headers.update({'Product-Code': 'VS'}) - rv = client.get(f'/api/v1/orgs/{id}/authorizations', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + headers.update({"Product-Code": "VS"}) + rv = client.get(f"/api/v1/orgs/{id}/authorizations", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK org_authorisations_by_vs = json.loads(rv.data) - assert len(org_authorisations_by_vs.get('roles')) == 0 + assert len(org_authorisations_by_vs.get("roles")) == 0 def test_ppr_auth(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that accounts get PPR authorization.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) # Create a basic account - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED orgs = json.loads(rv.data) - id = orgs.get('id') + id = orgs.get("id") # Try to add PPR as a product which doesn't add product as account is BASIC. - client.post(f'/api/v1/orgs/{id}/products', - data=json.dumps(TestOrgProductsInfo.org_products1), - headers=headers, content_type='application/json') + client.post( + f"/api/v1/orgs/{id}/products", + data=json.dumps(TestOrgProductsInfo.org_products1), + headers=headers, + content_type="application/json", + ) # Check PPR access and assert no roles are returned. - rv = client.get(f'/api/v1/accounts/{id}/products/PPR/authorizations', - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + f"/api/v1/accounts/{id}/products/PPR/authorizations", headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK org_authorisations = json.loads(rv.data) - assert len(org_authorisations.get('roles')) == 0 + assert len(org_authorisations.get("roles")) == 0 # Create a PREMIUM account - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.bcol_linked()), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.bcol_linked()), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED orgs = json.loads(rv.data) - id = orgs.get('id') + id = orgs.get("id") # Try to add PPR as a product which adds product as account is PREMIUM. - client.post(f'/api/v1/orgs/{id}/products', - data=json.dumps(TestOrgProductsInfo.org_products1), - headers=headers, content_type='application/json') + client.post( + f"/api/v1/orgs/{id}/products", + data=json.dumps(TestOrgProductsInfo.org_products1), + headers=headers, + content_type="application/json", + ) # Check PPR access and assert no roles are returned. - rv = client.get(f'/api/v1/accounts/{id}/products/PPR/authorizations', headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + f"/api/v1/accounts/{id}/products/PPR/authorizations", headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.OK org_authorisations = json.loads(rv.data) - assert len(org_authorisations.get('roles')) > 0 + assert len(org_authorisations.get("roles")) > 0 diff --git a/auth-api/tests/unit/api/test_org_products.py b/auth-api/tests/unit/api/test_org_products.py index 97c2eb4b57..8757f130e5 100644 --- a/auth-api/tests/unit/api/test_org_products.py +++ b/auth-api/tests/unit/api/test_org_products.py @@ -18,10 +18,10 @@ """ import json +from http import HTTPStatus import pytest -from auth_api import status as http_status from auth_api.schemas import utils as schema_utils from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo, TestOrgProductsInfo from tests.utilities.factory_utils import factory_auth_header @@ -30,317 +30,400 @@ def test_add_multiple_org_products(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(TestOrgProductsInfo.org_products2), - headers=headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(TestOrgProductsInfo.org_products2), + headers=headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] def test_add_single_org_product(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(TestOrgProductsInfo.org_products1), - headers=headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(TestOrgProductsInfo.org_products1), + headers=headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] def test_add_single_org_product_vs(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(TestOrgProductsInfo.org_products_vs), - headers=headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] - - rv_products = client.get(f"/api/v1/orgs/{dictionary.get('id')}/products", headers=headers, - content_type='application/json') + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(TestOrgProductsInfo.org_products_vs), + headers=headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] + + rv_products = client.get( + f"/api/v1/orgs/{dictionary.get('id')}/products", headers=headers, content_type="application/json" + ) list_products = json.loads(rv_products.data) - vs_product = next(prod for prod in list_products if prod.get('code') == 'VS') - assert vs_product.get('subscriptionStatus') == 'PENDING_STAFF_REVIEW' + vs_product = next(prod for prod in list_products if prod.get("code") == "VS") + assert vs_product.get("subscriptionStatus") == "PENDING_STAFF_REVIEW" def test_dir_search_doesnt_get_any_product(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert dir search doesnt get any active product subscriptions.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) - client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - assert dictionary['accessType'] == 'ANONYMOUS' - assert schema_utils.validate(rv.json, 'org_response')[0] + assert dictionary["accessType"] == "ANONYMOUS" + assert schema_utils.validate(rv.json, "org_response")[0] - rv_products = client.get(f"/api/v1/orgs/{dictionary.get('id')}/products", headers=headers, - content_type='application/json') + rv_products = client.get( + f"/api/v1/orgs/{dictionary.get('id')}/products", headers=headers, content_type="application/json" + ) list_products = json.loads(rv_products.data) - assert len([x for x in list_products if x.get('subscriptionStatus') != 'NOT_SUBSCRIBED']) == 0 + assert len([x for x in list_products if x.get("subscriptionStatus") != "NOT_SUBSCRIBED"]) == 0 def test_new_dir_search_can_be_returned(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert new dir search product subscriptions can be subscribed to via system admin / returned via org user.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', - data=json.dumps(TestOrgInfo.org1), - headers=headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org1), headers=headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) system_admin_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_admin_role) - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(TestOrgProductsInfo.org_products_nds), - headers=system_admin_headers, - content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - rv_products = client.get(f"/api/v1/orgs/{dictionary.get('id')}/products?include_hidden=true", - headers=headers, - content_type='application/json') + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(TestOrgProductsInfo.org_products_nds), + headers=system_admin_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + rv_products = client.get( + f"/api/v1/orgs/{dictionary.get('id')}/products?include_hidden=true", + headers=headers, + content_type="application/json", + ) list_products = json.loads(rv_products.data) - nds_product = next(prod for prod in list_products if prod.get('code') == 'NDS') - assert nds_product.get('subscriptionStatus') == 'ACTIVE' + nds_product = next(prod for prod in list_products if prod.get("code") == "NDS") + assert nds_product.get("subscriptionStatus") == "ACTIVE" -def assert_product_parent_and_child_statuses(client, jwt, org_id, - parent_code, parent_status, child_code, child_status): +def assert_product_parent_and_child_statuses(client, jwt, org_id, parent_code, parent_status, child_code, child_status): """Assert that an organizations parent product code and child product code have the expected statuses.""" staff_view_account_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_view_accounts_role) - rv_products = client.get(f'/api/v1/orgs/{org_id}/products', headers=staff_view_account_headers, - content_type='application/json') + rv_products = client.get( + f"/api/v1/orgs/{org_id}/products", headers=staff_view_account_headers, content_type="application/json" + ) list_products = json.loads(rv_products.data) - mhr_product = next(prod for prod in list_products - if prod.get('code') == child_code) + mhr_product = next(prod for prod in list_products if prod.get("code") == child_code) - parent_mhr_product = next(prod for prod in list_products if prod.get('code') == parent_code) + parent_mhr_product = next(prod for prod in list_products if prod.get("code") == parent_code) - assert mhr_product.get('subscriptionStatus') == child_status - assert parent_mhr_product.get('subscriptionStatus') == parent_status + assert mhr_product.get("subscriptionStatus") == child_status + assert parent_mhr_product.get("subscriptionStatus") == parent_status -@pytest.mark.parametrize('test_name, org_product_info', [ - ('lawyer_notary', TestOrgProductsInfo.mhr_qs_lawyer_and_notaries), - ('home_manufacturers', TestOrgProductsInfo.mhr_qs_home_manufacturers), - ('home_dealers', TestOrgProductsInfo.mhr_qs_home_dealers), - ('system_no_approval', TestOrgProductsInfo.mhr_qs_home_manufacturers) -]) -def test_add_single_org_product_mhr_qualified_supplier_approve(client, jwt, session, keycloak_mock, - test_name, org_product_info): +@pytest.mark.parametrize( + "test_name, org_product_info", + [ + ("lawyer_notary", TestOrgProductsInfo.mhr_qs_lawyer_and_notaries), + ("home_manufacturers", TestOrgProductsInfo.mhr_qs_home_manufacturers), + ("home_dealers", TestOrgProductsInfo.mhr_qs_home_dealers), + ("system_no_approval", TestOrgProductsInfo.mhr_qs_home_manufacturers), + ], +) +def test_add_single_org_product_mhr_qualified_supplier_approve( + client, jwt, session, keycloak_mock, test_name, org_product_info +): """Assert that MHR sub products subscriptions can be created and approved.""" # setup user and org staff_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=user_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=user_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) - if test_name == 'system_no_approval': + if test_name == "system_no_approval": user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.system_role) - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(org_product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] - - subscription_status = 'ACTIVE' if test_name == 'system_no_approval' else 'PENDING_STAFF_REVIEW' - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', subscription_status, - org_product_info['subscriptions'][0]['productCode'], - subscription_status) - if test_name == 'system_no_approval': + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(org_product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] + + subscription_status = "ACTIVE" if test_name == "system_no_approval" else "PENDING_STAFF_REVIEW" + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + subscription_status, + org_product_info["subscriptions"][0]["productCode"], + subscription_status, + ) + + if test_name == "system_no_approval": return - rv = client.get('/api/v1/tasks', headers=staff_headers, content_type='application/json') - + rv = client.get("/api/v1/tasks", headers=staff_headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert len(item_list['tasks']) == 1 + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + assert len(item_list["tasks"]) == 1 - task = item_list['tasks'][0] - assert task['relationshipStatus'] == 'PENDING_STAFF_REVIEW' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == org_product_info['subscriptions'][0]['externalSourceId'] + task = item_list["tasks"][0] + assert task["relationshipStatus"] == "PENDING_STAFF_REVIEW" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == org_product_info["subscriptions"][0]["externalSourceId"] - rv = client.put('/api/v1/tasks/{}'.format(task['id']), - data=json.dumps({'relationshipStatus': 'ACTIVE'}), - headers=staff_headers, content_type='application/json') + # Approve task + rv = client.put( + "/api/v1/tasks/{}".format(task["id"]), + data=json.dumps({"relationshipStatus": "ACTIVE"}), + headers=staff_headers, + content_type="application/json", + ) task = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert task['relationshipStatus'] == 'ACTIVE' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == org_product_info['subscriptions'][0]['externalSourceId'] - - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'ACTIVE', - org_product_info['subscriptions'][0]['productCode'], 'ACTIVE') - - -@pytest.mark.parametrize('org_product_info', [ - TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, - TestOrgProductsInfo.mhr_qs_home_manufacturers, - TestOrgProductsInfo.mhr_qs_home_dealers -]) + assert rv.status_code == HTTPStatus.OK + assert task["relationshipStatus"] == "ACTIVE" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == org_product_info["subscriptions"][0]["externalSourceId"] + + # MHR parent and sub product should be active + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "ACTIVE", + org_product_info["subscriptions"][0]["productCode"], + "ACTIVE", + ) + + +@pytest.mark.parametrize( + "org_product_info", + [ + TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, + TestOrgProductsInfo.mhr_qs_home_manufacturers, + TestOrgProductsInfo.mhr_qs_home_dealers, + ], +) def test_add_single_org_product_mhr_qualified_supplier_reject(client, jwt, session, keycloak_mock, org_product_info): """Assert that MHR sub products subscriptions can be created and rejected with no pre-existing subscriptions.""" # setup user and org staff_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=user_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=user_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) # Create product subscription - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(org_product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(org_product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] # Fetch org products and validate subscription status - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'PENDING_STAFF_REVIEW', - org_product_info['subscriptions'][0]['productCode'], - 'PENDING_STAFF_REVIEW') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "PENDING_STAFF_REVIEW", + org_product_info["subscriptions"][0]["productCode"], + "PENDING_STAFF_REVIEW", + ) # Should show up as a review task for staff - rv = client.get('/api/v1/tasks', headers=staff_headers, content_type='application/json') + rv = client.get("/api/v1/tasks", headers=staff_headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert len(item_list['tasks']) == 1 + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + assert len(item_list["tasks"]) == 1 - task = item_list['tasks'][0] - assert task['relationshipStatus'] == 'PENDING_STAFF_REVIEW' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == org_product_info['subscriptions'][0]['externalSourceId'] + task = item_list["tasks"][0] + assert task["relationshipStatus"] == "PENDING_STAFF_REVIEW" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == org_product_info["subscriptions"][0]["externalSourceId"] # Reject task - rv = client.put('/api/v1/tasks/{}'.format(task['id']), - data=json.dumps({'relationshipStatus': 'REJECTED'}), - headers=staff_headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(task["id"]), + data=json.dumps({"relationshipStatus": "REJECTED"}), + headers=staff_headers, + content_type="application/json", + ) task = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert task['relationshipStatus'] == 'REJECTED' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == org_product_info['subscriptions'][0]['externalSourceId'] + assert rv.status_code == HTTPStatus.OK + assert task["relationshipStatus"] == "REJECTED" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == org_product_info["subscriptions"][0]["externalSourceId"] # MHR parent and sub product should be rejected - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'REJECTED', - org_product_info['subscriptions'][0]['productCode'], 'REJECTED') - - -@pytest.mark.parametrize('org_product_info', [ - TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, - TestOrgProductsInfo.mhr_qs_home_manufacturers, - TestOrgProductsInfo.mhr_qs_home_dealers -]) + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "REJECTED", + org_product_info["subscriptions"][0]["productCode"], + "REJECTED", + ) + + +@pytest.mark.parametrize( + "org_product_info", + [ + TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, + TestOrgProductsInfo.mhr_qs_home_manufacturers, + TestOrgProductsInfo.mhr_qs_home_dealers, + ], +) def test_add_single_org_product_mhr_qualified_supplier_reject2(client, jwt, session, keycloak_mock, org_product_info): """Assert that MHR sub products subscriptions can be created and rejected when a parent product already exists.""" # setup user and org staff_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=user_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=user_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) # Create parent product subscription - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(TestOrgProductsInfo.mhr), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(TestOrgProductsInfo.mhr), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] # Fetch org products and validate subscription status - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'ACTIVE', - org_product_info['subscriptions'][0]['productCode'], - 'NOT_SUBSCRIBED') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "ACTIVE", + org_product_info["subscriptions"][0]["productCode"], + "NOT_SUBSCRIBED", + ) # Create sub product subscription - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(org_product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(org_product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] # Fetch org products and validate subscription status - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'ACTIVE', - org_product_info['subscriptions'][0]['productCode'], - 'PENDING_STAFF_REVIEW') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "ACTIVE", + org_product_info["subscriptions"][0]["productCode"], + "PENDING_STAFF_REVIEW", + ) # Should show up as a review task for staff - rv = client.get('/api/v1/tasks', headers=staff_headers, content_type='application/json') + rv = client.get("/api/v1/tasks", headers=staff_headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert len(item_list['tasks']) == 1 + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + assert len(item_list["tasks"]) == 1 - task = item_list['tasks'][0] - assert task['relationshipStatus'] == 'PENDING_STAFF_REVIEW' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == org_product_info['subscriptions'][0]['externalSourceId'] + task = item_list["tasks"][0] + assert task["relationshipStatus"] == "PENDING_STAFF_REVIEW" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == org_product_info["subscriptions"][0]["externalSourceId"] # Reject task - rv = client.put('/api/v1/tasks/{}'.format(task['id']), - data=json.dumps({'relationshipStatus': 'REJECTED'}), - headers=staff_headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(task["id"]), + data=json.dumps({"relationshipStatus": "REJECTED"}), + headers=staff_headers, + content_type="application/json", + ) task = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert task['relationshipStatus'] == 'REJECTED' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == org_product_info['subscriptions'][0]['externalSourceId'] + assert rv.status_code == HTTPStatus.OK + assert task["relationshipStatus"] == "REJECTED" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == org_product_info["subscriptions"][0]["externalSourceId"] # MHR parent and sub product should be rejected - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'ACTIVE', - org_product_info['subscriptions'][0]['productCode'], 'REJECTED') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "ACTIVE", + org_product_info["subscriptions"][0]["productCode"], + "REJECTED", + ) def test_add_org_product_mhr_qualified_supplier_reject_approve(client, jwt, session, keycloak_mock): @@ -348,176 +431,225 @@ def test_add_org_product_mhr_qualified_supplier_reject_approve(client, jwt, sess # setup user and org staff_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=user_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=user_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) qsln_product_info = TestOrgProductsInfo.mhr_qs_lawyer_and_notaries qshm_product_info = TestOrgProductsInfo.mhr_qs_home_manufacturers # Create first sub product subscription - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(qsln_product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(qsln_product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] # Fetch org products and validate subscription status - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'PENDING_STAFF_REVIEW', - qsln_product_info['subscriptions'][0]['productCode'], - 'PENDING_STAFF_REVIEW') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "PENDING_STAFF_REVIEW", + qsln_product_info["subscriptions"][0]["productCode"], + "PENDING_STAFF_REVIEW", + ) # Should show up as a review task for staff - rv = client.get('/api/v1/tasks', headers=staff_headers, content_type='application/json') + rv = client.get("/api/v1/tasks", headers=staff_headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert len(item_list['tasks']) == 1 + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + assert len(item_list["tasks"]) == 1 - task = item_list['tasks'][0] - assert task['relationshipStatus'] == 'PENDING_STAFF_REVIEW' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qsln_product_info['subscriptions'][0]['externalSourceId'] + task = item_list["tasks"][0] + assert task["relationshipStatus"] == "PENDING_STAFF_REVIEW" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qsln_product_info["subscriptions"][0]["externalSourceId"] # Reject task - rv = client.put('/api/v1/tasks/{}'.format(task['id']), - data=json.dumps({'relationshipStatus': 'REJECTED'}), - headers=staff_headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(task["id"]), + data=json.dumps({"relationshipStatus": "REJECTED"}), + headers=staff_headers, + content_type="application/json", + ) task = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert task['relationshipStatus'] == 'REJECTED' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qsln_product_info['subscriptions'][0]['externalSourceId'] + assert rv.status_code == HTTPStatus.OK + assert task["relationshipStatus"] == "REJECTED" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qsln_product_info["subscriptions"][0]["externalSourceId"] # MHR parent and sub product should be rejected - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'REJECTED', - qsln_product_info['subscriptions'][0]['productCode'], 'REJECTED') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "REJECTED", + qsln_product_info["subscriptions"][0]["productCode"], + "REJECTED", + ) # Create second sub product subscription - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(qshm_product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(qshm_product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] # Fetch org products and validate subscription status - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'PENDING_STAFF_REVIEW', - qshm_product_info['subscriptions'][0]['productCode'], - 'PENDING_STAFF_REVIEW') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "PENDING_STAFF_REVIEW", + qshm_product_info["subscriptions"][0]["productCode"], + "PENDING_STAFF_REVIEW", + ) # Should show up as a review task for staff - rv = client.get('/api/v1/tasks', headers=staff_headers, content_type='application/json') + rv = client.get("/api/v1/tasks", headers=staff_headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert len(item_list['tasks']) == 2 + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + assert len(item_list["tasks"]) == 2 - task = item_list['tasks'][1] - assert task['relationshipStatus'] == 'PENDING_STAFF_REVIEW' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qshm_product_info['subscriptions'][0]['externalSourceId'] + task = item_list["tasks"][1] + assert task["relationshipStatus"] == "PENDING_STAFF_REVIEW" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qshm_product_info["subscriptions"][0]["externalSourceId"] # Approve task - rv = client.put('/api/v1/tasks/{}'.format(task['id']), - data=json.dumps({'relationshipStatus': 'ACTIVE'}), - headers=staff_headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(task["id"]), + data=json.dumps({"relationshipStatus": "ACTIVE"}), + headers=staff_headers, + content_type="application/json", + ) task = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert task['relationshipStatus'] == 'ACTIVE' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qshm_product_info['subscriptions'][0]['externalSourceId'] + assert rv.status_code == HTTPStatus.OK + assert task["relationshipStatus"] == "ACTIVE" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qshm_product_info["subscriptions"][0]["externalSourceId"] # MHR parent and sub product should be approved - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'ACTIVE', - qshm_product_info['subscriptions'][0]['productCode'], 'ACTIVE') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "ACTIVE", + qshm_product_info["subscriptions"][0]["productCode"], + "ACTIVE", + ) def test_org_product_resubmission_invalid(client, jwt, session, keycloak_mock): """Assert that product subscription re-submission returns invalid for unsupported products.""" # setup user and org user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=user_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=user_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) product_info = TestOrgProductsInfo.org_products_vs - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] - - rv_products = client.get(f"/api/v1/orgs/{dictionary.get('id')}/products", headers=user_headers, - content_type='application/json') + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] + + rv_products = client.get( + f"/api/v1/orgs/{dictionary.get('id')}/products", headers=user_headers, content_type="application/json" + ) list_products = json.loads(rv_products.data) - product = next(prod for prod in list_products if - prod.get('code') == product_info['subscriptions'][0]['productCode']) - assert product.get('subscriptionStatus') == 'PENDING_STAFF_REVIEW' + product = next( + prod for prod in list_products if prod.get("code") == product_info["subscriptions"][0]["productCode"] + ) + assert product.get("subscriptionStatus") == "PENDING_STAFF_REVIEW" # Should return bad request for invalid product for products without can_resubmit flag True - rv_products = client.patch(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(product_info), - headers=user_headers, content_type='application/json') - - assert rv_products.status_code == http_status.HTTP_400_BAD_REQUEST + rv_products = client.patch( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(product_info), + headers=user_headers, + content_type="application/json", + ) + + assert rv_products.status_code == HTTPStatus.BAD_REQUEST error = rv_products.json - assert error['message'] == 'Product is not valid for re-submission.' + assert error["message"] == "Product is not valid for re-submission." def test_org_product_resubmission_state_invalid(client, jwt, session, keycloak_mock): """Assert that product subscription re-submission returns invalid state.""" # setup user and org user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=user_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=user_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) product_info = TestOrgProductsInfo.mhr_qs_lawyer_and_notaries - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] - - rv_products = client.get(f"/api/v1/orgs/{dictionary.get('id')}/products", headers=user_headers, - content_type='application/json') + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] + + rv_products = client.get( + f"/api/v1/orgs/{dictionary.get('id')}/products", headers=user_headers, content_type="application/json" + ) list_products = json.loads(rv_products.data) - product = next(prod for prod in list_products if - prod.get('code') == product_info['subscriptions'][0]['productCode']) - assert product.get('subscriptionStatus') == 'PENDING_STAFF_REVIEW' + product = next( + prod for prod in list_products if prod.get("code") == product_info["subscriptions"][0]["productCode"] + ) + assert product.get("subscriptionStatus") == "PENDING_STAFF_REVIEW" # Should return bad request for invalid product for products not in REJECTED state - rv_products = client.patch(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(product_info), - headers=user_headers, content_type='application/json') - - assert rv_products.status_code == http_status.HTTP_400_BAD_REQUEST + rv_products = client.patch( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(product_info), + headers=user_headers, + content_type="application/json", + ) + + assert rv_products.status_code == HTTPStatus.BAD_REQUEST error = rv_products.json - assert error['message'] == 'Product is not in a valid state for re-submission.' + assert error["message"] == "Product is not in a valid state for re-submission." def test_org_product_resubmission(client, jwt, session, keycloak_mock): @@ -525,102 +657,167 @@ def test_org_product_resubmission(client, jwt, session, keycloak_mock): # setup user and org staff_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=user_headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_premium), - headers=user_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=user_headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_premium), headers=user_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.CREATED dictionary = json.loads(rv.data) qsln_product_info = TestOrgProductsInfo.mhr_qs_lawyer_and_notaries # Create first sub product subscription - rv_products = client.post(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(qsln_product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(qsln_product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] # Fetch org products and validate subscription status - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'PENDING_STAFF_REVIEW', - qsln_product_info['subscriptions'][0]['productCode'], - 'PENDING_STAFF_REVIEW') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "PENDING_STAFF_REVIEW", + qsln_product_info["subscriptions"][0]["productCode"], + "PENDING_STAFF_REVIEW", + ) # Should show up as a review task for staff - rv = client.get('/api/v1/tasks', headers=staff_headers, content_type='application/json') + rv = client.get("/api/v1/tasks", headers=staff_headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert len(item_list['tasks']) == 1 + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + assert len(item_list["tasks"]) == 1 - task = item_list['tasks'][0] - assert task['relationshipStatus'] == 'PENDING_STAFF_REVIEW' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qsln_product_info['subscriptions'][0]['externalSourceId'] + task = item_list["tasks"][0] + assert task["relationshipStatus"] == "PENDING_STAFF_REVIEW" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qsln_product_info["subscriptions"][0]["externalSourceId"] # Reject task - rv = client.put('/api/v1/tasks/{}'.format(task['id']), - data=json.dumps({'relationshipStatus': 'REJECTED'}), - headers=staff_headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(task["id"]), + data=json.dumps({"relationshipStatus": "REJECTED"}), + headers=staff_headers, + content_type="application/json", + ) task = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert task['relationshipStatus'] == 'REJECTED' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qsln_product_info['subscriptions'][0]['externalSourceId'] + assert rv.status_code == HTTPStatus.OK + assert task["relationshipStatus"] == "REJECTED" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qsln_product_info["subscriptions"][0]["externalSourceId"] # MHR parent and sub product should be rejected - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'REJECTED', - qsln_product_info['subscriptions'][0]['productCode'], 'REJECTED') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "REJECTED", + qsln_product_info["subscriptions"][0]["productCode"], + "REJECTED", + ) # Resubmit sub product subscription - rv_products = client.patch(f"/api/v1/orgs/{dictionary.get('id')}/products", - data=json.dumps(qsln_product_info), - headers=user_headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.patch( + f"/api/v1/orgs/{dictionary.get('id')}/products", + data=json.dumps(qsln_product_info), + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.OK + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] # Fetch org products and validate subscription status - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'PENDING_STAFF_REVIEW', - qsln_product_info['subscriptions'][0]['productCode'], - 'PENDING_STAFF_REVIEW') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "PENDING_STAFF_REVIEW", + qsln_product_info["subscriptions"][0]["productCode"], + "PENDING_STAFF_REVIEW", + ) # Should show up as a review task for staff - rv = client.get('/api/v1/tasks', headers=staff_headers, content_type='application/json') + rv = client.get("/api/v1/tasks", headers=staff_headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - assert len(item_list['tasks']) == 1 + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + assert len(item_list["tasks"]) == 1 - task = item_list['tasks'][0] - assert task['relationshipStatus'] == 'PENDING_STAFF_REVIEW' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qsln_product_info['subscriptions'][0]['externalSourceId'] + task = item_list["tasks"][0] + assert task["relationshipStatus"] == "PENDING_STAFF_REVIEW" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qsln_product_info["subscriptions"][0]["externalSourceId"] # Approve task - rv = client.put('/api/v1/tasks/{}'.format(task['id']), - data=json.dumps({'relationshipStatus': 'ACTIVE'}), - headers=staff_headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(task["id"]), + data=json.dumps({"relationshipStatus": "ACTIVE"}), + headers=staff_headers, + content_type="application/json", + ) task = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert task['relationshipStatus'] == 'ACTIVE' - assert task['relationshipType'] == 'PRODUCT' - assert task['action'] == 'QUALIFIED_SUPPLIER_REVIEW' - assert task['externalSourceId'] == qsln_product_info['subscriptions'][0]['externalSourceId'] + assert rv.status_code == HTTPStatus.OK + assert task["relationshipStatus"] == "ACTIVE" + assert task["relationshipType"] == "PRODUCT" + assert task["action"] == "QUALIFIED_SUPPLIER_REVIEW" + assert task["externalSourceId"] == qsln_product_info["subscriptions"][0]["externalSourceId"] # MHR parent and sub product should be approved - assert_product_parent_and_child_statuses(client, jwt, - dictionary.get('id'), - 'MHR', 'ACTIVE', - qsln_product_info['subscriptions'][0]['productCode'], 'ACTIVE') + assert_product_parent_and_child_statuses( + client, + jwt, + dictionary.get("id"), + "MHR", + "ACTIVE", + qsln_product_info["subscriptions"][0]["productCode"], + "ACTIVE", + ) + + +def test_get_org_products_validation_error(client, jwt, session, keycloak_mock): + """Assert that MHR sub products subscriptions can be created and approved.""" + # setup user and org + user_headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) + + rv_products = client.get( + "/api/v1/orgs/None/products", + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.BAD_REQUEST + + rv_products = client.get( + "/api/v1/orgs/A1234/products", + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.BAD_REQUEST + + rv_products = client.get( + "/api/v1/orgs/-1/products", + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.BAD_REQUEST + + rv_products = client.get( + "/api/v1/orgs//products", + headers=user_headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.NOT_FOUND diff --git a/auth-api/tests/unit/api/test_permissions.py b/auth-api/tests/unit/api/test_permissions.py index 613d79792f..8811008267 100644 --- a/auth-api/tests/unit/api/test_permissions.py +++ b/auth-api/tests/unit/api/test_permissions.py @@ -17,8 +17,8 @@ Test-Suite to ensure that the /permissions endpoint is working as expected. """ import json +from http import HTTPStatus -from auth_api import status as http_status from tests.utilities.factory_scenarios import TestJwtClaims from tests.utilities.factory_utils import factory_auth_header @@ -26,18 +26,17 @@ def test_permissions_returns_200(client, jwt, session): # pylint:disable=unused-argument """Assert get permissions endpoint returns 200.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/permissions/active/admin?case=upper', headers=headers, content_type='application/json') + rv = client.get("/api/v1/permissions/active/admin?case=upper", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - present = 'CHANGE_ORG_NAME' in dictionary + present = "CHANGE_ORG_NAME" in dictionary assert present is True """Assert get permissions endpoint returns 200.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/permissions/foo/bar', headers=headers, content_type='application' - '/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/permissions/foo/bar", headers=headers, content_type="application" "/json") + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) assert len(dictionary) == 0 @@ -45,9 +44,9 @@ def test_permissions_returns_200(client, jwt, session): # pylint:disable=unused def test_returns_empty_string_permissions(client, jwt, session): # pylint:disable=unused-argument """Assert get permissions endpoint returns 200.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/permissions/active/admin?case=upper', headers=headers, content_type='application/json') + rv = client.get("/api/v1/permissions/active/admin?case=upper", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - present = 'VIEW_USER_LOGINSOURCE' in dictionary + present = "VIEW_USER_LOGINSOURCE" in dictionary assert present is True diff --git a/auth-api/tests/unit/api/test_product.py b/auth-api/tests/unit/api/test_product.py index f2ac2fcd82..3399654c38 100644 --- a/auth-api/tests/unit/api/test_product.py +++ b/auth-api/tests/unit/api/test_product.py @@ -23,16 +23,16 @@ def test_get_all_products(client, session): # pylint:disable=unused-argument """Assert that an org can be retrieved via GET.""" - rv = client.get('/api/v1/products') + rv = client.get("/api/v1/products") item_list = json.loads(rv.data) - assert schema_utils.validate(item_list, 'products')[0] + assert schema_utils.validate(item_list, "products")[0] # assert the structure is correct by checking for name, description properties in each element - mhr_sub_prods = ['MHR_QSLN', 'MHR_QSHM', 'MHR_QSHD'] + mhr_sub_prods = ["MHR_QSLN", "MHR_QSHM", "MHR_QSHD"] for item in item_list: - assert item['code'] and item['description'] - if item['code'] in mhr_sub_prods: - assert item['parentCode'] == 'MHR' - assert item['keycloak_group'] + assert item["code"] and item["description"] + if item["code"] in mhr_sub_prods: + assert item["parentCode"] == "MHR" + assert item["keycloak_group"] else: - assert not item.get('parentCode') + assert not item.get("parentCode") diff --git a/auth-api/tests/unit/api/test_reset.py b/auth-api/tests/unit/api/test_reset.py deleted file mode 100644 index b481a613a9..0000000000 --- a/auth-api/tests/unit/api/test_reset.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tests to verify the reset API end-point. - -Test-Suite to ensure that the /tester/reset endpoint is working as expected. -""" -import json -from unittest.mock import patch - -from auth_api import status as http_status -from auth_api.exceptions import BusinessException -from auth_api.exceptions.errors import Error -from auth_api.services import ResetTestData as ResetDataService -from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo -from tests.utilities.factory_utils import factory_auth_header - - -def test_reset(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument - """Assert the endpoint can reset the test data.""" - headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.tester_role) - rv = client.post('/test/reset', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT - - -def test_reset_unauthorized(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument - """Assert the endpoint get a unauthorized error if don't have tester role.""" - headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org1), - headers=headers, content_type='application/json') - headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/test/reset', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED - - -def test_reset_returns_exception(client, jwt, session): # pylint:disable=unused-argument - """Assert that the code type can not be fetched and with expcetion.""" - with patch.object(ResetDataService, 'reset', side_effect=BusinessException(Error.UNDEFINED_ERROR, None)): - headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.tester_role) - rv = client.post('/test/reset', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST diff --git a/auth-api/tests/unit/api/test_simple_org.py b/auth-api/tests/unit/api/test_simple_org.py index 32edf1def4..a3a183391d 100644 --- a/auth-api/tests/unit/api/test_simple_org.py +++ b/auth-api/tests/unit/api/test_simple_org.py @@ -16,201 +16,212 @@ Test-Suite to ensure that the /orgs endpoint is working as expected. """ +from http import HTTPStatus + from faker import Faker -from auth_api import status as http_status from auth_api.models import Org as OrgModel from auth_api.utils.enums import OrgStatus, OrgType from tests.utilities.factory_scenarios import TestJwtClaims from tests.utilities.factory_utils import factory_auth_header - FAKE = Faker() def assert_simple_org(result_dict: dict, org: OrgModel): """Assert simple org result.""" - assert result_dict['id'] == org.id - assert result_dict['name'] == org.name - assert result_dict['branchName'] == org.branch_name + assert result_dict["id"] == org.id + assert result_dict["name"] == org.name + assert result_dict["branchName"] == org.branch_name def test_simple_org_search(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an org can be searched using multiple syntax.""" - org_inactive = OrgModel(name='INACTIVE TST ORG', - branch_name='INACTIVE TST BRANCH NAME', - type_code=OrgType.PREMIUM.value, - status_code=OrgStatus.INACTIVE.value).save() - - org_branch_1 = OrgModel(name='TST ORG NAME 1', - branch_name='TST BRANCH 1', - type_code=OrgType.PREMIUM.value, - status_code=OrgStatus.ACTIVE.value).save() - - org_branch_2 = OrgModel(name='TST ORG NAME 2', - branch_name='TST branch 2', - type_code=OrgType.PREMIUM.value, - status_code=OrgStatus.ACTIVE.value).save() - - org_no_branch_1 = OrgModel(name='TST ORG NO BRANCH Name 1', - type_code=OrgType.PREMIUM.value, - status_code=OrgStatus.ACTIVE.value).save() - - org_no_branch_2 = OrgModel(name='TST ORG NO BRANCH name 2', - type_code=OrgType.PREMIUM.value, - status_code=OrgStatus.ACTIVE.value).save() + org_inactive = OrgModel( + name="INACTIVE TST ORG", + branch_name="INACTIVE TST BRANCH NAME", + type_code=OrgType.PREMIUM.value, + status_code=OrgStatus.INACTIVE.value, + ).save() + + org_branch_1 = OrgModel( + name="TST ORG NAME 1", + branch_name="TST BRANCH 1", + type_code=OrgType.PREMIUM.value, + status_code=OrgStatus.ACTIVE.value, + ).save() + + org_branch_2 = OrgModel( + name="TST ORG NAME 2", + branch_name="TST branch 2", + type_code=OrgType.PREMIUM.value, + status_code=OrgStatus.ACTIVE.value, + ).save() + + org_no_branch_1 = OrgModel( + name="TST ORG NO BRANCH Name 1", type_code=OrgType.PREMIUM.value, status_code=OrgStatus.ACTIVE.value + ).save() + + org_no_branch_2 = OrgModel( + name="TST ORG NO BRANCH name 2", type_code=OrgType.PREMIUM.value, status_code=OrgStatus.ACTIVE.value + ).save() headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.manage_eft_role) - rv = client.get(f'/api/v1/orgs/simple?statuses={OrgStatus.INACTIVE.value}', - headers=headers, content_type='application/json') + rv = client.get( + f"/api/v1/orgs/simple?statuses={OrgStatus.INACTIVE.value}", headers=headers, content_type="application/json" + ) result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 1 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_inactive) - - rv = client.get(f'/api/v1/orgs/simple?statuses={OrgStatus.ACTIVE.value}&excludeStatuses=true', - headers=headers, content_type='application/json') + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 1 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_inactive) + + rv = client.get( + f"/api/v1/orgs/simple?statuses={OrgStatus.ACTIVE.value}&excludeStatuses=true", + headers=headers, + content_type="application/json", + ) result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 1 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_inactive) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 1 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_inactive) # Assert default search - returns active orgs - rv = client.get('/api/v1/orgs/simple', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 4 - assert result['page'] == 1 - assert result['total'] == 4 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_branch_1) - assert_simple_org(result['items'][1], org_branch_2) - assert_simple_org(result['items'][2], org_no_branch_1) - assert_simple_org(result['items'][3], org_no_branch_2) - - rv = client.get(f'/api/v1/orgs/simple?id={org_no_branch_1.id}', headers=headers, content_type='application/json') + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 4 + assert result["page"] == 1 + assert result["total"] == 4 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_branch_1) + assert_simple_org(result["items"][1], org_branch_2) + assert_simple_org(result["items"][2], org_no_branch_1) + assert_simple_org(result["items"][3], org_no_branch_2) + + rv = client.get(f"/api/v1/orgs/simple?id={org_no_branch_1.id}", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 1 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_no_branch_1) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 1 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_no_branch_1) - rv = client.get(f'/api/v1/orgs/simple?id={org_no_branch_1.id}', headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/orgs/simple?id={org_no_branch_1.id}", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 1 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_no_branch_1) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 1 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_no_branch_1) - rv = client.get('/api/v1/orgs/simple?name=Name 2', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple?name=Name 2", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 2 - assert result['page'] == 1 - assert result['total'] == 2 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_branch_2) - assert_simple_org(result['items'][1], org_no_branch_2) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 2 + assert result["page"] == 1 + assert result["total"] == 2 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_branch_2) + assert_simple_org(result["items"][1], org_no_branch_2) - rv = client.get('/api/v1/orgs/simple?branchName=branch', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple?branchName=branch", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 2 - assert result['page'] == 1 - assert result['total'] == 2 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_branch_1) - assert_simple_org(result['items'][1], org_branch_2) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 2 + assert result["page"] == 1 + assert result["total"] == 2 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_branch_1) + assert_simple_org(result["items"][1], org_branch_2) - rv = client.get('/api/v1/orgs/simple?branchName=ch 1', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple?branchName=ch 1", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 1 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_branch_1) - - rv = client.get(f'/api/v1/orgs/simple?searchText={org_no_branch_2.id}', headers=headers, - content_type='application/json') + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 1 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_branch_1) + + rv = client.get( + f"/api/v1/orgs/simple?searchText={org_no_branch_2.id}", headers=headers, content_type="application/json" + ) result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 1 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_no_branch_2) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 1 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_no_branch_2) - rv = client.get('/api/v1/orgs/simple?searchText=name 1', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple?searchText=name 1", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 2 - assert result['page'] == 1 - assert result['total'] == 2 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_branch_1) - assert_simple_org(result['items'][1], org_no_branch_1) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 2 + assert result["page"] == 1 + assert result["total"] == 2 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_branch_1) + assert_simple_org(result["items"][1], org_no_branch_1) - rv = client.get('/api/v1/orgs/simple?searchText=ch 1', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple?searchText=ch 1", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 1 - assert result['limit'] == 10 - assert_simple_org(result['items'][0], org_branch_1) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 1 + assert result["limit"] == 10 + assert_simple_org(result["items"][0], org_branch_1) - rv = client.get('/api/v1/orgs/simple?page=1&limit=1', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple?page=1&limit=1", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 1 - assert result['total'] == 4 - assert result['limit'] == 1 - assert_simple_org(result['items'][0], org_branch_1) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 1 + assert result["total"] == 4 + assert result["limit"] == 1 + assert_simple_org(result["items"][0], org_branch_1) - rv = client.get('/api/v1/orgs/simple?page=2&limit=1', headers=headers, content_type='application/json') + rv = client.get("/api/v1/orgs/simple?page=2&limit=1", headers=headers, content_type="application/json") result = rv.json - assert rv.status_code == http_status.HTTP_200_OK - assert result['items'] - assert len(result['items']) == 1 - assert result['page'] == 2 - assert result['total'] == 4 - assert result['limit'] == 1 - assert_simple_org(result['items'][0], org_branch_2) + assert rv.status_code == HTTPStatus.OK + assert result["items"] + assert len(result["items"]) == 1 + assert result["page"] == 2 + assert result["total"] == 4 + assert result["limit"] == 1 + assert_simple_org(result["items"][0], org_branch_2) diff --git a/auth-api/tests/unit/api/test_task.py b/auth-api/tests/unit/api/test_task.py index 591ea12279..8158ebee9e 100644 --- a/auth-api/tests/unit/api/test_task.py +++ b/auth-api/tests/unit/api/test_task.py @@ -15,13 +15,13 @@ Test-Suite to ensure that the /tasks endpoint is working as expected. """ +import datetime as dt import json -import mock +from http import HTTPStatus +from unittest import mock -import datetime as dt import pytest -from auth_api import status as http_status from auth_api.models import ProductCode as ProductCodeModel from auth_api.models.dataclass import TaskSearch from auth_api.schemas import utils as schema_utils @@ -29,18 +29,34 @@ from auth_api.services import Org as OrgService from auth_api.services import Task as TaskService from auth_api.utils.enums import ( - AccessType, OrgStatus, ProductSubscriptionStatus, TaskAction, TaskRelationshipStatus, TaskRelationshipType, - TaskStatus, TaskTypePrefix) + AccessType, + OrgStatus, + ProductSubscriptionStatus, + TaskAction, + TaskRelationshipStatus, + TaskRelationshipType, + TaskStatus, + TaskTypePrefix, +) +from tests.conftest import mock_token from tests.utilities.factory_scenarios import ( - TestAffidavit, TestJwtClaims, TestOrgInfo, TestOrgProductsInfo, TestUserInfo) + TestAffidavit, + TestJwtClaims, + TestOrgInfo, + TestOrgProductsInfo, + TestUserInfo, +) from tests.utilities.factory_utils import ( - factory_auth_header, factory_task_model, factory_task_service, factory_user_model, factory_user_model_with_contact, - patch_token_info) -from tests.conftest import mock_token - + factory_auth_header, + factory_task_model, + factory_task_service, + factory_user_model, + factory_user_model_with_contact, + patch_token_info, +) current_dt = dt.datetime.now() -current_date_str = current_dt.strftime('%Y-%m-%d') +current_date_str = current_dt.strftime("%Y-%m-%d") def test_fetch_tasks(client, jwt, session): # pylint:disable=unused-argument @@ -49,27 +65,30 @@ def test_fetch_tasks(client, jwt, session): # pylint:disable=unused-argument factory_task_service(user.id) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/tasks', headers=headers, content_type='application/json') + rv = client.get("/api/v1/tasks", headers=headers, content_type="application/json") item_list = rv.json - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK def test_fetch_tasks_no_content(client, jwt, session): # pylint:disable=unused-argument """Assert that the none can be fetched.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/tasks', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - - -@pytest.mark.parametrize('test_name, endpoint', [ - ('status', 'status=OPEN'), - ('relationshipStatus', 'relationshipStatus=PENDING_STAFF_REVIEW'), - ('dateSubmitted', f'startDate=2022-10-1&endDate={current_date_str}'), - ('type', 'type=New Account'), - ('name', 'name=foo'), - ('modifiedBy', 'modifiedBy=User'), -]) + rv = client.get("/api/v1/tasks", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + + +@pytest.mark.parametrize( + "test_name, endpoint", + [ + ("status", "status=OPEN"), + ("relationshipStatus", "relationshipStatus=PENDING_STAFF_REVIEW"), + ("dateSubmitted", f"startDate=2022-10-1&endDate={current_date_str}"), + ("type", "type=New Account"), + ("name", "name=foo"), + ("modifiedBy", "modifiedBy=User"), + ], +) def test_fetch_tasks_with_params(test_name, client, jwt, endpoint, session): # pylint:disable=unused-argument """Assert that the tasks can be fetched.""" user = factory_user_model() @@ -77,34 +96,35 @@ def test_fetch_tasks_with_params(test_name, client, jwt, endpoint, session): # factory_task_model(user_id=user.id, modified_by_id=user.id, date_submitted=date_submitted) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get(f'/api/v1/tasks?{endpoint}', - headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/tasks?{endpoint}", headers=headers, content_type="application/json") item_list = rv.json - assert item_list['tasks'] - assert len(item_list['tasks']) > 0 - assert item_list['tasks'][0][test_name] - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK - - -@pytest.mark.parametrize('test_name, endpoint', [ - ('with-2-params', 'status=OPEN&relationshipStatus=PENDING_STAFF_REVIEW'), - ('with-many-params', 'status=OPEN&relationshipStatus=PENDING_STAFF_REVIEW&page=1&limit=10') -]) + assert item_list["tasks"] + assert len(item_list["tasks"]) > 0 + assert item_list["tasks"][0][test_name] + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK + + +@pytest.mark.parametrize( + "test_name, endpoint", + [ + ("with-2-params", "status=OPEN&relationshipStatus=PENDING_STAFF_REVIEW"), + ("with-many-params", "status=OPEN&relationshipStatus=PENDING_STAFF_REVIEW&page=1&limit=10"), + ], +) def test_fetch_tasks_with_many_params(test_name, client, jwt, endpoint, session): """Assert that the tasks can be fetched.""" user = factory_user_model() factory_task_service(user.id) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get(f'/api/v1/tasks?{endpoint}', - headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/tasks?{endpoint}", headers=headers, content_type="application/json") item_list = rv.json - assert item_list['tasks'] - assert len(item_list['tasks']) > 0 - assert item_list['tasks'][0]['relationshipStatus'] - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK + assert item_list["tasks"] + assert len(item_list["tasks"]) > 0 + assert item_list["tasks"][0]["relationshipStatus"] + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK def test_fetch_tasks_end_of_day(client, jwt, session): @@ -120,18 +140,19 @@ def test_fetch_tasks_end_of_day(client, jwt, session): factory_task_model(user_id=user_2.id, modified_by_id=user_2.id, date_submitted=date_submitted_2) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/tasks?startDate=2022-7-10&endDate=2022-7-10', - headers=headers, content_type='application/json') + rv = client.get( + "/api/v1/tasks?startDate=2022-7-10&endDate=2022-7-10", headers=headers, content_type="application/json" + ) item_list = rv.json - assert item_list['tasks'] - assert len(item_list['tasks']) > 0 and len(item_list['tasks']) <= 1 - assert item_list['tasks'][0]['dateSubmitted'] == '2022-07-10T15:59:59+00:00' - assert schema_utils.validate(item_list, 'paged_response')[0] - assert rv.status_code == http_status.HTTP_200_OK + assert item_list["tasks"] + assert len(item_list["tasks"]) > 0 and len(item_list["tasks"]) <= 1 + assert item_list["tasks"][0]["dateSubmitted"] == "2022-07-10T15:59:59+00:00" + assert schema_utils.validate(item_list, "paged_response")[0] + assert rv.status_code == HTTPStatus.OK -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_put_task_org(client, jwt, session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that the task can be updated.""" # 1. Create User @@ -139,10 +160,10 @@ def test_put_task_org(client, jwt, session, keycloak_mock, monkeypatch): # pyli # 3. Create affidavit # 4. Create Org # 5. Update the created task and the relationship - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', lambda: TestJwtClaims.public_bceid_user) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", lambda: TestJwtClaims.public_bceid_user) user_with_token = TestUserInfo.user_staff_admin - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model_with_contact(user_with_token) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() @@ -150,46 +171,44 @@ def test_put_task_org(client, jwt, session, keycloak_mock, monkeypatch): # pyli org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value - org_id = org_dict['id'] + assert org_dict["status_code"] == OrgStatus.PENDING_STAFF_REVIEW.value + org_id = org_dict["id"] - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 - ) + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) tasks = TaskService.fetch_tasks(task_search) - fetched_tasks = tasks['tasks'] + fetched_tasks = tasks["tasks"] fetched_task = fetched_tasks[0] task_type_new_account = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - assert fetched_task['type'] == task_type_new_account + assert fetched_task["type"] == task_type_new_account update_task_payload = { - 'status': TaskStatus.COMPLETED.value, - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value + "status": TaskStatus.COMPLETED.value, + "relationshipStatus": TaskRelationshipStatus.ACTIVE.value, } headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.put('/api/v1/tasks/{}'.format(fetched_task['id']), - data=json.dumps(update_task_payload), - headers=headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(fetched_task["id"]), + data=json.dumps(update_task_payload), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert rv.status_code == http_status.HTTP_200_OK - assert dictionary['status'] == TaskStatus.COMPLETED.value - assert dictionary['relationshipStatus'] == TaskRelationshipStatus.ACTIVE.value + assert rv.status_code == HTTPStatus.OK + assert dictionary["status"] == TaskStatus.COMPLETED.value + assert dictionary["relationshipStatus"] == TaskRelationshipStatus.ACTIVE.value headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/orgs/{}'.format(org_id), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['id'] == org_id - assert rv.json.get('orgStatus') == OrgStatus.ACTIVE.value + assert dictionary["id"] == org_id + assert rv.json.get("orgStatus") == OrgStatus.ACTIVE.value -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_put_task_org_on_hold(client, jwt, session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that the task can be updated.""" # 1. Create User @@ -197,10 +216,10 @@ def test_put_task_org_on_hold(client, jwt, session, keycloak_mock, monkeypatch): # 3. Create affidavit # 4. Create Org # 5. Update the created task and the relationship - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', lambda: TestJwtClaims.public_bceid_user) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", lambda: TestJwtClaims.public_bceid_user) user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model_with_contact(user_with_token) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() @@ -208,48 +227,46 @@ def test_put_task_org_on_hold(client, jwt, session, keycloak_mock, monkeypatch): org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value - org_id = org_dict['id'] + assert org_dict["status_code"] == OrgStatus.PENDING_STAFF_REVIEW.value + org_id = org_dict["id"] - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 - ) + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) tasks = TaskService.fetch_tasks(task_search) - fetched_tasks = tasks['tasks'] + fetched_tasks = tasks["tasks"] fetched_task = fetched_tasks[0] task_type_new_account = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - assert fetched_task['type'] == task_type_new_account + assert fetched_task["type"] == task_type_new_account update_task_payload = { - 'status': TaskStatus.HOLD.value, - 'relationshipStatus': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, - 'remarks': ['AFFIDAVIT SEAL MISSING'] + "status": TaskStatus.HOLD.value, + "relationshipStatus": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + "remarks": ["AFFIDAVIT SEAL MISSING"], } headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.put('/api/v1/tasks/{}'.format(fetched_task['id']), - data=json.dumps(update_task_payload), - headers=headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(fetched_task["id"]), + data=json.dumps(update_task_payload), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert rv.status_code == http_status.HTTP_200_OK - assert dictionary['status'] == TaskStatus.HOLD.value - assert dictionary['relationshipStatus'] == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value + assert rv.status_code == HTTPStatus.OK + assert dictionary["status"] == TaskStatus.HOLD.value + assert dictionary["relationshipStatus"] == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/orgs/{}'.format(org_id), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get("/api/v1/orgs/{}".format(org_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK dictionary = json.loads(rv.data) - assert dictionary['id'] == org_id - assert rv.json.get('orgStatus') == OrgStatus.PENDING_STAFF_REVIEW.value + assert dictionary["id"] == org_id + assert rv.json.get("orgStatus") == OrgStatus.PENDING_STAFF_REVIEW.value -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_put_task_product(client, jwt, session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that the task can be updated.""" # 1. Create User @@ -259,20 +276,19 @@ def test_put_task_product(client, jwt, session, keycloak_mock, monkeypatch): # # Post user, org and product subscription headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_role) user_with_token = TestUserInfo.user_staff_admin - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model_with_contact(user_with_token) - patch_token_info({ - 'sub': str(user_with_token['keycloak_guid']), - 'idp_userid': str(user_with_token['idp_userid']), - 'username': 'public_user', - 'realm_access': { - 'roles': [ - 'edit' - ] - } - }, monkeypatch) + patch_token_info( + { + "sub": str(user_with_token["keycloak_guid"]), + "idp_userid": str(user_with_token["idp_userid"]), + "username": "public_user", + "realm_access": {"roles": ["edit"]}, + }, + monkeypatch, + ) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() AffidavitService.create_affidavit(affidavit_info=affidavit_info) @@ -282,54 +298,58 @@ def test_put_task_product(client, jwt, session, keycloak_mock, monkeypatch): # org_dict = org.as_dict() product_which_doesnt_need_approval = TestOrgProductsInfo.org_products1 - rv_products = client.post(f"/api/v1/orgs/{org_dict.get('id')}/products", - data=json.dumps(product_which_doesnt_need_approval), - headers=headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] - - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 + rv_products = client.post( + f"/api/v1/orgs/{org_dict.get('id')}/products", + data=json.dumps(product_which_doesnt_need_approval), + headers=headers, + content_type="application/json", ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] + + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) tasks = TaskService.fetch_tasks(task_search) - assert len(tasks['tasks']) == 1 + assert len(tasks["tasks"]) == 1 product_which_needs_approval = TestOrgProductsInfo.org_products_vs - rv_products = client.post(f"/api/v1/orgs/{org_dict.get('id')}/products", - data=json.dumps(product_which_needs_approval), - headers=headers, content_type='application/json') - assert rv_products.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv_products.json, 'org_product_subscriptions_response')[0] + rv_products = client.post( + f"/api/v1/orgs/{org_dict.get('id')}/products", + data=json.dumps(product_which_needs_approval), + headers=headers, + content_type="application/json", + ) + assert rv_products.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv_products.json, "org_product_subscriptions_response")[0] tasks = TaskService.fetch_tasks(task_search) - fetched_tasks = tasks['tasks'] + fetched_tasks = tasks["tasks"] fetched_task = fetched_tasks[1] - assert fetched_task['relationship_type'] == TaskRelationshipType.PRODUCT.value + assert fetched_task["relationship_type"] == TaskRelationshipType.PRODUCT.value # Assert task name product: ProductCodeModel = ProductCodeModel.find_by_code( - product_which_needs_approval['subscriptions'][0].get('productCode')) - org_name = org_dict['name'] - assert fetched_task['name'] == org_name - assert fetched_task['type'] == product.description + product_which_needs_approval["subscriptions"][0].get("productCode") + ) + org_name = org_dict["name"] + assert fetched_task["name"] == org_name + assert fetched_task["type"] == product.description # Assert the task can be updated and the product status is changed to active - update_task_payload = { - 'relationshipStatus': ProductSubscriptionStatus.ACTIVE.value - } + update_task_payload = {"relationshipStatus": ProductSubscriptionStatus.ACTIVE.value} headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.put('/api/v1/tasks/{}'.format(fetched_task['id']), - data=json.dumps(update_task_payload), - headers=headers, content_type='application/json') + rv = client.put( + "/api/v1/tasks/{}".format(fetched_task["id"]), + data=json.dumps(update_task_payload), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert rv.status_code == http_status.HTTP_200_OK - assert dictionary['status'] == TaskStatus.COMPLETED.value - assert dictionary['relationshipStatus'] == TaskRelationshipStatus.ACTIVE.value + assert rv.status_code == HTTPStatus.OK + assert dictionary["status"] == TaskStatus.COMPLETED.value + assert dictionary["relationshipStatus"] == TaskRelationshipStatus.ACTIVE.value def test_fetch_task(client, jwt, session): # pylint:disable=unused-argument @@ -339,20 +359,31 @@ def test_fetch_task(client, jwt, session): # pylint:disable=unused-argument task_id = task._model.id headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/tasks/{}'.format(task_id), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('name') == task._model.name - - -@pytest.mark.parametrize('user_token, access_type, expected_task_action', [ - (TestJwtClaims.public_bceid_user, AccessType.EXTRA_PROVINCIAL.value, TaskAction.AFFIDAVIT_REVIEW.value), - (TestJwtClaims.public_bceid_user, AccessType.REGULAR_BCEID.value, TaskAction.AFFIDAVIT_REVIEW.value), - (TestJwtClaims.public_bceid_user, AccessType.GOVN.value, TaskAction.AFFIDAVIT_REVIEW.value), - (TestJwtClaims.public_user_role, AccessType.GOVN.value, TaskAction.ACCOUNT_REVIEW.value), -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_tasks_on_account_creation(client, jwt, session, keycloak_mock, # pylint:disable=unused-argument - monkeypatch, user_token, access_type, expected_task_action): + rv = client.get("/api/v1/tasks/{}".format(task_id), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("name") == task._model.name + + +@pytest.mark.parametrize( + "user_token, access_type, expected_task_action", + [ + (TestJwtClaims.public_bceid_user, AccessType.EXTRA_PROVINCIAL.value, TaskAction.AFFIDAVIT_REVIEW.value), + (TestJwtClaims.public_bceid_user, AccessType.REGULAR_BCEID.value, TaskAction.AFFIDAVIT_REVIEW.value), + (TestJwtClaims.public_bceid_user, AccessType.GOVN.value, TaskAction.AFFIDAVIT_REVIEW.value), + (TestJwtClaims.public_user_role, AccessType.GOVN.value, TaskAction.ACCOUNT_REVIEW.value), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_tasks_on_account_creation( + client, + jwt, + session, + keycloak_mock, # pylint:disable=unused-argument + monkeypatch, + user_token, + access_type, + expected_task_action, +): """Assert that tasks are created.""" # 1. Create User # 2. Get document signed link @@ -360,16 +391,16 @@ def test_tasks_on_account_creation(client, jwt, session, keycloak_mock, # pylin # 4. Create Org # 5. Assert correct task is created - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', lambda: user_token) - user = factory_user_model_with_contact(user_token, keycloak_guid=user_token['sub']) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", lambda: user_token) + user = factory_user_model_with_contact(user_token, keycloak_guid=user_token["sub"]) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() AffidavitService.create_affidavit(affidavit_info=affidavit_info) org_info = TestOrgInfo.org_with_mailing_address() - org_info['accessType'] = access_type + org_info["accessType"] = access_type OrgService.create_org(org_info, user_id=user.id) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/tasks', headers=headers, content_type='application/json') - assert rv.json['tasks'][0]['action'] == expected_task_action + rv = client.get("/api/v1/tasks", headers=headers, content_type="application/json") + assert rv.json["tasks"][0]["action"] == expected_task_action diff --git a/auth-api/tests/unit/api/test_user.py b/auth-api/tests/unit/api/test_user.py index d25a5830e4..34b6140220 100644 --- a/auth-api/tests/unit/api/test_user.py +++ b/auth-api/tests/unit/api/test_user.py @@ -18,14 +18,14 @@ """ import copy import json -import pytest import time import uuid -import mock +from http import HTTPStatus +from unittest import mock +import pytest from sqlalchemy import event -from auth_api import status as http_status from auth_api.exceptions.errors import Error from auth_api.models import Affidavit as AffidavitModel from auth_api.models import Affiliation as AffiliationModel @@ -40,15 +40,31 @@ from auth_api.utils.enums import AccessType, AffidavitStatus, IdpHint, ProductCode, Status, UserStatus from auth_api.utils.roles import ADMIN, COORDINATOR, USER, Role from tests import skip_in_pod +from tests.conftest import mock_token from tests.utilities.factory_scenarios import ( - KeycloakScenario, TestAffidavit, TestAnonymousMembership, TestContactInfo, TestEntityInfo, TestJwtClaims, - TestOrgInfo, TestOrgTypeInfo, TestUserInfo) + KeycloakScenario, + TestAffidavit, + TestAnonymousMembership, + TestContactInfo, + TestEntityInfo, + TestJwtClaims, + TestOrgInfo, + TestOrgTypeInfo, + TestUserInfo, +) from tests.utilities.factory_utils import ( - factory_affiliation_model, factory_auth_header, factory_contact_model, factory_entity_model, - factory_invitation_anonymous, factory_membership_model, factory_org_model, factory_product_model, - factory_user_model, patch_token_info) + factory_affiliation_model, + factory_auth_header, + factory_contact_model, + factory_entity_model, + factory_invitation_anonymous, + factory_membership_model, + factory_org_model, + factory_product_model, + factory_user_model, + patch_token_info, +) from tests.utilities.sqlalchemy import clear_event_listeners -from tests.conftest import mock_token KEYCLOAK_SERVICE = KeycloakService() @@ -56,12 +72,12 @@ def test_add_user(client, jwt, session): # pylint:disable=unused-argument """Assert that a user can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'user_response')[0] + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "user_response")[0] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_add_user_staff_org(client, jwt, session, keycloak_mock, monkeypatch): """Assert that adding and removing membership to a staff org occurs.""" # Create a user and org @@ -71,24 +87,24 @@ def test_add_user_staff_org(client, jwt, session, keycloak_mock, monkeypatch): clear_event_listeners(OrgModel) patch_token_info(TestJwtClaims.user_test, monkeypatch) OrgService.create_org(TestOrgInfo.staff_org, user_id=user_model.id).as_dict() - event.listen(OrgModel, 'before_update', receive_before_update, raw=True) - event.listen(OrgModel, 'before_insert', receive_before_insert) + event.listen(OrgModel, "before_update", receive_before_update, raw=True) + event.listen(OrgModel, "before_insert", receive_before_insert) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.user_test) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert rv.json.get('id') is not None + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + assert rv.json.get("id") is not None - staff_memberships = MembershipModel.find_active_staff_org_memberships_for_user(rv.json.get('id')) + staff_memberships = MembershipModel.find_active_staff_org_memberships_for_user(rv.json.get("id")) assert len(staff_memberships) == 1 assert staff_memberships[0].status == Status.ACTIVE.value patch_token_info(TestJwtClaims.get_test_real_user(sub=user_model.keycloak_guid), monkeypatch) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.get_test_real_user(sub=user_model.keycloak_guid)) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED - staff_memberships = MembershipModel.find_active_staff_org_memberships_for_user(rv.json.get('id')) + staff_memberships = MembershipModel.find_active_staff_org_memberships_for_user(rv.json.get("id")) assert len(staff_memberships) == 0 # 0, because our row was set to INACTIVE. @@ -107,50 +123,54 @@ def test_delete_bcros_valdiations(client, jwt, session, keycloak_mock, monkeypat membership = [member, admin] UserService.create_user_and_add_membership(membership, org.id) owner_headers = factory_auth_header(jwt=jwt, claims=owner_claims) - member_username = IdpHint.BCROS.value + '/' + member['username'] - admin_username = IdpHint.BCROS.value + '/' + admin['username'] - admin_claims = TestJwtClaims.get_test_real_user(uuid.uuid4(), admin_username, access_ype=AccessType.ANONYMOUS.value, - roles=[Role.ANONYMOUS_USER.value]) + member_username = IdpHint.BCROS.value + "/" + member["username"] + admin_username = IdpHint.BCROS.value + "/" + admin["username"] + admin_claims = TestJwtClaims.get_test_real_user( + uuid.uuid4(), admin_username, access_ype=AccessType.ANONYMOUS.value, roles=[Role.ANONYMOUS_USER.value] + ) admin_headers = factory_auth_header(jwt=jwt, claims=admin_claims) - member_claims = TestJwtClaims.get_test_real_user(uuid.uuid4(), member_username, - access_ype=AccessType.ANONYMOUS.value, - roles=[Role.ANONYMOUS_USER.value]) + member_claims = TestJwtClaims.get_test_real_user( + uuid.uuid4(), member_username, access_ype=AccessType.ANONYMOUS.value, roles=[Role.ANONYMOUS_USER.value] + ) member_headers = factory_auth_header(jwt=jwt, claims=member_claims) # set up JWTS for member and admin patch_token_info(admin_claims, monkeypatch) - client.post('/api/v1/users', headers=admin_headers, content_type='application/json', - data=json.dumps({'isLogin': True})) + client.post( + "/api/v1/users", headers=admin_headers, content_type="application/json", data=json.dumps({"isLogin": True}) + ) patch_token_info(member_claims, monkeypatch) - client.post('/api/v1/users', headers=member_headers, content_type='application/json', - data=json.dumps({'isLogin': True})) + client.post( + "/api/v1/users", headers=member_headers, content_type="application/json", data=json.dumps({"isLogin": True}) + ) patch_token_info(owner_claims, monkeypatch) # delete only owner ;failure - rv = client.delete(f"/api/v1/users/{admin_user['username']}", headers=owner_headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.delete( + f"/api/v1/users/{admin_user['username']}", headers=owner_headers, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED # admin trying to delete member: Failure patch_token_info(admin_claims, monkeypatch) - rv = client.delete(f'/api/v1/users/{member_username}', headers=admin_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.delete(f"/api/v1/users/{member_username}", headers=admin_headers, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED # member delete admin: failure patch_token_info(member_claims, monkeypatch) - rv = client.delete(f'/api/v1/users/{admin_username}', headers=member_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.delete(f"/api/v1/users/{admin_username}", headers=member_headers, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED # a self delete ;should work ;mimics leave team for anonymous user patch_token_info(member_claims, monkeypatch) - rv = client.delete(f'/api/v1/users/{member_username}', headers=member_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete(f"/api/v1/users/{member_username}", headers=member_headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT patch_token_info(admin_claims, monkeypatch) - rv = client.delete(f'/api/v1/users/{admin_username}', headers=admin_headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete(f"/api/v1/users/{admin_username}", headers=admin_headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT # add one more admin patch_token_info(owner_claims, monkeypatch) @@ -158,9 +178,12 @@ def test_delete_bcros_valdiations(client, jwt, session, keycloak_mock, monkeypat membership = [new_owner] UserService.create_user_and_add_membership(membership, org.id) patch_token_info(owner_claims, monkeypatch) - rv = client.delete(f"/api/v1/users/{IdpHint.BCROS.value + '/' + new_owner['username']}", headers=owner_headers, - content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete( + f"/api/v1/users/{IdpHint.BCROS.value + '/' + new_owner['username']}", + headers=owner_headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NO_CONTENT def test_add_back_a_delete_bcros(client, jwt, session, keycloak_mock, monkeypatch): @@ -171,18 +194,17 @@ def test_add_back_a_delete_bcros(client, jwt, session, keycloak_mock, monkeypatc factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) owner_claims = TestJwtClaims.get_test_real_user(user.keycloak_guid, idp_userid=user.idp_userid) member = TestAnonymousMembership.generate_random_user(USER) - membership = [member, - TestAnonymousMembership.generate_random_user(COORDINATOR)] + membership = [member, TestAnonymousMembership.generate_random_user(COORDINATOR)] patch_token_info(owner_claims, monkeypatch) UserService.create_user_and_add_membership(membership, org.id) headers = factory_auth_header(jwt=jwt, claims=owner_claims) - member_user_id = IdpHint.BCROS.value + '/' + member.get('username') - rv = client.delete(f'/api/v1/users/{member_user_id}', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT - kc_user = KeycloakService.get_user_by_username(member.get('username')) + member_user_id = IdpHint.BCROS.value + "/" + member.get("username") + rv = client.delete(f"/api/v1/users/{member_user_id}", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT + kc_user = KeycloakService.get_user_by_username(member.get("username")) assert kc_user.enabled is False user_model = UserService.find_by_username(member_user_id) - assert user_model.as_dict().get('user_status') == UserStatus.INACTIVE.value + assert user_model.as_dict().get("user_status") == UserStatus.INACTIVE.value membership = MembershipModel.find_membership_by_userid(user_model.identifier) assert membership.status == Status.INACTIVE.value @@ -201,230 +223,258 @@ def test_reset_password(client, jwt, session, keycloak_mock, monkeypatch): # py patch_token_info(owner_claims, monkeypatch) UserService.create_user_and_add_membership(membership, org.id) owner_headers = factory_auth_header(jwt=jwt, claims=owner_claims) - member_username = IdpHint.BCROS.value + '/' + member['username'] - admin_username = IdpHint.BCROS.value + '/' + admin['username'] - admin_claims = TestJwtClaims.get_test_real_user(uuid.uuid4(), - admin_username, - access_ype=AccessType.ANONYMOUS.value, - roles=[Role.ANONYMOUS_USER.value]) + member_username = IdpHint.BCROS.value + "/" + member["username"] + admin_username = IdpHint.BCROS.value + "/" + admin["username"] + admin_claims = TestJwtClaims.get_test_real_user( + uuid.uuid4(), admin_username, access_ype=AccessType.ANONYMOUS.value, roles=[Role.ANONYMOUS_USER.value] + ) admin_headers = factory_auth_header(jwt=jwt, claims=admin_claims) - member_claims = TestJwtClaims.get_test_real_user(uuid.uuid4(), - member_username, - access_ype=AccessType.ANONYMOUS.value, - roles=[Role.ANONYMOUS_USER.value]) + member_claims = TestJwtClaims.get_test_real_user( + uuid.uuid4(), member_username, access_ype=AccessType.ANONYMOUS.value, roles=[Role.ANONYMOUS_USER.value] + ) member_headers = factory_auth_header(jwt=jwt, claims=member_claims) # set up JWTS for member and admin patch_token_info(admin_claims, monkeypatch) - client.post('/api/v1/users', headers=admin_headers, content_type='application/json', - data=json.dumps({'isLogin': True})) + client.post( + "/api/v1/users", headers=admin_headers, content_type="application/json", data=json.dumps({"isLogin": True}) + ) patch_token_info(member_claims, monkeypatch) - client.post('/api/v1/users', headers=member_headers, content_type='application/json', - data=json.dumps({'isLogin': True})) + client.post( + "/api/v1/users", headers=member_headers, content_type="application/json", data=json.dumps({"isLogin": True}) + ) # reset password of admin by owner - input_data = json.dumps({'username': admin_username, 'password': 'Mysecretcode@1234'}) + input_data = json.dumps({"username": admin_username, "password": "Mysecretcode@1234"}) patch_token_info(owner_claims, monkeypatch) - rv = client.patch(f'/api/v1/users/{admin_username}', headers=owner_headers, - data=input_data, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.patch( + f"/api/v1/users/{admin_username}", headers=owner_headers, data=input_data, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.NO_CONTENT # member cant reset password patch_token_info(member_claims, monkeypatch) - rv = client.patch(f'/api/v1/users/{admin_username}', headers=member_headers, - data=input_data, content_type='application/json') - assert rv.status_code == http_status.HTTP_403_FORBIDDEN + rv = client.patch( + f"/api/v1/users/{admin_username}", headers=member_headers, data=input_data, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.FORBIDDEN # admin cant reset password patch_token_info(admin_claims, monkeypatch) - rv = client.patch(f'/api/v1/users/{admin_username}', headers=admin_headers, - data=input_data, content_type='application/json') - assert rv.status_code == http_status.HTTP_403_FORBIDDEN + rv = client.patch( + f"/api/v1/users/{admin_username}", headers=admin_headers, data=input_data, content_type="application/json" + ) + assert rv.status_code == HTTPStatus.FORBIDDEN def test_add_user_admin_valid_bcros(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that an anonymous admin can be POSTed.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_admin_dir_search_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - rv = client.post('/api/v1/orgs', data=json.dumps(TestOrgInfo.org_anonymous), - headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + rv = client.post( + "/api/v1/orgs", data=json.dumps(TestOrgInfo.org_anonymous), headers=headers, content_type="application/json" + ) dictionary = json.loads(rv.data) - org_id = dictionary['id'] - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation_anonymous(org_id=org_id)), - headers=headers, content_type='application/json') + org_id = dictionary["id"] + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation_anonymous(org_id=org_id)), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert dictionary.get('token') is not None - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.post('/api/v1/users/bcros', data=json.dumps(TestUserInfo.user_anonymous_1), - headers={'invitation_token': dictionary.get('token')}, content_type='application/json') + assert dictionary.get("token") is not None + assert rv.status_code == HTTPStatus.CREATED + rv = client.post( + "/api/v1/users/bcros", + data=json.dumps(TestUserInfo.user_anonymous_1), + headers={"invitation_token": dictionary.get("token")}, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert rv.status_code == http_status.HTTP_201_CREATED - assert dictionary['users'][0].get('username') == IdpHint.BCROS.value + '/' + TestUserInfo.user_anonymous_1[ - 'username'] - assert dictionary['users'][0].get('password') is None - assert dictionary['users'][0].get('type') == Role.ANONYMOUS_USER.name - assert schema_utils.validate(rv.json, 'anonymous_user_response')[0] + assert rv.status_code == HTTPStatus.CREATED + assert ( + dictionary["users"][0].get("username") == IdpHint.BCROS.value + "/" + TestUserInfo.user_anonymous_1["username"] + ) + assert dictionary["users"][0].get("password") is None + assert dictionary["users"][0].get("type") == Role.ANONYMOUS_USER.name + assert schema_utils.validate(rv.json, "anonymous_user_response")[0] # different error scenarios # check expired invitation - rv = client.post('/api/v1/users/bcros', data=json.dumps(TestUserInfo.user_anonymous_1), - headers={'invitation_token': dictionary.get('token')}, content_type='application/json') + rv = client.post( + "/api/v1/users/bcros", + data=json.dumps(TestUserInfo.user_anonymous_1), + headers={"invitation_token": dictionary.get("token")}, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert dictionary['code'] == 'EXPIRED_INVITATION' - - rv = client.post('/api/v1/invitations', data=json.dumps(factory_invitation_anonymous(org_id=org_id)), - headers=headers, content_type='application/json') + assert dictionary["code"] == "EXPIRED_INVITATION" + + rv = client.post( + "/api/v1/invitations", + data=json.dumps(factory_invitation_anonymous(org_id=org_id)), + headers=headers, + content_type="application/json", + ) dictionary = json.loads(rv.data) # check duplicate user - rv = client.post('/api/v1/users/bcros', data=json.dumps(TestUserInfo.user_anonymous_1), - headers={'invitation_token': dictionary.get('token')}, content_type='application/json') + rv = client.post( + "/api/v1/users/bcros", + data=json.dumps(TestUserInfo.user_anonymous_1), + headers={"invitation_token": dictionary.get("token")}, + content_type="application/json", + ) dictionary = json.loads(rv.data) - assert dictionary['code'] == 409 - assert dictionary['message'] == 'The username is already taken' + assert dictionary["code"] == 409 + assert dictionary["message"] == "The username is already taken" def test_add_user_no_token_returns_401(client, session): # pylint:disable=unused-argument """Assert that POSTing a user with no token returns a 401.""" - rv = client.post('/api/v1/users', headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post("/api/v1/users", headers=None, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED @skip_in_pod def test_add_user_invalid_token_returns_401(client, jwt, session): # pylint:disable=unused-argument """Assert that POSTing a user with an invalid token returns a 401.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.invalid) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_update_user(client, jwt, session): # pylint:disable=unused-argument """Assert that a POST to an existing user updates that user.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'user_response')[0] + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "user_response")[0] user = json.loads(rv.data) - assert user['firstname'] is not None + assert user["firstname"] is not None # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.updated_test) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'user_response')[0] + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "user_response")[0] user = json.loads(rv.data) - assert user['firstname'] is not None + assert user["firstname"] is not None def test_update_user_terms_of_use(client, jwt, session): # pylint:disable=unused-argument """Assert that a PATCH to an existing user updates that user.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED user = json.loads(rv.data) - assert user['firstname'] is not None + assert user["firstname"] is not None # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.updated_test) - input_data = json.dumps({'termsversion': '1', 'istermsaccepted': True}) - rv = client.patch('/api/v1/users/@me', headers=headers, - data=input_data, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'user_response')[0] + input_data = json.dumps({"termsversion": "1", "istermsaccepted": True}) + rv = client.patch("/api/v1/users/@me", headers=headers, data=input_data, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "user_response")[0] user = json.loads(rv.data) - assert user['userTerms']['termsOfUseAcceptedVersion'] == '1' + assert user["userTerms"]["termsOfUseAcceptedVersion"] == "1" # version 1 is old version ; so api should return terms of service accepted as false - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'user_response')[0] + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "user_response")[0] user = json.loads(rv.data) - assert user['userTerms']['isTermsOfUseAccepted'] is False + assert user["userTerms"]["isTermsOfUseAccepted"] is False def test_update_user_terms_of_use_invalid_input(client, jwt, session): # pylint:disable=unused-argument """Assert that a PATCH to an existing user updates that user.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED user = json.loads(rv.data) - assert user['firstname'] is not None + assert user["firstname"] is not None # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.updated_test) - input_data = json.dumps({'invalid': True}) - rv = client.patch('/api/v1/users/@me', headers=headers, - data=input_data, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + input_data = json.dumps({"invalid": True}) + rv = client.patch("/api/v1/users/@me", headers=headers, data=input_data, content_type="application/json") + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_update_user_terms_of_use_no_jwt(client, jwt, session): # pylint:disable=unused-argument """Assert that a PATCH to an existing user updates that user.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED user = json.loads(rv.data) - assert user['firstname'] is not None + assert user["firstname"] is not None # post token with updated claims - input_data = json.dumps({'invalid': True}) - rv = client.patch('/api/v1/users/@me', - data=input_data, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + input_data = json.dumps({"invalid": True}) + rv = client.patch("/api/v1/users/@me", data=input_data, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_staff_get_user(client, jwt, session): # pylint:disable=unused-argument """Assert that a staff user can GET a user by id.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED # GET the test user as a staff user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/users/{}'.format(TestJwtClaims.public_user_role['preferred_username']), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'user_response')[0] + rv = client.get( + "/api/v1/users/{}".format(TestJwtClaims.public_user_role["preferred_username"]), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "user_response")[0] user = json.loads(rv.data) - assert user['firstname'] is not None + assert user["firstname"] is not None def test_staff_get_user_invalid_id_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that a staff user can GET a user by id.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.get('/api/v1/users/{}'.format('SOME_USER'), headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.get("/api/v1/users/{}".format("SOME_USER"), headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NOT_FOUND def test_staff_search_users(client, jwt, session): # pylint:disable=unused-argument """Assert that a staff user can GET a list of users with search parameters.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED # POST a second test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.no_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED # Search on all users as a staff user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'users_response')[0] + rv = client.get("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "users_response")[0] users = json.loads(rv.data) assert len(users) == 2 # Search on users with a search parameter - rv = client.get('/api/v1/users?lastname={}'.format(TestJwtClaims.no_role['lastname']), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'users_response')[0] + rv = client.get( + "/api/v1/users?lastname={}".format(TestJwtClaims.no_role["lastname"]), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "users_response")[0] users = json.loads(rv.data) assert len(users) == 1 @@ -433,25 +483,25 @@ def test_get_user(client, jwt, session): # pylint:disable=unused-argument """Assert that a user can retrieve their own profile.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - rv = client.get('/api/v1/users/@me', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'user_response')[0] + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED + rv = client.get("/api/v1/users/@me", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "user_response")[0] user = json.loads(rv.data) - assert user['firstname'] is not None + assert user["firstname"] is not None def test_get_user_returns_401(client, session): # pylint:disable=unused-argument """Assert that unauthorized access to a user profile returns a 401 error.""" - rv = client.get('/api/v1/users/@me', headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.get("/api/v1/users/@me", headers=None, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_get_user_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that the endpoint returns 404 when user is not found.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.get('/api/v1/users/@me', headers=headers, content_type='application/json') + rv = client.get("/api/v1/users/@me", headers=headers, content_type="application/json") assert rv.status_code == Error.DATA_NOT_FOUND.status_code @@ -459,169 +509,218 @@ def test_add_contact(client, jwt, session): # pylint:disable=unused-argument """Assert that a contact can be added (POST) to an existing user.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") # POST a contact to test user - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'contact_response')[0] + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "contact_response")[0] contact = json.loads(rv.data) - assert contact['email'] == 'foo@bar.com' + assert contact["email"] == "foo@bar.com" def test_add_contact_valid_email_with_special_characters(client, jwt, session): # pylint:disable=unused-argument """Assert that a contact can be added (POST) to an existing user.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") # POST a contact to test user - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.email_valid), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.email_valid), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED contact = json.loads(rv.data) - assert contact['email'] == TestContactInfo.email_valid['email'] + assert contact["email"] == TestContactInfo.email_valid["email"] def test_add_contact_no_token_returns_401(client, session): # pylint:disable=unused-argument """Assert that adding a contact without providing a token returns a 401.""" - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), + headers=None, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_add_contact_invalid_format_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that adding a contact in an invalid format returns a 400.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.invalid), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.invalid), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_add_contact_duplicate_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that adding a contact for a user who already has a contact returns a 400.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") # POST a contact to test user - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED - - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact2), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED + + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact2), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_update_contact(client, jwt, session): # pylint:disable=unused-argument, invalid-name """Assert that a contact can be updated (PUT) on an existing user.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") # POST a contact to test user - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED # PUT a contact on the same user - rv = client.put('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact2), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'contact_response')[0] + rv = client.put( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact2), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "contact_response")[0] contact = json.loads(rv.data) - assert contact['email'] == 'bar@foo.com' + assert contact["email"] == "bar@foo.com" def test_update_contact_no_token_returns_401(client, session): # pylint:disable=unused-argument """Assert that updating a contact without providing a token returns a 401.""" - rv = client.put('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact2), - headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.put( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact2), + headers=None, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_update_contact_invalid_format_returns_400(client, jwt, session): # pylint:disable=unused-argument """Assert that adding a contact in an invalid format returns a 400.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") - rv = client.put('/api/v1/users/contacts', data=json.dumps(TestContactInfo.invalid), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.put( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.invalid), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_update_contact_missing_contact_returns_404(client, jwt, session): # pylint:disable=unused-argument """Assert that updating a contact for a non-existent user returns a 404.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") # PUT a contact to test user - rv = client.put('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.put( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.NOT_FOUND def test_delete_contact(client, jwt, session): # pylint:disable=unused-argument, invalid-name """Assert that a contact can be deleted on an existing user.""" # POST a test user headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") # POST a contact to test user - rv = client.post('/api/v1/users/contacts', data=json.dumps(TestContactInfo.contact1), - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post( + "/api/v1/users/contacts", + data=json.dumps(TestContactInfo.contact1), + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.CREATED # PUT a contact on the same user - rv = client.delete('/api/v1/users/contacts', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.delete("/api/v1/users/contacts", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK - rv = client.get('/api/v1/users/contacts', headers=headers, content_type='application/json') + rv = client.get("/api/v1/users/contacts", headers=headers, content_type="application/json") dictionary = json.loads(rv.data) - contacts = dictionary.get('contacts') + contacts = dictionary.get("contacts") assert len(contacts) == 0 def test_delete_contact_no_token_returns_401(client, session): # pylint:disable=unused-argument, invalid-name """Assert that deleting a contact without a token returns a 401.""" - rv = client.delete('/api/v1/users/contacts', headers=None, content_type='application/json') - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.delete("/api/v1/users/contacts", headers=None, content_type="application/json") + assert rv.status_code == HTTPStatus.UNAUTHORIZED def test_delete_contact_no_contact_returns_404(client, jwt, session): # pylint:disable=unused-argument, invalid-name """Assert that deleting a contact that doesn't exist returns a 404.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") - rv = client.delete('/api/v1/users/contacts', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.delete("/api/v1/users/contacts", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NOT_FOUND def test_get_orgs_for_user(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert that retrieving a list of orgs for a user functions.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") # Add an org - the current user should be auto-added as an ADMIN - rv = client.post('/api/v1/orgs', headers=headers, data=json.dumps(TestOrgInfo.org1), - content_type='application/json') + rv = client.post( + "/api/v1/orgs", headers=headers, data=json.dumps(TestOrgInfo.org1), content_type="application/json" + ) - rv = client.get('/api/v1/users/orgs', headers=headers) + rv = client.get("/api/v1/users/orgs", headers=headers) - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(rv.json, 'orgs_response')[0] + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(rv.json, "orgs_response")[0] response = json.loads(rv.data) - assert response['orgs'] - assert len(response['orgs']) == 1 - assert response['orgs'][0]['name'] == TestOrgInfo.org1['name'] + assert response["orgs"] + assert len(response["orgs"]) == 1 + assert response["orgs"][0]["name"] == TestOrgInfo.org1["name"] def test_user_authorizations_returns_200(client, jwt, session): # pylint:disable=unused-argument @@ -633,50 +732,50 @@ def test_user_authorizations_returns_200(client, jwt, session): # pylint:disabl factory_affiliation_model(entity.id, org.id) claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user.keycloak_guid) + claims["sub"] = str(user.keycloak_guid) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get('/api/v1/users/authorizations', headers=headers, content_type='application/json') + rv = client.get("/api/v1/users/authorizations", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert rv.json.get('authorizations')[0].get('orgMembership') == 'ADMIN' + assert rv.status_code == HTTPStatus.OK + assert rv.json.get("authorizations")[0].get("orgMembership") == "ADMIN" # Test with invalid user - claims['sub'] = str(uuid.uuid4()) + claims["sub"] = str(uuid.uuid4()) headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get('/api/v1/users/authorizations', headers=headers, content_type='application/json') + rv = client.get("/api/v1/users/authorizations", headers=headers, content_type="application/json") - assert rv.status_code == http_status.HTTP_200_OK - assert len(rv.json.get('authorizations')) == 0 + assert rv.status_code == HTTPStatus.OK + assert len(rv.json.get("authorizations")) == 0 def test_delete_user_with_no_orgs_returns_204(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Test if the user doesn't have any teams/orgs assert status is 204.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.updated_test) - rv = client.delete('/api/v1/users/@me', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete("/api/v1/users/@me", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT def test_delete_inactive_user_returns_400(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Test if the user doesn't have any teams/orgs assert status is 204.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_201_CREATED + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.CREATED # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.updated_test) - rv = client.delete('/api/v1/users/@me', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete("/api/v1/users/@me", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT - rv = client.delete('/api/v1/users/@me', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.delete("/api/v1/users/@me", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.BAD_REQUEST def test_delete_unknown_user_returns_404(client, jwt, session): # pylint:disable=unused-argument @@ -684,30 +783,31 @@ def test_delete_unknown_user_returns_404(client, jwt, session): # pylint:disabl # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.updated_test) - rv = client.delete('/api/v1/users/@me', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_404_NOT_FOUND + rv = client.delete("/api/v1/users/@me", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NOT_FOUND -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_user_as_only_admin_returns_400(client, jwt, session, keycloak_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_user_as_only_admin_returns_400( + client, jwt, session, keycloak_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Test if the user is the only owner of a team assert status is 400.""" user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model - contact_link.commit() + contact_link.save() claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user_model.keycloak_guid) - claims['idp_userid'] = str(user_model.idp_userid) + claims["sub"] = str(user_model.keycloak_guid) + claims["idp_userid"] = str(user_model.idp_userid) patch_token_info(claims, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) @@ -716,51 +816,53 @@ def test_delete_user_as_only_admin_returns_400(client, jwt, session, keycloak_mo headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.delete('/api/v1/users/@me', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_400_BAD_REQUEST + rv = client.delete("/api/v1/users/@me", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.BAD_REQUEST -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_user_is_member_returns_204(client, jwt, session, keycloak_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_user_is_member_returns_204( + client, jwt, session, keycloak_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Test if the user is the member of a team assert status is 204.""" user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model - contact_link.commit() + contact_link.save() user_model2 = factory_user_model(user_info=TestUserInfo.user2) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model2 - contact_link.commit() + contact_link.save() claims = copy.deepcopy(TestJwtClaims.public_user_role.value) - claims['sub'] = str(user_model2.keycloak_guid) - claims['idp_userid'] = str(user_model2.idp_userid) + claims["sub"] = str(user_model2.keycloak_guid) + claims["idp_userid"] = str(user_model2.idp_userid) patch_token_info(claims, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id, environment=environment) affiliation.save() - membership = MembershipModel(org_id=org_id, user_id=user_model2.id, membership_type_code='USER', - membership_type_status=Status.ACTIVE.value) + membership = MembershipModel( + org_id=org_id, user_id=user_model2.id, membership_type_code="USER", membership_type_status=Status.ACTIVE.value + ) membership.save() headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.delete('/api/v1/users/@me', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete("/api/v1/users/@me", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.NO_CONTENT def test_delete_otp_for_user(client, jwt, session): # pylint:disable=unused-argument @@ -787,7 +889,7 @@ def test_delete_otp_for_user(client, jwt, session): # pylint:disable=unused-arg request = KeycloakScenario.create_user_by_user_info(user_info=TestJwtClaims.tester_bceid_role) KEYCLOAK_SERVICE.add_user(request, return_if_exists=True) user = KEYCLOAK_SERVICE.get_user_by_username(request.user_name) - assert 'CONFIGURE_TOTP' not in json.loads(user.value()).get('requiredActions', None) + assert "CONFIGURE_TOTP" not in json.loads(user.value()).get("requiredActions", None) user_id = user.id # Create user, org and membserhip in DB user = factory_user_model(TestUserInfo.get_bceid_user_with_kc_guid(user_id)) @@ -795,29 +897,29 @@ def test_delete_otp_for_user(client, jwt, session): # pylint:disable=unused-arg # staff with manage accounts otp reset headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.delete(f'api/v1/users/{user.username}/otp', headers=headers) - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete(f"api/v1/users/{user.username}/otp", headers=headers) + assert rv.status_code == HTTPStatus.NO_CONTENT user1 = KEYCLOAK_SERVICE.get_user_by_username(request.user_name) - assert 'CONFIGURE_TOTP' in json.loads(user1.value()).get('requiredActions') + assert "CONFIGURE_TOTP" in json.loads(user1.value()).get("requiredActions") # staff with basic access cant do otp reset headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_role) - rv = client.delete(f'api/v1/users/{user.username}/otp', headers=headers) - assert rv.status_code == http_status.HTTP_401_UNAUTHORIZED + rv = client.delete(f"api/v1/users/{user.username}/otp", headers=headers) + assert rv.status_code == HTTPStatus.UNAUTHORIZED # admin can do otp reset - rv = client.delete(f'api/v1/users/{user.username}/otp', headers=admin_headers) - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete(f"api/v1/users/{user.username}/otp", headers=admin_headers) + assert rv.status_code == HTTPStatus.NO_CONTENT # coordinator can do otp reset - rv = client.delete(f'api/v1/users/{user.username}/otp', headers=coordinator_headers) - assert rv.status_code == http_status.HTTP_204_NO_CONTENT + rv = client.delete(f"api/v1/users/{user.username}/otp", headers=coordinator_headers) + assert rv.status_code == HTTPStatus.NO_CONTENT # user can not do otp reset - rv = client.delete(f'api/v1/users/{user.username}/otp', headers=user_headers) - assert rv.status_code == http_status.HTTP_403_FORBIDDEN + rv = client.delete(f"api/v1/users/{user.username}/otp", headers=user_headers) + assert rv.status_code == HTTPStatus.FORBIDDEN # another org admin cant do admin_user1 = factory_user_model(user_info=TestUserInfo.user_test) @@ -825,8 +927,8 @@ def test_delete_otp_for_user(client, jwt, session): # pylint:disable=unused-arg factory_membership_model(admin_user1.id, org1.id) admin_claims = TestJwtClaims.get_test_real_user(admin_user1.keycloak_guid) admin1_headers = factory_auth_header(jwt=jwt, claims=admin_claims) - rv = client.delete(f'api/v1/users/{user.username}/otp', headers=admin1_headers) - assert rv.status_code == http_status.HTTP_403_FORBIDDEN + rv = client.delete(f"api/v1/users/{user.username}/otp", headers=admin1_headers) + assert rv.status_code == HTTPStatus.FORBIDDEN def test_add_bceid_user(client, jwt, session): # pylint:disable=unused-argument @@ -837,80 +939,97 @@ def test_add_bceid_user(client, jwt, session): # pylint:disable=unused-argument user = KEYCLOAK_SERVICE.get_user_by_username(request.user_name) user_id = user.id - headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.get_test_user(user_id, source='BCEID')) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json', data=json.dumps({ - 'firstName': 'John', - 'lastName': 'Doe' - })) - assert rv.status_code == http_status.HTTP_201_CREATED - assert rv.json.get('firstname') == 'John' + headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.get_test_user(user_id, source="BCEID")) + rv = client.post( + "/api/v1/users", + headers=headers, + content_type="application/json", + data=json.dumps({"firstName": "John", "lastName": "Doe"}), + ) + assert rv.status_code == HTTPStatus.CREATED + assert rv.json.get("firstname") == "John" - rv = client.post('/api/v1/users', headers=headers, content_type='application/json', data=json.dumps({ - 'firstName': 'John-New', - 'lastName': 'Doe' - })) + rv = client.post( + "/api/v1/users", + headers=headers, + content_type="application/json", + data=json.dumps({"firstName": "John-New", "lastName": "Doe"}), + ) - assert rv.status_code == http_status.HTTP_201_CREATED - assert schema_utils.validate(rv.json, 'user_response')[0] - assert rv.json.get('firstname') == 'John-New' + assert rv.status_code == HTTPStatus.CREATED + assert schema_utils.validate(rv.json, "user_response")[0] + assert rv.json.get("firstname") == "John-New" def test_user_post_during_login(client, jwt, session): # pylint:disable=unused-argument """Assert that a user can be POSTed.""" # Create a user by POST, then create same user with login flag and make sure the login date is different headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_user_role) - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - login_time = rv.json.get('loginTime') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + login_time = rv.json.get("loginTime") # Call the same endpoint again and confirm logn time is same - rv = client.post('/api/v1/users', headers=headers, content_type='application/json') - assert login_time == rv.json.get('loginTime') + rv = client.post("/api/v1/users", headers=headers, content_type="application/json") + assert login_time == rv.json.get("loginTime") # Call same endpoint with login flag and assert login time is different time.sleep(1) - rv = client.post('/api/v1/users', headers=headers, data=json.dumps({'isLogin': True}), - content_type='application/json') - assert schema_utils.validate(rv.json, 'user_response')[0] - assert login_time != rv.json.get('loginTime') + rv = client.post( + "/api/v1/users", headers=headers, data=json.dumps({"isLogin": True}), content_type="application/json" + ) + assert schema_utils.validate(rv.json, "user_response")[0] + assert login_time != rv.json.get("loginTime") def test_get_affidavit(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert get affidavit.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') - - document_signature = client.get('/api/v1/documents/test.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key = document_signature.json.get('key') - client.post(f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits", - headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), - content_type='application/json') + client.post("/api/v1/users", headers=headers, content_type="application/json") + + document_signature = client.get( + "/api/v1/documents/test.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key = document_signature.json.get("key") + client.post( + f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits", + headers=headers, + data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), + content_type="application/json", + ) headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get(f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits", - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits", + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK def test_get_rejected_affidavit(client, jwt, session, keycloak_mock): # pylint:disable=unused-argument """Assert get affidavit.""" headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.public_bceid_user) - client.post('/api/v1/users', headers=headers, content_type='application/json') - - document_signature = client.get('/api/v1/documents/test.jpeg/signatures', headers=headers, - content_type='application/json') - doc_key = document_signature.json.get('key') - rv = client.post(f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits", - headers=headers, - data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), - content_type='application/json') - - affidavit: AffidavitModel = AffidavitModel.find_pending_by_user_id(rv.json.get('user')) + client.post("/api/v1/users", headers=headers, content_type="application/json") + + document_signature = client.get( + "/api/v1/documents/test.jpeg/signatures", headers=headers, content_type="application/json" + ) + doc_key = document_signature.json.get("key") + rv = client.post( + f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits", + headers=headers, + data=json.dumps(TestAffidavit.get_test_affidavit_with_contact(doc_id=doc_key)), + content_type="application/json", + ) + + affidavit: AffidavitModel = AffidavitModel.find_pending_by_user_id(rv.json.get("user")) affidavit.status_code = AffidavitStatus.REJECTED.value affidavit.save() headers = factory_auth_header(jwt=jwt, claims=TestJwtClaims.staff_manage_accounts_role) - rv = client.get(f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits?status=REJECTED", - headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK + rv = client.get( + f"/api/v1/users/{TestJwtClaims.public_user_role.get('sub')}/affidavits?status=REJECTED", + headers=headers, + content_type="application/json", + ) + assert rv.status_code == HTTPStatus.OK diff --git a/auth-api/tests/unit/api/test_user_settings.py b/auth-api/tests/unit/api/test_user_settings.py index 014734e5b7..df43716909 100644 --- a/auth-api/tests/unit/api/test_user_settings.py +++ b/auth-api/tests/unit/api/test_user_settings.py @@ -17,19 +17,23 @@ Test-Suite to ensure that the /users endpoint is working as expected. """ import copy -import mock +from http import HTTPStatus +from unittest import mock -from auth_api import status as http_status from auth_api.models import ContactLink as ContactLinkModel from auth_api.schemas import utils as schema_utils from auth_api.services import Org as OrgService +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo, TestUserInfo from tests.utilities.factory_utils import ( - factory_auth_header, factory_contact_model, factory_user_model, patch_token_info) -from tests.conftest import mock_token + factory_auth_header, + factory_contact_model, + factory_user_model, + patch_token_info, +) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_user_settings(client, jwt, session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that get works and adhere to schema.""" user_model = factory_user_model(user_info=TestUserInfo.user_test) @@ -37,38 +41,38 @@ def test_get_user_settings(client, jwt, session, keycloak_mock, monkeypatch): # contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model - contact_link.commit() + contact_link.save() kc_id = user_model.keycloak_guid claims = copy.deepcopy(TestJwtClaims.updated_test.value) - claims['sub'] = str(kc_id) - claims['idp_userid'] = str(user_model.idp_userid) + claims["sub"] = str(kc_id) + claims["idp_userid"] = str(user_model.idp_userid) patch_token_info(claims, monkeypatch) OrgService.create_org(TestOrgInfo.org_branch_name, user_id=user_model.id) # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/users/{kc_id}/settings', headers=headers, content_type='application/json') + rv = client.get(f"/api/v1/users/{kc_id}/settings", headers=headers, content_type="application/json") item_list = rv.json - account = next(obj for obj in item_list if obj['type'] == 'ACCOUNT') - assert account['accountType'] == 'BASIC' - assert account['additionalLabel'] == TestOrgInfo.org_branch_name.get('branchName') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(item_list, 'user_settings_response')[0] - assert account['productSettings'] == f'/account/{account["id"]}/restricted-product' + account = next(obj for obj in item_list if obj["type"] == "ACCOUNT") + assert account["accountType"] == "BASIC" + assert account["additionalLabel"] == TestOrgInfo.org_branch_name.get("branchName") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(item_list, "user_settings_response")[0] + assert account["productSettings"] == f'/account/{account["id"]}/restricted-product' - kc_id_no_user = TestUserInfo.user1.get('keycloak_guid') + kc_id_no_user = TestUserInfo.user1.get("keycloak_guid") claims = copy.deepcopy(TestJwtClaims.updated_test.value) - claims['sub'] = str(kc_id_no_user) + claims["sub"] = str(kc_id_no_user) patch_token_info(claims, monkeypatch) # post token with updated claims headers = factory_auth_header(jwt=jwt, claims=claims) - rv = client.get(f'/api/v1/users/{kc_id_no_user}/settings', headers=headers, content_type='application/json') - assert rv.status_code == http_status.HTTP_200_OK - assert schema_utils.validate(item_list, 'user_settings_response')[0] + rv = client.get(f"/api/v1/users/{kc_id_no_user}/settings", headers=headers, content_type="application/json") + assert rv.status_code == HTTPStatus.OK + assert schema_utils.validate(item_list, "user_settings_response")[0] item_list = rv.json - account = next((obj for obj in item_list if obj['type'] == 'ACCOUNT'), None) + account = next((obj for obj in item_list if obj["type"] == "ACCOUNT"), None) assert account is None - user_profile = next(obj for obj in item_list if obj['type'] == 'USER_PROFILE') - assert '/userprofile' in user_profile.get('urlpath') + user_profile = next(obj for obj in item_list if obj["type"] == "USER_PROFILE") + assert "/userprofile" in user_profile.get("urlpath") diff --git a/auth-api/tests/unit/conf/test_configuration.py b/auth-api/tests/unit/conf/test_configuration.py index ac3ddeca4d..0d9906160d 100644 --- a/auth-api/tests/unit/conf/test_configuration.py +++ b/auth-api/tests/unit/conf/test_configuration.py @@ -21,25 +21,24 @@ import auth_api.config as config - # testdata pattern is ({str: environment}, {expected return value}) TEST_ENVIRONMENT_DATA = [ - ('valid', 'development', config.DevConfig), - ('valid', 'testing', config.TestConfig), - ('valid', 'default', config.ProdConfig), - ('valid', 'staging', config.ProdConfig), - ('valid', 'production', config.ProdConfig), - ('error', None, KeyError) + ("valid", "development", config.DevConfig), + ("valid", "testing", config.TestConfig), + ("valid", "default", config.ProdConfig), + ("valid", "staging", config.ProdConfig), + ("valid", "production", config.ProdConfig), + ("error", None, KeyError), ] -@pytest.mark.parametrize('test_type,environment,expected', TEST_ENVIRONMENT_DATA) +@pytest.mark.parametrize("test_type,environment,expected", TEST_ENVIRONMENT_DATA) def test_get_named_config(test_type, environment, expected): """Assert that the named configurations can be loaded. Or that a KeyError is returned for missing config types. """ - if test_type == 'valid': + if test_type == "valid": assert isinstance(config.get_named_config(environment), expected) else: with pytest.raises(KeyError): @@ -52,7 +51,7 @@ def test_prod_config_secret_key(monkeypatch): # pylint: disable=missing-docstri The object either uses the SECRET_KEY from the environment, or creates the SECRET_KEY on the fly. """ - key = 'SECRET_KEY' + key = "SECRET_KEY" # Assert that secret key will default to some value # even if missed in the environment setup @@ -61,6 +60,6 @@ def test_prod_config_secret_key(monkeypatch): # pylint: disable=missing-docstri assert config.ProdConfig().SECRET_KEY is not None # Assert that the secret_key is set to the assigned environment value - monkeypatch.setenv(key, 'SECRET_KEY') + monkeypatch.setenv(key, "SECRET_KEY") reload(config) - assert config.ProdConfig().SECRET_KEY == 'SECRET_KEY' + assert config.ProdConfig().SECRET_KEY == "SECRET_KEY" diff --git a/auth-api/tests/unit/models/test_affiliation.py b/auth-api/tests/unit/models/test_affiliation.py index 3a66ae35bb..7e5dc3b27c 100644 --- a/auth-api/tests/unit/models/test_affiliation.py +++ b/auth-api/tests/unit/models/test_affiliation.py @@ -27,21 +27,22 @@ def factory_entity_model(): """Produce a templated entity model.""" - entity = EntityModel(business_identifier='CP1234567', business_number='791861073BC0001', name='Foobar, Inc.', - corp_type_code='CP') + entity = EntityModel( + business_identifier="CP1234567", business_number="791861073BC0001", name="Foobar, Inc.", corp_type_code="CP" + ) entity.save() return entity def factory_org_model(name): """Produce a templated org model.""" - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") org_type.save() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") org_status.save() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") preferred_payment.save() org = OrgModel(name=name) @@ -63,7 +64,7 @@ def factory_affiliation_model(entity_id, org_id, environment=None): def test_affiliation(session): # pylint:disable=unused-argument """Assert that a Affiliation can be stored in the service.""" entity = factory_entity_model() - org = factory_org_model(name='My Test Org') + org = factory_org_model(name="My Test Org") affiliation = factory_affiliation_model(entity.id, org.id) assert entity.id is not None @@ -75,44 +76,45 @@ def test_affiliation(session): # pylint:disable=unused-argument def test_find_affiliation_by_ids(session): # pylint:disable=unused-argument """Assert that a affiliation can be retrieved via the org id and affiliation id.""" entity = factory_entity_model() - org = factory_org_model(name='My Test Org') + org = factory_org_model(name="My Test Org") affiliation = factory_affiliation_model(entity.id, org.id) result_affiliation = affiliation.find_affiliation_by_ids(org_id=org.id, affiliation_id=affiliation.id) assert result_affiliation is not None -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_find_affiliations_by_org_id(session, environment): # pylint:disable=unused-argument """Assert that a affiliation can be retrieved via affiliation id.""" entity = factory_entity_model() - org = factory_org_model(name='My Test Org') + org = factory_org_model(name="My Test Org") affiliation = factory_affiliation_model(entity.id, org.id, environment) result_affiliation = affiliation.find_affiliations_by_org_id(org_id=org.id, environment=environment) assert result_affiliation is not None -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_find_affiliation_by_org_and_entity_ids(session, environment): # pylint:disable=unused-argument """Assert that affiliations can be retrieved via the org id.""" entity = factory_entity_model() - org = factory_org_model(name='My Test Org') + org = factory_org_model(name="My Test Org") affiliation = factory_affiliation_model(entity.id, org.id, environment) - result_affiliations = affiliation.find_affiliation_by_org_and_entity_ids(org_id=org.id, entity_id=entity.id, - environment=environment) + result_affiliations = affiliation.find_affiliation_by_org_and_entity_ids( + org_id=org.id, entity_id=entity.id, environment=environment + ) assert result_affiliations is not None -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_find_affiliation_by_org_id_and_business_identifier(session, environment): # pylint:disable=unused-argument """Assert that affiliations can be retrieved via the org id and business identifier.""" entity = factory_entity_model() - org = factory_org_model(name='My Test Org') + org = factory_org_model(name="My Test Org") affiliation = factory_affiliation_model(entity.id, org.id, environment) - result_affiliations = affiliation.find_affiliation_by_org_id_and_business_identifier(org.id, - entity.business_identifier, - environment) + result_affiliations = affiliation.find_affiliation_by_org_id_and_business_identifier( + org.id, entity.business_identifier, environment + ) assert result_affiliations is not None diff --git a/auth-api/tests/unit/models/test_affiliation_invitation.py b/auth-api/tests/unit/models/test_affiliation_invitation.py index d2cc392cc0..bb7facef44 100644 --- a/auth-api/tests/unit/models/test_affiliation_invitation.py +++ b/auth-api/tests/unit/models/test_affiliation_invitation.py @@ -15,10 +15,11 @@ Test suite to ensure that the model routines are working as expected. """ -from _datetime import datetime, timedelta from typing import List from uuid import uuid4 +from _datetime import datetime, timedelta + from auth_api.config import get_named_config from auth_api.models import AffiliationInvitation as AffiliationInvitationModel from auth_api.models import Entity as EntityModel @@ -32,23 +33,23 @@ def _get_random_affiliation_invitation_model( - # mandatory params - user: User, - from_org_id: int, - to_org_id: int, - entity_id: int, - # optional params below - affiliation_identifier: int = 1, - affiliation_type='REQUEST', - invitation_token='ABCD', - sent_date=datetime.now(), - recipient_email=None, - invitation_status_code=InvitationStatus.PENDING.value, - approver_id=None, - additional_message=None + # mandatory params + user: User, + from_org_id: int, + to_org_id: int, + entity_id: int, + # optional params below + affiliation_identifier: int = 1, + affiliation_type="REQUEST", + invitation_token="ABCD", + sent_date=datetime.now(), + recipient_email=None, + invitation_status_code=InvitationStatus.PENDING.value, + approver_id=None, + additional_message=None, ): if recipient_email is None: - recipient_email = str(uuid4()) + '@test.com' + recipient_email = str(uuid4()) + "@test.com" affiliation_invitation_model = AffiliationInvitationModel() affiliation_invitation_model.recipient_email = recipient_email @@ -67,7 +68,7 @@ def _get_random_affiliation_invitation_model( def _create_org(new_org_id, org_type: OrgTypeModel, org_status: OrgStatusModel, preferred_payment: PaymentTypeModel): random_org = OrgModel() - random_org.name = f'Test Org #${new_org_id}' + random_org.name = f"Test Org #${new_org_id}" random_org.org_type = org_type random_org.org_status = org_status random_org.preferred_payment = preferred_payment @@ -75,46 +76,50 @@ def _create_org(new_org_id, org_type: OrgTypeModel, org_status: OrgStatusModel, return random_org -def factory_affiliation_invitation_model(session, status, sent_date=datetime.now(), - invitation_type: AffiliationInvitationType = None): +def factory_affiliation_invitation_model( + session, status, sent_date=datetime.now(), invitation_type: AffiliationInvitationType = None +): """Produce a templated affiliation_invitation model.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") user.save() - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") org_type.save() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") org_status.save() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") preferred_payment.save() from_org = OrgModel() - from_org.name = 'Test From Org' + from_org.name = "Test From Org" from_org.org_type = org_type from_org.org_status = org_status from_org.preferred_payment = preferred_payment from_org.save() to_org = OrgModel() - to_org.name = 'Test To Org' + to_org.name = "Test To Org" to_org.org_type = org_type to_org.org_status = org_status to_org.preferred_payment = preferred_payment to_org.save() - entity = EntityModel(business_identifier='CP1234567', business_number='791861073BC0001', name='Interesting, Inc.', - corp_type_code='CP') + entity = EntityModel( + business_identifier="CP1234567", + business_number="791861073BC0001", + name="Interesting, Inc.", + corp_type_code="CP", + ) entity.save() affiliation_invitation = AffiliationInvitationModel() - affiliation_invitation.recipient_email = 'abc@test.com' + affiliation_invitation.recipient_email = "abc@test.com" affiliation_invitation.sender = user affiliation_invitation.sent_date = sent_date affiliation_invitation.invitation_status_code = status - affiliation_invitation.token = 'ABCD' + affiliation_invitation.token = "ABCD" affiliation_invitation.from_org_id = from_org.id affiliation_invitation.to_org_id = to_org.id affiliation_invitation.entity_id = entity.id @@ -147,8 +152,9 @@ def test_find_invitations_by_sender(session): invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(sender_id=invitation.sender_id)) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(sender_id=invitation.sender_id) + ) assert len(retrieved_invitation) > 0 assert retrieved_invitation[0].recipient_email == invitation.recipient_email assert retrieved_invitation[0].token == invitation.token @@ -168,8 +174,9 @@ def test_find_invitations_from_org(session): invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - found_invitations = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(from_org_id=invitation.from_org_id)) + found_invitations = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(from_org_id=invitation.from_org_id) + ) assert found_invitations assert len(found_invitations) == 1 assert found_invitations[0].from_org_id == invitation.from_org_id @@ -181,8 +188,9 @@ def test_find_invitations_to_org(session): # pylint:disable=unused-argument invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - found_invitations = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(to_org_id=invitation.to_org_id)) + found_invitations = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(to_org_id=invitation.to_org_id) + ) assert found_invitations assert len(found_invitations) == 1 assert found_invitations[0].to_org_id == invitation.to_org_id @@ -206,9 +214,9 @@ def test_find_pending_invitations_by_sender(session): # pylint:disable=unused-a invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(sender_id=invitation.sender_id, - status_codes=[InvitationStatus.PENDING.value])) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=[InvitationStatus.PENDING.value]) + ) assert len(retrieved_invitation) == 1 assert retrieved_invitation[0].recipient_email == invitation.recipient_email assert invitation.invitation_status_code == InvitationStatus.PENDING.value @@ -219,8 +227,9 @@ def test_find_pending_invitations_by_from_org(session): # pylint:disable=unused invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(from_org_id=invitation.from_org_id)) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(from_org_id=invitation.from_org_id) + ) assert len(retrieved_invitation) == 1 assert retrieved_invitation[0].recipient_email == invitation.recipient_email assert invitation.invitation_status_code == InvitationStatus.PENDING.value @@ -231,8 +240,9 @@ def test_find_pending_invitations_by_to_org(session): # pylint:disable=unused-a invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(to_org_id=invitation.to_org_id)) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(to_org_id=invitation.to_org_id) + ) assert len(retrieved_invitation) == 1 assert retrieved_invitation[0].recipient_email == invitation.recipient_email assert invitation.invitation_status_code == InvitationStatus.PENDING.value @@ -243,13 +253,14 @@ def test_invitations_by_status(session): invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(sender_id=invitation.sender_id, - status_codes=[InvitationStatus.PENDING.value])) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=[InvitationStatus.PENDING.value]) + ) assert len(retrieved_invitation) == 1 - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=['INVALID'])) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=["INVALID"]) + ) assert len(retrieved_invitation) == 0 @@ -258,13 +269,14 @@ def test_invitations_by_expired_status(session): invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.EXPIRED.value) invitation.save() - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(sender_id=invitation.sender_id, - status_codes=[InvitationStatus.EXPIRED.value])) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=[InvitationStatus.EXPIRED.value]) + ) assert len(retrieved_invitation) == 1 - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=['INVALID'])) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=["INVALID"]) + ) assert len(retrieved_invitation) == 0 @@ -273,19 +285,22 @@ def test_invitations_by_invalid_status(session): invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value) invitation.save() - retrieved_invitation = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=['INVALID'])) + retrieved_invitation = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(sender_id=invitation.sender_id, status_codes=["INVALID"]) + ) assert len(retrieved_invitation) == 0 def test_find_invitations_by_org_entity_ids(session): """Assert that an Affiliation Invitation can be retrieved by the org and entity ids.""" - invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value, - invitation_type=AffiliationInvitationType.REQUEST.value) + invitation = factory_affiliation_invitation_model( + session=session, status=InvitationStatus.PENDING.value, invitation_type=AffiliationInvitationType.REQUEST.value + ) invitation.save() - retrieved_invitation = AffiliationInvitationModel.find_invitations_by_org_entity_ids(invitation.from_org_id, - invitation.entity_id) + retrieved_invitation = AffiliationInvitationModel.find_invitations_by_org_entity_ids( + invitation.from_org_id, invitation.entity_id + ) assert len(retrieved_invitation) == 1 assert retrieved_invitation[0].recipient_email == invitation.recipient_email assert invitation.invitation_status_code == InvitationStatus.PENDING.value @@ -293,42 +308,45 @@ def test_find_invitations_by_org_entity_ids(session): def test_create_from_dict(session): """Assert that an Entity can be created from schema.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") user.save() - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") org_type.save() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") org_status.save() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") preferred_payment.save() from_org = OrgModel() - from_org.name = 'Test From Org' + from_org.name = "Test From Org" from_org.org_type = org_type from_org.org_status = org_status from_org.preferred_payment = preferred_payment from_org.save() to_org = OrgModel() - to_org.name = 'Test To Org' + to_org.name = "Test To Org" to_org.org_type = org_type to_org.org_status = org_status to_org.preferred_payment = preferred_payment to_org.save() - entity = EntityModel(business_identifier='CP1234567', business_number='791861073BC0001', name='Interesting, Inc.', - corp_type_code='CP') + entity = EntityModel( + business_identifier="CP1234567", + business_number="791861073BC0001", + name="Interesting, Inc.", + corp_type_code="CP", + ) entity.save() invitation_info = { - 'recipientEmail': 'abc.test@gmail.com', - 'fromOrgId': from_org.id, - 'toOrgId': to_org.id, - 'entityId': entity.id + "recipientEmail": "abc.test@gmail.com", + "fromOrgId": from_org.id, + "toOrgId": to_org.id, + "entityId": entity.id, } result_invitation = AffiliationInvitationModel.create_from_dict(invitation_info, user.id) @@ -338,8 +356,7 @@ def test_create_from_dict(session): def test_create_from_dict_no_schema(session): # pylint:disable=unused-argument """Assert that an affiliation invitation can not be created without schema.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") user.save() result_invitation = AffiliationInvitationModel.create_from_dict(None, user.id) @@ -350,8 +367,9 @@ def test_create_from_dict_no_schema(session): # pylint:disable=unused-argument def test_invitations_status_expiry(session): """Assert can set the status from PENDING to EXPIRED.""" sent_date = datetime.now() - timedelta(minutes=int(get_named_config().AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS)) - invitation = factory_affiliation_invitation_model(session=session, status=InvitationStatus.PENDING.value, - sent_date=sent_date) + invitation = factory_affiliation_invitation_model( + session=session, status=InvitationStatus.PENDING.value, sent_date=sent_date + ) invitation.save() result: str = invitation.status @@ -362,10 +380,12 @@ def test_invitations_status_expiry(session): def test_invitations_status_for_request_does_not_expire(session): """Assert status stays PENDING for invitation of type REQUEST.""" sent_date = datetime.now() - timedelta(minutes=int(get_named_config().AFFILIATION_TOKEN_EXPIRY_PERIOD_MINS)) - invitation = factory_affiliation_invitation_model(session=session, - status=InvitationStatus.PENDING.value, - sent_date=sent_date, - invitation_type=AffiliationInvitationType.REQUEST.value) + invitation = factory_affiliation_invitation_model( + session=session, + status=InvitationStatus.PENDING.value, + sent_date=sent_date, + invitation_type=AffiliationInvitationType.REQUEST.value, + ) session.add(invitation) session.commit() @@ -384,39 +404,46 @@ def test_update_invitation_as_failed(session): def _setup_multiple_orgs_and_invites(session, create_org_count=5, create_affiliation_invitation_count=5): - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") session.add(user) session.commit() - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") org_type.save() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") org_status.save() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") preferred_payment.save() for i in range(1, create_org_count + 1): - new_org = _create_org(new_org_id=i, org_type=org_type, org_status=org_status, - preferred_payment=preferred_payment) + new_org = _create_org( + new_org_id=i, org_type=org_type, org_status=org_status, preferred_payment=preferred_payment + ) session.add(new_org) session.commit() - entity = EntityModel(business_identifier='CP1234567', business_number='791861073BC0001', name='Interesting, Inc.', - corp_type_code='CP', id=1) + entity = EntityModel( + business_identifier="CP1234567", + business_number="791861073BC0001", + name="Interesting, Inc.", + corp_type_code="CP", + id=1, + ) entity.save() for i in range(1, create_affiliation_invitation_count + 1): if i == 1: - new_ai = _get_random_affiliation_invitation_model(user=user, to_org_id=2, from_org_id=1, - entity_id=entity.id) + new_ai = _get_random_affiliation_invitation_model( + user=user, to_org_id=2, from_org_id=1, entity_id=entity.id + ) else: - new_ai = _get_random_affiliation_invitation_model(user=user, to_org_id=1, from_org_id=i, - entity_id=entity.id) + new_ai = _get_random_affiliation_invitation_model( + user=user, to_org_id=1, from_org_id=i, entity_id=entity.id + ) session.add(new_ai) session.commit() @@ -434,6 +461,7 @@ def test_find_all_sent_to_org_affiliated_with_entity(session): """Assert that finding affiliations sent to org and requested for specific entity return correct count.""" affiliation_invitation_count = 5 _setup_multiple_orgs_and_invites(session, create_affiliation_invitation_count=affiliation_invitation_count) - affiliation_invitations: List = AffiliationInvitationModel \ - .filter_by(AffiliationInvitationSearch(to_org_id='1', entity_id='1')) + affiliation_invitations: List = AffiliationInvitationModel.filter_by( + AffiliationInvitationSearch(to_org_id="1", entity_id="1") + ) assert len(affiliation_invitations) == affiliation_invitation_count - 1 diff --git a/auth-api/tests/unit/models/test_contact.py b/auth-api/tests/unit/models/test_contact.py index 8bb2d8ab62..531d07854c 100644 --- a/auth-api/tests/unit/models/test_contact.py +++ b/auth-api/tests/unit/models/test_contact.py @@ -22,16 +22,16 @@ def test_contact(session): """Assert that a Contact can be stored in the database.""" contact = ContactModel( - street='123 Roundabout Lane', - street_additional='Unit 1', - city='Victoria', - region='British Columbia', - country='CA', - postal_code='V1A 1A1', - delivery_instructions='Ring buzzer 123', - phone='111-222-3333', - phone_extension='123', - email='abc123@mail.com' + street="123 Roundabout Lane", + street_additional="Unit 1", + city="Victoria", + region="British Columbia", + country="CA", + postal_code="V1A 1A1", + delivery_instructions="Ring buzzer 123", + phone="111-222-3333", + phone_extension="123", + email="abc123@mail.com", ) session.add(contact) diff --git a/auth-api/tests/unit/models/test_documents.py b/auth-api/tests/unit/models/test_documents.py index 65db7f45c3..5b19ec7741 100644 --- a/auth-api/tests/unit/models/test_documents.py +++ b/auth-api/tests/unit/models/test_documents.py @@ -26,7 +26,7 @@ def test_documents_with_insert(session): Start with a blank document. """ - doc_latest = Documents.fetch_latest_document_by_type('termsofuse') + doc_latest = Documents.fetch_latest_document_by_type("termsofuse") assert doc_latest.version_id == get_tos_latest_version() @@ -35,11 +35,11 @@ def test_documents_with_insert_some_type(session): Start with a blank document. """ - html_content = '' + html_content = "" # putting higher numbers so that version number doesnt collide with existing in db - doc = Documents(version_id=20, type='sometype', content=html_content, content_type='text/html') + doc = Documents(version_id=20, type="sometype", content=html_content, content_type="text/html") session.add(doc) session.commit() - doc_latest = Documents.fetch_latest_document_by_type('sometype') + doc_latest = Documents.fetch_latest_document_by_type("sometype") assert doc_latest.content == html_content diff --git a/auth-api/tests/unit/models/test_entity.py b/auth-api/tests/unit/models/test_entity.py index 07ddef64b3..b2b64e848f 100644 --- a/auth-api/tests/unit/models/test_entity.py +++ b/auth-api/tests/unit/models/test_entity.py @@ -21,43 +21,45 @@ def test_entity(session): """Assert that an Entity can be stored in the service.""" - entity = EntityModel(business_identifier='CP1234567', business_number='791861073BC0001', name='Foobar, Inc.', - corp_type_code='CP') + entity = EntityModel( + business_identifier="CP1234567", business_number="791861073BC0001", name="Foobar, Inc.", corp_type_code="CP" + ) session.add(entity) session.commit() assert entity.id is not None - assert entity.corp_type_code == 'CP' + assert entity.corp_type_code == "CP" def test_entity_find_by_business_id(session): """Assert that an Entity can be retrieved via business identifier.""" - entity = EntityModel(business_identifier='CP1234567', business_number='791861073BC0001', name='Foobar, Inc.', - corp_type_code='CP') + entity = EntityModel( + business_identifier="CP1234567", business_number="791861073BC0001", name="Foobar, Inc.", corp_type_code="CP" + ) session.add(entity) session.commit() - business_id = 'CP1234567' + business_id = "CP1234567" result_entity = EntityModel.find_by_business_identifier(business_identifier=business_id) assert result_entity.id is not None - assert result_entity.corp_type_code == 'CP' + assert result_entity.corp_type_code == "CP" def test_create_from_dict(session): # pylint:disable=unused-argument """Assert that an Entity can be created from schema.""" updated_entity_info = { - 'businessIdentifier': 'CP1234567', - 'businessNumber': '791861073BC0001', - 'passCode': '9898989', - 'name': 'Barfoo, Inc.', - 'corp_type_code': 'CP' + "businessIdentifier": "CP1234567", + "businessNumber": "791861073BC0001", + "passCode": "9898989", + "name": "Barfoo, Inc.", + "corp_type_code": "CP", } result_entity = EntityModel.create_from_dict(updated_entity_info) assert result_entity.id is not None - assert result_entity.corp_type_code == 'CP' + assert result_entity.corp_type_code == "CP" def test_create_from_dict_no_schema(session): # pylint:disable=unused-argument @@ -68,15 +70,14 @@ def test_create_from_dict_no_schema(session): # pylint:disable=unused-argument def test_bootstrap_entity_with_sub_type(session): """Assert that an Entity can be retrieved via business identifier.""" - entity = EntityModel(business_identifier='tmp000123', name='Test', - corp_type_code='TMP', corp_sub_type_code='ULC') + entity = EntityModel(business_identifier="tmp000123", name="Test", corp_type_code="TMP", corp_sub_type_code="ULC") session.add(entity) session.commit() - business_id = 'tmp000123' + business_id = "tmp000123" result_entity = EntityModel.find_by_business_identifier(business_identifier=business_id) assert result_entity.id is not None - assert result_entity.corp_type_code == 'TMP' - assert result_entity.corp_sub_type_code == 'ULC' + assert result_entity.corp_type_code == "TMP" + assert result_entity.corp_sub_type_code == "ULC" diff --git a/auth-api/tests/unit/models/test_invitation.py b/auth-api/tests/unit/models/test_invitation.py index feb9aed3c0..6ffe7b2265 100644 --- a/auth-api/tests/unit/models/test_invitation.py +++ b/auth-api/tests/unit/models/test_invitation.py @@ -29,41 +29,40 @@ def factory_invitation_model(session, status, sent_date=datetime.now()): """Produce a templated invitation model.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") session.add(user) session.commit() - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") session.add(org_type) session.commit() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") session.add(org_status) session.commit() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") session.add(preferred_payment) session.commit() org = OrgModel() - org.name = 'Test Org' + org.name = "Test Org" org.org_type = org_type org.org_status = org_status org.preferred_payment = preferred_payment org.save() invitation = InvitationModel() - invitation.recipient_email = 'abc@test.com' + invitation.recipient_email = "abc@test.com" invitation.sender = user invitation.sent_date = sent_date invitation.invitation_status_code = status - invitation.token = 'ABCD' + invitation.token = "ABCD" invitation_membership = InvitationMembershipModel() invitation_membership.org_id = org.id - invitation_membership.membership_type_code = 'USER' + invitation_membership.membership_type_code = "USER" invitation.membership.append(invitation_membership) invitation.save() @@ -72,7 +71,7 @@ def factory_invitation_model(session, status, sent_date=datetime.now()): def test_create_invitation(session): """Assert that an Invitation can be stored in the service.""" - invitation = factory_invitation_model(session=session, status='PENDING') + invitation = factory_invitation_model(session=session, status="PENDING") session.add(invitation) session.commit() assert invitation.id is not None @@ -80,7 +79,7 @@ def test_create_invitation(session): def test_find_invitation_by_id(session): # pylint:disable=unused-argument """Assert that an Invitation can retrieved by its id.""" - invitation = factory_invitation_model(session=session, status='PENDING') + invitation = factory_invitation_model(session=session, status="PENDING") session.add(invitation) session.commit() @@ -91,7 +90,7 @@ def test_find_invitation_by_id(session): # pylint:disable=unused-argument def test_find_invitations_by_user(session): # pylint:disable=unused-argument """Assert that an Invitation can retrieved by the user id.""" - invitation = factory_invitation_model(session=session, status='PENDING') + invitation = factory_invitation_model(session=session, status="PENDING") session.add(invitation) session.commit() @@ -103,17 +102,17 @@ def test_find_invitations_by_user(session): # pylint:disable=unused-argument def test_update_invitation_as_retried(session): # pylint:disable=unused-argument """Assert that an Invitation can be updated.""" - invitation = factory_invitation_model(session=session, status='FAILED') + invitation = factory_invitation_model(session=session, status="FAILED") session.add(invitation) session.commit() invitation.update_invitation_as_retried() assert invitation - assert invitation.invitation_status_code == 'PENDING' + assert invitation.invitation_status_code == "PENDING" def test_find_invitations_by_org(session): # pylint:disable=unused-argument """Assert that Invitations for a specified org can be retrieved.""" - invitation = factory_invitation_model(session=session, status='PENDING') + invitation = factory_invitation_model(session=session, status="PENDING") session.add(invitation) session.commit() @@ -121,12 +120,12 @@ def test_find_invitations_by_org(session): # pylint:disable=unused-argument assert found_invitations assert len(found_invitations) == 1 assert found_invitations[0].membership[0].org_id == invitation.membership[0].org_id - assert invitation.invitation_status_code == 'PENDING' + assert invitation.invitation_status_code == "PENDING" def test_find_pending_invitations_by_user(session): # pylint:disable=unused-argument """Assert that an Invitation can retrieved by the user id.""" - invitation = factory_invitation_model(session=session, status='PENDING') + invitation = factory_invitation_model(session=session, status="PENDING") session.add(invitation) session.commit() @@ -137,7 +136,7 @@ def test_find_pending_invitations_by_user(session): # pylint:disable=unused-arg def test_find_pending_invitations_by_org(session): # pylint:disable=unused-argument """Assert that an Invitation can retrieved by the org id.""" - invitation = factory_invitation_model(session=session, status='PENDING') + invitation = factory_invitation_model(session=session, status="PENDING") session.add(invitation) session.commit() @@ -148,64 +147,57 @@ def test_find_pending_invitations_by_org(session): # pylint:disable=unused-argu def test_invitations_by_status(session): # pylint:disable=unused-argument """Assert that an Invitation can retrieved by the user id.""" - invitation = factory_invitation_model(session=session, status='PENDING') + invitation = factory_invitation_model(session=session, status="PENDING") session.add(invitation) session.commit() - retrieved_invitation = InvitationModel.find_invitations_by_status(invitation.sender_id, 'FAILED') + retrieved_invitation = InvitationModel.find_invitations_by_status(invitation.sender_id, "FAILED") assert len(retrieved_invitation) == 0 def test_create_from_dict(session): # pylint:disable=unused-argument """Assert that an Entity can be created from schema.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") session.add(user) session.commit() - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") session.add(org_type) session.commit() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") session.add(org_status) session.commit() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") session.add(preferred_payment) session.commit() org = OrgModel() - org.name = 'Test Org' + org.name = "Test Org" org.org_type = org_type org.org_status = org_status org.preferred_payment = preferred_payment org.save() invitation_info = { - 'recipientEmail': 'abc.test@gmail.com', - 'membership': [ - { - 'membershipType': 'USER', - 'orgId': org.id - } - ] + "recipientEmail": "abc.test@gmail.com", + "membership": [{"membershipType": "USER", "orgId": org.id}], } - result_invitation = InvitationModel.create_from_dict(invitation_info, user.id, 'STANDARD') + result_invitation = InvitationModel.create_from_dict(invitation_info, user.id, "STANDARD") assert result_invitation.id is not None def test_create_from_dict_no_schema(session): # pylint:disable=unused-argument """Assert that an Entity can not be created without schema.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") session.add(user) session.commit() - result_invitation = InvitationModel.create_from_dict(None, user.id, 'STANDARD') + result_invitation = InvitationModel.create_from_dict(None, user.id, "STANDARD") assert result_invitation is None @@ -213,12 +205,10 @@ def test_create_from_dict_no_schema(session): # pylint:disable=unused-argument def test_invitations_status_expiry(session): # pylint:disable=unused-argument """Assert can set the status from PENDING to EXPIRED.""" sent_date = datetime.now() - timedelta(days=int(get_named_config().TOKEN_EXPIRY_PERIOD) + 1) - invitation = factory_invitation_model(session=session, - status='PENDING', - sent_date=sent_date) + invitation = factory_invitation_model(session=session, status="PENDING", sent_date=sent_date) session.add(invitation) session.commit() result: str = invitation.status - assert result == 'EXPIRED' + assert result == "EXPIRED" diff --git a/auth-api/tests/unit/models/test_membership.py b/auth-api/tests/unit/models/test_membership.py index 81cb4586c9..5dcb9efad9 100644 --- a/auth-api/tests/unit/models/test_membership.py +++ b/auth-api/tests/unit/models/test_membership.py @@ -28,18 +28,18 @@ def factory_membersip_model(session): """Produce a templated org model.""" user = factory_user_model() - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") session.add(org_type) session.commit() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") session.add(org_status) session.commit() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") session.add(preferred_payment) session.commit() - org = OrgModel(name='Test Org') + org = OrgModel(name="Test Org") org.org_type = org_type org.org_status = OrgStatusModel.get_default_status() org.preferred_payment = preferred_payment diff --git a/auth-api/tests/unit/models/test_org.py b/auth-api/tests/unit/models/test_org.py index 687fa5edbe..202139ab0d 100644 --- a/auth-api/tests/unit/models/test_org.py +++ b/auth-api/tests/unit/models/test_org.py @@ -26,22 +26,22 @@ def factory_org_model(name, session): """Produce a templated org model.""" - org_type = OrgTypeModel(code='TEST', description='Test') + org_type = OrgTypeModel(code="TEST", description="Test") session.add(org_type) session.commit() - org_status = OrgStatusModel(code='TEST', description='Test') + org_status = OrgStatusModel(code="TEST", description="Test") session.add(org_status) session.commit() - preferred_payment = PaymentTypeModel(code='TEST', description='Test') + preferred_payment = PaymentTypeModel(code="TEST", description="Test") session.add(preferred_payment) session.commit() org = OrgModel(name=name) org.org_type = org_type org.org_status = OrgStatusModel.get_default_status() org.preferred_payment = preferred_payment - org.branch_name = '' + org.branch_name = "" org.save() return org @@ -49,7 +49,7 @@ def factory_org_model(name, session): def test_org(session): """Assert that an Org can be stored in the service.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() assert org.id is not None @@ -57,19 +57,17 @@ def test_org(session): def test_org_create_from_dictionary(session): # pylint:disable=unused-argument """Assert that an Org can be created from a dictionary.""" - org_info = { - 'name': 'My Test Org' - } + org_info = {"name": "My Test Org"} org_model = OrgModel.create_from_dict(org_info).save() assert org_model assert org_model.id - assert org_model.name == org_info['name'] + assert org_model.name == org_info["name"] def test_org_find_by_id(session): # pylint:disable=unused-argument """Assert that an Org can retrieved by its id.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() @@ -80,7 +78,7 @@ def test_org_find_by_id(session): # pylint:disable=unused-argument def test_org_find_by_uuid(session): """Assert that an Org can be retrieved by its uuid.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() @@ -92,7 +90,7 @@ def test_org_find_by_uuid(session): def test_org_find_by_name(session): # pylint:disable=unused-argument """Assert that an Org can retrieved by its name.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() @@ -105,7 +103,7 @@ def test_org_find_by_name(session): # pylint:disable=unused-argument def test_org_find_by_name_inactive(session): # pylint:disable=unused-argument """Assert that an inactive Org can not be retrieved by its name.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() @@ -117,7 +115,7 @@ def test_org_find_by_name_inactive(session): # pylint:disable=unused-argument def test_find_similar_org_by_name(session): # pylint:disable=unused-argument """Assert that an Org can retrieved by its name.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() @@ -125,13 +123,13 @@ def test_find_similar_org_by_name(session): # pylint:disable=unused-argument assert found_org assert found_org.name == org.name - found_org = OrgModel.find_similar_org_by_name('Test Or') + found_org = OrgModel.find_similar_org_by_name("Test Or") assert not found_org def test_find_similar_org_by_name_inactive(session): # pylint:disable=unused-argument """Assert that an inactive Org can not be retrieved by its name.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() @@ -143,22 +141,20 @@ def test_find_similar_org_by_name_inactive(session): # pylint:disable=unused-ar def test_update_org_from_dict(session): # pylint:disable=unused-argument """Assert that an Org can be updated from a dictionary.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() - update_dictionary = { - 'name': 'My Updated Test Org' - } + update_dictionary = {"name": "My Updated Test Org"} org.update_org_from_dict(update_dictionary) assert org - assert org.name == update_dictionary['name'] + assert org.name == update_dictionary["name"] def test_count_org_from_dict(session): # pylint:disable=unused-argument """Assert that an Org can be updated from a dictionary.""" user = factory_user_model() - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) org.created_by_id = user.id session.add(org) session.commit() @@ -167,9 +163,7 @@ def test_count_org_from_dict(session): # pylint:disable=unused-argument def test_create_from_dict(session): # pylint:disable=unused-argument """Assert that an Org can be created from schema.""" - org_info = { - 'name': 'My Test Org' - } + org_info = {"name": "My Test Org"} result_org = OrgModel.create_from_dict(org_info).save() @@ -185,7 +179,7 @@ def test_create_from_dict_no_schema(session): # pylint:disable=unused-argument def test_delete(session): # pylint:disable=unused-argument """Assert that an Org can be updated from a dictionary.""" - org = factory_org_model(name='My Test Org', session=session) + org = factory_org_model(name="My Test Org", session=session) session.add(org) session.commit() assert org.status_code == OrgStatusEnum.ACTIVE.value diff --git a/auth-api/tests/unit/models/test_task.py b/auth-api/tests/unit/models/test_task.py index a0bde6d1f3..23a9e3750f 100644 --- a/auth-api/tests/unit/models/test_task.py +++ b/auth-api/tests/unit/models/test_task.py @@ -28,28 +28,41 @@ def test_task_model(session): """Assert that a task can be stored in the service.""" user = factory_user_model() task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - task = TaskModel(name='TEST', date_submitted=datetime.now(), relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=task_type, status=TaskStatus.OPEN.value, - related_to=user.id) + task = TaskModel( + name="TEST", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=task_type, + status=TaskStatus.OPEN.value, + related_to=user.id, + ) session.add(task) session.commit() assert task.id is not None - assert task.name == 'TEST' + assert task.name == "TEST" def test_task_model_with_due_date(session): """Assert that a task can be stored in the service.""" user = factory_user_model() task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - task = TaskModel(name='TEST', date_submitted=datetime.now(), relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=task_type, due_date=datetime.now(), - status=TaskStatus.OPEN.value, related_to=user.id) + task = TaskModel( + name="TEST", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=task_type, + due_date=datetime.now(), + status=TaskStatus.OPEN.value, + related_to=user.id, + ) session.add(task) session.commit() assert task.id is not None - assert task.name == 'TEST' + assert task.name == "TEST" assert task.due_date is not None @@ -57,11 +70,17 @@ def test_fetch_tasks(session): # pylint:disable=unused-argument """Assert that we can fetch all tasks.""" user = factory_user_model() task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - task = TaskModel(name='TEST', date_submitted=datetime.now(), relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=task_type, due_date=datetime.now(), - status=TaskStatus.OPEN.value, related_to=user.id, - relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value - ) + task = TaskModel( + name="TEST", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=task_type, + due_date=datetime.now(), + status=TaskStatus.OPEN.value, + related_to=user.id, + relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + ) session.add(task) session.commit() @@ -70,7 +89,7 @@ def test_fetch_tasks(session): # pylint:disable=unused-argument type=task_type, status=[TaskStatus.OPEN.value], page=1, - limit=10 + limit=10, ) found_tasks, count = TaskModel.fetch_tasks(task_search) @@ -85,9 +104,16 @@ def test_find_task_by_id(session): # pylint:disable=unused-argument """Assert that we can fetch all tasks.""" user = factory_user_model() task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - task = TaskModel(name='TEST', date_submitted=datetime.now(), relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=task_type, due_date=datetime.now(), - status=TaskStatus.OPEN.value, related_to=user.id) + task = TaskModel( + name="TEST", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=task_type, + due_date=datetime.now(), + status=TaskStatus.OPEN.value, + related_to=user.id, + ) session.add(task) session.commit() found_task = TaskModel.find_by_task_id(task.id) @@ -106,7 +132,7 @@ def test_fetch_tasks_pagination(session): # pylint:disable=unused-argument type=task_type, status=[TaskStatus.OPEN.value], page=3, - limit=2 + limit=2, ) found_tasks, count = TaskModel.fetch_tasks(task_search) @@ -118,33 +144,48 @@ def test_task_model_account_id(session): """Assert that a task can be stored along with account id column.""" user = factory_user_model() task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - task = TaskModel(name='TEST', date_submitted=datetime.now(), relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=task_type, status=TaskStatus.OPEN.value, - account_id=10, related_to=user.id) + task = TaskModel( + name="TEST", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=task_type, + status=TaskStatus.OPEN.value, + account_id=10, + related_to=user.id, + ) session.add(task) session.commit() assert task.id is not None - assert task.name == 'TEST' + assert task.name == "TEST" assert task.account_id == 10 def test_fetch_pending_tasks_descending(session): # pylint:disable=unused-argument """Assert that we can fetch all tasks.""" user = factory_user_model() - task = TaskModel(name='TEST 1', date_submitted=datetime.now(), - relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value, - status=TaskStatus.OPEN.value, - related_to=user.id, - relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value) + task = TaskModel( + name="TEST 1", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value, + status=TaskStatus.OPEN.value, + related_to=user.id, + relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + ) task.save() - task = TaskModel(name='TEST 2', date_submitted=datetime(2021, 5, 25), - relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value, - status=TaskStatus.OPEN.value, - related_to=user.id, - relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value) + task = TaskModel( + name="TEST 2", + date_submitted=datetime(2021, 5, 25), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value, + status=TaskStatus.OPEN.value, + related_to=user.id, + relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + ) task.save() task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value @@ -153,31 +194,36 @@ def test_fetch_pending_tasks_descending(session): # pylint:disable=unused-argum type=task_type, status=[TaskStatus.OPEN.value], page=1, - limit=2 + limit=2, ) found_tasks, count = TaskModel.fetch_tasks(task_search) assert found_tasks - assert found_tasks[0].name == 'TEST 2' - assert found_tasks[1].name == 'TEST 1' + assert found_tasks[0].name == "TEST 2" + assert found_tasks[1].name == "TEST 1" assert count == 2 def test_finding_task_by_relationship_id(session): # pylint:disable=unused-argument """Assert that we can fetch all tasks.""" user = factory_user_model() - task = TaskModel(name='TEST 1', date_submitted=datetime.now(), - relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value, - status=TaskStatus.OPEN.value, - related_to=user.id, - relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value) + task = TaskModel( + name="TEST 1", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value, + status=TaskStatus.OPEN.value, + related_to=user.id, + relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + ) task.save() found_task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.ORG.value, relationship_id=10) + task_relationship_type=TaskRelationshipType.ORG.value, relationship_id=10 + ) assert found_task - assert found_task.name == 'TEST 1' + assert found_task.name == "TEST 1" assert found_task.relationship_id == 10 assert found_task.status == TaskStatus.OPEN.value @@ -185,17 +231,21 @@ def test_finding_task_by_relationship_id(session): # pylint:disable=unused-argu def test_find_by_task_for_user(session): # pylint:disable=unused-argument """Assert that we can fetch all tasks.""" user = factory_user_model() - task = TaskModel(name='TEST 1', date_submitted=datetime.now(), - relationship_type=TaskRelationshipType.USER.value, - relationship_id=user.id, type=TaskTypePrefix.BCEID_ADMIN.value, - status=TaskStatus.OPEN.value, - related_to=user.id, - account_id=10, - relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value) + task = TaskModel( + name="TEST 1", + date_submitted=datetime.now(), + relationship_type=TaskRelationshipType.USER.value, + relationship_id=user.id, + type=TaskTypePrefix.BCEID_ADMIN.value, + status=TaskStatus.OPEN.value, + related_to=user.id, + account_id=10, + relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + ) task.save() found_task = TaskModel.find_by_user_and_status(org_id=10, status=TaskStatus.OPEN.value) assert found_task - assert found_task.name == 'TEST 1' + assert found_task.name == "TEST 1" assert found_task.relationship_id == user.id assert found_task.status == TaskStatus.OPEN.value diff --git a/auth-api/tests/unit/models/test_user.py b/auth-api/tests/unit/models/test_user.py index 3056361ce7..498835b4d8 100644 --- a/auth-api/tests/unit/models/test_user.py +++ b/auth-api/tests/unit/models/test_user.py @@ -26,8 +26,7 @@ def test_user(session): Start with a blank database. """ - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") session.add(user) session.commit() @@ -40,21 +39,14 @@ def test_user_find_by_jwt_token(session, monkeypatch): Start with a blank database. """ - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") session.add(user) session.commit() token = { - 'preferred_username': 'CP1234567', - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - } + "preferred_username": "CP1234567", + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, } patch_token_info(token, monkeypatch) u = User.find_by_jwt_token() @@ -65,153 +57,111 @@ def test_user_find_by_jwt_token(session, monkeypatch): def test_create_from_jwt_token(session, monkeypatch): # pylint: disable=unused-argument """Assert User is created from the JWT fields.""" token = { - 'preferred_username': 'CP1234567', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - }, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04' + "preferred_username": "CP1234567", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", } patch_token_info(token, monkeypatch) - u = User.create_from_jwt_token('fname', 'lname') + u = User.create_from_jwt_token("fname", "lname") assert u.id is not None def test_create_from_jwt_token_no_token(session, monkeypatch): # pylint: disable=unused-argument """Assert User is not created from an empty token.""" patch_token_info(None, monkeypatch) - u = User.create_from_jwt_token('fname', 'lname') + u = User.create_from_jwt_token("fname", "lname") assert u is None def test_update_from_jwt_token(session, monkeypatch): # pylint: disable=unused-argument """Assert User is updated from a JWT and an existing User model.""" token = { - 'preferred_username': 'CP1234567', - 'firstname': 'Bobby', - 'lasname': 'Joe', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - }, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04' + "preferred_username": "CP1234567", + "firstname": "Bobby", + "lasname": "Joe", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", } patch_token_info(token, monkeypatch) - user = User.create_from_jwt_token('Bobby', 'Joe') + user = User.create_from_jwt_token("Bobby", "Joe") updated_token = { - 'preferred_username': 'CP1234567', - 'firstname': 'Bob', - 'lastname': 'Joe', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - }, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04' + "preferred_username": "CP1234567", + "firstname": "Bob", + "lastname": "Joe", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", } patch_token_info(updated_token, monkeypatch) - user = User.update_from_jwt_token(user, 'Bob', 'Joe') + user = User.update_from_jwt_token(user, "Bob", "Joe") - assert user.firstname == 'Bob' + assert user.firstname == "Bob" def test_update_terms_of_user_success(session, monkeypatch): # pylint:disable=unused-argument """Assert User is updated from a JWT with new terms of use.""" token = { - 'preferred_username': 'CP1234567', - 'firstname': 'Bobby', - 'lasname': 'Joe', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - }, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04' + "preferred_username": "CP1234567", + "firstname": "Bobby", + "lasname": "Joe", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", } patch_token_info(token, monkeypatch) - user = User.create_from_jwt_token('Bobby', 'Joe') + user = User.create_from_jwt_token("Bobby", "Joe") assert user.is_terms_of_use_accepted is False assert user.terms_of_use_accepted_version is None - user = User.update_terms_of_use(True, '1') + user = User.update_terms_of_use(True, "1") assert user.is_terms_of_use_accepted is True - assert user.terms_of_use_accepted_version == '1' + assert user.terms_of_use_accepted_version == "1" def test_update_terms_of_user_success_with_integer(session, monkeypatch): # pylint:disable=unused-argument """Assert User is updated from a JWT with new terms of use.""" token = { - 'preferred_username': 'CP1234567', - 'firstname': 'Bobby', - 'lasname': 'Joe', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - }, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04' + "preferred_username": "CP1234567", + "firstname": "Bobby", + "lasname": "Joe", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", } patch_token_info(token, monkeypatch) - user = User.create_from_jwt_token('Bobby', 'Joe') + user = User.create_from_jwt_token("Bobby", "Joe") assert user.is_terms_of_use_accepted is False assert user.terms_of_use_accepted_version is None user = User.update_terms_of_use(True, 1) assert user.is_terms_of_use_accepted is True - assert user.terms_of_use_accepted_version == '1' + assert user.terms_of_use_accepted_version == "1" def test_update_from_jwt_token_no_token(session, monkeypatch): # pylint:disable=unused-argument """Assert that a user is not updateable without a token (should return None).""" token = { - 'preferred_username': 'CP1234567', - 'firstname': 'Bobby', - 'lasname': 'Joe', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - }, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04' + "preferred_username": "CP1234567", + "firstname": "Bobby", + "lasname": "Joe", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", } patch_token_info(token, monkeypatch) - existing_user = User.create_from_jwt_token('Bobby', 'Joe') + existing_user = User.create_from_jwt_token("Bobby", "Joe") token = None patch_token_info(token, monkeypatch) - user = User.update_from_jwt_token(existing_user, 'Bobby', 'Joe') + user = User.update_from_jwt_token(existing_user, "Bobby", "Joe") assert user is None def test_update_from_jwt_token_no_user(session, monkeypatch): # pylint:disable=unused-argument """Assert that a user is not updateable without a user (should return None).""" token = { - 'preferred_username': 'CP1234567', - 'firstname': 'Bobby', - 'lasname': 'Joe', - 'realm_access': { - 'roles': [ - 'edit', - 'uma_authorization', - 'basic' - ] - }, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04' + "preferred_username": "CP1234567", + "firstname": "Bobby", + "lasname": "Joe", + "realm_access": {"roles": ["edit", "uma_authorization", "basic"]}, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", } patch_token_info(token, monkeypatch) user = User.update_from_jwt_token(None, None, None) @@ -220,20 +170,18 @@ def test_update_from_jwt_token_no_user(session, monkeypatch): # pylint:disable= def test_find_by_username(session): """Assert User can be found by the most current username.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") session.add(user) session.commit() - u = User.find_by_username('CP1234567') + u = User.find_by_username("CP1234567") assert u.id is not None def test_user_save(session): # pylint: disable=unused-argument """Assert User record is saved.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") user.save() assert user.id is not None @@ -241,8 +189,7 @@ def test_user_save(session): # pylint: disable=unused-argument def test_user_delete(session): # pylint: disable=unused-argument """Assert the User record is deleted.""" - user = User(username='CP1234567', - keycloak_guid='1b20db59-19a0-4727-affe-c6f64309fd04') + user = User(username="CP1234567", keycloak_guid="1b20db59-19a0-4727-affe-c6f64309fd04") user.save() user.delete() diff --git a/auth-api/tests/unit/models/test_user_status_code.py b/auth-api/tests/unit/models/test_user_status_code.py index b4da95d42c..de8fa6c54f 100644 --- a/auth-api/tests/unit/models/test_user_status_code.py +++ b/auth-api/tests/unit/models/test_user_status_code.py @@ -21,13 +21,9 @@ def test_status_code(session): """Assert that a User Status Code can be stored in the database.""" - status_code = UserStatusCodeModel( - id=100, - name='TEST', - description='TEST CODE' - ) + status_code = UserStatusCodeModel(id=100, name="TEST", description="TEST CODE") status_code = status_code.save() assert status_code.id is not None - status_code = UserStatusCodeModel.get_user_status_by_name('TEST') + status_code = UserStatusCodeModel.get_user_status_by_name("TEST") assert status_code.id == 100 assert UserStatusCodeModel.get_default_type() == 1 diff --git a/auth-api/tests/unit/models/views/test_authorization.py b/auth-api/tests/unit/models/views/test_authorization.py index e778b4458f..bb3b74fa0b 100644 --- a/auth-api/tests/unit/models/views/test_authorization.py +++ b/auth-api/tests/unit/models/views/test_authorization.py @@ -20,8 +20,13 @@ from auth_api.models.views.authorization import Authorization from auth_api.utils.enums import ProductCode from tests.utilities.factory_utils import ( - factory_affiliation_model, factory_entity_model, factory_membership_model, factory_org_model, factory_product_model, - factory_user_model) + factory_affiliation_model, + factory_entity_model, + factory_membership_model, + factory_org_model, + factory_product_model, + factory_user_model, +) def test_find_user_authorization_by_business_number(session): # pylint:disable=unused-argument @@ -31,8 +36,9 @@ def test_find_user_authorization_by_business_number(session): # pylint:disable= membership = factory_membership_model(user.id, org.id) entity = factory_entity_model() factory_affiliation_model(entity.id, org.id) - authorization = Authorization.find_user_authorization_by_business_number(entity.business_identifier, - str(user.keycloak_guid)) + authorization = Authorization.find_user_authorization_by_business_number( + entity.business_identifier, str(user.keycloak_guid) + ) assert authorization is not None assert authorization.org_membership == membership.membership_type_code @@ -45,8 +51,7 @@ def test_find_user_authorization_by_org_id(session): # pylint:disable=unused-ar membership = factory_membership_model(user.id, org.id) entity = factory_entity_model() factory_affiliation_model(entity.id, org.id) - authorization = Authorization.find_user_authorization_by_org_id(str(user.keycloak_guid), - org.id) + authorization = Authorization.find_user_authorization_by_org_id(str(user.keycloak_guid), org.id) assert authorization is not None assert authorization.org_membership == membership.membership_type_code @@ -59,12 +64,13 @@ def test_find_invalid_user_authorization_by_business_number(session): # pylint: factory_membership_model(user.id, org.id) entity = factory_entity_model() factory_affiliation_model(entity.id, org.id) - authorization = Authorization.find_user_authorization_by_business_number(entity.business_identifier, - str(uuid.uuid4())) + authorization = Authorization.find_user_authorization_by_business_number( + entity.business_identifier, str(uuid.uuid4()) + ) assert authorization is None # Test with invalid business identifier - authorization = Authorization.find_user_authorization_by_business_number('', str(uuid.uuid4())) + authorization = Authorization.find_user_authorization_by_business_number("", str(uuid.uuid4())) assert authorization is None @@ -100,8 +106,9 @@ def test_find_user_authorization_by_business_number_product(session): # pylint: factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) entity = factory_entity_model() factory_affiliation_model(entity.id, org.id) - authorization = Authorization.find_user_authorization_by_business_number_and_product(entity.business_identifier, - ProductCode.DIR_SEARCH.value) + authorization = Authorization.find_user_authorization_by_business_number_and_product( + entity.business_identifier, ProductCode.DIR_SEARCH.value + ) assert authorization is not None assert authorization.product_code == ProductCode.DIR_SEARCH.value diff --git a/auth-api/tests/unit/services/test_affidavit.py b/auth-api/tests/unit/services/test_affidavit.py index 1916107425..82b74f8a69 100644 --- a/auth-api/tests/unit/services/test_affidavit.py +++ b/auth-api/tests/unit/services/test_affidavit.py @@ -15,16 +15,16 @@ Test suite to ensure that the affidavit service routines are working as expected. """ -import mock -from auth_api.services import Affidavit as AffidavitService +from unittest import mock + from auth_api.models import Task as TaskModel +from auth_api.services import Affidavit as AffidavitService from auth_api.services import Org as OrgService from auth_api.services import Task as TaskService -from auth_api.utils.enums import (AffidavitStatus, LoginSource, OrgStatus, TaskStatus, TaskAction, - TaskRelationshipStatus) +from auth_api.utils.enums import AffidavitStatus, LoginSource, OrgStatus, TaskAction, TaskRelationshipStatus, TaskStatus +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestAffidavit, TestJwtClaims, TestOrgInfo, TestUserInfo # noqa: I005 from tests.utilities.factory_utils import factory_user_model, factory_user_model_with_contact, patch_token_info -from tests.conftest import mock_token def test_create_affidavit(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument @@ -36,7 +36,7 @@ def test_create_affidavit(session, keycloak_mock, monkeypatch): # pylint:disabl affidavit = AffidavitService.create_affidavit(affidavit_info=affidavit_info) assert affidavit - assert affidavit.as_dict().get('status', None) == AffidavitStatus.PENDING.value + assert affidavit.as_dict().get("status_code", None) == AffidavitStatus.PENDING.value def test_create_affidavit_duplicate(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument @@ -48,57 +48,59 @@ def test_create_affidavit_duplicate(session, keycloak_mock, monkeypatch): # pyl affidavit_info = TestAffidavit.get_test_affidavit_with_contact() affidavit = AffidavitService.create_affidavit(affidavit_info=affidavit_info) - assert affidavit.as_dict().get('status', None) == AffidavitStatus.PENDING.value + assert affidavit.as_dict().get("status_code", None) == AffidavitStatus.PENDING.value new_affidavit_info = TestAffidavit.get_test_affidavit_with_contact() affidavit2 = AffidavitService.create_affidavit(affidavit_info=new_affidavit_info) new_affidavit_info_2 = TestAffidavit.get_test_affidavit_with_contact() affidavit3 = AffidavitService.create_affidavit(affidavit_info=new_affidavit_info_2) - assert affidavit.as_dict().get('status', None) == AffidavitStatus.INACTIVE.value - assert affidavit2.as_dict().get('status', None) == AffidavitStatus.INACTIVE.value - assert affidavit3.as_dict().get('status', None) == AffidavitStatus.PENDING.value + assert affidavit.as_dict().get("status_code", None) == AffidavitStatus.INACTIVE.value + assert affidavit2.as_dict().get("status_code", None) == AffidavitStatus.INACTIVE.value + assert affidavit3.as_dict().get("status_code", None) == AffidavitStatus.PENDING.value -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_approve_org(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Affidavit can be approved.""" user = factory_user_model_with_contact(user_info=TestUserInfo.user_bceid_tester) token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() AffidavitService.create_affidavit(affidavit_info=affidavit_info) org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value - task_model = TaskModel.find_by_task_for_account(org_dict['id'], status=TaskStatus.OPEN.value) - assert task_model.relationship_id == org_dict['id'] + assert org_dict["status_code"] == OrgStatus.PENDING_STAFF_REVIEW.value + task_model = TaskModel.find_by_task_for_account(org_dict["id"], status=TaskStatus.OPEN.value) + assert task_model.relationship_id == org_dict["id"] assert task_model.action == TaskAction.AFFIDAVIT_REVIEW.value task_info = { - 'status': TaskStatus.OPEN.value, - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value, - 'remarks': ['Test Remark'] + "status": TaskStatus.OPEN.value, + "relationshipStatus": TaskRelationshipStatus.ACTIVE.value, + "remarks": ["Test Remark"], } task = TaskService.update_task(TaskService(task_model), task_info) task_dict = task.as_dict() - affidavit = AffidavitService.find_affidavit_by_org_id(task_dict['relationship_id']) - assert affidavit['status'] == AffidavitStatus.APPROVED.value + affidavit = AffidavitService.find_affidavit_by_org_id(task_dict["relationship_id"]) + assert affidavit["status_code"] == AffidavitStatus.APPROVED.value -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_task_creation(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that affidavit reupload creates new task.""" user = factory_user_model_with_contact() token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() AffidavitService.create_affidavit(affidavit_info=affidavit_info) org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) - org_id = org.as_dict().get('id') + org_id = org.as_dict().get("id") task_model: TaskModel = TaskModel.find_by_task_for_account(org_id, TaskStatus.OPEN.value) - assert task_model is not None, 'New Open should be generated' + assert task_model is not None, "New Open should be generated" task_model.status = TaskStatus.HOLD.value # set current task to hold.Its a staff action new_affidavit_info = TestAffidavit.get_test_affidavit_with_contact() AffidavitService.create_affidavit(affidavit_info=new_affidavit_info) @@ -106,12 +108,13 @@ def test_task_creation(session, keycloak_mock, monkeypatch): # pylint:disable=u assert TaskModel.find_by_task_for_account(org_id, TaskStatus.OPEN.value) is not None -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_reject_org(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Affidavit can be rejected.""" user = factory_user_model_with_contact(user_info=TestUserInfo.user_bceid_tester) token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() @@ -120,21 +123,21 @@ def test_reject_org(session, keycloak_mock, monkeypatch): # pylint:disable=unus affidavit_info = TestAffidavit.get_test_affidavit_with_contact() affidavit = AffidavitService.create_affidavit(affidavit_info=affidavit_info) - assert affidavit1.as_dict().get('status', None) == AffidavitStatus.INACTIVE.value - assert affidavit.as_dict().get('status', None) == AffidavitStatus.PENDING.value + assert affidavit1.as_dict().get("status_code", None) == AffidavitStatus.INACTIVE.value + assert affidavit.as_dict().get("status_code", None) == AffidavitStatus.PENDING.value org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value - task_model = TaskModel.find_by_task_for_account(org_dict['id'], status=TaskStatus.OPEN.value) - assert task_model.relationship_id == org_dict['id'] + assert org_dict["status_code"] == OrgStatus.PENDING_STAFF_REVIEW.value + task_model = TaskModel.find_by_task_for_account(org_dict["id"], status=TaskStatus.OPEN.value) + assert task_model.relationship_id == org_dict["id"] assert task_model.action == TaskAction.AFFIDAVIT_REVIEW.value task_info = { - 'status': TaskStatus.OPEN.value, - 'relationshipStatus': TaskRelationshipStatus.REJECTED.value, - 'remarks': ['Test Remark'] + "status": TaskStatus.OPEN.value, + "relationshipStatus": TaskRelationshipStatus.REJECTED.value, + "remarks": ["Test Remark"], } task = TaskService.update_task(TaskService(task_model), task_info) task_dict = task.as_dict() - affidavit = AffidavitService.find_affidavit_by_org_id(task_dict['relationship_id']) - assert affidavit['status'] == AffidavitStatus.REJECTED.value + affidavit = AffidavitService.find_affidavit_by_org_id(task_dict["relationship_id"]) + assert affidavit["status_code"] == AffidavitStatus.REJECTED.value diff --git a/auth-api/tests/unit/services/test_affiliation.py b/auth-api/tests/unit/services/test_affiliation.py index e95e5e3bd6..0102b5535d 100644 --- a/auth-api/tests/unit/services/test_affiliation.py +++ b/auth-api/tests/unit/services/test_affiliation.py @@ -15,82 +15,89 @@ Test suite to ensure that the Affiliation service routines are working as expected. """ +from unittest import mock from unittest.mock import ANY, patch -import mock import pytest from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error +from auth_api.models.affiliation import Affiliation as AffiliationModel from auth_api.models.dataclass import Activity from auth_api.models.dataclass import Affiliation as AffiliationData from auth_api.models.dataclass import DeleteAffiliationRequest -from auth_api.models.affiliation import Affiliation as AffiliationModel from auth_api.models.org import Org as OrgModel from auth_api.services import ActivityLogPublisher from auth_api.services import Affiliation as AffiliationService from auth_api.utils.enums import ActivityAction, OrgType +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestEntityInfo, TestJwtClaims, TestOrgInfo, TestOrgTypeInfo, TestUserInfo from tests.utilities.factory_utils import ( - convert_org_to_staff_org, factory_entity_service, factory_membership_model, factory_org_service, - factory_user_model_with_contact, patch_get_firms_parties, patch_token_info) -from tests.conftest import mock_token + convert_org_to_staff_org, + factory_entity_service, + factory_membership_model, + factory_org_service, + factory_user_model_with_contact, + patch_get_firms_parties, + patch_token_info, +) -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_create_affiliation(session, auth_mock, monkeypatch, environment): # pylint:disable=unused-argument """Assert that an Affiliation can be created.""" entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - affiliation = AffiliationService.create_affiliation(org_id, business_identifier, environment, - TestEntityInfo.entity_lear_mock['passCode']) + affiliation = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) assert affiliation assert affiliation.entity.identifier == entity_service.identifier - assert affiliation.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation.as_dict()["organization"]["id"] == org_dictionary["id"] -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_create_affiliation_no_org(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that an Affiliation can not be created without org.""" entity_service = factory_entity_service() entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] with pytest.raises(BusinessException) as exception: AffiliationService.create_affiliation(None, business_identifier, environment) assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_create_affiliation_no_entity(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that an Affiliation can not be created without entity.""" org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] with pytest.raises(BusinessException) as exception: AffiliationService.create_affiliation(org_id, None, environment) assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_create_affiliation_implicit(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that an Affiliation can not be created when org is BASIC.""" entity_service1 = factory_entity_service() entity_dictionary1 = entity_service1.as_dict() - business_identifier1 = entity_dictionary1['business_identifier'] + business_identifier1 = entity_dictionary1["business_identifier"] org_service = factory_org_service(org_type_info=TestOrgTypeInfo.implicit) org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - pass_code = '111111111' + pass_code = "111111111" with pytest.raises(BusinessException) as exception: AffiliationService.create_affiliation(org_id, business_identifier1, environment, pass_code) @@ -101,38 +108,37 @@ def test_create_affiliation_implicit(session, auth_mock, environment): # pylint assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_create_affiliation_with_passcode(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that an Affiliation can be created.""" entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - affiliation = AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) + affiliation = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) assert affiliation assert affiliation.entity.identifier == entity_service.identifier - assert affiliation.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation.as_dict()["organization"]["id"] == org_dictionary["id"] -@pytest.mark.parametrize('environment', ['test', None]) -def test_create_affiliation_with_passcode_no_passcode_input(session, - auth_mock, - environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +def test_create_affiliation_with_passcode_no_passcode_input( + session, auth_mock, environment +): # pylint:disable=unused-argument """Assert that an Affiliation can not be created with a passcode entity and no passcode input parameter.""" entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_passcode) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] with pytest.raises(BusinessException) as exception: AffiliationService.create_affiliation(org_id, business_identifier, environment) @@ -140,22 +146,22 @@ def test_create_affiliation_with_passcode_no_passcode_input(session, assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_create_affiliation_exists(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that multiple affiliation is allowed.""" entity_service1 = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary1 = entity_service1.as_dict() - business_identifier1 = entity_dictionary1['business_identifier'] + business_identifier1 = entity_dictionary1["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] org_service_2 = factory_org_service(org_info=TestOrgInfo.org2) org_dictionary_2 = org_service_2.as_dict() - org_id_2 = org_dictionary_2['id'] + org_id_2 = org_dictionary_2["id"] - pass_code = TestEntityInfo.entity_lear_mock['passCode'] + pass_code = TestEntityInfo.entity_lear_mock["passCode"] # create first row in affiliation table AffiliationService.create_affiliation(org_id, business_identifier1, environment, pass_code) @@ -165,31 +171,35 @@ def test_create_affiliation_exists(session, auth_mock, environment): # pylint:d assert affiliation -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@pytest.mark.parametrize('environment', ['test', None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@pytest.mark.parametrize("environment", ["test", None]) def test_create_affiliation_firms(session, auth_mock, monkeypatch, environment): # pylint:disable=unused-argument """Assert that an Affiliation can be created.""" patch_get_firms_parties(monkeypatch) entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock3) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - affiliation = AffiliationService.create_affiliation(org_id, business_identifier, environment, - TestEntityInfo.entity_lear_mock3['passCode']) + affiliation = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock3["passCode"] + ) assert affiliation assert affiliation.entity.identifier == entity_service.identifier - assert affiliation.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation.as_dict()["organization"]["id"] == org_dictionary["id"] -@pytest.mark.parametrize('test_name, org_type, should_succeed, environment', [ - ('sbc_staff_create_affiliation_no_passcode_success', OrgType.SBC_STAFF.value, True, None), - ('staff_create_affiliation_no_passcode_success', OrgType.STAFF.value, True, 'Test'), - ('premium_create_affiliation_failure', OrgType.PREMIUM.value, False, None) -]) +@pytest.mark.parametrize( + "test_name, org_type, should_succeed, environment", + [ + ("sbc_staff_create_affiliation_no_passcode_success", OrgType.SBC_STAFF.value, True, None), + ("staff_create_affiliation_no_passcode_success", OrgType.STAFF.value, True, "Test"), + ("premium_create_affiliation_failure", OrgType.PREMIUM.value, False, None), + ], +) def test_create_affiliation_staff_sbc_staff( session, auth_mock, monkeypatch, test_name: str, org_type: OrgType, should_succeed: bool, environment: str ): # pylint:disable=unused-argument @@ -197,11 +207,11 @@ def test_create_affiliation_staff_sbc_staff( db_user = factory_user_model_with_contact(user_info=TestUserInfo.user_test) entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock3) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] factory_membership_model(db_user.id, org_id) @@ -210,99 +220,95 @@ def test_create_affiliation_staff_sbc_staff( convert_org_to_staff_org(org_id, org_type) # Requires staff role. - user = TestJwtClaims.staff_role if org_type == OrgType.STAFF.value \ - else TestJwtClaims.public_bceid_account_holder_user - user['idp_userid'] = db_user.idp_userid - user['sub'] = db_user.keycloak_guid + user = ( + TestJwtClaims.staff_role if org_type == OrgType.STAFF.value else TestJwtClaims.public_bceid_account_holder_user + ) + user["idp_userid"] = db_user.idp_userid + user["sub"] = db_user.keycloak_guid patch_token_info(user, monkeypatch) if should_succeed: affiliation = AffiliationService.create_affiliation(org_id, business_identifier, environment) assert affiliation assert affiliation.entity.identifier == entity_service.identifier - assert affiliation.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation.as_dict()["organization"]["id"] == org_dictionary["id"] else: with pytest.raises(BusinessException): affiliation = AffiliationService.create_affiliation(org_id, business_identifier, environment) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@pytest.mark.parametrize('environment', ['test', None]) -def test_create_affiliation_firms_party_with_additional_space(session, auth_mock, - monkeypatch, environment): +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@pytest.mark.parametrize("environment", ["test", None]) +def test_create_affiliation_firms_party_with_additional_space(session, auth_mock, monkeypatch, environment): """Assert that an Affiliation can be created.""" patch_get_firms_parties(monkeypatch) entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock3) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] # When party name has additional space - pass_code = TestEntityInfo.entity_lear_mock3['passCode'].replace(' ', ' ') - affiliation = AffiliationService.create_affiliation(org_id, business_identifier, environment, - pass_code) + pass_code = TestEntityInfo.entity_lear_mock3["passCode"].replace(" ", " ") + affiliation = AffiliationService.create_affiliation(org_id, business_identifier, environment, pass_code) assert affiliation assert affiliation.entity.identifier == entity_service.identifier - assert affiliation.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation.as_dict()["organization"]["id"] == org_dictionary["id"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@pytest.mark.parametrize('environment', ['test', None]) -def test_create_affiliation_firms_party_not_valid(session, auth_mock, monkeypatch, - environment): +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@pytest.mark.parametrize("environment", ["test", None]) +def test_create_affiliation_firms_party_not_valid(session, auth_mock, monkeypatch, environment): """Assert that an Affiliation can be created.""" patch_get_firms_parties(monkeypatch) entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock3) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] with pytest.raises(BusinessException) as exception: - AffiliationService.create_affiliation(org_id, business_identifier, environment, 'test user') + AffiliationService.create_affiliation(org_id, business_identifier, environment, "test user") assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_find_affiliated_entities_by_org_id(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that an Affiliation can be created.""" entity_service1 = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary1 = entity_service1.as_dict() - business_identifier1 = entity_dictionary1['business_identifier'] + business_identifier1 = entity_dictionary1["business_identifier"] entity_service2 = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock2) entity_dictionary2 = entity_service2.as_dict() - business_identifier2 = entity_dictionary2['business_identifier'] + business_identifier2 = entity_dictionary2["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] # create first row in affiliation table - AffiliationService.create_affiliation(org_id, - business_identifier1, - environment, - TestEntityInfo.entity_lear_mock['passCode']) + AffiliationService.create_affiliation( + org_id, business_identifier1, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) # create second row in affiliation table - AffiliationService.create_affiliation(org_id, - business_identifier2, - environment, - TestEntityInfo.entity_lear_mock2['passCode']) + AffiliationService.create_affiliation( + org_id, business_identifier2, environment, TestEntityInfo.entity_lear_mock2["passCode"] + ) affiliated_entities = AffiliationService.find_visible_affiliations_by_org_id(org_id, environment) assert affiliated_entities assert len(affiliated_entities) == 2 - assert affiliated_entities[0]['business_identifier'] == entity_dictionary2['business_identifier'] + assert affiliated_entities[0]["business_identifier"] == entity_dictionary2["business_identifier"] -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_find_affiliated_entities_by_org_id_no_org(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that an Affiliation can not be find without org id or org id not exists.""" with pytest.raises(BusinessException) as exception: @@ -314,65 +320,66 @@ def test_find_affiliated_entities_by_org_id_no_org(session, auth_mock, environme assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test', None]) -def test_find_affiliated_entities_by_org_id_no_affiliation(session, auth_mock, - environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +def test_find_affiliated_entities_by_org_id_no_affiliation( + session, auth_mock, environment +): # pylint:disable=unused-argument """Assert that an Affiliation can not be find without affiliation.""" org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - with patch.object(AffiliationModel, 'find_affiliations_by_org_id', return_value=[]): + with patch.object(AffiliationModel, "find_affiliations_by_org_id", return_value=[]): affiliations = AffiliationService.find_visible_affiliations_by_org_id(org_id, environment) assert not affiliations -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_delete_affiliation(session, auth_mock, monkeypatch, environment): # pylint:disable=unused-argument """Assert that an affiliation can be deleted.""" entity_service = factory_entity_service(TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] patch_token_info(TestJwtClaims.user_test, monkeypatch) - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - affiliation = AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) - mock_alp.assert_called_with(Activity(action=ActivityAction.CREATE_AFFILIATION.value, - org_id=ANY, name=ANY, id=ANY)) - - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + affiliation = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) + mock_alp.assert_called_with( + Activity(action=ActivityAction.CREATE_AFFILIATION.value, org_id=ANY, name=ANY, id=ANY) + ) + + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=business_identifier) AffiliationService.delete_affiliation(delete_affiliation_request, environment=environment) - mock_alp.assert_called_with(Activity(action=ActivityAction.REMOVE_AFFILIATION.value, - org_id=ANY, name=ANY, id=ANY)) + mock_alp.assert_called_with( + Activity(action=ActivityAction.REMOVE_AFFILIATION.value, org_id=ANY, name=ANY, id=ANY) + ) found_affiliation = AffiliationModel.query.filter_by(id=affiliation.identifier).first() assert found_affiliation is None -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_delete_affiliation_no_org(session, auth_mock, monkeypatch, environment): # pylint:disable=unused-argument """Assert that an affiliation can not be deleted without org.""" entity_service = factory_entity_service(TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] patch_token_info(TestJwtClaims.user_test, monkeypatch) org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) + AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) with pytest.raises(BusinessException) as exception: delete_affiliation_request = DeleteAffiliationRequest(org_id=None, business_identifier=business_identifier) @@ -381,23 +388,22 @@ def test_delete_affiliation_no_org(session, auth_mock, monkeypatch, environment) assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_delete_affiliation_no_entity(session, auth_mock, monkeypatch, environment): # pylint:disable=unused-argument """Assert that an affiliation can not be deleted without entity.""" entity_service = factory_entity_service(TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] patch_token_info(TestJwtClaims.user_test, monkeypatch) org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) + AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) with pytest.raises(BusinessException) as exception: delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=None) @@ -406,24 +412,24 @@ def test_delete_affiliation_no_entity(session, auth_mock, monkeypatch, environme assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test', None]) -def test_delete_affiliation_no_affiliation(session, auth_mock, monkeypatch, - environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +def test_delete_affiliation_no_affiliation( + session, auth_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Assert that an affiliation can not be deleted without affiliation.""" entity_service = factory_entity_service(TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] patch_token_info(TestJwtClaims.user_test, monkeypatch) org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) + AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=business_identifier) AffiliationService.delete_affiliation(delete_affiliation_request, environment=environment) @@ -434,23 +440,22 @@ def test_delete_affiliation_no_affiliation(session, auth_mock, monkeypatch, assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_delete_affiliation_implicit(session, auth_mock, monkeypatch, environment): # pylint:disable=unused-argument """Assert that an affiliation can be deleted.""" entity_service = factory_entity_service(TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service(org_type_info=TestOrgTypeInfo.implicit) org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] patch_token_info(TestJwtClaims.user_test, monkeypatch) - affiliation = AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) + affiliation = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=business_identifier) AffiliationService.delete_affiliation(delete_affiliation_request, environment=environment) @@ -458,26 +463,27 @@ def test_delete_affiliation_implicit(session, auth_mock, monkeypatch, environmen assert found_affiliation is None -@pytest.mark.parametrize('environment', ['test', None]) -def test_delete_affiliation_reset_passcode(session, auth_mock, monkeypatch, - environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +def test_delete_affiliation_reset_passcode( + session, auth_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Assert that an affiliation can be deleted.""" entity_service = factory_entity_service(TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] patch_token_info(TestJwtClaims.public_account_holder_user, monkeypatch) org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] - - affiliation = AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) - delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=business_identifier, - email_addresses=None, reset_passcode=True) + org_id = org_dictionary["id"] + + affiliation = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) + delete_affiliation_request = DeleteAffiliationRequest( + org_id=org_id, business_identifier=business_identifier, email_addresses=None, reset_passcode=True + ) AffiliationService.delete_affiliation(delete_affiliation_request, environment=environment) found_affiliation = AffiliationModel.query.filter_by(id=affiliation.identifier).first() @@ -489,12 +495,13 @@ def test_create_new_business(session, auth_mock, nr_mock): # pylint:disable=unu org_service = factory_org_service() org_dictionary = org_service.as_dict() - affiliation_data = AffiliationData(org_id=org_dictionary['id'], business_identifier='NR 1234567', - email='test@test.com', phone='1112223333') + affiliation_data = AffiliationData( + org_id=org_dictionary["id"], business_identifier="NR 1234567", email="test@test.com", phone="1112223333" + ) affiliation = AffiliationService.create_new_business_affiliation(affiliation_data) assert affiliation - assert affiliation.as_dict()['business']['business_identifier'] == affiliation_data.business_identifier + assert affiliation.as_dict()["business"]["business_identifier"] == affiliation_data.business_identifier def test_create_new_business_email_case(session, auth_mock, nr_mock): # pylint:disable=unused-argument @@ -502,12 +509,13 @@ def test_create_new_business_email_case(session, auth_mock, nr_mock): # pylint: org_service = factory_org_service() org_dictionary = org_service.as_dict() - affiliation_data = AffiliationData(org_id=org_dictionary['id'], business_identifier='NR 1234567', - email='TEST@TEST.COM', phone='1112223333') + affiliation_data = AffiliationData( + org_id=org_dictionary["id"], business_identifier="NR 1234567", email="TEST@TEST.COM", phone="1112223333" + ) affiliation = AffiliationService.create_new_business_affiliation(affiliation_data) assert affiliation - assert affiliation.as_dict()['business']['business_identifier'] == affiliation_data.business_identifier + assert affiliation.as_dict()["business"]["business_identifier"] == affiliation_data.business_identifier def test_create_new_business_invalid_contact(session, auth_mock, nr_mock): # pylint:disable=unused-argument @@ -515,17 +523,17 @@ def test_create_new_business_invalid_contact(session, auth_mock, nr_mock): # py org_service = factory_org_service() org_dictionary = org_service.as_dict() - affiliation_data = AffiliationData(org_id=org_dictionary['id'], business_identifier='NR 1234567') + affiliation_data = AffiliationData(org_id=org_dictionary["id"], business_identifier="NR 1234567") with pytest.raises(BusinessException) as exception: - affiliation_data.phone = '0000000000' + affiliation_data.phone = "0000000000" AffiliationService.create_new_business_affiliation(affiliation_data) assert exception.value.code == Error.NR_INVALID_CONTACT.name with pytest.raises(BusinessException) as exception: affiliation_data.phone = None - affiliation_data.email = 'aaa@aaa.com' + affiliation_data.email = "aaa@aaa.com" AffiliationService.create_new_business_affiliation(affiliation_data) assert exception.value.code == Error.NR_INVALID_CONTACT.name @@ -542,32 +550,31 @@ def test_find_affiliations_for_new_business(session, auth_mock, nr_mock, monkeyp entity_service1 = factory_entity_service(entity_info=TestEntityInfo.name_request) entity_dictionary1 = entity_service1.as_dict() - business_identifier1 = entity_dictionary1['business_identifier'] - name1 = entity_dictionary1['name'] + business_identifier1 = entity_dictionary1["business_identifier"] + name1 = entity_dictionary1["name"] entity_service2 = factory_entity_service(entity_info=TestEntityInfo.temp_business) entity_dictionary2 = entity_service2.as_dict() - business_identifier2 = entity_dictionary2['business_identifier'] + business_identifier2 = entity_dictionary2["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] # create NR affiliation - affiliation_data = AffiliationData(org_id=org_id, business_identifier=business_identifier1, phone='1112223333') + affiliation_data = AffiliationData(org_id=org_id, business_identifier=business_identifier1, phone="1112223333") AffiliationService.create_new_business_affiliation(affiliation_data) # create second row in affiliation table - AffiliationService.create_affiliation(org_id, - business_identifier2) + AffiliationService.create_affiliation(org_id, business_identifier2) affiliated_entities = AffiliationService.find_visible_affiliations_by_org_id(org_id) assert affiliated_entities assert len(affiliated_entities) == 1 - assert affiliated_entities[0]['business_identifier'] == business_identifier2 - assert affiliated_entities[0]['nr_number'] == business_identifier1 - assert affiliated_entities[0]['name'] == name1 + assert affiliated_entities[0]["business_identifier"] == business_identifier2 + assert affiliated_entities[0]["nr_number"] == business_identifier1 + assert affiliated_entities[0]["name"] == name1 delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=business_identifier2) AffiliationService.delete_affiliation(delete_affiliation_request) @@ -576,11 +583,12 @@ def test_find_affiliations_for_new_business(session, auth_mock, nr_mock, monkeyp assert affiliated_entities assert len(affiliated_entities) == 1 - assert affiliated_entities[0]['business_identifier'] == business_identifier1 + assert affiliated_entities[0]["business_identifier"] == business_identifier1 -def test_find_affiliations_for_new_business_incorporation_complete(session, auth_mock, - nr_mock): # pylint:disable=unused-argument +def test_find_affiliations_for_new_business_incorporation_complete( + session, auth_mock, nr_mock +): # pylint:disable=unused-argument """Assert that an Affiliation can be created.""" # Create 2 entities - 1 with type NR and another one TMP # Affiliate to an org @@ -591,30 +599,29 @@ def test_find_affiliations_for_new_business_incorporation_complete(session, auth nr_entity = factory_entity_service(entity_info=TestEntityInfo.name_request) entity_dictionary1 = nr_entity.as_dict() - nr_business_identifier = entity_dictionary1['business_identifier'] + nr_business_identifier = entity_dictionary1["business_identifier"] tmp_entity = factory_entity_service(entity_info=TestEntityInfo.temp_business) entity_dictionary2 = tmp_entity.as_dict() - tmp_business_identifier = entity_dictionary2['business_identifier'] + tmp_business_identifier = entity_dictionary2["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - affiliation_data = AffiliationData(org_id=org_id, business_identifier=nr_business_identifier, phone='1112223333') + affiliation_data = AffiliationData(org_id=org_id, business_identifier=nr_business_identifier, phone="1112223333") # create NR affiliation AffiliationService.create_new_business_affiliation(affiliation_data) # create second row in affiliation table - AffiliationService.create_affiliation(org_id, - tmp_business_identifier) + AffiliationService.create_affiliation(org_id, tmp_business_identifier) affiliated_entities = AffiliationService.find_visible_affiliations_by_org_id(org_id) assert affiliated_entities assert len(affiliated_entities) == 1 - assert affiliated_entities[0]['business_identifier'] == tmp_business_identifier + assert affiliated_entities[0]["business_identifier"] == tmp_business_identifier # Delete the NR And TEMP IA affiliation and entities delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=tmp_business_identifier) @@ -627,157 +634,169 @@ def test_find_affiliations_for_new_business_incorporation_complete(session, auth # Create entities for a TEMP with name as BC... number and incorporated entity tmp_inc_entity = factory_entity_service(entity_info=TestEntityInfo.temp_business_incoporated) entity_dictionary1 = tmp_inc_entity.as_dict() - tmp_business_incorporated_identifier = entity_dictionary1['business_identifier'] + tmp_business_incorporated_identifier = entity_dictionary1["business_identifier"] AffiliationService.create_affiliation(org_id, business_identifier=tmp_business_incorporated_identifier) inc_entity = factory_entity_service(entity_info=TestEntityInfo.business_incoporated) entity_dictionary1 = inc_entity.as_dict() - business_incorporated_identifier = entity_dictionary1['business_identifier'] + business_incorporated_identifier = entity_dictionary1["business_identifier"] AffiliationService.create_affiliation(org_id, business_identifier=business_incorporated_identifier) affiliated_entities = AffiliationService.find_visible_affiliations_by_org_id(org_id) assert affiliated_entities assert len(affiliated_entities) == 1 - assert affiliated_entities[0]['business_identifier'] == business_incorporated_identifier + assert affiliated_entities[0]["business_identifier"] == business_incorporated_identifier -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_fix_stale_affiliations(session, auth_mock, nr_mock, system_user_mock, environment): """Assert that an affilation doesn't go stale when transitioning from NR to a business.""" nr = factory_entity_service(entity_info=TestEntityInfo.name_request).as_dict() - org_id = factory_org_service().as_dict()['id'] - affiliation1 = AffiliationService.create_affiliation(org_id, business_identifier=nr['business_identifier'], - environment=environment) + org_id = factory_org_service().as_dict()["id"] + affiliation1 = AffiliationService.create_affiliation( + org_id, business_identifier=nr["business_identifier"], environment=environment + ) # Create a new entity with the finalized business identifier. (filer usually does this on registration) business = factory_entity_service(entity_info=TestEntityInfo.entity2).as_dict() - affiliation2 = AffiliationService.create_affiliation(org_id, business_identifier=business['business_identifier'], - environment=environment) + affiliation2 = AffiliationService.create_affiliation( + org_id, business_identifier=business["business_identifier"], environment=environment + ) assert affiliation1.entity.identifier != affiliation2.entity.identifier # Run fix stale affiliations to point the affiliations at the new entity. - AffiliationService.fix_stale_affiliations(org_id=None, entity_details={ - 'identifier': business['business_identifier'], - 'nrNumber': nr['business_identifier'], - 'bootstrapIdentifier': 'gdsf34324' - }, environment=environment) + AffiliationService.fix_stale_affiliations( + org_id=None, + entity_details={ + "identifier": business["business_identifier"], + "nrNumber": nr["business_identifier"], + "bootstrapIdentifier": "gdsf34324", + }, + environment=environment, + ) assert affiliation1.entity.identifier == affiliation2.entity.identifier -@pytest.mark.parametrize('environment', ['test', None]) +@pytest.mark.parametrize("environment", ["test", None]) def test_find_affiliation(session, auth_mock, environment): # pylint:disable=unused-argument """Assert that an Affiliation can be retrieve by org id and business identifier.""" entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] # create first row in affiliation table - AffiliationService.create_affiliation(org_id, - business_identifier, - environment, - TestEntityInfo.entity_lear_mock['passCode']) + AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) affiliation = AffiliationService.find_affiliation(org_id, business_identifier, environment) assert affiliation - assert affiliation['business']['business_identifier'] == business_identifier - assert affiliation['organization']['id'] == org_dictionary['id'] + assert affiliation["business"]["business_identifier"] == business_identifier + assert affiliation["organization"]["id"] == org_dictionary["id"] def test_create_affiliation_multiple_environments(session, auth_mock, monkeypatch): # pylint:disable=unused-argument """Assert that same Affiliation can be created in different environments.""" entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - affiliation_dev = AffiliationService.create_affiliation(org_id, business_identifier, 'dev', - TestEntityInfo.entity_lear_mock['passCode']) + affiliation_dev = AffiliationService.create_affiliation( + org_id, business_identifier, "dev", TestEntityInfo.entity_lear_mock["passCode"] + ) assert affiliation_dev assert affiliation_dev.entity.identifier == entity_service.identifier - assert affiliation_dev.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation_dev.as_dict()["organization"]["id"] == org_dictionary["id"] - affiliation_test = AffiliationService.create_affiliation(org_id, business_identifier, 'test', - TestEntityInfo.entity_lear_mock['passCode']) + affiliation_test = AffiliationService.create_affiliation( + org_id, business_identifier, "test", TestEntityInfo.entity_lear_mock["passCode"] + ) assert affiliation_test assert affiliation_test.entity.identifier == entity_service.identifier - assert affiliation_test.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation_test.as_dict()["organization"]["id"] == org_dictionary["id"] - affiliation_dev = AffiliationService.find_affiliation(org_id, business_identifier, 'dev') + affiliation_dev = AffiliationService.find_affiliation(org_id, business_identifier, "dev") assert affiliation_dev - assert affiliation_dev['business']['business_identifier'] == business_identifier - assert affiliation_dev['organization']['id'] == org_dictionary['id'] + assert affiliation_dev["business"]["business_identifier"] == business_identifier + assert affiliation_dev["organization"]["id"] == org_dictionary["id"] - affiliation_test = AffiliationService.find_affiliation(org_id, business_identifier, 'test') + affiliation_test = AffiliationService.find_affiliation(org_id, business_identifier, "test") assert affiliation_test - assert affiliation_test['business']['business_identifier'] == business_identifier - assert affiliation_test['organization']['id'] == org_dictionary['id'] + assert affiliation_test["business"]["business_identifier"] == business_identifier + assert affiliation_test["organization"]["id"] == org_dictionary["id"] -@pytest.mark.parametrize('environment', ['test', None]) -def test_find_affiliation_multiple_environments(session, auth_mock, monkeypatch, - environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +def test_find_affiliation_multiple_environments( + session, auth_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Assert that the affiliation record for the specified environment is returned.""" entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - affiliation_dev = AffiliationService.create_affiliation(org_id, business_identifier, environment, - TestEntityInfo.entity_lear_mock['passCode']) + affiliation_dev = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) assert affiliation_dev assert affiliation_dev.entity.identifier == entity_service.identifier - assert affiliation_dev.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation_dev.as_dict()["organization"]["id"] == org_dictionary["id"] affiliation_dev = AffiliationService.find_affiliation(org_id, business_identifier, environment) assert affiliation_dev - assert affiliation_dev['business']['business_identifier'] == business_identifier - assert affiliation_dev['organization']['id'] == org_dictionary['id'] + assert affiliation_dev["business"]["business_identifier"] == business_identifier + assert affiliation_dev["organization"]["id"] == org_dictionary["id"] with pytest.raises(BusinessException) as exception: - AffiliationService.find_affiliation(org_id, business_identifier, 'sandbox') + AffiliationService.find_affiliation(org_id, business_identifier, "sandbox") assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test']) -def test_delete_affiliation_multiple_environments(session, auth_mock, monkeypatch, - environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test"]) +def test_delete_affiliation_multiple_environments( + session, auth_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Verify that the affiliation record for the specified environment gets deleted.""" entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] + business_identifier = entity_dictionary["business_identifier"] org_service = factory_org_service() org_dictionary = org_service.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] - affiliation_dev = AffiliationService.create_affiliation(org_id, business_identifier, None, - TestEntityInfo.entity_lear_mock['passCode']) + affiliation_dev = AffiliationService.create_affiliation( + org_id, business_identifier, None, TestEntityInfo.entity_lear_mock["passCode"] + ) assert affiliation_dev assert affiliation_dev.entity.identifier == entity_service.identifier - assert affiliation_dev.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation_dev.as_dict()["organization"]["id"] == org_dictionary["id"] - affiliation_test = AffiliationService.create_affiliation(org_id, business_identifier, environment, - TestEntityInfo.entity_lear_mock['passCode']) + affiliation_test = AffiliationService.create_affiliation( + org_id, business_identifier, environment, TestEntityInfo.entity_lear_mock["passCode"] + ) assert affiliation_test assert affiliation_test.entity.identifier == entity_service.identifier - assert affiliation_test.as_dict()['organization']['id'] == org_dictionary['id'] + assert affiliation_test.as_dict()["organization"]["id"] == org_dictionary["id"] - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None): + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None): delete_affiliation_request = DeleteAffiliationRequest(org_id=org_id, business_identifier=business_identifier) AffiliationService.delete_affiliation(delete_affiliation_request, environment=environment) @@ -787,5 +806,5 @@ def test_delete_affiliation_multiple_environments(session, auth_mock, monkeypatc affiliation_dev = AffiliationService.find_affiliation(org_id, business_identifier) assert affiliation_dev - assert affiliation_dev['business']['business_identifier'] == business_identifier - assert affiliation_dev['organization']['id'] == org_dictionary['id'] + assert affiliation_dev["business"]["business_identifier"] == business_identifier + assert affiliation_dev["organization"]["id"] == org_dictionary["id"] diff --git a/auth-api/tests/unit/services/test_affiliation_invitation.py b/auth-api/tests/unit/services/test_affiliation_invitation.py index b6f043f205..564b4a9428 100644 --- a/auth-api/tests/unit/services/test_affiliation_invitation.py +++ b/auth-api/tests/unit/services/test_affiliation_invitation.py @@ -16,9 +16,9 @@ Test suite to ensure that the Affiliation Invitation service routines are working as expected. """ from datetime import datetime, timedelta +from unittest import mock from unittest.mock import patch -import mock import pytest from freezegun import freeze_time from sbc_common_components.utils.enums import QueueMessageTypes @@ -39,22 +39,28 @@ from auth_api.services import User from auth_api.utils import roles from auth_api.utils.enums import InvitationStatus +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestContactInfo, TestEntityInfo, TestJwtClaims, TestOrgInfo, TestUserInfo from tests.utilities.factory_utils import ( - factory_affiliation_invitation, factory_entity_model, factory_membership_model, factory_user_model, - patch_token_info) -from tests.conftest import mock_token + factory_affiliation_invitation, + factory_entity_model, + factory_membership_model, + factory_user_model, + patch_token_info, +) def create_test_entity(): """Create test entity data.""" - entity = EntityService.save_entity({ - 'businessIdentifier': TestEntityInfo.entity_passcode['businessIdentifier'], - 'businessNumber': TestEntityInfo.entity_passcode['businessNumber'], - 'passCode': TestEntityInfo.entity_passcode['passCode'], - 'name': TestEntityInfo.entity_passcode['name'], - 'corpTypeCode': TestEntityInfo.entity_passcode['corpTypeCode'] - }) + entity = EntityService.save_entity( + { + "businessIdentifier": TestEntityInfo.entity_passcode["businessIdentifier"], + "businessNumber": TestEntityInfo.entity_passcode["businessNumber"], + "passCode": TestEntityInfo.entity_passcode["passCode"], + "name": TestEntityInfo.entity_passcode["name"], + "corpTypeCode": TestEntityInfo.entity_passcode["corpTypeCode"], + } + ) entity.add_contact(TestContactInfo.contact1) return entity @@ -72,82 +78,91 @@ def setup_org_and_entity(user): return from_org_dictionary, to_org_dictionary, entity_dictionary -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_as_dict(session, auth_mock, keycloak_mock, business_mock, monkeypatch, - environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_as_dict( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Assert that the Affiliation Invitation is exported correctly as a dictionary.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) - - affiliation_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user), '', environment) + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) + + affiliation_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ) affiliation_invitation_dictionary = affiliation_invitation.as_dict() - assert affiliation_invitation_dictionary['recipient_email'] == affiliation_invitation_info['recipientEmail'] + assert affiliation_invitation_dictionary["recipient_email"] == affiliation_invitation_info["recipientEmail"] @pytest.mark.parametrize( - 'create_org_with, environment', [ - ('id', None), - ('id', 'test'), - ('uuid', None), - ('uuid', 'test'), - ] + "create_org_with, environment", + [ + ("id", None), + ("id", "test"), + ("uuid", None), + ("uuid", "test"), + ], ) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_affiliation_invitation(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, create_org_with, environment): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_affiliation_invitation( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, create_org_with, environment +): # pylint:disable=unused-argument """Assert that an Affiliation Invitation can be created.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'] if create_org_with == 'id' else None, - to_org_uuid=to_org_dictionary['uuid'] if create_org_with == 'uuid' else None, - business_identifier=entity_dictionary['business_identifier']) - - affiliation_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user), '', environment) + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"] if create_org_with == "id" else None, + to_org_uuid=to_org_dictionary["uuid"] if create_org_with == "uuid" else None, + business_identifier=entity_dictionary["business_identifier"], + ) + + affiliation_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ) invitation_dictionary = affiliation_invitation.as_dict() - assert invitation_dictionary['recipient_email'] == affiliation_invitation_info['recipientEmail'] - assert invitation_dictionary['id'] + assert invitation_dictionary["recipient_email"] == affiliation_invitation_info["recipientEmail"] + assert invitation_dictionary["id"] -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_find_affiliation_invitation_by_id(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_find_affiliation_invitation_by_id( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Find an existing affiliation invitation with the provided id.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) - new_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user), '', - environment).as_dict() - invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(new_invitation['id']).as_dict() + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ).as_dict() + invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(new_invitation["id"]).as_dict() assert invitation - assert invitation['recipient_email'] == affiliation_invitation_info['recipientEmail'] + assert invitation["recipient_email"] == affiliation_invitation_info["recipientEmail"] def test_find_invitation_by_id_exception(session, auth_mock): # pylint:disable=unused-argument @@ -156,56 +171,61 @@ def test_find_invitation_by_id_exception(session, auth_mock): # pylint:disable= assert affiliation_invitation is None -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_affiliation_invitation(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_affiliation_invitation( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Delete the specified affiliation invitation.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) - - new_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user), '', - environment).as_dict() - AffiliationInvitationService.delete_affiliation_invitation(new_invitation['id']) - invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(new_invitation['id']) + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) + + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ).as_dict() + AffiliationInvitationService.delete_affiliation_invitation(new_invitation["id"]) + invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(new_invitation["id"]) assert invitation is None -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_accepted_affiliation_invitation(session, auth_mock, keycloak_mock, business_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_accepted_affiliation_invitation( + session, auth_mock, keycloak_mock, business_mock, monkeypatch +): # pylint:disable=unused-argument """Delete the specified accepted affiliation invitation.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) - - new_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user), '').as_dict() - - invitation = AffiliationInvitationService.accept_affiliation_invitation(new_invitation['id'], - User(user), - '').as_dict() + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) + + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "" + ).as_dict() + + invitation = AffiliationInvitationService.accept_affiliation_invitation( + new_invitation["id"], User(user), "" + ).as_dict() assert invitation - assert invitation['status'] == InvitationStatus.ACCEPTED.value + assert invitation["status"] == InvitationStatus.ACCEPTED.value - AffiliationInvitationService.delete_affiliation_invitation(new_invitation['id']) - deleted_invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(new_invitation['id']) + AffiliationInvitationService.delete_affiliation_invitation(new_invitation["id"]) + deleted_invitation = AffiliationInvitationService.find_affiliation_invitation_by_id(new_invitation["id"]) assert deleted_invitation - assert deleted_invitation.as_dict().get('is_deleted') + assert deleted_invitation.as_dict().get("is_deleted") def test_delete_affiliation_invitation_exception(session, auth_mock): # pylint:disable=unused-argument @@ -216,90 +236,100 @@ def test_delete_affiliation_invitation_exception(session, auth_mock): # pylint: assert exception.value.code == Error.DATA_NOT_FOUND.name -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_update_affiliation_invitation(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_update_affiliation_invitation( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Update the specified affiliation invitation with new data.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) - - new_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user), '', environment) - updated_invitation = new_invitation.update_affiliation_invitation(User(user), '', {}).as_dict() - assert updated_invitation['status'] == 'PENDING' - - -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_update_invitation_verify_different_tokens(session, auth_mock, keycloak_mock, - business_mock, monkeypatch, - environment): # pylint:disable=unused-argument + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) + + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ) + updated_invitation = new_invitation.update_affiliation_invitation(User(user), "", {}).as_dict() + assert updated_invitation["status"] == "PENDING" + + +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_update_invitation_verify_different_tokens( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Update the specified affiliation invitation to check for token difference.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) - - new_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user), '', environment) - old_token = new_invitation.as_dict().get('token') + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) + + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ) + old_token = new_invitation.as_dict().get("token") with freeze_time( - lambda: datetime.now() + timedelta(seconds=1)): # to give time difference..or else token will be same.. - updated_invitation = new_invitation.update_affiliation_invitation(User(user), '', {}).as_dict() - new_token = updated_invitation.get('token') + lambda: datetime.now() + timedelta(seconds=1) + ): # to give time difference..or else token will be same.. + updated_invitation = new_invitation.update_affiliation_invitation(User(user), "", {}).as_dict() + new_token = updated_invitation.get("token") assert old_token != new_token - assert updated_invitation['status'] == 'PENDING' + assert updated_invitation["status"] == "PENDING" def test_generate_confirmation_token(session): # pylint:disable=unused-argument """Generate the affiliation invitation token.""" - confirmation_token = AffiliationInvitationService.generate_confirmation_token(1, 2, 3, 'CP1234567') + confirmation_token = AffiliationInvitationService.generate_confirmation_token(1, 2, 3, "CP1234567") assert confirmation_token is not None -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_validate_token_accepted(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_validate_token_accepted( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Validate invalid invitation token.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) user_invitee = factory_user_model(TestUserInfo.user1) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) - - new_invitation = AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, - User(user_invitee), '', - environment).as_dict() - token = AffiliationInvitationService\ - .generate_confirmation_token(new_invitation['id'], - new_invitation['from_org']['id'], - new_invitation['to_org']['id'], - entity_dictionary['business_identifier']) - - AffiliationInvitationService.accept_affiliation_invitation(new_invitation['id'], User(user_invitee), '', - environment) + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) + + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user_invitee), "", environment + ).as_dict() + token = AffiliationInvitationService.generate_confirmation_token( + new_invitation["id"], + new_invitation["from_org"]["id"], + new_invitation["to_org"]["id"], + entity_dictionary["business_identifier"], + ) + + AffiliationInvitationService.accept_affiliation_invitation( + new_invitation["id"], User(user_invitee), "", environment + ) with pytest.raises(BusinessException) as exception: - AffiliationInvitationService.validate_token(token, new_invitation['id']) + AffiliationInvitationService.validate_token(token, new_invitation["id"]) assert exception.value.code == Error.ACTIONED_AFFILIATION_INVITATION.name @@ -312,171 +342,180 @@ def test_validate_token_exception(session): # pylint:disable=unused-argument assert exception.value.code == Error.EXPIRED_AFFILIATION_INVITATION.name -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_accept_affiliation_invitation(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_accept_affiliation_invitation( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Accept the affiliation invitation and add the affiliation from the invitation.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): - with patch.object(auth, 'check_auth', return_value=True): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): + with patch.object(auth, "check_auth", return_value=True): user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) user_with_token_invitee = TestUserInfo.user1 - user_with_token_invitee['keycloak_guid'] = TestJwtClaims.edit_role_2['sub'] + user_with_token_invitee["keycloak_guid"] = TestJwtClaims.edit_role_2["sub"] user_invitee = factory_user_model(user_with_token_invitee) - new_invitation = AffiliationInvitationService \ - .create_affiliation_invitation(affiliation_invitation_info, - User(user_invitee), '', environment).as_dict() + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user_invitee), "", environment + ).as_dict() - invitation = AffiliationInvitationService.accept_affiliation_invitation(new_invitation['id'], - User(user_invitee), - '', environment).as_dict() + invitation = AffiliationInvitationService.accept_affiliation_invitation( + new_invitation["id"], User(user_invitee), "", environment + ).as_dict() patch_token_info(TestJwtClaims.public_user_role, monkeypatch) - affiliation = AffiliationService.find_affiliation(new_invitation['from_org']['id'], - entity_dictionary['business_identifier'], environment) + affiliation = AffiliationService.find_affiliation( + new_invitation["from_org"]["id"], entity_dictionary["business_identifier"], environment + ) assert affiliation assert invitation - assert affiliation['id'] == invitation['affiliation_id'] + assert affiliation["id"] == invitation["affiliation_id"] -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_accept_invitation_exceptions(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_accept_invitation_exceptions( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Accept the affiliation invitation exceptions.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): - with patch.object(auth, 'check_auth', return_value=True): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): + with patch.object(auth, "check_auth", return_value=True): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_dictionary['id'], - to_org_id=to_org_dictionary['id'], - business_identifier=entity_dictionary['business_identifier']) + from_org_id=from_org_dictionary["id"], + to_org_id=to_org_dictionary["id"], + business_identifier=entity_dictionary["business_identifier"], + ) user_invitee = factory_user_model(TestUserInfo.user1) # Accepting a non-existent invitation should raise not found exception with pytest.raises(BusinessException) as exception: - AffiliationInvitationService.accept_affiliation_invitation(None, User(user_invitee), '', environment) + AffiliationInvitationService.accept_affiliation_invitation(None, User(user_invitee), "", environment) assert exception.value.code == Error.DATA_NOT_FOUND.name # Accepting an invitation multiple times should raise actioned invitation exception - new_invitation = AffiliationInvitationService \ - .create_affiliation_invitation(affiliation_invitation_info, - User(user_invitee), '', environment).as_dict() + new_invitation = AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user_invitee), "", environment + ).as_dict() - AffiliationInvitationService.accept_affiliation_invitation(new_invitation['id'], User(user_invitee), '', - environment) + AffiliationInvitationService.accept_affiliation_invitation( + new_invitation["id"], User(user_invitee), "", environment + ) with pytest.raises(BusinessException) as exception: - AffiliationInvitationService.accept_affiliation_invitation(new_invitation['id'], - User(user_invitee), '', - environment) + AffiliationInvitationService.accept_affiliation_invitation( + new_invitation["id"], User(user_invitee), "", environment + ) assert exception.value.code == Error.ACTIONED_AFFILIATION_INVITATION.name # Accepting an expired invitation should raise an expired invitation exception with pytest.raises(BusinessException) as exception: - expired_invitation: AffiliationInvitationModel = AffiliationInvitationModel \ - .find_invitation_by_id(new_invitation['id']) - expired_invitation.invitation_status = InvitationStatusModel.get_status_by_code('EXPIRED') + expired_invitation: AffiliationInvitationModel = AffiliationInvitationModel.find_invitation_by_id( + new_invitation["id"] + ) + expired_invitation.invitation_status = InvitationStatusModel.get_status_by_code("EXPIRED") expired_invitation.save() - AffiliationInvitationService.accept_affiliation_invitation(expired_invitation.id, - User(user_invitee), - '', environment) + AffiliationInvitationService.accept_affiliation_invitation( + expired_invitation.id, User(user_invitee), "", environment + ) assert exception.value.code == Error.EXPIRED_AFFILIATION_INVITATION.name -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_get_invitations_by_from_org_id(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_get_invitations_by_from_org_id( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Find an existing invitation with the provided from org id.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): patch_token_info(TestJwtClaims.public_user_role, monkeypatch) user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) - from_org_id = from_org_dictionary['id'] - to_org_id = to_org_dictionary['id'] + from_org_id = from_org_dictionary["id"] + to_org_id = to_org_dictionary["id"] affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=entity_dictionary['business_identifier']) + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=entity_dictionary["business_identifier"] + ) - AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, User(user), '', - environment) + AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ) - invitations: list = AffiliationInvitationService \ - .search_invitations(AffiliationInvitationSearch(from_org_id=from_org_id, - status_codes=['PENDING'])) + invitations: list = AffiliationInvitationService.search_invitations( + AffiliationInvitationSearch(from_org_id=from_org_id, status_codes=["PENDING"]) + ) assert invitations assert len(invitations) == 1 -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_get_invitations_by_to_org_id(session, auth_mock, keycloak_mock, business_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_get_invitations_by_to_org_id( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Find an existing invitation with the provided to org id.""" - with patch.object(AffiliationInvitationService, 'send_affiliation_invitation', return_value=None): + with patch.object(AffiliationInvitationService, "send_affiliation_invitation", return_value=None): patch_token_info(TestJwtClaims.public_user_role, monkeypatch) user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) from_org_dictionary, to_org_dictionary, entity_dictionary = setup_org_and_entity(user) - from_org_id = from_org_dictionary['id'] - to_org_id = to_org_dictionary['id'] + from_org_id = from_org_dictionary["id"] + to_org_id = to_org_dictionary["id"] affiliation_invitation_info = factory_affiliation_invitation( - from_org_id=from_org_id, - to_org_id=to_org_id, - business_identifier=entity_dictionary['business_identifier']) + from_org_id=from_org_id, to_org_id=to_org_id, business_identifier=entity_dictionary["business_identifier"] + ) - AffiliationInvitationService.create_affiliation_invitation(affiliation_invitation_info, User(user), '', - environment) + AffiliationInvitationService.create_affiliation_invitation( + affiliation_invitation_info, User(user), "", environment + ) - invitations: list = AffiliationInvitationService \ - .search_invitations( - search_filter=AffiliationInvitationSearch(to_org_id=to_org_id, status_codes=['PENDING']) - ) + invitations: list = AffiliationInvitationService.search_invitations( + search_filter=AffiliationInvitationSearch(to_org_id=to_org_id, status_codes=["PENDING"]) + ) assert invitations assert len(invitations) == 1 -def _setup_affiliation_invitation_data(affiliation_invitation_type='EMAIL', - affiliation_invitation_status_code=InvitationStatus.PENDING.value): +def _setup_affiliation_invitation_data( + affiliation_invitation_type="EMAIL", affiliation_invitation_status_code=InvitationStatus.PENDING.value +): from_org = OrgModel() from_org.id = 1 - from_org.name = 'From the moon inc.' - from_org.branch_name = 'Luna division' + from_org.name = "From the moon inc." + from_org.branch_name = "Luna division" to_org = OrgModel() to_org.id = 2 - to_org.name = 'To the stars inc.' + to_org.name = "To the stars inc." affiliation_invitation = AffiliationInvitationModel() affiliation_invitation.from_org = from_org affiliation_invitation.to_org = to_org - affiliation_invitation.recipient_email = 'abc@test.com' + affiliation_invitation.recipient_email = "abc@test.com" affiliation_invitation.from_org_id = from_org.id affiliation_invitation.to_org_id = to_org.id affiliation_invitation.invitation_status_code = affiliation_invitation_status_code @@ -485,77 +524,84 @@ def _setup_affiliation_invitation_data(affiliation_invitation_type='EMAIL', return affiliation_invitation -@patch.object(auth_api.services.affiliation_invitation, 'publish_to_mailer') -def test_send_affiliation_invitation_magic_link(publish_to_mailer_mock, - session, auth_mock, keycloak_mock, business_mock, monkeypatch): +@patch.object(auth_api.services.affiliation_invitation, "publish_to_mailer") +def test_send_affiliation_invitation_magic_link( + publish_to_mailer_mock, session, auth_mock, keycloak_mock, business_mock, monkeypatch +): """Verify Magic link data for email is correctly generated.""" affiliation_invitation = _setup_affiliation_invitation_data() - business_name = 'Busy Inc.' - affiliation_invitation.token = 'ABCD' + business_name = "Busy Inc." + affiliation_invitation.token = "ABCD" AffiliationInvitationService.send_affiliation_invitation( affiliation_invitation=affiliation_invitation, business_name=business_name, - email_addresses=affiliation_invitation.recipient_email + email_addresses=affiliation_invitation.recipient_email, ) expected_data = { - 'accountId': affiliation_invitation.from_org.id, - 'businessName': business_name, - 'emailAddresses': affiliation_invitation.recipient_email, - 'orgName': affiliation_invitation.from_org.name, - 'contextUrl': 'None/RnJvbSB0aGUgbW9vbiBpbmMu/affiliationInvitation/acceptToken/ABCD' + "accountId": affiliation_invitation.from_org.id, + "businessName": business_name, + "emailAddresses": affiliation_invitation.recipient_email, + "orgName": affiliation_invitation.from_org.name, + "contextUrl": "None/RnJvbSB0aGUgbW9vbiBpbmMu/affiliationInvitation/acceptToken/ABCD", } - publish_to_mailer_mock.assert_called_with(notification_type=QueueMessageTypes.AFFILIATION_INVITATION.value, - data=expected_data) + publish_to_mailer_mock.assert_called_with( + notification_type=QueueMessageTypes.AFFILIATION_INVITATION.value, data=expected_data + ) -@patch.object(auth_api.services.affiliation_invitation, 'publish_to_mailer') -def test_send_affiliation_invitation_request_sent(publish_to_mailer_mock, - session, auth_mock, keycloak_mock, business_mock, monkeypatch): +@patch.object(auth_api.services.affiliation_invitation, "publish_to_mailer") +def test_send_affiliation_invitation_request_sent( + publish_to_mailer_mock, session, auth_mock, keycloak_mock, business_mock, monkeypatch +): """Verify REQUEST ACCESS - on create request - data for email is correctly generated.""" - additional_message = 'Ad Astra.' - affiliation_invitation = _setup_affiliation_invitation_data(affiliation_invitation_type='REQUEST') - business_name = 'Troll incorporated' + additional_message = "Ad Astra." + affiliation_invitation = _setup_affiliation_invitation_data(affiliation_invitation_type="REQUEST") + business_name = "Troll incorporated" affiliation_invitation.additional_message = additional_message AffiliationInvitationService.send_affiliation_invitation( affiliation_invitation=affiliation_invitation, business_name=business_name, - email_addresses=affiliation_invitation.recipient_email + email_addresses=affiliation_invitation.recipient_email, ) expected_data = { - 'accountId': affiliation_invitation.from_org.id, - 'businessName': business_name, - 'emailAddresses': affiliation_invitation.recipient_email, - 'orgName': affiliation_invitation.from_org.name, - 'fromOrgName': affiliation_invitation.from_org.name, - 'fromOrgBranchName': affiliation_invitation.from_org.branch_name, - 'toOrgName': affiliation_invitation.to_org.name, - 'toOrgBranchName': affiliation_invitation.to_org.branch_name, - 'additionalMessage': additional_message + "accountId": affiliation_invitation.from_org.id, + "businessName": business_name, + "emailAddresses": affiliation_invitation.recipient_email, + "orgName": affiliation_invitation.from_org.name, + "fromOrgName": affiliation_invitation.from_org.name, + "fromOrgBranchName": affiliation_invitation.from_org.branch_name, + "toOrgName": affiliation_invitation.to_org.name, + "toOrgBranchName": affiliation_invitation.to_org.branch_name, + "additionalMessage": additional_message, } notification_type = QueueMessageTypes.AFFILIATION_INVITATION_REQUEST.value - publish_to_mailer_mock.assert_called_with(notification_type=notification_type, - data=expected_data) + publish_to_mailer_mock.assert_called_with(notification_type=notification_type, data=expected_data) -@patch.object(auth_api.services.affiliation_invitation, 'publish_to_mailer') -def test_send_affiliation_invitation_request_authorized(publish_to_mailer_mock, - session, auth_mock, keycloak_mock, business_mock, monkeypatch): +@patch.object(auth_api.services.affiliation_invitation, "publish_to_mailer") +def test_send_affiliation_invitation_request_authorized( + publish_to_mailer_mock, session, auth_mock, keycloak_mock, business_mock, monkeypatch +): """Verify REQUEST ACCESS - on authorize request - data for email is correctly generated.""" - monkeypatch.setattr('auth_api.services.affiliation_invitation.RestService.get_service_account_token', - lambda config_id, config_secret: 'TestToken') + monkeypatch.setattr( + "auth_api.services.affiliation_invitation.RestService.get_service_account_token", + lambda config_id, config_secret: "TestToken", + ) - affiliation_invitation = \ - _setup_affiliation_invitation_data(affiliation_invitation_type='REQUEST', - affiliation_invitation_status_code=InvitationStatus.ACCEPTED.value) - business_name = 'BarFoo, Inc.' # will get it from business mock 'get_business' method - expected_email = 'expected@email.com' - monkeypatch.setattr('auth_api.services.affiliation_invitation.UserService.get_admin_emails_for_org', - lambda org_id: expected_email if org_id == affiliation_invitation.from_org_id else None) + affiliation_invitation = _setup_affiliation_invitation_data( + affiliation_invitation_type="REQUEST", affiliation_invitation_status_code=InvitationStatus.ACCEPTED.value + ) + business_name = "BarFoo, Inc." # will get it from business mock 'get_business' method + expected_email = "expected@email.com" + monkeypatch.setattr( + "auth_api.services.affiliation_invitation.UserService.get_admin_emails_for_org", + lambda org_id: expected_email if org_id == affiliation_invitation.from_org_id else None, + ) # simulate subquery for entity entity = EntityModel() @@ -563,43 +609,46 @@ def test_send_affiliation_invitation_request_authorized(publish_to_mailer_mock, affiliation_invitation.entity = entity AffiliationInvitationService.send_affiliation_invitation_authorization_email( - affiliation_invitation=affiliation_invitation, - is_authorized=True + affiliation_invitation=affiliation_invitation, is_authorized=True ) expected_data = { - 'accountId': affiliation_invitation.from_org.id, - 'businessName': business_name, - 'emailAddresses': expected_email, - 'orgName': affiliation_invitation.from_org.name, - 'fromOrgName': affiliation_invitation.from_org.name, - 'fromOrgBranchName': affiliation_invitation.from_org.branch_name, - 'toOrgName': affiliation_invitation.to_org.name, - 'toOrgBranchName': affiliation_invitation.to_org.branch_name, - 'isAuthorized': True + "accountId": affiliation_invitation.from_org.id, + "businessName": business_name, + "emailAddresses": expected_email, + "orgName": affiliation_invitation.from_org.name, + "fromOrgName": affiliation_invitation.from_org.name, + "fromOrgBranchName": affiliation_invitation.from_org.branch_name, + "toOrgName": affiliation_invitation.to_org.name, + "toOrgBranchName": affiliation_invitation.to_org.branch_name, + "isAuthorized": True, } notification_type = QueueMessageTypes.AFFILIATION_INVITATION_REQUEST_AUTHORIZATION.value - publish_to_mailer_mock.assert_called_with(notification_type=notification_type, - data=expected_data) + publish_to_mailer_mock.assert_called_with(notification_type=notification_type, data=expected_data) -@patch.object(auth_api.services.affiliation_invitation, 'publish_to_mailer') -def test_send_affiliation_invitation_request_refused(publish_to_mailer_mock, - session, auth_mock, keycloak_mock, business_mock, monkeypatch): +@patch.object(auth_api.services.affiliation_invitation, "publish_to_mailer") +def test_send_affiliation_invitation_request_refused( + publish_to_mailer_mock, session, auth_mock, keycloak_mock, business_mock, monkeypatch +): """Verify REQUEST ACCESS - on refuse request - data for email is correctly generated.""" - monkeypatch.setattr('auth_api.services.affiliation_invitation.RestService.get_service_account_token', - lambda config_id, config_secret: 'TestToken') + monkeypatch.setattr( + "auth_api.services.affiliation_invitation.RestService.get_service_account_token", + lambda config_id, config_secret: "TestToken", + ) - affiliation_invitation = \ - _setup_affiliation_invitation_data(affiliation_invitation_type='REQUEST', - affiliation_invitation_status_code=InvitationStatus.FAILED.value) + affiliation_invitation = _setup_affiliation_invitation_data( + affiliation_invitation_type="REQUEST", affiliation_invitation_status_code=InvitationStatus.FAILED.value + ) - expected_email = 'expected@email.com' - monkeypatch.setattr('auth_api.services.affiliation_invitation.UserService.get_admin_emails_for_org', - lambda org_id: expected_email if org_id == affiliation_invitation.from_org_id else None) + expected_email = "expected@email.com" + monkeypatch.setattr( + "auth_api.services.affiliation_invitation.UserService.get_admin_emails_for_org", + lambda org_id: expected_email if org_id == affiliation_invitation.from_org_id else None, + ) - business_name = 'BarFoo, Inc.' # will get it from business mock 'get_business' method + business_name = "BarFoo, Inc." # will get it from business mock 'get_business' method # simulate subquery for entity entity = EntityModel() @@ -607,39 +656,41 @@ def test_send_affiliation_invitation_request_refused(publish_to_mailer_mock, affiliation_invitation.entity = entity AffiliationInvitationService.send_affiliation_invitation_authorization_email( - affiliation_invitation=affiliation_invitation, - is_authorized=False + affiliation_invitation=affiliation_invitation, is_authorized=False ) expected_data = { - 'accountId': affiliation_invitation.from_org.id, - 'businessName': business_name, - 'emailAddresses': expected_email, - 'orgName': affiliation_invitation.from_org.name, - 'fromOrgName': affiliation_invitation.from_org.name, - 'fromOrgBranchName': affiliation_invitation.from_org.branch_name, - 'toOrgName': affiliation_invitation.to_org.name, - 'toOrgBranchName': affiliation_invitation.to_org.branch_name, - 'isAuthorized': False + "accountId": affiliation_invitation.from_org.id, + "businessName": business_name, + "emailAddresses": expected_email, + "orgName": affiliation_invitation.from_org.name, + "fromOrgName": affiliation_invitation.from_org.name, + "fromOrgBranchName": affiliation_invitation.from_org.branch_name, + "toOrgName": affiliation_invitation.to_org.name, + "toOrgBranchName": affiliation_invitation.to_org.branch_name, + "isAuthorized": False, } notification_type = QueueMessageTypes.AFFILIATION_INVITATION_REQUEST_AUTHORIZATION.value - publish_to_mailer_mock.assert_called_with(notification_type=notification_type, - data=expected_data) - - -@pytest.mark.parametrize('test_name,member_type,expect_request_invites', [ - ('test user is org admin', roles.ADMIN, True), - ('test user is org coordinator', roles.COORDINATOR, True), - ('test user is org user', roles.USER, False), -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_get_all_invitations_with_details_related_to_org(session, auth_mock, keycloak_mock, business_mock, monkeypatch, - test_name, member_type, expect_request_invites): + publish_to_mailer_mock.assert_called_with(notification_type=notification_type, data=expected_data) + + +@pytest.mark.parametrize( + "test_name,member_type,expect_request_invites", + [ + ("test user is org admin", roles.ADMIN, True), + ("test user is org coordinator", roles.COORDINATOR, True), + ("test user is org user", roles.USER, False), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_get_all_invitations_with_details_related_to_org( + session, auth_mock, keycloak_mock, business_mock, monkeypatch, test_name, member_type, expect_request_invites +): """Verify REQUEST affiliation invitations are returned only when user is org ADMIN/COORDINATOR.""" # setup an org user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org1 = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org1 org2 = OrgService.create_org(TestOrgInfo.org3, user_id=user.id) @@ -650,29 +701,32 @@ def test_get_all_invitations_with_details_related_to_org(session, auth_mock, key affiliation_invitation_model1 = AffiliationInvitationModel.create_from_dict( invitation_info={ - 'fromOrgId': org1._model.id, - 'toOrgId': org2._model.id, - 'entityId': entity.id, - 'type': 'REQUEST' + "fromOrgId": org1._model.id, + "toOrgId": org2._model.id, + "entityId": entity.id, + "type": "REQUEST", }, - user_id=user.id) + user_id=user.id, + ) affiliation_invitation_model1.save() affiliation_invitation_model2 = AffiliationInvitationModel.create_from_dict( invitation_info={ - 'fromOrgId': org1._model.id, - 'toOrgId': org3._model.id, - 'entityId': entity.id, - 'type': 'REQUEST' + "fromOrgId": org1._model.id, + "toOrgId": org3._model.id, + "entityId": entity.id, + "type": "REQUEST", }, - user_id=user.id) + user_id=user.id, + ) affiliation_invitation_model2.save() factory_membership_model(user.id, org1._model.id, member_type=member_type) search_filter = AffiliationInvitationSearch() - result = AffiliationInvitationService.get_all_invitations_with_details_related_to_org(org_id=org1._model.id, - search_filter=search_filter) + result = AffiliationInvitationService.get_all_invitations_with_details_related_to_org( + org_id=org1._model.id, search_filter=search_filter + ) if expect_request_invites: assert len(result) == 2 diff --git a/auth-api/tests/unit/services/test_authorization.py b/auth-api/tests/unit/services/test_authorization.py index 5d1e6f57fe..b656af2a5e 100644 --- a/auth-api/tests/unit/services/test_authorization.py +++ b/auth-api/tests/unit/services/test_authorization.py @@ -27,8 +27,16 @@ from auth_api.utils.roles import ADMIN, STAFF, USER from tests.utilities.factory_scenarios import TestEntityInfo, TestJwtClaims, TestUserInfo from tests.utilities.factory_utils import ( - TestOrgInfo, TestOrgTypeInfo, factory_affiliation_model, factory_entity_model, factory_membership_model, - factory_org_model, factory_product_model, factory_user_model, patch_token_info) + TestOrgInfo, + TestOrgTypeInfo, + factory_affiliation_model, + factory_entity_model, + factory_membership_model, + factory_org_model, + factory_product_model, + factory_user_model, + patch_token_info, +) def test_get_user_authorizations_for_entity(session, monkeypatch): # pylint:disable=unused-argument @@ -38,52 +46,46 @@ def test_get_user_authorizations_for_entity(session, monkeypatch): # pylint:dis membership = factory_membership_model(user.id, org.id) entity = factory_entity_model() factory_affiliation_model(entity.id, org.id) - patch_token_info({ - 'sub': str(user.keycloak_guid), - 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + patch_token_info({"sub": str(user.keycloak_guid), "realm_access": {"roles": ["basic"]}}, monkeypatch) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert authorization is not None - assert authorization.get('orgMembership', None) == membership.membership_type_code + assert authorization.get("orgMembership", None) == membership.membership_type_code # Test with invalid user - patch_token_info({'sub': str(uuid.uuid4()), 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + patch_token_info({"sub": str(uuid.uuid4()), "realm_access": {"roles": ["basic"]}}, monkeypatch) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert authorization is not None - assert authorization.get('orgMembership', None) is None + assert authorization.get("orgMembership", None) is None # Test for passcode users with invalid username - patch_token_info({'loginSource': 'PASSCODE', 'username': 'INVALID', 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + patch_token_info( + {"loginSource": "PASSCODE", "username": "INVALID", "realm_access": {"roles": ["basic"]}}, monkeypatch + ) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert authorization is not None - assert authorization.get('orgMembership', None) is None + assert authorization.get("orgMembership", None) is None # Test for staff users - patch_token_info( - {'loginSource': '', 'realm_access': {'roles': ['staff']}}, - monkeypatch) + patch_token_info({"loginSource": "", "realm_access": {"roles": ["staff"]}}, monkeypatch) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert authorization is not None - assert authorization.get('orgMembership', None) is None + assert authorization.get("orgMembership", None) is None # test with api_gw source user - patch_token_info({ - 'Account-Id': org.id, - 'loginSource': 'API_GW', - 'sub': str(user.keycloak_guid), - 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + patch_token_info( + { + "Account-Id": org.id, + "loginSource": "API_GW", + "sub": str(user.keycloak_guid), + "realm_access": {"roles": ["basic"]}, + }, + monkeypatch, + ) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert authorization is not None - assert authorization.get('orgMembership', None) == membership.membership_type_code + assert authorization.get("orgMembership", None) == membership.membership_type_code def test_get_user_authorizations_for_org(session, monkeypatch): # pylint:disable=unused-argument @@ -92,35 +94,23 @@ def test_get_user_authorizations_for_org(session, monkeypatch): # pylint:disabl org = factory_org_model() membership = factory_membership_model(user.id, org.id) - patch_token_info({ - 'sub': str(user.keycloak_guid), - 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + patch_token_info({"sub": str(user.keycloak_guid), "realm_access": {"roles": ["basic"]}}, monkeypatch) authorization = Authorization.get_account_authorizations_for_org(org.id, ProductCode.BUSINESS.value) assert authorization is not None - assert authorization.get('orgMembership', None) == membership.membership_type_code - assert authorization.get('roles') is not None - - patch_token_info({ - 'sub': str(user.keycloak_guid), - 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + assert authorization.get("orgMembership", None) == membership.membership_type_code + assert authorization.get("roles") is not None + + patch_token_info({"sub": str(user.keycloak_guid), "realm_access": {"roles": ["basic"]}}, monkeypatch) authorization = Authorization.get_account_authorizations_for_org(org.id, ProductCode.NAMES_REQUEST.value) assert authorization is not None - assert authorization.get('orgMembership', None) == membership.membership_type_code - assert authorization.get('roles') is not None - - patch_token_info({ - 'sub': str(user.keycloak_guid), - 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + assert authorization.get("orgMembership", None) == membership.membership_type_code + assert authorization.get("roles") is not None + + patch_token_info({"sub": str(user.keycloak_guid), "realm_access": {"roles": ["basic"]}}, monkeypatch) authorization = Authorization.get_account_authorizations_for_org(org.id, ProductCode.VS.value) assert authorization is not None - assert authorization.get('orgMembership') is None - assert len(authorization.get('roles')) == 0 + assert authorization.get("orgMembership") is None + assert len(authorization.get("roles")) == 0 def test_get_user_authorizations_for_entity_service_account(session, monkeypatch): @@ -134,24 +124,26 @@ def test_get_user_authorizations_for_entity_service_account(session, monkeypatch # Test for service accounts with correct product code patch_token_info( - {'loginSource': '', 'realm_access': {'roles': ['system']}, 'product_code': ProductCode.BUSINESS.value}, - monkeypatch) + {"loginSource": "", "realm_access": {"roles": ["system"]}, "product_code": ProductCode.BUSINESS.value}, + monkeypatch, + ) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert bool(authorization) is True - assert authorization.get('orgMembership', None) == 'ADMIN' + assert authorization.get("orgMembership", None) == "ADMIN" # Test for service accounts with wrong product code - patch_token_info({'loginSource': '', 'realm_access': {'roles': ['system']}, 'product_code': 'INVALIDCP'}, - monkeypatch) + patch_token_info( + {"loginSource": "", "realm_access": {"roles": ["system"]}, "product_code": "INVALIDCP"}, monkeypatch + ) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert bool(authorization) is False - assert authorization.get('orgMembership', None) is None + assert authorization.get("orgMembership", None) is None # Test for service accounts with no product code - patch_token_info({'loginSource': '', 'realm_access': {'roles': ['system']}}, monkeypatch) + patch_token_info({"loginSource": "", "realm_access": {"roles": ["system"]}}, monkeypatch) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert bool(authorization) is False - assert authorization.get('orgMembership', None) is None + assert authorization.get("orgMembership", None) is None def test_get_user_authorizations(session): # pylint:disable=unused-argument @@ -164,12 +156,12 @@ def test_get_user_authorizations(session): # pylint:disable=unused-argument authorization = Authorization.get_user_authorizations(str(user.keycloak_guid)) assert authorization is not None - assert authorization['authorizations'][0].get('orgMembership', None) == membership.membership_type_code + assert authorization["authorizations"][0].get("orgMembership", None) == membership.membership_type_code # Test with invalid user authorization = Authorization.get_user_authorizations(str(uuid.uuid4())) assert authorization is not None - assert len(authorization['authorizations']) == 0 + assert len(authorization["authorizations"]) == 0 def test_check_auth(session, monkeypatch): # pylint:disable=unused-argument @@ -182,98 +174,120 @@ def test_check_auth(session, monkeypatch): # pylint:disable=unused-argument factory_affiliation_model(entity.id, org.id) # Test if staff admin can access to STAFF only method - patch_token_info({'realm_access': {'roles': ['staff', 'create_accounts']}, 'sub': str(user.keycloak_guid)}, - monkeypatch) + patch_token_info( + {"realm_access": {"roles": ["staff", "create_accounts"]}, "sub": str(user.keycloak_guid)}, monkeypatch + ) check_auth(one_of_roles=[STAFF]) # Test for staff admin role to only STAFF - patch_token_info({'realm_access': {'roles': ['staff', 'create_accounts']}, 'sub': str(user.keycloak_guid)}, - monkeypatch) + patch_token_info( + {"realm_access": {"roles": ["staff", "create_accounts"]}, "sub": str(user.keycloak_guid)}, monkeypatch + ) check_auth(equals_role=STAFF) # Test for staff role - patch_token_info({'realm_access': {'roles': ['staff']}, 'sub': str(user.keycloak_guid), - 'product_code': ProductCode.BUSINESS.value}, monkeypatch) + patch_token_info( + { + "realm_access": {"roles": ["staff"]}, + "sub": str(user.keycloak_guid), + "product_code": ProductCode.BUSINESS.value, + }, + monkeypatch, + ) check_auth(one_of_roles=[STAFF]) # Test for owner role - patch_token_info({'realm_access': {'roles': ['public']}, 'sub': str(user.keycloak_guid), - 'product_code': ProductCode.BUSINESS.value}, monkeypatch) + patch_token_info( + { + "realm_access": {"roles": ["public"]}, + "sub": str(user.keycloak_guid), + "product_code": ProductCode.BUSINESS.value, + }, + monkeypatch, + ) check_auth(one_of_roles=[ADMIN], business_identifier=entity.business_identifier) # Test for owner role with org id - patch_token_info({'realm_access': {'roles': ['public']}, 'sub': str(user.keycloak_guid), - 'product_code': ProductCode.BUSINESS.value}, monkeypatch) + patch_token_info( + { + "realm_access": {"roles": ["public"]}, + "sub": str(user.keycloak_guid), + "product_code": ProductCode.BUSINESS.value, + }, + monkeypatch, + ) check_auth(one_of_roles=[ADMIN], org_id=org.id) # Test for exception, check for auth if resource is available for STAFF users with pytest.raises(HTTPException) as excinfo: - patch_token_info({'realm_access': {'roles': ['public']}, 'sub': str(user.keycloak_guid)}, monkeypatch) + patch_token_info({"realm_access": {"roles": ["public"]}, "sub": str(user.keycloak_guid)}, monkeypatch) check_auth(one_of_roles=[STAFF], business_identifier=entity.business_identifier) assert excinfo.exception.code == 403 # Test auth where STAFF role is in disabled role list with pytest.raises(HTTPException) as excinfo: - patch_token_info({'realm_access': {'roles': ['staff']}, 'sub': str(user.keycloak_guid)}, monkeypatch) + patch_token_info({"realm_access": {"roles": ["staff"]}, "sub": str(user.keycloak_guid)}, monkeypatch) check_auth(disabled_roles=[STAFF], business_identifier=entity.business_identifier) assert excinfo.exception.code == 403 # Test auth where STAFF role is exact match with pytest.raises(HTTPException) as excinfo: - patch_token_info({'realm_access': {'roles': ['public']}, 'sub': str(user.keycloak_guid)}, monkeypatch) + patch_token_info({"realm_access": {"roles": ["public"]}, "sub": str(user.keycloak_guid)}, monkeypatch) check_auth(equals_role=USER, business_identifier=entity.business_identifier) assert excinfo.exception.code == 403 # Test auth where STAFF role is exact match with pytest.raises(HTTPException) as excinfo: - patch_token_info({'realm_access': {'roles': ['public']}, 'sub': str(user.keycloak_guid)}, monkeypatch) + patch_token_info({"realm_access": {"roles": ["public"]}, "sub": str(user.keycloak_guid)}, monkeypatch) check_auth(equals_role=USER, org_id=org.id) assert excinfo.exception.code == 403 # Test auth where STAFF role is exact match with pytest.raises(HTTPException) as excinfo: - patch_token_info({'realm_access': {'roles': ['staff', 'create_accounts']}, 'sub': str(user.keycloak_guid)}, - monkeypatch) + patch_token_info( + {"realm_access": {"roles": ["staff", "create_accounts"]}, "sub": str(user.keycloak_guid)}, monkeypatch + ) check_auth(equals_role=USER, org_id=org.id) assert excinfo.exception.code == 403 @pytest.mark.parametrize( - 'test_desc,test_expect,additional_kwargs,add_org_id', + "test_desc,test_expect,additional_kwargs,add_org_id", [ - ('Test 403 when no role checks provided in kwargs.', pytest.raises(Forbidden), {}, False), - ('Test 403 when STAFF in disabled_roles.', pytest.raises(Forbidden), {'disabled_roles': {'STAFF'}}, False), - ('Test OK when STAFF not in disabled_roles.', does_not_raise(), {'disabled_roles': {None}}, False), - ('Test OK when STAFF in one_of_roles.', does_not_raise(), {'one_of_roles': {'STAFF'}}, False), - ('Test OK when STAFF IS equals_role.', does_not_raise(), {'equals_role': 'STAFF'}, False), + ("Test 403 when no role checks provided in kwargs.", pytest.raises(Forbidden), {}, False), + ("Test 403 when STAFF in disabled_roles.", pytest.raises(Forbidden), {"disabled_roles": {"STAFF"}}, False), + ("Test OK when STAFF not in disabled_roles.", does_not_raise(), {"disabled_roles": {None}}, False), + ("Test OK when STAFF in one_of_roles.", does_not_raise(), {"one_of_roles": {"STAFF"}}, False), + ("Test OK when STAFF IS equals_role.", does_not_raise(), {"equals_role": "STAFF"}, False), ( - 'Test UnboundLocalError when system_required set to true -- auth variable not set.', + "Test UnboundLocalError when system_required set to true -- auth variable not set.", pytest.raises(UnboundLocalError), - {'equals_role': 'STAFF', 'system_required': True}, - False + {"equals_role": "STAFF", "system_required": True}, + False, ), ( - 'Test 403 when system_required set to true and correct OrgId provided, but not correct membership type.', + "Test 403 when system_required set to true and correct OrgId provided, but not correct membership type.", pytest.raises(Forbidden), - {'equals_role': 'STAFF', 'system_required': True}, - True + {"equals_role": "STAFF", "system_required": True}, + True, ), ( - 'Test OK when system_required set to true, it is STAFF and correct OrgId and membership provided.', + "Test OK when system_required set to true, it is STAFF and correct OrgId and membership provided.", does_not_raise(), - {'equals_role': 'ADMIN', 'system_required': True}, - True + {"equals_role": "ADMIN", "system_required": True}, + True, ), - ]) + ], +) def test_check_auth_staff_path(session, monkeypatch, test_desc, test_expect, additional_kwargs, add_org_id): """Assert and document scenarios for check_auth when STAFF path is concerned.""" jwt_claims = TestJwtClaims.staff_role if add_org_id: user = factory_user_model(TestUserInfo.user_test) - jwt_claims['sub'] = str(user.keycloak_guid) - jwt_claims['idp_userid'] = user.idp_userid + jwt_claims["sub"] = str(user.keycloak_guid) + jwt_claims["idp_userid"] = user.idp_userid org = factory_org_model() factory_membership_model(user.id, org.id) - additional_kwargs['org_id'] = org.id + additional_kwargs["org_id"] = org.id patch_token_info(jwt_claims, monkeypatch) with test_expect: @@ -281,49 +295,81 @@ def test_check_auth_staff_path(session, monkeypatch, test_desc, test_expect, add @pytest.mark.parametrize( - 'test_desc,test_expect,additional_kwargs,is_org_member,is_entity_affiliated,product_code_in_jwt', + "test_desc,test_expect,additional_kwargs,is_org_member,is_entity_affiliated,product_code_in_jwt", [ ( - 'Test 403 when no role checks provided in kwargs, and no org_id or business_identifier.', - pytest.raises(Forbidden), {}, False, False, ProductCode.BUSINESS.value - ), - ( - 'Test OK when no role checks provided in kwargs, but has ALL product in jwt. (bypass all checks).', - does_not_raise(), {}, False, False, 'ALL' + "Test 403 when no role checks provided in kwargs, and no org_id or business_identifier.", + pytest.raises(Forbidden), + {}, + False, + False, + ProductCode.BUSINESS.value, ), ( - 'Test OK when business identifier for affiliated entity and member of org.', - does_not_raise(), {}, True, True, ProductCode.BUSINESS.value + "Test OK when no role checks provided in kwargs, but has ALL product in jwt. (bypass all checks).", + does_not_raise(), + {}, + False, + False, + "ALL", ), ( - 'Test OK when business identifier for affiliated entity provided.', - does_not_raise(), {}, False, True, ProductCode.BUSINESS.value + "Test OK when business identifier for affiliated entity and member of org.", + does_not_raise(), + {}, + True, + True, + ProductCode.BUSINESS.value, ), ( - 'Test OK when member of the org.', - does_not_raise(), {}, True, False, ProductCode.BUSINESS.value + "Test OK when business identifier for affiliated entity provided.", + does_not_raise(), + {}, + False, + True, + ProductCode.BUSINESS.value, ), + ("Test OK when member of the org.", does_not_raise(), {}, True, False, ProductCode.BUSINESS.value), ( - 'Test OK when business identifier provided, not affiliated...', - does_not_raise(), {'business_identifier': 'SOME_NOT_AFFILIATED'}, False, False, ProductCode.BUSINESS.value + "Test OK when business identifier provided, not affiliated...", + does_not_raise(), + {"business_identifier": "SOME_NOT_AFFILIATED"}, + False, + False, + ProductCode.BUSINESS.value, ), ( - 'Test OK when org_id provided, not member...', - does_not_raise(), {'org_id': 123}, False, False, ProductCode.BUSINESS.value + "Test OK when org_id provided, not member...", + does_not_raise(), + {"org_id": 123}, + False, + False, + ProductCode.BUSINESS.value, ), ( - 'Test OK when org_id provided, not member, and not affiliated business_identifier...', - does_not_raise(), {'org_id': 123, 'business_identifier': 'SOME_NOT_AFFILIATED'}, - False, False, ProductCode.BUSINESS.value + "Test OK when org_id provided, not member, and not affiliated business_identifier...", + does_not_raise(), + {"org_id": 123, "business_identifier": "SOME_NOT_AFFILIATED"}, + False, + False, + ProductCode.BUSINESS.value, ), - ] + ], ) -def test_check_auth_system_path(session, monkeypatch, test_desc, test_expect, additional_kwargs, - is_org_member, is_entity_affiliated, product_code_in_jwt): +def test_check_auth_system_path( + session, + monkeypatch, + test_desc, + test_expect, + additional_kwargs, + is_org_member, + is_entity_affiliated, + product_code_in_jwt, +): """Assert and document scenarios for check_auth when calls are made by SYSTEM ROLE.""" jwt_claims = TestJwtClaims.system_role user = factory_user_model() - jwt_claims['sub'] = str(user.keycloak_guid) + jwt_claims["sub"] = str(user.keycloak_guid) org1 = factory_org_model(org_info=TestOrgInfo.org1) org3 = factory_org_model(org_info=TestOrgInfo.org4) entity1 = factory_entity_model(entity_info=TestEntityInfo.entity1) @@ -334,21 +380,21 @@ def test_check_auth_system_path(session, monkeypatch, test_desc, test_expect, ad factory_membership_model(user.id, org1.id) factory_product_model(org1.id, product_code=ProductCode.BUSINESS.value) factory_affiliation_model(entity1.id, org1.id) - additional_kwargs['org_id'] = org1.id - additional_kwargs['business_identifier'] = entity1.business_identifier + additional_kwargs["org_id"] = org1.id + additional_kwargs["business_identifier"] = entity1.business_identifier elif is_org_member: factory_membership_model(user.id, org1.id) factory_product_model(org1.id, product_code=ProductCode.BUSINESS.value) - additional_kwargs['org_id'] = org1.id + additional_kwargs["org_id"] = org1.id elif is_entity_affiliated: factory_membership_model(user.id, org1.id) factory_product_model(org1.id, product_code=ProductCode.BUSINESS.value) factory_affiliation_model(entity1.id, org1.id) - additional_kwargs['business_identifier'] = entity1.business_identifier + additional_kwargs["business_identifier"] = entity1.business_identifier - jwt_claims['product_code'] = product_code_in_jwt + jwt_claims["product_code"] = product_code_in_jwt patch_token_info(jwt_claims, monkeypatch) with test_expect: @@ -356,72 +402,109 @@ def test_check_auth_system_path(session, monkeypatch, test_desc, test_expect, ad @pytest.mark.parametrize( - 'test_desc,test_expect,additional_kwargs,is_org_member,is_entity_affiliated', + "test_desc,test_expect,additional_kwargs,is_org_member,is_entity_affiliated", [ ( - 'Test UnboundLocalError (403) when no role checks provided in kwargs.', - pytest.raises(UnboundLocalError), {}, False, False - ), - ( - 'Test 403 when org member, but no role checks provided in kwargs.', - pytest.raises(Forbidden), {}, True, False + "Test UnboundLocalError (403) when no role checks provided in kwargs.", + pytest.raises(UnboundLocalError), + {}, + False, + False, ), + ("Test 403 when org member, but no role checks provided in kwargs.", pytest.raises(Forbidden), {}, True, False), ( - 'Test 403 when entity affiliated, but no role checks provided in kwargs.', - pytest.raises(Forbidden), {}, False, True + "Test 403 when entity affiliated, but no role checks provided in kwargs.", + pytest.raises(Forbidden), + {}, + False, + True, ), ( - 'Test OK when org member ADMIN and checked for ADMIN role.', - does_not_raise(), {'equals_role': 'ADMIN'}, True, False + "Test OK when org member ADMIN and checked for ADMIN role.", + does_not_raise(), + {"equals_role": "ADMIN"}, + True, + False, ), ( - 'Test OK when affiliated entity and checked for ADMIN role.', - does_not_raise(), {'equals_role': 'ADMIN'}, False, True + "Test OK when affiliated entity and checked for ADMIN role.", + does_not_raise(), + {"equals_role": "ADMIN"}, + False, + True, ), ( - 'Test OK when org member ADMIN and checked for ADMIN role.', - does_not_raise(), {'one_of_roles': {'ADMIN'}}, True, False + "Test OK when org member ADMIN and checked for ADMIN role.", + does_not_raise(), + {"one_of_roles": {"ADMIN"}}, + True, + False, ), ( - 'Test OK when affiliated entity and checked for ADMIN role.', - does_not_raise(), {'one_of_roles': {'ADMIN'}}, False, True + "Test OK when affiliated entity and checked for ADMIN role.", + does_not_raise(), + {"one_of_roles": {"ADMIN"}}, + False, + True, ), ( - 'Test OK when org member ADMIN and checked for ADMIN role.', - does_not_raise(), {'one_of_roles': {'ADMIN'}}, True, False + "Test OK when org member ADMIN and checked for ADMIN role.", + does_not_raise(), + {"one_of_roles": {"ADMIN"}}, + True, + False, ), ( - 'Test OK when affiliated entity and checked for ADMIN role.', - does_not_raise(), {'one_of_roles': {'ADMIN'}}, False, True + "Test OK when affiliated entity and checked for ADMIN role.", + does_not_raise(), + {"one_of_roles": {"ADMIN"}}, + False, + True, ), ( - 'Test OK when org member ADMIN and checked for ADMIN role.', - does_not_raise(), {'disabled_roles': {'STAFF'}}, True, False + "Test OK when org member ADMIN and checked for ADMIN role.", + does_not_raise(), + {"disabled_roles": {"STAFF"}}, + True, + False, ), ( - 'Test OK when affiliated entity and checked for ADMIN role.', - does_not_raise(), {'disabled_roles': {'STAFF'}}, False, True + "Test OK when affiliated entity and checked for ADMIN role.", + does_not_raise(), + {"disabled_roles": {"STAFF"}}, + False, + True, ), ( - 'Test 403 when org member ADMIN and checked for STAFF role.', - pytest.raises(Forbidden), {'one_of_roles': {'STAFF'}}, True, True + "Test 403 when org member ADMIN and checked for STAFF role.", + pytest.raises(Forbidden), + {"one_of_roles": {"STAFF"}}, + True, + True, ), ( - 'Test 403 when affiliated entity and disabled ADMIN role.', - pytest.raises(Forbidden), {'disabled_roles': {'ADMIN'}}, True, True + "Test 403 when affiliated entity and disabled ADMIN role.", + pytest.raises(Forbidden), + {"disabled_roles": {"ADMIN"}}, + True, + True, ), ( - 'Test 403 when affiliated entity and disabled STAFF role.', - pytest.raises(Forbidden), {'equals_role': {'STAFF'}}, True, True + "Test 403 when affiliated entity and disabled STAFF role.", + pytest.raises(Forbidden), + {"equals_role": {"STAFF"}}, + True, + True, ), - ] + ], ) -def test_check_auth_public_user_path(session, monkeypatch, test_desc, test_expect, additional_kwargs, - is_org_member, is_entity_affiliated): +def test_check_auth_public_user_path( + session, monkeypatch, test_desc, test_expect, additional_kwargs, is_org_member, is_entity_affiliated +): """Assert and document scenarios for check_auth when calls are made by PUBLIC USER ROLE.""" jwt_claims = TestJwtClaims.public_user_role user = factory_user_model(user_info=TestUserInfo.user_tester) - jwt_claims['sub'] = str(user.keycloak_guid) + jwt_claims["sub"] = str(user.keycloak_guid) org1 = factory_org_model(org_info=TestOrgInfo.org1) org3 = factory_org_model(org_info=TestOrgInfo.org4) entity1 = factory_entity_model(entity_info=TestEntityInfo.entity1) @@ -432,19 +515,19 @@ def test_check_auth_public_user_path(session, monkeypatch, test_desc, test_expec factory_membership_model(user.id, org1.id) factory_product_model(org1.id, product_code=ProductCode.BUSINESS.value) factory_affiliation_model(entity1.id, org1.id) - additional_kwargs['org_id'] = org1.id - additional_kwargs['business_identifier'] = entity1.business_identifier + additional_kwargs["org_id"] = org1.id + additional_kwargs["business_identifier"] = entity1.business_identifier elif is_org_member: factory_membership_model(user.id, org1.id) factory_product_model(org1.id, product_code=ProductCode.BUSINESS.value) - additional_kwargs['org_id'] = org1.id + additional_kwargs["org_id"] = org1.id elif is_entity_affiliated: factory_membership_model(user.id, org1.id) factory_product_model(org1.id, product_code=ProductCode.BUSINESS.value) factory_affiliation_model(entity1.id, org1.id) - additional_kwargs['business_identifier'] = entity1.business_identifier + additional_kwargs["business_identifier"] = entity1.business_identifier patch_token_info(jwt_claims, monkeypatch) with test_expect: @@ -461,7 +544,7 @@ def test_check_auth_for_service_account_valid_with_org_id(session, monkeypatch): factory_affiliation_model(entity.id, org.id) # Test for service account with CP corp type - patch_token_info({'realm_access': {'roles': ['system']}, 'product_code': ProductCode.BUSINESS.value}, monkeypatch) + patch_token_info({"realm_access": {"roles": ["system"]}, "product_code": ProductCode.BUSINESS.value}, monkeypatch) check_auth(org_id=org.id) @@ -475,11 +558,11 @@ def test_check_auth_for_service_account_valid_with_business_id(session, monkeypa factory_affiliation_model(entity.id, org.id) # Test for service account with CP corp type - patch_token_info({'realm_access': {'roles': ['system']}, 'product_code': ProductCode.BUSINESS.value}, monkeypatch) + patch_token_info({"realm_access": {"roles": ["system"]}, "product_code": ProductCode.BUSINESS.value}, monkeypatch) check_auth(business_identifier=entity.business_identifier) -@pytest.mark.skip(reason='the approach changed;should be fixed later') +@pytest.mark.skip(reason="the approach changed;should be fixed later") def test_check_auth_for_service_account_invalid(session): # pylint:disable=unused-argument """Assert that check_auth is working as expected and throws exception.""" user = factory_user_model() @@ -511,59 +594,52 @@ def test_get_account_authorizations_for_product(session, monkeypatch): # pylint factory_membership_model(user.id, org.id) patch_token_info(TestJwtClaims.get_test_real_user(user.keycloak_guid), monkeypatch) - authorization = Authorization.get_account_authorizations_for_product(org.id, 'PPR') + authorization = Authorization.get_account_authorizations_for_product(org.id, "PPR") assert authorization is not None - assert len(authorization.get('roles')) == 0 + assert len(authorization.get("roles")) == 0 # Now add some product subscription for the org patch_token_info(TestJwtClaims.get_test_real_user(user.keycloak_guid), monkeypatch) factory_product_model(org.id) - authorization = Authorization.get_account_authorizations_for_product(org.id, 'PPR') + authorization = Authorization.get_account_authorizations_for_product(org.id, "PPR") assert authorization is not None - assert len(authorization.get('roles')) > 0 + assert len(authorization.get("roles")) > 0 # Create another org and assert that the roles are empty org = factory_org_model(org_info=TestOrgInfo.org2, org_type_info=TestOrgTypeInfo.implicit) factory_membership_model(user.id, org.id) patch_token_info(TestJwtClaims.get_test_real_user(user.keycloak_guid), monkeypatch) - authorization = Authorization.get_account_authorizations_for_product(org.id, 'PPR') + authorization = Authorization.get_account_authorizations_for_product(org.id, "PPR") assert authorization is not None - assert len(authorization.get('roles')) == 0 + assert len(authorization.get("roles")) == 0 factory_product_model(org.id) patch_token_info(TestJwtClaims.get_test_real_user(user.keycloak_guid), monkeypatch) - authorization = Authorization.get_account_authorizations_for_product(org.id, 'PPR') + authorization = Authorization.get_account_authorizations_for_product(org.id, "PPR") assert authorization is not None - assert len(authorization.get('roles')) > 0 + assert len(authorization.get("roles")) > 0 -def test_get_user_authorizations_for_entity_with_multiple_affiliations(session, # pylint:disable=unused-argument - monkeypatch): +def test_get_user_authorizations_for_entity_with_multiple_affiliations( + session, monkeypatch # pylint:disable=unused-argument +): """Assert that user authorizations for entity is working.""" user = factory_user_model() org = factory_org_model() membership = factory_membership_model(user.id, org.id) entity = factory_entity_model() factory_affiliation_model(entity.id, org.id) - patch_token_info({ - 'sub': str(user.keycloak_guid), - 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + patch_token_info({"sub": str(user.keycloak_guid), "realm_access": {"roles": ["basic"]}}, monkeypatch) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert authorization is not None - assert authorization.get('orgMembership', None) == membership.membership_type_code + assert authorization.get("orgMembership", None) == membership.membership_type_code # Affiliate same entity to another org and user, and assert both authorizations works user_2 = factory_user_model(user_info=TestUserInfo.user2) org_2 = factory_org_model(org_info=TestOrgInfo.org2) membership = factory_membership_model(user_2.id, org_2.id) factory_affiliation_model(entity.id, org_2.id) - patch_token_info({ - 'sub': str(user_2.keycloak_guid), - 'realm_access': { - 'roles': ['basic'] - }}, monkeypatch) + patch_token_info({"sub": str(user_2.keycloak_guid), "realm_access": {"roles": ["basic"]}}, monkeypatch) authorization = Authorization.get_user_authorizations_for_entity(entity.business_identifier) assert authorization is not None - assert authorization.get('orgMembership', None) == membership.membership_type_code + assert authorization.get("orgMembership", None) == membership.membership_type_code diff --git a/auth-api/tests/unit/services/test_codes.py b/auth-api/tests/unit/services/test_codes.py index 2db24c3df7..6d3b2c7bcb 100644 --- a/auth-api/tests/unit/services/test_codes.py +++ b/auth-api/tests/unit/services/test_codes.py @@ -27,58 +27,58 @@ def test_fetch_data_model(session): # pylint: disable=unused-argument """Assert that code type details can be fetch by table name.""" - code_type = 'membership_types' + code_type = "membership_types" code_result = CodesService.fetch_data_model(code_type) assert code_result is not None def test_fetch_data_model_not_found(session): # pylint: disable=unused-argument """Assert that code type details can be fetch by table name.""" - code_type = 'membership_type1' + code_type = "membership_type1" code_result = CodesService.fetch_data_model(code_type) assert not code_result - code_type = 'user' + code_type = "user" code_result = CodesService.fetch_data_model(code_type) assert not code_result - code_type = '' + code_type = "" code_result = CodesService.fetch_data_model(code_type) assert not code_result def test_fetch_codes(session): # pylint: disable=unused-argument """Assert that code type details can be fetch by table name.""" - code_type = 'membership_types' + code_type = "membership_types" data = CodesService.fetch_codes(code_type) assert data is not None - assert data[0]['name'] == 'USER' + assert data[0]["name"] == "USER" def test_fetch_codes_not_found(session): # pylint: disable=unused-argument """Assert that code type details can not be fetch by table name.""" # Table is not exists - code_type = 'membership_type1' + code_type = "membership_type1" data = CodesService.fetch_codes(code_type) assert not data data = CodesService.fetch_codes(None) assert not data - data = CodesService.fetch_codes('') + data = CodesService.fetch_codes("") assert not data # The table is not the code, type or status table. - code_type = 'user' + code_type = "user" data = CodesService.fetch_codes(code_type) assert not data def test_fetch_codes_with_exception(session): # pylint: disable=unused-argument """Assert that code type details can not be fetch by table name.""" - code_type = 'membership_types' - with patch.object(importlib, 'import_module', side_effect=Exception(Error.UNDEFINED_ERROR, None)): + code_type = "membership_types" + with patch.object(importlib, "import_module", side_effect=Exception(Error.UNDEFINED_ERROR, None)): with pytest.raises(BusinessException) as exception: CodesService.fetch_codes(code_type) - assert exception.value.code == 'UNDEFINED_ERROR' + assert exception.value.code == "UNDEFINED_ERROR" diff --git a/auth-api/tests/unit/services/test_documents.py b/auth-api/tests/unit/services/test_documents.py index c47d6d3f61..b8cf00dea6 100644 --- a/auth-api/tests/unit/services/test_documents.py +++ b/auth-api/tests/unit/services/test_documents.py @@ -24,37 +24,37 @@ def test_as_dict(session): # pylint: disable=unused-argument """Assert that a document is rendered correctly as a dictionary.""" - _model = DocumentsModel.fetch_latest_document_by_type('termsofuse') + _model = DocumentsModel.fetch_latest_document_by_type("termsofuse") termsofuse = DocumentService(_model) dictionary = termsofuse.as_dict() - assert dictionary['type'] == 'termsofuse' + assert dictionary["type"] == "termsofuse" def test_with_valid_type(session): # pylint: disable=unused-argument """Assert that a document is rendered correctly as a dictionary.""" - terms_of_use = DocumentService.fetch_latest_document('termsofuse') + terms_of_use = DocumentService.fetch_latest_document("termsofuse") assert terms_of_use is not None def test_with_no_valid_type(session): # pylint: disable=unused-argument """Assert that a document is rendered correctly as a dictionary.""" - terms_of_use = DocumentService.fetch_latest_document('sometype') + terms_of_use = DocumentService.fetch_latest_document("sometype") assert terms_of_use is None def test_find_latest_version_by_invalid_type(session): # pylint: disable=unused-argument """Assert that a document is rendered correctly as a dictionary.""" - terms_of_use = DocumentService.find_latest_version_by_type('sometype') + terms_of_use = DocumentService.find_latest_version_by_type("sometype") assert terms_of_use is None def test_find_latest_version_by_type(session): # pylint: disable=unused-argument """Assert that a document is rendered correctly as a dictionary.""" - terms_of_use = DocumentService.find_latest_version_by_type('termsofuse') + terms_of_use = DocumentService.find_latest_version_by_type("termsofuse") assert terms_of_use == get_tos_latest_version() def test_find_latest_version_for_director_search(session): # pylint: disable=unused-argument """Assert that a document is rendered correctly as a dictionary.""" - terms_of_use = DocumentService.find_latest_version_by_type('termsofuse_directorsearch') - assert terms_of_use == 'd1' + terms_of_use = DocumentService.find_latest_version_by_type("termsofuse_directorsearch") + assert terms_of_use == "d1" diff --git a/auth-api/tests/unit/services/test_entity.py b/auth-api/tests/unit/services/test_entity.py index e27fd4a1d9..060ad0b067 100644 --- a/auth-api/tests/unit/services/test_entity.py +++ b/auth-api/tests/unit/services/test_entity.py @@ -23,7 +23,11 @@ from auth_api.services.entity import Entity as EntityService from tests.utilities.factory_scenarios import TestContactInfo, TestEntityInfo, TestJwtClaims, TestUserInfo from tests.utilities.factory_utils import ( - factory_contact_model, factory_entity_model, factory_org_service, patch_token_info) + factory_contact_model, + factory_entity_model, + factory_org_service, + patch_token_info, +) def test_as_dict(session): # pylint:disable=unused-argument @@ -32,107 +36,116 @@ def test_as_dict(session): # pylint:disable=unused-argument entity = EntityService(entity_model) dictionary = entity.as_dict() - assert dictionary['business_identifier'] == TestEntityInfo.entity1['businessIdentifier'] + assert dictionary["business_identifier"] == TestEntityInfo.entity1["businessIdentifier"] def test_save_entity_new(session): # pylint:disable=unused-argument """Assert that an Entity can be created from a dictionary.""" - entity = EntityService.save_entity({ - 'businessIdentifier': TestEntityInfo.entity_passcode['businessIdentifier'], - 'businessNumber': TestEntityInfo.entity_passcode['businessNumber'], - 'passCode': TestEntityInfo.entity_passcode['passCode'], - 'name': TestEntityInfo.entity_passcode['name'], - 'corpTypeCode': TestEntityInfo.entity_passcode['corpTypeCode'] - }) + entity = EntityService.save_entity( + { + "businessIdentifier": TestEntityInfo.entity_passcode["businessIdentifier"], + "businessNumber": TestEntityInfo.entity_passcode["businessNumber"], + "passCode": TestEntityInfo.entity_passcode["passCode"], + "name": TestEntityInfo.entity_passcode["name"], + "corpTypeCode": TestEntityInfo.entity_passcode["corpTypeCode"], + } + ) assert entity is not None dictionary = entity.as_dict() - assert dictionary['business_identifier'] == TestEntityInfo.entity_passcode['businessIdentifier'] + assert dictionary["business_identifier"] == TestEntityInfo.entity_passcode["businessIdentifier"] def test_save_entity_existing(session): # pylint:disable=unused-argument """Assert that an Entity can be updated from a dictionary.""" - entity = EntityService.save_entity({ - 'businessIdentifier': TestEntityInfo.entity_passcode['businessIdentifier'], - 'businessNumber': TestEntityInfo.entity_passcode['businessNumber'], - 'passCode': TestEntityInfo.entity_passcode['passCode'], - 'name': TestEntityInfo.entity_passcode['name'], - 'corpTypeCode': TestEntityInfo.entity_passcode['corpTypeCode'] - }) + entity = EntityService.save_entity( + { + "businessIdentifier": TestEntityInfo.entity_passcode["businessIdentifier"], + "businessNumber": TestEntityInfo.entity_passcode["businessNumber"], + "passCode": TestEntityInfo.entity_passcode["passCode"], + "name": TestEntityInfo.entity_passcode["name"], + "corpTypeCode": TestEntityInfo.entity_passcode["corpTypeCode"], + } + ) assert entity updated_entity_info = { - 'businessIdentifier': TestEntityInfo.entity_passcode2['businessIdentifier'], - 'businessNumber': TestEntityInfo.entity_passcode2['businessNumber'], - 'passCode': TestEntityInfo.entity_passcode['passCode'], - 'name': TestEntityInfo.entity_passcode['name'], - 'corpTypeCode': TestEntityInfo.entity_passcode['corpTypeCode'] + "businessIdentifier": TestEntityInfo.entity_passcode2["businessIdentifier"], + "businessNumber": TestEntityInfo.entity_passcode2["businessNumber"], + "passCode": TestEntityInfo.entity_passcode["passCode"], + "name": TestEntityInfo.entity_passcode["name"], + "corpTypeCode": TestEntityInfo.entity_passcode["corpTypeCode"], } updated_entity = EntityService.save_entity(updated_entity_info) assert updated_entity - assert updated_entity.as_dict()['name'] == updated_entity_info['name'] - assert updated_entity.as_dict()['business_number'] == updated_entity_info['businessNumber'] + assert updated_entity.as_dict()["name"] == updated_entity_info["name"] + assert updated_entity.as_dict()["business_number"] == updated_entity_info["businessNumber"] def test_update_entity_existing_success(session, monkeypatch): # pylint:disable=unused-argument """Assert that an Entity can be updated from a dictionary.""" - entity = EntityService.save_entity({ - 'businessIdentifier': TestEntityInfo.bc_entity_passcode3['businessIdentifier'], - 'businessNumber': TestEntityInfo.bc_entity_passcode3['businessNumber'], - 'passCode': TestEntityInfo.bc_entity_passcode3['passCode'], - 'name': TestEntityInfo.bc_entity_passcode3['name'], - 'corpTypeCode': TestEntityInfo.bc_entity_passcode3['corpTypeCode'] - }) + entity = EntityService.save_entity( + { + "businessIdentifier": TestEntityInfo.bc_entity_passcode3["businessIdentifier"], + "businessNumber": TestEntityInfo.bc_entity_passcode3["businessNumber"], + "passCode": TestEntityInfo.bc_entity_passcode3["passCode"], + "name": TestEntityInfo.bc_entity_passcode3["name"], + "corpTypeCode": TestEntityInfo.bc_entity_passcode3["corpTypeCode"], + } + ) assert entity - assert entity.as_dict()['corp_type']['code'] == 'BC' + assert entity.as_dict()["corp_type"]["code"] == "BC" updated_entity_info = { - 'businessIdentifier': TestEntityInfo.bc_entity_passcode4['businessIdentifier'], - 'businessNumber': TestEntityInfo.bc_entity_passcode4['businessNumber'], - 'name': TestEntityInfo.bc_entity_passcode4['name'], - 'corpTypeCode': TestEntityInfo.bc_entity_passcode4['corpTypeCode'] + "businessIdentifier": TestEntityInfo.bc_entity_passcode4["businessIdentifier"], + "businessNumber": TestEntityInfo.bc_entity_passcode4["businessNumber"], + "name": TestEntityInfo.bc_entity_passcode4["name"], + "corpTypeCode": TestEntityInfo.bc_entity_passcode4["corpTypeCode"], } user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] - patch_token_info({'loginSource': '', 'realm_access': {'roles': ['system']}, 'corp_type': 'BC'}, monkeypatch) - updated_entity = EntityService.update_entity(entity.as_dict().get('business_identifier'), updated_entity_info) + patch_token_info({"loginSource": "", "realm_access": {"roles": ["system"]}, "corp_type": "BC"}, monkeypatch) + updated_entity = EntityService.update_entity(entity.as_dict().get("business_identifier"), updated_entity_info) assert updated_entity - assert updated_entity.as_dict()['name'] == updated_entity_info['name'] - assert updated_entity.as_dict()['business_number'] == updated_entity_info['businessNumber'] + assert updated_entity.as_dict()["name"] == updated_entity_info["name"] + assert updated_entity.as_dict()["business_number"] == updated_entity_info["businessNumber"] def test_update_entity_existing_failures(session, monkeypatch): # pylint:disable=unused-argument """Assert that an Entity can be updated from a dictionary.""" - entity = EntityService.save_entity({ - 'businessIdentifier': TestEntityInfo.bc_entity_passcode3['businessIdentifier'], - 'businessNumber': TestEntityInfo.bc_entity_passcode3['businessNumber'], - 'passCode': TestEntityInfo.bc_entity_passcode3['passCode'], - 'name': TestEntityInfo.bc_entity_passcode3['name'], - 'corpTypeCode': TestEntityInfo.bc_entity_passcode3['corpTypeCode'] - }) + entity = EntityService.save_entity( + { + "businessIdentifier": TestEntityInfo.bc_entity_passcode3["businessIdentifier"], + "businessNumber": TestEntityInfo.bc_entity_passcode3["businessNumber"], + "passCode": TestEntityInfo.bc_entity_passcode3["passCode"], + "name": TestEntityInfo.bc_entity_passcode3["name"], + "corpTypeCode": TestEntityInfo.bc_entity_passcode3["corpTypeCode"], + } + ) assert entity - assert entity.as_dict()['corp_type']['code'] == 'BC' + assert entity.as_dict()["corp_type"]["code"] == "BC" updated_entity_info = { - 'businessIdentifier': TestEntityInfo.bc_entity_passcode4['businessIdentifier'], - 'businessNumber': TestEntityInfo.bc_entity_passcode4['businessNumber'], - 'name': TestEntityInfo.bc_entity_passcode4['name'], - 'corpTypeCode': TestEntityInfo.bc_entity_passcode4['corpTypeCode'] + "businessIdentifier": TestEntityInfo.bc_entity_passcode4["businessIdentifier"], + "businessNumber": TestEntityInfo.bc_entity_passcode4["businessNumber"], + "name": TestEntityInfo.bc_entity_passcode4["name"], + "corpTypeCode": TestEntityInfo.bc_entity_passcode4["corpTypeCode"], } user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] with pytest.raises(BusinessException) as exception: - patch_token_info({'loginSource': '', 'realm_access': {'roles': ['system']}, - 'corp_type': 'INVALID_CP'}, monkeypatch) - EntityService.update_entity('invalidbusinessnumber', updated_entity_info) + patch_token_info( + {"loginSource": "", "realm_access": {"roles": ["system"]}, "corp_type": "INVALID_CP"}, monkeypatch + ) + EntityService.update_entity("invalidbusinessnumber", updated_entity_info) assert exception.value.code == Error.DATA_NOT_FOUND.name @@ -147,16 +160,16 @@ def test_save_entity_no_input(session): # pylint:disable=unused-argument def test_entity_find_by_business_id(session, auth_mock): # pylint:disable=unused-argument """Assert that an Entity can be retrieved by business identifier.""" factory_entity_model() - entity = EntityService.find_by_business_identifier(TestEntityInfo.entity1['businessIdentifier']) + entity = EntityService.find_by_business_identifier(TestEntityInfo.entity1["businessIdentifier"]) assert entity is not None dictionary = entity.as_dict() - assert dictionary['business_identifier'] == TestEntityInfo.entity1['businessIdentifier'] + assert dictionary["business_identifier"] == TestEntityInfo.entity1["businessIdentifier"] def test_entity_find_by_business_id_no_model(session, auth_mock): # pylint:disable=unused-argument """Assert that an Entity which does not exist cannot be retrieved.""" - entity = EntityService.find_by_business_identifier(TestEntityInfo.entity1['businessIdentifier']) + entity = EntityService.find_by_business_identifier(TestEntityInfo.entity1["businessIdentifier"]) assert entity is None @@ -170,7 +183,7 @@ def test_entity_find_by_entity_id(session, auth_mock): # pylint:disable=unused- assert entity is not None dictionary = entity.as_dict() - assert dictionary['business_identifier'] == TestEntityInfo.entity1['businessIdentifier'] + assert dictionary["business_identifier"] == TestEntityInfo.entity1["businessIdentifier"] def test_entity_find_by_entity_id_no_id(session, auth_mock): # pylint:disable=unused-argument @@ -191,9 +204,9 @@ def test_add_contact(session): # pylint:disable=unused-argument entity.add_contact(TestContactInfo.contact1) dictionary = entity.as_dict() - assert dictionary['contacts'] - assert len(dictionary['contacts']) == 1 - assert dictionary['contacts'][0]['email'] == TestContactInfo.contact1['email'] + assert dictionary["contacts"] + assert len(dictionary["contacts"]) == 1 + assert dictionary["contacts"][0]["email"] == TestContactInfo.contact1["email"] def test_add_contact_duplicate(session): # pylint:disable=unused-argument @@ -214,15 +227,15 @@ def test_update_contact(session): # pylint:disable=unused-argument entity.add_contact(TestContactInfo.contact1) dictionary = entity.as_dict() - assert len(dictionary['contacts']) == 1 - assert dictionary['contacts'][0]['email'] == TestContactInfo.contact1['email'] + assert len(dictionary["contacts"]) == 1 + assert dictionary["contacts"][0]["email"] == TestContactInfo.contact1["email"] entity.update_contact(TestContactInfo.contact2) dictionary = None dictionary = entity.as_dict() - assert len(dictionary['contacts']) == 1 - assert dictionary['contacts'][0]['email'] == TestContactInfo.contact2['email'] + assert len(dictionary["contacts"]) == 1 + assert dictionary["contacts"][0]["email"] == TestContactInfo.contact2["email"] def test_update_contact_no_contact(session): # pylint:disable=unused-argument @@ -243,7 +256,7 @@ def test_get_contact_by_business_identifier(session): # pylint:disable=unused-a contact = entity.get_contact() assert contact is not None - assert contact.email == TestContactInfo.contact1['email'] + assert contact.email == TestContactInfo.contact1["email"] def test_get_contact_by_business_identifier_no_contact(session): # pylint:disable=unused-argument @@ -262,7 +275,7 @@ def test_delete_contact(session): # pylint:disable=unused-argument updated_entity = entity.delete_contact() dictionary = updated_entity.as_dict() - assert not dictionary['contacts'] + assert not dictionary["contacts"] def test_delete_contact_no_entity(session, auth_mock): # pylint:disable=unused-argument @@ -286,7 +299,7 @@ def test_delete_contact_entity_link(session, auth_mock): # pylint:disable=unuse org = factory_org_service() org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] contact = factory_contact_model() @@ -294,13 +307,13 @@ def test_delete_contact_entity_link(session, auth_mock): # pylint:disable=unuse contact_link.contact = contact contact_link.entity = entity._model # pylint:disable=protected-access contact_link.org = org._model # pylint:disable=protected-access - contact_link.commit() + contact_link.save() updated_entity = entity.delete_contact() dictionary = None dictionary = updated_entity.as_dict() - assert len(dictionary['contacts']) == 0 + assert len(dictionary["contacts"]) == 0 delete_contact_link = ContactLinkModel.find_by_entity_id(entity.identifier) assert not delete_contact_link @@ -323,7 +336,7 @@ def test_validate_invalid_pass_code(app, session): # pylint:disable=unused-argu entity_model = factory_entity_model(entity_info=TestEntityInfo.entity_passcode) entity = EntityService(entity_model) - validated = entity.validate_pass_code('222222222') + validated = entity.validate_pass_code("222222222") assert not validated @@ -340,7 +353,7 @@ def test_delete_entity(app, session): # pylint:disable=unused-argument contact_link.contact = contact contact_link.entity = entity._model # pylint:disable=protected-access contact_link.org = org._model # pylint:disable=protected-access - contact_link.commit() + contact_link.save() entity.delete() @@ -356,7 +369,7 @@ def test_reset_pass_code(app, session, monkeypatch): # pylint:disable=unused-ar entity = EntityService(entity_model) old_passcode = entity.pass_code patch_token_info(TestJwtClaims.user_test, monkeypatch) - entity.reset_passcode(entity.business_identifier, '') + entity.reset_passcode(entity.business_identifier, "") new_passcode = entity.pass_code assert old_passcode != new_passcode diff --git a/auth-api/tests/unit/services/test_flags.py b/auth-api/tests/unit/services/test_flags.py index e64d92dcc2..806a556233 100644 --- a/auth-api/tests/unit/services/test_flags.py +++ b/auth-api/tests/unit/services/test_flags.py @@ -16,8 +16,8 @@ import pytest from flask import Flask -from auth_api.services import Flags from auth_api.models import User +from auth_api.services import Flags app = None @@ -27,7 +27,7 @@ def setup(): """Initialize app with dev env for testing.""" global app app = Flask(__name__) - app.env = 'testing' + app.config["ENV"] = "testing" def test_flags_constructor_no_app(setup): @@ -41,67 +41,70 @@ def test_flags_constructor_with_app(setup): with app.app_context(): flags = Flags(app) assert flags - assert app.extensions['featureflags'] + assert app.extensions["featureflags"] def test_init_app_dev_with_key(setup): """Ensure that extension can be initialized with a key in dev.""" - app.config['AUTH_LD_SDK_KEY'] = 'https://no.flag/avail' + app.config["AUTH_LD_SDK_KEY"] = "https://no.flag/avail" with app.app_context(): flags = Flags() flags.init_app(app) assert flags - assert app.extensions['featureflags'] - assert app.extensions['featureflags'].get_sdk_key() == 'https://no.flag/avail' + assert app.extensions["featureflags"] + assert app.extensions["featureflags"].get_sdk_key() == "https://no.flag/avail" def test_init_app_dev_no_key(setup): """Ensure that extension can be initialized with no key in dev.""" - app.config['AUTH_LD_SDK_KEY'] = None + app.config["AUTH_LD_SDK_KEY"] = None with app.app_context(): flags = Flags() flags.init_app(app) assert flags - assert app.extensions['featureflags'] + assert app.extensions["featureflags"] def test_init_app_prod_with_key(setup): """Ensure that extension can be initialized with a key in prod.""" - app.env = 'production' - app.config['AUTH_LD_SDK_KEY'] = 'https://no.flag/avail' + app.config["ENV"] = "production" + app.config["AUTH_LD_SDK_KEY"] = "https://no.flag/avail" with app.app_context(): flags = Flags() flags.init_app(app) assert flags - assert app.extensions['featureflags'] - assert app.extensions['featureflags'].get_sdk_key() == 'https://no.flag/avail' + assert app.extensions["featureflags"] + assert app.extensions["featureflags"].get_sdk_key() == "https://no.flag/avail" def test_init_app_prod_no_key(setup): """Ensure that extension can be initialized with no key in prod.""" - app.env = 'production' - app.config['AUTH_LD_SDK_KEY'] = None + app.config["ENV"] = "production" + app.config["AUTH_LD_SDK_KEY"] = None with app.app_context(): flags = Flags() flags.init_app(app) with pytest.raises(KeyError): - client = app.extensions['featureflags'] + client = app.extensions["featureflags"] assert not client assert flags -@pytest.mark.parametrize('test_name,flag_name,expected', [ - ('boolean flag', 'bool-flag', True), - ('string flag', 'string-flag', 'a string value'), - ('integer flag', 'integer-flag', 10), -]) +@pytest.mark.parametrize( + "test_name,flag_name,expected", + [ + ("boolean flag", "bool-flag", True), + ("string flag", "string-flag", "a string value"), + ("integer flag", "integer-flag", 10), + ], +) def test_flags_read_from_json(setup, test_name, flag_name, expected): """Ensure that is_on is TRUE when reading flags from local JSON file.""" - app.config['AUTH_LD_SDK_KEY'] = 'https://no.flag/avail' + app.config["AUTH_LD_SDK_KEY"] = "https://no.flag/avail" with app.app_context(): flags = Flags() @@ -112,24 +115,27 @@ def test_flags_read_from_json(setup, test_name, flag_name, expected): def test_flags_read_from_json_missing_flag(setup): """Ensure that is_on is FALSE when reading a flag that doesn't exist from local JSON file.""" - app.config['AUTH_LD_SDK_KEY'] = 'https://no.flag/avail' + app.config["AUTH_LD_SDK_KEY"] = "https://no.flag/avail" with app.app_context(): flags = Flags() flags.init_app(app) - flag_on = flags.is_on('missing flag') + flag_on = flags.is_on("missing flag") assert not flag_on -@pytest.mark.parametrize('test_name,flag_name,expected', [ - ('boolean flag', 'bool-flag', True), - ('string flag', 'string-flag', 'a string value'), - ('integer flag', 'integer-flag', 10), -]) +@pytest.mark.parametrize( + "test_name,flag_name,expected", + [ + ("boolean flag", "bool-flag", True), + ("string flag", "string-flag", "a string value"), + ("integer flag", "integer-flag", 10), + ], +) def test_flags_read_flag_values_from_json(setup, test_name, flag_name, expected): """Ensure that values read from JSON == expected values when no user is passed.""" - app.config['AUTH_LD_SDK_KEY'] = 'https://no.flag/avail' + app.config["AUTH_LD_SDK_KEY"] = "https://no.flag/avail" with app.app_context(): flags = Flags() @@ -139,16 +145,19 @@ def test_flags_read_flag_values_from_json(setup, test_name, flag_name, expected) assert val == expected -@pytest.mark.parametrize('test_name,flag_name,expected', [ - ('boolean flag', 'bool-flag', True), - ('string flag', 'string-flag', 'a string value'), - ('integer flag', 'integer-flag', 10), -]) +@pytest.mark.parametrize( + "test_name,flag_name,expected", + [ + ("boolean flag", "bool-flag", True), + ("string flag", "string-flag", "a string value"), + ("integer flag", "integer-flag", 10), + ], +) def test_flags_read_flag_values_unique_user(setup, test_name, flag_name, expected): """Ensure that values read from JSON == expected values when passed with a user.""" - app.config['AUTH_LD_SDK_KEY'] = 'https://no.flag/avail' + app.config["AUTH_LD_SDK_KEY"] = "https://no.flag/avail" - user = User(username='username', firstname='firstname', lastname='lastname', idp_userid='userid') + user = User(username="username", firstname="firstname", lastname="lastname", idp_userid="userid") with app.app_context(): flags = Flags() flags.init_app(app) diff --git a/auth-api/tests/unit/services/test_invitation.py b/auth-api/tests/unit/services/test_invitation.py index a8b2d83b6b..a1e757c44b 100644 --- a/auth-api/tests/unit/services/test_invitation.py +++ b/auth-api/tests/unit/services/test_invitation.py @@ -16,78 +16,84 @@ Test suite to ensure that the Invitation service routines are working as expected. """ from datetime import datetime, timedelta +from unittest import mock from unittest.mock import ANY, patch -import mock import pytest from freezegun import freeze_time -from auth_api.models.dataclass import Activity import auth_api.services.authorization as auth from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import Invitation as InvitationModel from auth_api.models import InvitationStatus as InvitationStatusModel +from auth_api.models.dataclass import Activity from auth_api.services import ActivityLogPublisher from auth_api.services import Invitation as InvitationService from auth_api.services import Membership as MembershipService from auth_api.services import Org as OrgService from auth_api.services import User from auth_api.utils.enums import ActivityAction +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo, TestUserInfo from tests.utilities.factory_utils import factory_invitation, factory_user_model, patch_token_info -from tests.conftest import mock_token -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_as_dict(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that the Invitation is exported correctly as a dictionary.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) - invitation = InvitationService.create_invitation(invitation_info, User(user), '') + invitation_info = factory_invitation(org_dictionary["id"]) + invitation = InvitationService.create_invitation(invitation_info, User(user), "") invitation_dictionary = invitation.as_dict() - assert invitation_dictionary['recipient_email'] == invitation_info['recipientEmail'] + assert invitation_dictionary["recipient_email"] == invitation_info["recipientEmail"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_invitation(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Invitation can be created.""" - with patch.object(InvitationService, 'send_invitation', return_value=None) as mock_notify: + with patch.object(InvitationService, "send_invitation", return_value=None) as mock_notify: user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) - - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - invitation = InvitationService.create_invitation(invitation_info, User(user), '') - mock_alp.assert_called_with(Activity(action=ActivityAction.INVITE_TEAM_MEMBER.value, - org_id=ANY, name=invitation_info['recipientEmail'], id=ANY, - value='USER')) + invitation_info = factory_invitation(org_dictionary["id"]) + + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + invitation = InvitationService.create_invitation(invitation_info, User(user), "") + mock_alp.assert_called_with( + Activity( + action=ActivityAction.INVITE_TEAM_MEMBER.value, + org_id=ANY, + name=invitation_info["recipientEmail"], + id=ANY, + value="USER", + ) + ) invitation_dictionary = invitation.as_dict() - assert invitation_dictionary['recipient_email'] == invitation_info['recipientEmail'] - assert invitation_dictionary['id'] + assert invitation_dictionary["recipient_email"] == invitation_info["recipientEmail"] + assert invitation_dictionary["id"] mock_notify.assert_called() -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_find_invitation_by_id(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Find an existing invitation with the provided id.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) - new_invitation = InvitationService.create_invitation(invitation_info, User(user), '').as_dict() - invitation = InvitationService.find_invitation_by_id(new_invitation['id']).as_dict() + invitation_info = factory_invitation(org_dictionary["id"]) + new_invitation = InvitationService.create_invitation(invitation_info, User(user), "").as_dict() + invitation = InvitationService.find_invitation_by_id(new_invitation["id"]).as_dict() assert invitation - assert invitation['recipient_email'] == invitation_info['recipientEmail'] + assert invitation["recipient_email"] == invitation_info["recipientEmail"] def test_find_invitation_by_id_exception(session, auth_mock): # pylint:disable=unused-argument @@ -96,18 +102,18 @@ def test_find_invitation_by_id_exception(session, auth_mock): # pylint:disable= assert invitation is None -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_delete_invitation(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Delete the specified invitation.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) - new_invitation = InvitationService.create_invitation(invitation_info, User(user), '').as_dict() - InvitationService.delete_invitation(new_invitation['id']) - invitation = InvitationService.find_invitation_by_id(new_invitation['id']) + invitation_info = factory_invitation(org_dictionary["id"]) + new_invitation = InvitationService.create_invitation(invitation_info, User(user), "").as_dict() + InvitationService.delete_invitation(new_invitation["id"]) + invitation = InvitationService.find_invitation_by_id(new_invitation["id"]) assert invitation is None @@ -119,38 +125,40 @@ def test_delete_invitation_exception(session, auth_mock): # pylint:disable=unus assert exception.value.code == Error.DATA_NOT_FOUND.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_update_invitation(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Update the specified invitation with new data.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) - new_invitation = InvitationService.create_invitation(invitation_info, User(user), '') - updated_invitation = new_invitation.update_invitation(User(user), '').as_dict() - assert updated_invitation['status'] == 'PENDING' + invitation_info = factory_invitation(org_dictionary["id"]) + new_invitation = InvitationService.create_invitation(invitation_info, User(user), "") + updated_invitation = new_invitation.update_invitation(User(user), "").as_dict() + assert updated_invitation["status"] == "PENDING" -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_update_invitation_verify_different_tokens(session, auth_mock, keycloak_mock, # pylint:disable=unused-argument - monkeypatch): +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_update_invitation_verify_different_tokens( + session, auth_mock, keycloak_mock, monkeypatch # pylint:disable=unused-argument +): """Update the specified invitation with new data.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) - new_invitation = InvitationService.create_invitation(invitation_info, User(user), '') - old_token = new_invitation.as_dict().get('token') + invitation_info = factory_invitation(org_dictionary["id"]) + new_invitation = InvitationService.create_invitation(invitation_info, User(user), "") + old_token = new_invitation.as_dict().get("token") with freeze_time( - lambda: datetime.now() + timedelta(seconds=1)): # to give time difference..or else token will be same.. - updated_invitation = new_invitation.update_invitation(User(user), '').as_dict() - new_token = updated_invitation.get('token') + lambda: datetime.now() + timedelta(seconds=1) + ): # to give time difference..or else token will be same.. + updated_invitation = new_invitation.update_invitation(User(user), "").as_dict() + new_token = updated_invitation.get("token") assert old_token != new_token - assert updated_invitation['status'] == 'PENDING' + assert updated_invitation["status"] == "PENDING" def test_generate_confirmation_token(session): # pylint:disable=unused-argument @@ -159,34 +167,34 @@ def test_generate_confirmation_token(session): # pylint:disable=unused-argument assert confirmation_token is not None -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_validate_token_valid(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Validate the invitation token.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) - new_invitation = InvitationService.create_invitation(invitation_info, User(user), '').as_dict() - confirmation_token = InvitationService.generate_confirmation_token(new_invitation['id']) - invitation_id = InvitationService.validate_token(confirmation_token).as_dict().get('id') - assert invitation_id == new_invitation['id'] + invitation_info = factory_invitation(org_dictionary["id"]) + new_invitation = InvitationService.create_invitation(invitation_info, User(user), "").as_dict() + confirmation_token = InvitationService.generate_confirmation_token(new_invitation["id"]) + invitation_id = InvitationService.validate_token(confirmation_token).as_dict().get("id") + assert invitation_id == new_invitation["id"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_validate_token_accepted(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Validate invalid invitation token.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() user_invitee = factory_user_model(TestUserInfo.user1) - invitation_info = factory_invitation(org_dictionary['id']) - new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), '').as_dict() - confirmation_token = InvitationService.generate_confirmation_token(new_invitation['id']) - InvitationService.accept_invitation(new_invitation['id'], User(user_invitee), '') + invitation_info = factory_invitation(org_dictionary["id"]) + new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), "").as_dict() + confirmation_token = InvitationService.generate_confirmation_token(new_invitation["id"]) + InvitationService.accept_invitation(new_invitation["id"], User(user_invitee), "") with pytest.raises(BusinessException) as exception: InvitationService.validate_token(confirmation_token) @@ -202,119 +210,121 @@ def test_validate_token_exception(session): # pylint:disable=unused-argument assert exception.value.code == Error.EXPIRED_INVITATION.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_accept_invitation(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Accept the invitation and add membership from the invitation to the org.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): - with patch.object(auth, 'check_auth', return_value=True): - with patch.object(InvitationService, 'notify_admin', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): + with patch.object(auth, "check_auth", return_value=True): + with patch.object(InvitationService, "notify_admin", return_value=None): user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) + invitation_info = factory_invitation(org_dictionary["id"]) user_with_token_invitee = TestUserInfo.user1 - user_with_token_invitee['keycloak_guid'] = TestJwtClaims.edit_role_2['sub'] + user_with_token_invitee["keycloak_guid"] = TestJwtClaims.edit_role_2["sub"] user_invitee = factory_user_model(user_with_token_invitee) - new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), '') + new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), "") new_invitation_dict = new_invitation.as_dict() - InvitationService.accept_invitation(new_invitation_dict['id'], User(user_invitee), '') + InvitationService.accept_invitation(new_invitation_dict["id"], User(user_invitee), "") patch_token_info(TestJwtClaims.public_user_role, monkeypatch) - members = MembershipService.get_members_for_org(org_dictionary['id'], - 'PENDING_APPROVAL') + members = MembershipService.get_members_for_org(org_dictionary["id"], "PENDING_APPROVAL") assert members assert len(members) == 1 -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_accept_invitation_for_govm(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Accept the invitation and add membership from the invitation to the org.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): - with patch.object(auth, 'check_auth', return_value=True): - with patch.object(InvitationService, 'notify_admin', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): + with patch.object(auth, "check_auth", return_value=True): + with patch.object(InvitationService, "notify_admin", return_value=None): user_with_token = TestUserInfo.user_staff_admin - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_with_token) patch_token_info(TestJwtClaims.staff_admin_role, monkeypatch) org = OrgService.create_org(TestOrgInfo.org_govm, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) + invitation_info = factory_invitation(org_dictionary["id"]) user_with_token_invitee = TestUserInfo.user1 - user_with_token_invitee['keycloak_guid'] = TestJwtClaims.edit_role_2['sub'] + user_with_token_invitee["keycloak_guid"] = TestJwtClaims.edit_role_2["sub"] user_invitee = factory_user_model(user_with_token_invitee) - new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), '') + new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), "") new_invitation_dict = new_invitation.as_dict() - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - InvitationService.accept_invitation(new_invitation_dict['id'], User(user_invitee), '') - mock_alp.assert_called_with(Activity(action=ActivityAction.APPROVE_TEAM_MEMBER.value, - org_id=ANY, name=ANY, id=ANY, - value=ANY)) - - members = MembershipService.get_members_for_org(org_dictionary['id'], 'ACTIVE') + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + InvitationService.accept_invitation(new_invitation_dict["id"], User(user_invitee), "") + mock_alp.assert_called_with( + Activity( + action=ActivityAction.APPROVE_TEAM_MEMBER.value, org_id=ANY, name=ANY, id=ANY, value=ANY + ) + ) + + members = MembershipService.get_members_for_org(org_dictionary["id"], "ACTIVE") assert members - assert len(members) == 1, 'user gets active membership' + assert len(members) == 1, "user gets active membership" -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_accept_invitation_exceptions(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Accept the invitation and add membership from the invitation to the org.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): - with patch.object(auth, 'check_auth', return_value=True): - with patch.object(InvitationService, 'notify_admin', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): + with patch.object(auth, "check_auth", return_value=True): + with patch.object(InvitationService, "notify_admin", return_value=None): user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) + invitation_info = factory_invitation(org_dictionary["id"]) user_invitee = factory_user_model(TestUserInfo.user1) with pytest.raises(BusinessException) as exception: - InvitationService.accept_invitation(None, User(user_invitee), '') + InvitationService.accept_invitation(None, User(user_invitee), "") assert exception.value.code == Error.DATA_NOT_FOUND.name - new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), '') + new_invitation = InvitationService.create_invitation(invitation_info, User(user_invitee), "") new_invitation_dict = new_invitation.as_dict() - InvitationService.accept_invitation(new_invitation_dict['id'], User(user_invitee), '') + InvitationService.accept_invitation(new_invitation_dict["id"], User(user_invitee), "") with pytest.raises(BusinessException) as exception: - InvitationService.accept_invitation(new_invitation_dict['id'], User(user_invitee), '') + InvitationService.accept_invitation(new_invitation_dict["id"], User(user_invitee), "") assert exception.value.code == Error.ACTIONED_INVITATION.name with pytest.raises(BusinessException) as exception: - expired_invitation: InvitationModel = InvitationModel \ - .find_invitation_by_id(new_invitation_dict['id']) - expired_invitation.invitation_status = InvitationStatusModel.get_status_by_code('EXPIRED') + expired_invitation: InvitationModel = InvitationModel.find_invitation_by_id( + new_invitation_dict["id"] + ) + expired_invitation.invitation_status = InvitationStatusModel.get_status_by_code("EXPIRED") expired_invitation.save() - InvitationService.accept_invitation(expired_invitation.id, User(user_invitee), '') + InvitationService.accept_invitation(expired_invitation.id, User(user_invitee), "") assert exception.value.code == Error.EXPIRED_INVITATION.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_invitations_by_org_id(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Find an existing invitation with the provided org id.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): patch_token_info(TestJwtClaims.public_user_role, monkeypatch) user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) org_dictionary = org.as_dict() - org_id = org_dictionary['id'] - invitation_info = factory_invitation(org_dictionary['id']) - InvitationService.create_invitation(invitation_info, User(user), '').as_dict() - invitations: list = InvitationService.get_invitations_for_org(org_id, - status='PENDING', - token_info=TestJwtClaims.public_user_role) + org_id = org_dictionary["id"] + invitation_info = factory_invitation(org_dictionary["id"]) + InvitationService.create_invitation(invitation_info, User(user), "").as_dict() + invitations: list = InvitationService.get_invitations_for_org( + org_id, status="PENDING", token_info=TestJwtClaims.public_user_role + ) assert invitations assert len(invitations) == 1 diff --git a/auth-api/tests/unit/services/test_invitation_auth.py b/auth-api/tests/unit/services/test_invitation_auth.py index f63df43eaf..9bad174c3f 100644 --- a/auth-api/tests/unit/services/test_invitation_auth.py +++ b/auth-api/tests/unit/services/test_invitation_auth.py @@ -15,9 +15,9 @@ Test suite to ensure that the Invitation service authentication / login source checks are correct. """ +from unittest import mock from unittest.mock import ANY, patch -import mock import pytest from auth_api.exceptions import BusinessException @@ -31,15 +31,15 @@ from auth_api.services import User from auth_api.utils.enums import AccessType, ActivityAction, InvitationStatus, LoginSource from auth_api.utils.user_context import UserContext, user_context +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo, TestUserInfo from tests.utilities.factory_utils import factory_invitation, factory_user_model, patch_token_info -from tests.conftest import mock_token @user_context def assert_token_user_context(login_source: str = None, **kwargs): """Assert user_context information with token_info.""" - user_from_context: UserContext = kwargs['user_context'] + user_from_context: UserContext = kwargs["user_context"] assert user_from_context is not None @@ -48,15 +48,15 @@ def assert_token_user_context(login_source: str = None, **kwargs): # When trying to use patch_token_info take note of the token attribute naming as they differ from the user_context # e.g. loginSource --> login_source - assert user_from_context.login_source == token_info.get('loginSource') - assert user_from_context.first_name == token_info.get('firstname') - assert user_from_context.last_name == token_info.get('lastname') - assert user_from_context.sub == token_info.get('sub') + assert user_from_context.login_source == token_info.get("loginSource") + assert user_from_context.first_name == token_info.get("firstname") + assert user_from_context.last_name == token_info.get("lastname") + assert user_from_context.sub == token_info.get("sub") # Assert for a specific expected login_source if login_source: assert user_from_context.login_source == login_source - assert token_info.get('loginSource') == login_source + assert token_info.get("loginSource") == login_source def test_token_user_context(session, auth_mock, monkeypatch): @@ -76,22 +76,23 @@ def test_token_user_context(session, auth_mock, monkeypatch): # The sub or idp_userid is usually required to reference an existing user within service logic in queries etc # patch_token_info need to be called before your service call to set the user context / token info you are testing # against - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid, - 'loginSource': LoginSource.BCSC.value}, monkeypatch) + patch_token_info( + {"sub": user.keycloak_guid, "idp_userid": user.idp_userid, "loginSource": LoginSource.BCSC.value}, monkeypatch + ) assert_token_user_context(LoginSource.BCSC.value) # This usage of patching replaces the previous values, only loginSource will be set # Ensure all relevant values are part of your patch payload if you are getting user related errors - patch_token_info({'loginSource': LoginSource.BCEID.value}, monkeypatch) + patch_token_info({"loginSource": LoginSource.BCEID.value}, monkeypatch) assert_token_user_context(LoginSource.BCEID.value) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_change_authentication_subsequent_invites(session, auth_mock, keycloak_mock, monkeypatch): """Assert that changing org authentication method changes new invitation required login source.""" user_with_token = TestUserInfo.user_tester - user_with_token['keycloak_guid'] = TestJwtClaims.tester_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.tester_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.tester_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.tester_role["idp_userid"] inviter_user = factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.tester_role, monkeypatch) @@ -100,48 +101,74 @@ def test_change_authentication_subsequent_invites(session, auth_mock, keycloak_m org_dictionary = org.as_dict() # Org with access type of 'REGULAR' will create an invitation with login source BCSC - assert org_dictionary['access_type'] == AccessType.REGULAR.value + assert org_dictionary["access_type"] == AccessType.REGULAR.value # Change authentication method to BCSC - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - OrgService.update_login_option(org_dictionary['id'], LoginSource.BCSC.value) - mock_alp.assert_called_with(Activity(action=ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, - org_id=ANY, name=ANY, id=ANY, value=LoginSource.BCSC.value)) - - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + OrgService.update_login_option(org_dictionary["id"], LoginSource.BCSC.value) + mock_alp.assert_called_with( + Activity( + action=ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, + org_id=ANY, + name=ANY, + id=ANY, + value=LoginSource.BCSC.value, + ) + ) + + with patch.object(InvitationService, "send_invitation", return_value=None): # Create invitation that has BCSC login source - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - invitation_bcsc_info = factory_invitation(org_dictionary['id']) - invitation_bcsc = InvitationService.create_invitation(invitation_bcsc_info, User(inviter_user), '') - mock_alp.assert_called_with(Activity(action=ActivityAction.INVITE_TEAM_MEMBER.value, - org_id=ANY, name=invitation_bcsc_info['recipientEmail'], id=ANY, - value='USER')) + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + invitation_bcsc_info = factory_invitation(org_dictionary["id"]) + invitation_bcsc = InvitationService.create_invitation(invitation_bcsc_info, User(inviter_user), "") + mock_alp.assert_called_with( + Activity( + action=ActivityAction.INVITE_TEAM_MEMBER.value, + org_id=ANY, + name=invitation_bcsc_info["recipientEmail"], + id=ANY, + value="USER", + ) + ) # Should be BCSC login source - invitation_model = InvitationModel.find_invitation_by_id(invitation_bcsc.as_dict()['id']) + invitation_model = InvitationModel.find_invitation_by_id(invitation_bcsc.as_dict()["id"]) assert invitation_model.login_source == LoginSource.BCSC.value # Change authentication method to BCEID - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - OrgService.update_login_option(org_dictionary['id'], LoginSource.BCEID.value) - mock_alp.assert_called_with(Activity(action=ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, - org_id=ANY, name=ANY, id=ANY, value=LoginSource.BCEID.value)) + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + OrgService.update_login_option(org_dictionary["id"], LoginSource.BCEID.value) + mock_alp.assert_called_with( + Activity( + action=ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, + org_id=ANY, + name=ANY, + id=ANY, + value=LoginSource.BCEID.value, + ) + ) # Create another invitation and it should be BCEID login source - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - invitation_bceid_info = factory_invitation(org_dictionary['id']) - invitation_bceid_info['recipientEmail'] = 'test@test.com' - invitation_bceid = InvitationService.create_invitation(invitation_bceid_info, User(inviter_user), '') - mock_alp.assert_called_with(Activity(action=ActivityAction.INVITE_TEAM_MEMBER.value, - org_id=ANY, name=invitation_bceid_info['recipientEmail'], id=ANY, - value='USER')) + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + invitation_bceid_info = factory_invitation(org_dictionary["id"]) + invitation_bceid_info["recipientEmail"] = "test@test.com" + invitation_bceid = InvitationService.create_invitation(invitation_bceid_info, User(inviter_user), "") + mock_alp.assert_called_with( + Activity( + action=ActivityAction.INVITE_TEAM_MEMBER.value, + org_id=ANY, + name=invitation_bceid_info["recipientEmail"], + id=ANY, + value="USER", + ) + ) # Should be BCEID login source - invitation_model = InvitationModel.find_invitation_by_id(invitation_bceid.as_dict()['id']) + invitation_model = InvitationModel.find_invitation_by_id(invitation_bceid.as_dict()["id"]) assert invitation_model.login_source == LoginSource.BCEID.value -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_change_authentication_non_govm(session, auth_mock, keycloak_mock, monkeypatch): """Assert that non government ministry organization invites can be accepted by different login sources.""" # inviter/invitee user setup @@ -154,75 +181,97 @@ def test_change_authentication_non_govm(session, auth_mock, keycloak_mock, monke org_dictionary = org.as_dict() # Org with access type of 'REGULAR' will create an invitation with login source BCSC - assert org_dictionary['access_type'] == AccessType.REGULAR.value + assert org_dictionary["access_type"] == AccessType.REGULAR.value # Confirm that an invitation with BCSC login source can be accepted as a BCSC user - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): # Create invitation with BCSC source - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - invitation_info = factory_invitation(org_dictionary['id']) - invitation = InvitationService.create_invitation(invitation_info, User(inviter_user), '') + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + invitation_info = factory_invitation(org_dictionary["id"]) + invitation = InvitationService.create_invitation(invitation_info, User(inviter_user), "") invitation_dict = invitation.as_dict() - mock_alp.assert_called_with(Activity(action=ActivityAction.INVITE_TEAM_MEMBER.value, - org_id=ANY, name=invitation_info['recipientEmail'], id=ANY, - value='USER')) - - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + mock_alp.assert_called_with( + Activity( + action=ActivityAction.INVITE_TEAM_MEMBER.value, + org_id=ANY, + name=invitation_info["recipientEmail"], + id=ANY, + value="USER", + ) + ) + + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source == LoginSource.BCSC.value assert invitation_model.invitation_status_code == InvitationStatus.PENDING.value - patch_token_info({'sub': invitee_bcsc_user.keycloak_guid, 'idp_userid': invitee_bcsc_user.idp_userid, - 'loginSource': LoginSource.BCSC.value}, monkeypatch) - InvitationService.accept_invitation(invitation_dict['id'], User(invitee_bcsc_user), '') - - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + patch_token_info( + { + "sub": invitee_bcsc_user.keycloak_guid, + "idp_userid": invitee_bcsc_user.idp_userid, + "loginSource": LoginSource.BCSC.value, + }, + monkeypatch, + ) + InvitationService.accept_invitation(invitation_dict["id"], User(invitee_bcsc_user), "") + + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source == LoginSource.BCSC.value assert invitation_model.invitation_status_code == InvitationStatus.ACCEPTED.value patch_token_info(TestJwtClaims.tester_role, monkeypatch) - members = MembershipService.get_members_for_org(org_dictionary['id'], - 'PENDING_APPROVAL') + members = MembershipService.get_members_for_org(org_dictionary["id"], "PENDING_APPROVAL") assert members assert len(members) == 1 # Confirm that an invitation with BCSC login source can be accepted as another user login source and # updates the invitation login source based on the accepting user login source patch_token_info(TestJwtClaims.tester_role, monkeypatch) - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): # Create invitation with BCSC login source - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - invitation_info = factory_invitation(org_dictionary['id']) - invitation = InvitationService.create_invitation(invitation_info, User(inviter_user), '') + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + invitation_info = factory_invitation(org_dictionary["id"]) + invitation = InvitationService.create_invitation(invitation_info, User(inviter_user), "") invitation_dict = invitation.as_dict() - mock_alp.assert_called_with(Activity(action=ActivityAction.INVITE_TEAM_MEMBER.value, - org_id=ANY, name=invitation_info['recipientEmail'], id=ANY, - value='USER')) + mock_alp.assert_called_with( + Activity( + action=ActivityAction.INVITE_TEAM_MEMBER.value, + org_id=ANY, + name=invitation_info["recipientEmail"], + id=ANY, + value="USER", + ) + ) # Confirm invitation is BCSC as per org data - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source == LoginSource.BCSC.value assert invitation_model.invitation_status_code == InvitationStatus.PENDING.value # Accept invitation as a BCEID user - patch_token_info({'sub': invitee_bceid_user.keycloak_guid, 'idp_userid': invitee_bceid_user.idp_userid, - 'loginSource': LoginSource.BCEID.value}, monkeypatch) - InvitationService.accept_invitation(invitation_dict['id'], User(invitee_bceid_user), '') + patch_token_info( + { + "sub": invitee_bceid_user.keycloak_guid, + "idp_userid": invitee_bceid_user.idp_userid, + "loginSource": LoginSource.BCEID.value, + }, + monkeypatch, + ) + InvitationService.accept_invitation(invitation_dict["id"], User(invitee_bceid_user), "") # Confirm invitation login source is updated to BCEID - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source == LoginSource.BCEID.value assert invitation_model.invitation_status_code == InvitationStatus.ACCEPTED.value patch_token_info(TestJwtClaims.tester_role, monkeypatch) - members = MembershipService.get_members_for_org(org_dictionary['id'], - 'PENDING_APPROVAL') + members = MembershipService.get_members_for_org(org_dictionary["id"], "PENDING_APPROVAL") assert members assert len(members) == 2 -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_invitation_govm(session, auth_mock, keycloak_mock, monkeypatch): """Assert that government ministry organization invites can be accepted by IDIR only.""" # Users setup @@ -236,54 +285,77 @@ def test_invitation_govm(session, auth_mock, keycloak_mock, monkeypatch): org_dictionary = org.as_dict() # Org with access type is for government ministry - assert org_dictionary['access_type'] == AccessType.GOVM.value + assert org_dictionary["access_type"] == AccessType.GOVM.value # Confirm that an invitation with BCSC login source can be accepted as a BCSC user - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): # Create invitation with BCSC source - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - invitation_info = factory_invitation(org_dictionary['id']) - invitation = InvitationService.create_invitation(invitation_info, User(staff_user), '') + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + invitation_info = factory_invitation(org_dictionary["id"]) + invitation = InvitationService.create_invitation(invitation_info, User(staff_user), "") invitation_dict = invitation.as_dict() - mock_alp.assert_called_with(Activity(action=ActivityAction.INVITE_TEAM_MEMBER.value, - org_id=ANY, name=invitation_info['recipientEmail'], id=ANY, - value='USER')) - - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + mock_alp.assert_called_with( + Activity( + action=ActivityAction.INVITE_TEAM_MEMBER.value, + org_id=ANY, + name=invitation_info["recipientEmail"], + id=ANY, + value="USER", + ) + ) + + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source == LoginSource.STAFF.value assert invitation_model.invitation_status_code == InvitationStatus.PENDING.value # Accept invitation as a BCEID user should raise business exception - patch_token_info({'sub': invitee_bceid_user.keycloak_guid, 'idp_userid': invitee_bceid_user.idp_userid, - 'loginSource': LoginSource.BCEID.value}, monkeypatch) + patch_token_info( + { + "sub": invitee_bceid_user.keycloak_guid, + "idp_userid": invitee_bceid_user.idp_userid, + "loginSource": LoginSource.BCEID.value, + }, + monkeypatch, + ) with pytest.raises(BusinessException) as exception: - InvitationService.accept_invitation(invitation_dict['id'], User(invitee_bceid_user), '') + InvitationService.accept_invitation(invitation_dict["id"], User(invitee_bceid_user), "") assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name # Accept invitation as a BCSC user should raise business exception - patch_token_info({'sub': invitee_bcsc_user.keycloak_guid, 'idp_userid': invitee_bcsc_user.idp_userid, - 'loginSource': LoginSource.BCSC.value}, monkeypatch) + patch_token_info( + { + "sub": invitee_bcsc_user.keycloak_guid, + "idp_userid": invitee_bcsc_user.idp_userid, + "loginSource": LoginSource.BCSC.value, + }, + monkeypatch, + ) with pytest.raises(BusinessException) as exception: - InvitationService.accept_invitation(invitation_dict['id'], User(invitee_bcsc_user), '') + InvitationService.accept_invitation(invitation_dict["id"], User(invitee_bcsc_user), "") assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name # Accept invitation as a staff user should succeed - patch_token_info({'sub': staff_invitee_user.keycloak_guid, 'idp_userid': staff_invitee_user.idp_userid, - 'loginSource': LoginSource.STAFF.value}, monkeypatch) - InvitationService.accept_invitation(invitation_dict['id'], User(staff_invitee_user), '') - - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + patch_token_info( + { + "sub": staff_invitee_user.keycloak_guid, + "idp_userid": staff_invitee_user.idp_userid, + "loginSource": LoginSource.STAFF.value, + }, + monkeypatch, + ) + InvitationService.accept_invitation(invitation_dict["id"], User(staff_invitee_user), "") + + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source == LoginSource.STAFF.value assert invitation_model.invitation_status_code == InvitationStatus.ACCEPTED.value - members = MembershipService.get_members_for_org(org_dictionary['id'], - 'ACTIVE') + members = MembershipService.get_members_for_org(org_dictionary["id"], "ACTIVE") assert members assert len(members) == 1 -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_invitation_anonymous(session, auth_mock, keycloak_mock, monkeypatch): """Assert that non government ministry organization invites can be accepted by different login sources.""" # inviter/invitee user setup @@ -295,34 +367,40 @@ def test_invitation_anonymous(session, auth_mock, keycloak_mock, monkeypatch): org_dictionary = org.as_dict() # Org with access type of 'REGULAR' will create an invitation with login source BCSC - assert org_dictionary['access_type'] == AccessType.REGULAR.value + assert org_dictionary["access_type"] == AccessType.REGULAR.value # Confirm that an invitation with BCSC login source can be accepted as a BCSC user - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): # Create invitation with BCSC source - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - invitation_info = factory_invitation(org_dictionary['id']) - invitation = InvitationService.create_invitation(invitation_info, User(inviter_user), '') + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + invitation_info = factory_invitation(org_dictionary["id"]) + invitation = InvitationService.create_invitation(invitation_info, User(inviter_user), "") invitation_dict = invitation.as_dict() - mock_alp.assert_called_with(Activity(action=ActivityAction.INVITE_TEAM_MEMBER.value, - org_id=ANY, name=invitation_info['recipientEmail'], id=ANY, - value='USER')) - - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + mock_alp.assert_called_with( + Activity( + action=ActivityAction.INVITE_TEAM_MEMBER.value, + org_id=ANY, + name=invitation_info["recipientEmail"], + id=ANY, + value="USER", + ) + ) + + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source == LoginSource.BCSC.value assert invitation_model.invitation_status_code == InvitationStatus.PENDING.value - patch_token_info({'sub': invitee_bcsc_user.keycloak_guid, - 'idp_userid': invitee_bcsc_user.idp_userid}, monkeypatch) - InvitationService.accept_invitation(invitation_dict['id'], User(invitee_bcsc_user), '') + patch_token_info( + {"sub": invitee_bcsc_user.keycloak_guid, "idp_userid": invitee_bcsc_user.idp_userid}, monkeypatch + ) + InvitationService.accept_invitation(invitation_dict["id"], User(invitee_bcsc_user), "") - invitation_model = InvitationModel.find_invitation_by_id(invitation_dict['id']) + invitation_model = InvitationModel.find_invitation_by_id(invitation_dict["id"]) assert invitation_model.login_source is None assert invitation_model.invitation_status_code == InvitationStatus.ACCEPTED.value patch_token_info(TestJwtClaims.tester_role, monkeypatch) - members = MembershipService.get_members_for_org(org_dictionary['id'], - 'PENDING_APPROVAL') + members = MembershipService.get_members_for_org(org_dictionary["id"], "PENDING_APPROVAL") assert members assert len(members) == 1 diff --git a/auth-api/tests/unit/services/test_keycloak.py b/auth-api/tests/unit/services/test_keycloak.py index 42d09a60e2..c67b927b37 100644 --- a/auth-api/tests/unit/services/test_keycloak.py +++ b/auth-api/tests/unit/services/test_keycloak.py @@ -18,6 +18,7 @@ """ import pytest + from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models.dataclass import KeycloakGroupSubscription @@ -67,7 +68,7 @@ def test_keycloak_get_token(session): KEYCLOAK_SERVICE.add_user(request, return_if_exists=True) response = KEYCLOAK_SERVICE.get_token(request.user_name, request.password) - assert response.get('access_token') is not None + assert response.get("access_token") is not None KEYCLOAK_SERVICE.delete_user_by_username(request.user_name) @@ -76,7 +77,7 @@ def test_keycloak_get_token_user_not_exist(session): response = None # with app.app_context(): try: - response = KEYCLOAK_SERVICE.get_token('test', 'test') + response = KEYCLOAK_SERVICE.get_token("test", "test") except BusinessException as err: assert err.code == Error.INVALID_USER_CREDENTIALS.name assert response is None @@ -111,25 +112,27 @@ def test_join_users_group(app, session, monkeypatch): user = KEYCLOAK_SERVICE.get_user_by_username(request.user_name) user_id = user.id - patch_token_info({'sub': user_id, 'loginSource': LoginSource.BCSC.value, - 'realm_access': {'roles': []}}, monkeypatch) + patch_token_info( + {"sub": user_id, "loginSource": LoginSource.BCSC.value, "realm_access": {"roles": []}}, monkeypatch + ) KEYCLOAK_SERVICE.join_users_group() # Get the user groups and verify the public_users group is in the list user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_PUBLIC_USERS in groups # BCROS - patch_token_info({'sub': user_id, 'loginSource': LoginSource.BCROS.value, 'realm_access': {'roles': []}}, - monkeypatch) + patch_token_info( + {"sub": user_id, "loginSource": LoginSource.BCROS.value, "realm_access": {"roles": []}}, monkeypatch + ) KEYCLOAK_SERVICE.join_users_group() # Get the user groups and verify the public_users group is in the list user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ANONYMOUS_USERS in groups @@ -140,14 +143,15 @@ def test_join_users_group_for_staff_users(session, app, monkeypatch): KEYCLOAK_SERVICE.add_user(request, return_if_exists=True) user = KEYCLOAK_SERVICE.get_user_by_username(request.user_name) user_id = user.id - patch_token_info({'sub': user_id, 'loginSource': LoginSource.STAFF.value, 'realm_access': {'roles': []}}, - monkeypatch) + patch_token_info( + {"sub": user_id, "loginSource": LoginSource.STAFF.value, "realm_access": {"roles": []}}, monkeypatch + ) KEYCLOAK_SERVICE.join_users_group() # Get the user groups and verify the public_users group is in the list user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_PUBLIC_USERS not in groups @@ -159,14 +163,15 @@ def test_join_users_group_for_existing_users(session, monkeypatch): user_id = user.id patch_token_info( - {'sub': user_id, 'loginSource': LoginSource.BCSC.value, 'realm_access': {'roles': [Role.EDITOR.value]}}, - monkeypatch) + {"sub": user_id, "loginSource": LoginSource.BCSC.value, "realm_access": {"roles": [Role.EDITOR.value]}}, + monkeypatch, + ) KEYCLOAK_SERVICE.join_users_group() # Get the user groups and verify the public_users group is in the list user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_PUBLIC_USERS in groups @@ -181,7 +186,7 @@ def test_join_account_holders_group(session): user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS in groups @@ -193,14 +198,14 @@ def test_join_account_holders_group_from_token(session, monkeypatch): user_id = user.id # Patch token info - patch_token_info({'sub': user_id}, monkeypatch) + patch_token_info({"sub": user_id}, monkeypatch) KEYCLOAK_SERVICE.join_account_holders_group() # Get the user groups and verify the public_users group is in the list user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS in groups @@ -215,14 +220,14 @@ def test_remove_from_account_holders_group(session, monkeypatch): user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS in groups patch_token_info(TestJwtClaims.gov_account_holder_user, monkeypatch) KEYCLOAK_SERVICE.remove_from_account_holders_group(keycloak_guid=user_id) user_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user_id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS not in groups @@ -241,15 +246,28 @@ def test_add_remove_group_bulk(session): """Assert that the users' groups can be updated in bulk.""" user1 = KEYCLOAK_SERVICE.add_user(KeycloakScenario.create_user_request(), return_if_exists=True) user2 = KEYCLOAK_SERVICE.add_user(KeycloakScenario.create_user_request(), return_if_exists=True) - kgs = [KeycloakGroupSubscription(user_guid=user1.id, product_code='ppr', group_name='ppr', - group_action=KeycloakGroupActions.ADD_TO_GROUP.value), - KeycloakGroupSubscription(user_guid=user2.id, product_code='bca', group_name='bca', - group_action=KeycloakGroupActions.ADD_TO_GROUP.value), - KeycloakGroupSubscription(user_guid=user2.id, product_code='bca', group_name='bca', - group_action=KeycloakGroupActions.REMOVE_FROM_GROUP.value), - ] + kgs = [ + KeycloakGroupSubscription( + user_guid=user1.id, + product_code="ppr", + group_name="ppr", + group_action=KeycloakGroupActions.ADD_TO_GROUP.value, + ), + KeycloakGroupSubscription( + user_guid=user2.id, + product_code="bca", + group_name="bca", + group_action=KeycloakGroupActions.ADD_TO_GROUP.value, + ), + KeycloakGroupSubscription( + user_guid=user2.id, + product_code="bca", + group_name="bca", + group_action=KeycloakGroupActions.REMOVE_FROM_GROUP.value, + ), + ] KeycloakService.add_or_remove_product_keycloak_groups(kgs) user1_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user1.id) user2_groups = KEYCLOAK_SERVICE.get_user_groups(user_id=user2.id) - assert 'ppr' in ['ppr' for user_group in user1_groups if user_group.get('name') == 'ppr'] - assert 'bca' not in ['bca' for user_group in user2_groups if user_group.get('name') == 'bca'] + assert "ppr" in ["ppr" for user_group in user1_groups if user_group.get("name") == "ppr"] + assert "bca" not in ["bca" for user_group in user2_groups if user_group.get("name") == "bca"] diff --git a/auth-api/tests/unit/services/test_membership.py b/auth-api/tests/unit/services/test_membership.py index d7911e1d12..c60a344796 100644 --- a/auth-api/tests/unit/services/test_membership.py +++ b/auth-api/tests/unit/services/test_membership.py @@ -16,8 +16,9 @@ Test suite to ensure that the Membership service routines are working as expected. """ -import mock +from unittest import mock from unittest.mock import ANY, patch + from auth_api.models import MembershipStatusCode as MembershipStatusCodeModel from auth_api.models.dataclass import Activity from auth_api.services import ActivityLogPublisher @@ -26,12 +27,12 @@ from auth_api.services.keycloak import KeycloakService from auth_api.utils.constants import GROUP_ACCOUNT_HOLDERS from auth_api.utils.enums import ActivityAction, ProductCode, Status +from tests.conftest import mock_token from tests.utilities.factory_scenarios import KeycloakScenario, TestOrgInfo, TestUserInfo from tests.utilities.factory_utils import factory_membership_model, factory_product_model, factory_user_model -from tests.conftest import mock_token -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_accept_invite_adds_group_to_the_user(session, monkeypatch): # pylint:disable=unused-argument """Assert that accepting an invite adds group to the user.""" # Create a user in keycloak @@ -44,18 +45,14 @@ def test_accept_invite_adds_group_to_the_user(session, monkeypatch): # pylint:d # Patch token info def token_info(): # pylint: disable=unused-argument; mocks of library methods return { - 'sub': str(kc_user.id), - 'idp_userid': str(kc_user.id), - 'username': 'public_user', - 'realm_access': { - 'roles': [ - 'edit' - ] - }, - 'product_code': ProductCode.BUSINESS.value + "sub": str(kc_user.id), + "idp_userid": str(kc_user.id), + "username": "public_user", + "realm_access": {"roles": ["edit"]}, + "product_code": ProductCode.BUSINESS.value, } - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', token_info) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", token_info) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) # Create another user request = KeycloakScenario.create_user_request() @@ -64,25 +61,25 @@ def token_info(): # pylint: disable=unused-argument; mocks of library methods user2 = factory_user_model(TestUserInfo.get_user_with_kc_guid(kc_guid=kc_user2.id)) # Add a membership to the user for the org created - factory_membership_model(user2.id, org.as_dict().get('id'), member_type='COORDINATOR', member_status=4) + factory_membership_model(user2.id, org.as_dict().get("id"), member_type="COORDINATOR", member_status=4) # Add a product to org - factory_product_model(org.as_dict().get('id'), product_code=ProductCode.BUSINESS.value) + factory_product_model(org.as_dict().get("id"), product_code=ProductCode.BUSINESS.value) # Find the membership and update to ACTIVE - membership = MembershipService.get_membership_for_org_and_user(org.as_dict().get('id'), user2.id) + membership = MembershipService.get_membership_for_org_and_user(org.as_dict().get("id"), user2.id) active_membership_status = MembershipStatusCodeModel.get_membership_status_by_code(Status.ACTIVE.name) - updated_fields = {'membership_status': active_membership_status} + updated_fields = {"membership_status": active_membership_status} MembershipService(membership).update_membership(updated_fields=updated_fields, token_info=token_info()) user_groups = keycloak_service.get_user_groups(user_id=kc_user2.id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS in groups -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_remove_member_removes_group_to_the_user(session, monkeypatch): # pylint:disable=unused-argument """Assert that accepting an invite adds group to the user.""" # Create a user in keycloak @@ -95,19 +92,14 @@ def test_remove_member_removes_group_to_the_user(session, monkeypatch): # pylin # Patch token info def token_info(): # pylint: disable=unused-argument; mocks of library methods return { - 'sub': str(kc_user.id), - 'idp_userid': str(kc_user.id), - 'username': 'public_user', - 'realm_access': { - 'roles': [ - 'edit', - 'account_holder' - ] - }, - 'product_code': ProductCode.BUSINESS.value + "sub": str(kc_user.id), + "idp_userid": str(kc_user.id), + "username": "public_user", + "realm_access": {"roles": ["edit", "account_holder"]}, + "product_code": ProductCode.BUSINESS.value, } - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', token_info) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", token_info) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) # Create another user request = KeycloakScenario.create_user_request() @@ -116,51 +108,51 @@ def token_info(): # pylint: disable=unused-argument; mocks of library methods user2 = factory_user_model(TestUserInfo.get_user_with_kc_guid(kc_guid=kc_user2.id)) # Add a membership to the user for the org created - factory_membership_model(user2.id, org.as_dict().get('id'), member_type='COORDINATOR', member_status=4) + factory_membership_model(user2.id, org.as_dict().get("id"), member_type="COORDINATOR", member_status=4) # Add a product to org - factory_product_model(org.as_dict().get('id'), product_code=ProductCode.BUSINESS.value) + factory_product_model(org.as_dict().get("id"), product_code=ProductCode.BUSINESS.value) # Find the membership and update to ACTIVE - membership = MembershipService.get_membership_for_org_and_user(org.as_dict().get('id'), user2.id) + membership = MembershipService.get_membership_for_org_and_user(org.as_dict().get("id"), user2.id) active_membership_status = MembershipStatusCodeModel.get_membership_status_by_code(Status.ACTIVE.name) - updated_fields = {'membership_status': active_membership_status} - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + updated_fields = {"membership_status": active_membership_status} + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: MembershipService(membership).update_membership(updated_fields=updated_fields, token_info=token_info()) - mock_alp.assert_called_with(Activity(action=ActivityAction.APPROVE_TEAM_MEMBER.value, - org_id=ANY, name=ANY, id=ANY, - value=ANY)) + mock_alp.assert_called_with( + Activity(action=ActivityAction.APPROVE_TEAM_MEMBER.value, org_id=ANY, name=ANY, id=ANY, value=ANY) + ) user_groups = keycloak_service.get_user_groups(user_id=kc_user2.id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS in groups # Deactivate Membership - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: MembershipService(membership).deactivate_membership(token_info=token_info()) - mock_alp.assert_called_with(Activity(action=ActivityAction.REMOVE_TEAM_MEMBER.value, - org_id=ANY, name=ANY, id=ANY, - value=ANY)) + mock_alp.assert_called_with( + Activity(action=ActivityAction.REMOVE_TEAM_MEMBER.value, org_id=ANY, name=ANY, id=ANY, value=ANY) + ) # ACTIVE active_membership_status = MembershipStatusCodeModel.get_membership_status_by_code(Status.ACTIVE.name) - updated_fields = {'membership_status': active_membership_status} + updated_fields = {"membership_status": active_membership_status} MembershipService(membership).update_membership(updated_fields=updated_fields, token_info=token_info()) # Find the membership and update to INACTIVE active_membership_status = MembershipStatusCodeModel.get_membership_status_by_code(Status.INACTIVE.name) - updated_fields = {'membership_status': active_membership_status} - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + updated_fields = {"membership_status": active_membership_status} + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: MembershipService(membership).update_membership(updated_fields=updated_fields, token_info=token_info()) - mock_alp.assert_called_with(Activity(action=ActivityAction.REMOVE_TEAM_MEMBER.value, - org_id=ANY, name=ANY, id=ANY, - value=ANY)) + mock_alp.assert_called_with( + Activity(action=ActivityAction.REMOVE_TEAM_MEMBER.value, org_id=ANY, name=ANY, id=ANY, value=ANY) + ) user_groups = keycloak_service.get_user_groups(user_id=kc_user2.id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS not in groups MembershipService(membership).deactivate_membership() diff --git a/auth-api/tests/unit/services/test_minio.py b/auth-api/tests/unit/services/test_minio.py index 311ed1487b..c79b60991e 100644 --- a/auth-api/tests/unit/services/test_minio.py +++ b/auth-api/tests/unit/services/test_minio.py @@ -25,26 +25,26 @@ def test_create_signed_put_url(session): # pylint:disable=unused-argument """Assert that the a PUT url can be pre-signed.""" - file_name = 'affidavit-test.pdf' - signed_url = MinioService.create_signed_put_url(file_name, prefix_key='Test') + file_name = "affidavit-test.pdf" + signed_url = MinioService.create_signed_put_url(file_name, prefix_key="Test") assert signed_url - assert signed_url.get('key').startswith('Test/') - assert signed_url.get('key').endswith('.pdf') + assert signed_url.get("key").startswith("Test/") + assert signed_url.get("key").endswith(".pdf") def test_create_signed_get_url(session, tmpdir): # pylint:disable=unused-argument """Assert that a GET url can be pre-signed.""" - d = tmpdir.mkdir('subdir') - fh = d.join('test-file.txt') - fh.write('Test File') + d = tmpdir.mkdir("subdir") + fh = d.join("test-file.txt") + fh.write("Test File") filename = os.path.join(fh.dirname, fh.basename) - test_file = open(filename, 'rb') - files = {'upload_file': test_file} + test_file = open(filename, "rb") + files = {"upload_file": test_file} file_name = fh.basename - signed_url = MinioService.create_signed_put_url(file_name, prefix_key='Test') - key = signed_url.get('key') - pre_signed_put = signed_url.get('preSignedUrl') + signed_url = MinioService.create_signed_put_url(file_name, prefix_key="Test") + key = signed_url.get("key") + pre_signed_put = signed_url.get("preSignedUrl") requests.put(pre_signed_put, files=files) pre_signed_get = MinioService.create_signed_get_url(key) diff --git a/auth-api/tests/unit/services/test_org.py b/auth-api/tests/unit/services/test_org.py index e36f54855b..13bb24f123 100644 --- a/auth-api/tests/unit/services/test_org.py +++ b/auth-api/tests/unit/services/test_org.py @@ -16,9 +16,9 @@ Test suite to ensure that the Org service routines are working as expected. """ from http import HTTPStatus +from unittest import mock from unittest.mock import ANY, Mock, patch -import mock import pytest from requests import Response from werkzeug.exceptions import HTTPException @@ -44,17 +44,50 @@ from auth_api.services.rest_service import RestService from auth_api.utils.constants import GROUP_ACCOUNT_HOLDERS from auth_api.utils.enums import ( - AccessType, ActivityAction, LoginSource, OrgStatus, OrgType, PatchActions, PaymentMethod, ProductSubscriptionStatus, - SuspensionReasonCode, TaskAction, TaskRelationshipStatus, TaskRelationshipType, TaskStatus) + AccessType, + ActivityAction, + LoginSource, + OrgStatus, + OrgType, + PatchActions, + PaymentMethod, + ProductSubscriptionStatus, + SuspensionReasonCode, + TaskAction, + TaskRelationshipStatus, + TaskRelationshipType, + TaskStatus, +) +from tests.conftest import mock_token from tests.utilities.factory_scenarios import ( - KeycloakScenario, TestAffidavit, TestBCOLInfo, TestContactInfo, TestEntityInfo, TestJwtClaims, TestOrgInfo, - TestOrgProductsInfo, TestOrgTypeInfo, TestPaymentMethodInfo, TestUserInfo) + KeycloakScenario, + TestAffidavit, + TestBCOLInfo, + TestContactInfo, + TestEntityInfo, + TestJwtClaims, + TestOrgInfo, + TestOrgProductsInfo, + TestOrgTypeInfo, + TestPaymentMethodInfo, + TestUserInfo, +) from tests.utilities.factory_utils import ( - convert_org_to_staff_org, factory_contact_model, factory_entity_model, factory_entity_service, factory_invitation, - factory_membership_model, factory_org_model, factory_org_service, factory_user_model, - factory_user_model_with_contact, patch_pay_account_delete, patch_pay_account_post, patch_pay_account_put, - patch_token_info) -from tests.conftest import mock_token + convert_org_to_staff_org, + factory_contact_model, + factory_entity_model, + factory_entity_service, + factory_invitation, + factory_membership_model, + factory_org_model, + factory_org_service, + factory_user_model, + factory_user_model_with_contact, + patch_pay_account_delete, + patch_pay_account_post, + patch_pay_account_put, + patch_token_info, +) # noqa: I005 @@ -65,91 +98,92 @@ def test_as_dict(session): # pylint:disable=unused-argument dictionary = org.as_dict() assert dictionary - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_products(session, keycloak_mock, monkeypatch): """Assert that an Org with products can be created.""" user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: org = OrgService.create_org(TestOrgInfo.org_with_products, user_id=user.id) - mock_alp.assert_called_with(Activity(action=ActivityAction.ADD_PRODUCT_AND_SERVICE.value, - org_id=ANY, value=ANY, id=ANY, name='Business Registry & Name Request')) + mock_alp.assert_called_with( + Activity( + action=ActivityAction.ADD_PRODUCT_AND_SERVICE.value, + org_id=ANY, + value=ANY, + id=ANY, + name="Business Registry & Name Request", + ) + ) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_with_products['name'] + assert dictionary["name"] == TestOrgInfo.org_with_products["name"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_basic_org_assert_pay_request_is_correct(session, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_basic_org_assert_pay_request_is_correct( + session, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that while org creation , pay-api gets called with proper data for basic accounts.""" user = factory_user_model() - with patch.object(RestService, 'post') as mock_post: - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + with patch.object(RestService, "post") as mock_post: + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] mock_post.assert_called() - actual_data = mock_post.call_args.kwargs.get('data') + actual_data = mock_post.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name'), - 'branchName': '', - 'paymentInfo': { - 'methodOfPayment': OrgService._get_default_payment_method_for_creditcard() - } - + "accountId": dictionary.get("id"), + "accountName": dictionary.get("name"), + "branchName": "", + "paymentInfo": {"methodOfPayment": OrgService._get_default_payment_method_for_creditcard()}, } assert expected_data == actual_data -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_pay_request_is_correct_with_branch_name(session, - keycloak_mock, monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_pay_request_is_correct_with_branch_name(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that while org creation , pay-api gets called with proper data for basic accounts.""" user = factory_user_model() - with patch.object(RestService, 'post') as mock_post: - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + with patch.object(RestService, "post") as mock_post: + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org_branch_name, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_branch_name['name'] + assert dictionary["name"] == TestOrgInfo.org_branch_name["name"] mock_post.assert_called() - actual_data = mock_post.call_args.kwargs.get('data') + actual_data = mock_post.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': f"{dictionary.get('name')}-{TestOrgInfo.org_branch_name['branchName']}", - 'branchName': TestOrgInfo.org_branch_name['branchName'], - 'paymentInfo': { - 'methodOfPayment': OrgService._get_default_payment_method_for_creditcard() - } - + "accountId": dictionary.get("id"), + "accountName": f"{dictionary.get('name')}-{TestOrgInfo.org_branch_name['branchName']}", + "branchName": TestOrgInfo.org_branch_name["branchName"], + "paymentInfo": {"methodOfPayment": OrgService._get_default_payment_method_for_creditcard()}, } assert expected_data == actual_data -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_update_basic_org_assert_pay_request_activity(session, keycloak_mock, monkeypatch): """Assert that while org payment update touches activity log.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) # Have to patch this because the pay spec is wrong and returns 201, not 202 or 200. patch_pay_account_post(monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) @@ -159,40 +193,44 @@ def test_update_basic_org_assert_pay_request_activity(session, keycloak_mock, mo # Have to patch this because the pay spec is wrong and returns 201, not 202 or 200. patch_pay_account_put(monkeypatch) - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: org = OrgService.update_org(org, new_payment_method) - mock_alp.assert_called_with(Activity(action=ActivityAction.PAYMENT_INFO_CHANGE.value, - org_id=ANY, name=ANY, id=ANY, - value=PaymentMethod.ONLINE_BANKING.value)) - - -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_update_basic_org_assert_pay_request_is_correct(session, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument + mock_alp.assert_called_with( + Activity( + action=ActivityAction.PAYMENT_INFO_CHANGE.value, + org_id=ANY, + name=ANY, + id=ANY, + value=PaymentMethod.ONLINE_BANKING.value, + ) + ) + + +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_update_basic_org_assert_pay_request_is_correct( + session, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that while org updation , pay-api gets called with proper data for basic accounts.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) - with patch.object(RestService, 'put') as mock_put: + with patch.object(RestService, "put") as mock_put: new_payment_method = TestPaymentMethodInfo.get_payment_method_input(PaymentMethod.ONLINE_BANKING) patch_token_info(TestJwtClaims.public_user_role, monkeypatch) org = OrgService.update_org(org, new_payment_method) assert org dictionary = org.as_dict() mock_put.assert_called() - actual_data = mock_put.call_args.kwargs.get('data') + actual_data = mock_put.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name'), - 'branchName': '', - 'paymentInfo': { - 'methodOfPayment': PaymentMethod.ONLINE_BANKING.value - } - + "accountId": dictionary.get("id"), + "accountName": dictionary.get("name"), + "branchName": "", + "paymentInfo": {"methodOfPayment": PaymentMethod.ONLINE_BANKING.value}, } - assert expected_data == actual_data, 'updating to Online Banking works.' + assert expected_data == actual_data, "updating to Online Banking works." new_payment_method = TestPaymentMethodInfo.get_payment_method_input(PaymentMethod.DIRECT_PAY) patch_token_info(TestJwtClaims.public_user_role, monkeypatch) @@ -200,307 +238,318 @@ def test_update_basic_org_assert_pay_request_is_correct(session, keycloak_mock, assert org dictionary = org.as_dict() mock_put.assert_called() - actual_data = mock_put.call_args.kwargs.get('data') + actual_data = mock_put.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name'), - 'branchName': '', - 'paymentInfo': { - 'methodOfPayment': PaymentMethod.DIRECT_PAY.value - } - + "accountId": dictionary.get("id"), + "accountName": dictionary.get("name"), + "branchName": "", + "paymentInfo": {"methodOfPayment": PaymentMethod.DIRECT_PAY.value}, } - assert expected_data == actual_data, 'updating bank to Credit Card works.' + assert expected_data == actual_data, "updating bank to Credit Card works." -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_basic_org_assert_pay_request_is_correct_online_banking(session, - keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_basic_org_assert_pay_request_is_correct_online_banking( + session, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that while org creation , pay-api gets called with proper data for basic accounts.""" user = factory_user_model() - with patch.object(RestService, 'post') as mock_post: - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + with patch.object(RestService, "post") as mock_post: + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org_onlinebanking, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] mock_post.assert_called() - actual_data = mock_post.call_args.kwargs.get('data') + actual_data = mock_post.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name'), - 'branchName': '', - 'paymentInfo': { - 'methodOfPayment': PaymentMethod.ONLINE_BANKING.value - } - + "accountId": dictionary.get("id"), + "accountName": dictionary.get("name"), + "branchName": "", + "paymentInfo": {"methodOfPayment": PaymentMethod.ONLINE_BANKING.value}, } assert expected_data == actual_data -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_basic_org_assert_pay_request_is_govm(session, - keycloak_mock, staff_user_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_basic_org_assert_pay_request_is_govm( + session, keycloak_mock, staff_user_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that while org creation , pay-api gets called with proper data for basic accounts.""" user = factory_user_model() - token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value, - roles=['create_accounts']) - with patch.object(RestService, 'post') as mock_post: + token_info = TestJwtClaims.get_test_user( + sub=user.keycloak_guid, source=LoginSource.STAFF.value, roles=["create_accounts"] + ) + with patch.object(RestService, "post") as mock_post: patch_token_info(token_info, monkeypatch) org = OrgService.create_org(TestOrgInfo.org_govm, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_govm['name'] + assert dictionary["name"] == TestOrgInfo.org_govm["name"] mock_post.assert_called() - actual_data = mock_post.call_args.kwargs.get('data') + actual_data = mock_post.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name') + '-' + dictionary.get('branch_name'), - 'branchName': dictionary.get('branch_name'), - 'paymentInfo': { - 'methodOfPayment': PaymentMethod.EJV.value - } - + "accountId": dictionary.get("id"), + "accountName": dictionary.get("name") + "-" + dictionary.get("branch_name"), + "branchName": dictionary.get("branch_name"), + "paymentInfo": {"methodOfPayment": PaymentMethod.EJV.value}, } assert expected_data == actual_data -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_put_basic_org_assert_pay_request_is_govm(session, - keycloak_mock, staff_user_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_put_basic_org_assert_pay_request_is_govm( + session, keycloak_mock, staff_user_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that while org creation , pay-api gets called with proper data for basic accounts.""" user = factory_user_model() - staff_token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value, - roles=['create_accounts'], idp_userid=user.idp_userid) + staff_token_info = TestJwtClaims.get_test_user( + sub=user.keycloak_guid, source=LoginSource.STAFF.value, roles=["create_accounts"], idp_userid=user.idp_userid + ) user2 = factory_user_model(TestUserInfo.user2) - public_token_info = TestJwtClaims.get_test_user(sub=user2.keycloak_guid, source=LoginSource.STAFF.value, - roles=['gov_account_user'], idp_userid=user2.idp_userid) + public_token_info = TestJwtClaims.get_test_user( + sub=user2.keycloak_guid, source=LoginSource.STAFF.value, roles=["gov_account_user"], idp_userid=user2.idp_userid + ) patch_token_info(staff_token_info, monkeypatch) org: OrgService = OrgService.create_org(TestOrgInfo.org_govm, user_id=user.id) assert org - with patch.object(RestService, 'put') as mock_post: + with patch.object(RestService, "put") as mock_post: payment_details = TestPaymentMethodInfo.get_payment_method_input_with_revenue() - org_body = { - 'mailingAddress': TestOrgInfo.get_mailing_address(), - **payment_details - - } + org_body = {"mailingAddress": TestOrgInfo.get_mailing_address(), **payment_details} patch_token_info(public_token_info, monkeypatch) - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: org = OrgService.update_org(org, org_body) - mock_alp.assert_called_with(Activity(action=ActivityAction.ACCOUNT_ADDRESS_CHANGE.value, - org_id=ANY, name=ANY, id=ANY, - value=ANY)) + mock_alp.assert_called_with( + Activity(action=ActivityAction.ACCOUNT_ADDRESS_CHANGE.value, org_id=ANY, name=ANY, id=ANY, value=ANY) + ) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_govm['name'] + assert dictionary["name"] == TestOrgInfo.org_govm["name"] mock_post.assert_called() - actual_data = mock_post.call_args.kwargs.get('data') + actual_data = mock_post.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name') + '-' + dictionary.get('branch_name'), - 'branchName': dictionary.get('branch_name'), - 'paymentInfo': { - 'methodOfPayment': 'EJV', - 'revenueAccount': payment_details.get('paymentInfo').get('revenueAccount') + "accountId": dictionary.get("id"), + "accountName": dictionary.get("name") + "-" + dictionary.get("branch_name"), + "branchName": dictionary.get("branch_name"), + "paymentInfo": { + "methodOfPayment": "EJV", + "revenueAccount": payment_details.get("paymentInfo").get("revenueAccount"), }, - 'contactInfo': TestOrgInfo.get_mailing_address() - + "contactInfo": TestOrgInfo.get_mailing_address(), } assert expected_data == actual_data -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_premium_org_assert_pay_request_is_correct(session, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_premium_org_assert_pay_request_is_correct( + session, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that while org creation , pay-api gets called with proper data for basic accounts.""" bcol_response = Mock(spec=Response) - bcol_response.json.return_value = {'userId': 'PB25020', 'accountNumber': '180670', - 'orgName': 'BC ONLINE TECHNICAL TEAM DEVL'} + bcol_response.json.return_value = { + "userId": "PB25020", + "accountNumber": "180670", + "orgName": "BC ONLINE TECHNICAL TEAM DEVL", + } bcol_response.status_code = 200 pay_api_response = Mock(spec=Response) pay_api_response.status_code = 201 - with patch.object(RestService, 'post', side_effect=[bcol_response, pay_api_response]) as mock_post: + with patch.object(RestService, "post", side_effect=[bcol_response, pay_api_response]) as mock_post: user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.bcol_linked(), user_id=user.id) assert org dictionary = org.as_dict() mock_post.assert_called() - actual_data = mock_post.call_args_list[1].kwargs.get('data') + actual_data = mock_post.call_args_list[1].kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': TestOrgInfo.bcol_linked().get('name'), - 'branchName': '', - 'paymentInfo': { - 'methodOfPayment': PaymentMethod.BCOL.value - }, - 'bcolAccountNumber': dictionary.get('bcol_account_id'), - 'bcolUserId': dictionary.get('bcol_user_id'), - 'contactInfo': TestOrgInfo.bcol_linked().get('mailingAddress') - + "accountId": dictionary.get("id"), + "accountName": TestOrgInfo.bcol_linked().get("name"), + "branchName": "", + "paymentInfo": {"methodOfPayment": PaymentMethod.BCOL.value}, + "bcolAccountNumber": dictionary.get("bcol_account_id"), + "bcolUserId": dictionary.get("bcol_user_id"), + "contactInfo": TestOrgInfo.bcol_linked().get("mailingAddress"), } assert actual_data == expected_data -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_assert_payment_types(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] - assert dictionary.get('bcol_user_id', None) is None - assert dictionary.get('bcol_user_name', None) is None - assert dictionary.get('bcol_account_id', None) is None + assert dictionary["name"] == TestOrgInfo.org1["name"] + assert dictionary.get("bcol_user_id", None) is None + assert dictionary.get("bcol_user_name", None) is None + assert dictionary.get("bcol_account_id", None) is None -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_product_single_subscription(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) - subscriptions = ProductService.create_product_subscription(dictionary['id'], - TestOrgProductsInfo.org_products1, - skip_auth=True) - assert next(prod for prod in subscriptions - if prod.get('code') == TestOrgProductsInfo.org_products1['subscriptions'][0]['productCode']) - - -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_product_single_subscription_duplicate_error(session, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument + subscriptions = ProductService.create_product_subscription( + dictionary["id"], TestOrgProductsInfo.org_products1, skip_auth=True + ) + assert next( + prod + for prod in subscriptions + if prod.get("code") == TestOrgProductsInfo.org_products1["subscriptions"][0]["productCode"] + ) + + +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_product_single_subscription_duplicate_error( + session, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that an Org can be created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) - subscriptions = ProductService.create_product_subscription(dictionary['id'], - TestOrgProductsInfo.org_products_business, - skip_auth=True) - assert next(prod for prod in subscriptions - if prod.get('code') == TestOrgProductsInfo.org_products_business['subscriptions'][0]['productCode']) + subscriptions = ProductService.create_product_subscription( + dictionary["id"], TestOrgProductsInfo.org_products_business, skip_auth=True + ) + assert next( + prod + for prod in subscriptions + if prod.get("code") == TestOrgProductsInfo.org_products_business["subscriptions"][0]["productCode"] + ) with pytest.raises(BusinessException) as exception: - ProductService.create_product_subscription(dictionary['id'], - TestOrgProductsInfo.org_products_business, - skip_auth=True) + ProductService.create_product_subscription( + dictionary["id"], TestOrgProductsInfo.org_products_business, skip_auth=True + ) assert exception.value.code == Error.PRODUCT_SUBSCRIPTION_EXISTS.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_product_multiple_subscription(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model(user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) - subscriptions = ProductService.create_product_subscription(dictionary['id'], - TestOrgProductsInfo.org_products2, - skip_auth=True) - assert next(prod for prod in subscriptions - if prod.get('code') == TestOrgProductsInfo.org_products2['subscriptions'][0]['productCode']) - assert next(prod for prod in subscriptions - if prod.get('code') == TestOrgProductsInfo.org_products2['subscriptions'][1]['productCode']) - - -@pytest.mark.parametrize( - 'org_type', [(OrgType.STAFF.value), (OrgType.SBC_STAFF.value)] -) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) + subscriptions = ProductService.create_product_subscription( + dictionary["id"], TestOrgProductsInfo.org_products2, skip_auth=True + ) + assert next( + prod + for prod in subscriptions + if prod.get("code") == TestOrgProductsInfo.org_products2["subscriptions"][0]["productCode"] + ) + assert next( + prod + for prod in subscriptions + if prod.get("code") == TestOrgProductsInfo.org_products2["subscriptions"][1]["productCode"] + ) + + +@pytest.mark.parametrize("org_type", [(OrgType.STAFF.value), (OrgType.SBC_STAFF.value)]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_product_subscription_staff(session, keycloak_mock, org_type, monkeypatch): """Assert that updating product subscription works for staff.""" user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) # Clearing the event listeners here, because we can't change the type_code. convert_org_to_staff_org(org._model.id, org_type) - subscriptions = ProductService.create_product_subscription(org._model.id, - TestOrgProductsInfo.org_products2, - skip_auth=True) + subscriptions = ProductService.create_product_subscription( + org._model.id, TestOrgProductsInfo.org_products2, skip_auth=True + ) - assert next(prod for prod in subscriptions - if prod.get('code') == TestOrgProductsInfo.org_products2['subscriptions'][0]['productCode']) - assert next(prod for prod in subscriptions - if prod.get('code') == TestOrgProductsInfo.org_products2['subscriptions'][1]['productCode']) + assert next( + prod + for prod in subscriptions + if prod.get("code") == TestOrgProductsInfo.org_products2["subscriptions"][0]["productCode"] + ) + assert next( + prod + for prod in subscriptions + if prod.get("code") == TestOrgProductsInfo.org_products2["subscriptions"][1]["productCode"] + ) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_product_subscription_nds(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert a product subscription for NDS can be created with a system admin token.""" # setup an org user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org org_dict = org.as_dict() # add the NDS product subscription via system admin token patch_token_info(TestJwtClaims.system_admin_role, monkeypatch) - subscriptions = ProductService.create_product_subscription(org_dict['id'], - TestOrgProductsInfo.org_products_nds, - skip_auth=False) - assert next(prod for prod in subscriptions - if prod.get('code') == TestOrgProductsInfo.org_products_nds['subscriptions'][0]['productCode']) - - -@pytest.mark.parametrize('test_name,token_info', [ - ('test_public_user', TestJwtClaims.public_user_role), - ('test_public_bceid_user', TestJwtClaims.public_bceid_user), - ('test_gov_user', TestJwtClaims.gov_account_holder_user), - ('test_bcol_admin_user', TestJwtClaims.bcol_admin_role), - ('test_staff_manage_business', TestJwtClaims.staff_manage_business), - ('test_staff_view_accounts', TestJwtClaims.staff_view_accounts_role), - ('test_staff_manage_accounts', TestJwtClaims.staff_manage_accounts_role), - ('test_staff_admin', TestJwtClaims.staff_admin_role) -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_product_subscription_nds_unauthorized(session, # pylint:disable=unused-argument - keycloak_mock, - monkeypatch, - test_name, - token_info): + subscriptions = ProductService.create_product_subscription( + org_dict["id"], TestOrgProductsInfo.org_products_nds, skip_auth=False + ) + assert next( + prod + for prod in subscriptions + if prod.get("code") == TestOrgProductsInfo.org_products_nds["subscriptions"][0]["productCode"] + ) + + +@pytest.mark.parametrize( + "test_name,token_info", + [ + ("test_public_user", TestJwtClaims.public_user_role), + ("test_public_bceid_user", TestJwtClaims.public_bceid_user), + ("test_gov_user", TestJwtClaims.gov_account_holder_user), + ("test_bcol_admin_user", TestJwtClaims.bcol_admin_role), + ("test_staff_manage_business", TestJwtClaims.staff_manage_business), + ("test_staff_view_accounts", TestJwtClaims.staff_view_accounts_role), + ("test_staff_manage_accounts", TestJwtClaims.staff_manage_accounts_role), + ("test_staff_admin", TestJwtClaims.staff_admin_role), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_product_subscription_nds_unauthorized( + session, keycloak_mock, monkeypatch, test_name, token_info # pylint:disable=unused-argument +): """Assert a product subscription for NDS cannot be created by a non system token.""" # setup an org user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org org_dict = org.as_dict() # attempt to add the NDS product subscription to it patch_token_info(token_info, monkeypatch) with pytest.raises(HTTPException) as exception: - ProductService.create_product_subscription(org_dict['id'], - TestOrgProductsInfo.org_products_nds, - skip_auth=False) + ProductService.create_product_subscription( + org_dict["id"], TestOrgProductsInfo.org_products_nds, skip_auth=False + ) assert exception.value.code == HTTPStatus.FORBIDDEN @@ -512,66 +561,65 @@ def test_create_org_with_duplicate_name(session, monkeypatch): # pylint:disable factory_org_model(org_info=TestOrgInfo.org2, org_type_info=TestOrgTypeInfo.implicit) with pytest.raises(BusinessException) as exception: - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org.create_org(TestOrgInfo.org2, user_id=user.id) assert exception.value.code == Error.DATA_CONFLICT.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_with_similar_name(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org with similar name can be created.""" user = factory_user_model() org = factory_org_service() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) - new_org = org.create_org({'name': 'My Test'}, user_id=user.id) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) + new_org = org.create_org({"name": "My Test"}, user_id=user.id) dictionary = new_org.as_dict() - assert dictionary['name'] == 'My Test' + assert dictionary["name"] == "My Test" def test_create_org_with_duplicate_name_bcol(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org linking to bcol retrun exception if there's duplicated names.""" org = factory_org_service() - factory_org_model({'name': 'BC ONLINE TECHNICAL TEAM DEVL'}, org_type_info=TestOrgTypeInfo.implicit) + factory_org_model({"name": "BC ONLINE TECHNICAL TEAM DEVL"}, org_type_info=TestOrgTypeInfo.implicit) bcol_response = Mock(spec=Response) - bcol_response.json.return_value = {'userId': 'PB25020', 'accountNumber': '180670', - 'orgName': 'BC ONLINE TECHNICAL TEAM DEVL'} + bcol_response.json.return_value = { + "userId": "PB25020", + "accountNumber": "180670", + "orgName": "BC ONLINE TECHNICAL TEAM DEVL", + } bcol_response.status_code = 200 pay_api_response = Mock(spec=Response) pay_api_response.status_code = 201 - with patch.object(RestService, 'post', side_effect=[bcol_response, pay_api_response]): + with patch.object(RestService, "post", side_effect=[bcol_response, pay_api_response]): user = factory_user_model() with pytest.raises(BusinessException) as exception: - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org.create_org(TestOrgInfo.bcol_linked(), user_id=user.id) assert exception.value.code == Error.DATA_CONFLICT.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_update_org_name(session, monkeypatch): # pylint:disable=unused-argument """Assert that an Org name cannot be updated.""" org = factory_org_service() - with patch.object(RestService, 'put') as mock_put: - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - org = org.update_org({'name': 'My Test'}) - mock_alp.assert_called_with(Activity(action=ActivityAction.ACCOUNT_NAME_CHANGE.value, - org_id=ANY, value='My Test', id=ANY, - name=ANY)) + with patch.object(RestService, "put") as mock_put: + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + org = org.update_org({"name": "My Test"}) + mock_alp.assert_called_with( + Activity(action=ActivityAction.ACCOUNT_NAME_CHANGE.value, org_id=ANY, value="My Test", id=ANY, name=ANY) + ) assert org dictionary = org.as_dict() mock_put.assert_called() - actual_data = mock_put.call_args.kwargs.get('data') - expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name'), - 'branchName': '' - } - assert expected_data == actual_data, 'name update work.' + actual_data = mock_put.call_args.kwargs.get("data") + expected_data = {"accountId": dictionary.get("id"), "accountName": dictionary.get("name"), "branchName": ""} + assert expected_data == actual_data, "name update work." def test_update_org(session, monkeypatch): # pylint:disable=unused-argument @@ -580,7 +628,7 @@ def test_update_org(session, monkeypatch): # pylint:disable=unused-argument org.update_org(TestOrgInfo.update_org_with_business_type) dictionary = org.as_dict() - assert dictionary['business_type'] == TestOrgInfo.update_org_with_business_type['businessType'] + assert dictionary["business_type"] == TestOrgInfo.update_org_with_business_type["businessType"] def test_suspend_org(session, monkeypatch): # pylint:disable=unused-argument @@ -588,29 +636,28 @@ def test_suspend_org(session, monkeypatch): # pylint:disable=unused-argument org = factory_org_service() user = factory_user_model_with_contact() token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) - updated_org = org.change_org_status(OrgStatus.SUSPENDED.value, - SuspensionReasonCode.OWNER_CHANGE.name) - assert updated_org.as_dict()['status_code'] == OrgStatus.SUSPENDED.value - assert updated_org.as_dict()['suspension_reason_code'] == SuspensionReasonCode.OWNER_CHANGE.name + updated_org = org.change_org_status(OrgStatus.SUSPENDED.value, SuspensionReasonCode.OWNER_CHANGE.name) + assert updated_org.as_dict()["status_code"] == OrgStatus.SUSPENDED.value + assert updated_org.as_dict()["suspension_reason_code"] == SuspensionReasonCode.OWNER_CHANGE.name - updated_org = org.change_org_status(OrgStatus.ACTIVE.value, - SuspensionReasonCode.DISPUTE.name) - assert updated_org.as_dict()['status_code'] == OrgStatus.ACTIVE.value + updated_org = org.change_org_status(OrgStatus.ACTIVE.value, SuspensionReasonCode.DISPUTE.name) + assert updated_org.as_dict()["status_code"] == OrgStatus.ACTIVE.value def test_find_org_by_id(session, auth_mock): # pylint:disable=unused-argument """Assert that an org can be retrieved by its id.""" org = factory_org_service() dictionary = org.as_dict() - org_id = dictionary['id'] + org_id = dictionary["id"] found_org = OrgService.find_by_org_id(org_id) assert found_org dictionary = found_org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] + assert dictionary["name"] == TestOrgInfo.org1["name"] def test_find_org_by_id_no_org(session, auth_mock): # pylint:disable=unused-argument @@ -623,15 +670,15 @@ def test_find_org_by_name(session, auth_mock): # pylint:disable=unused-argument """Assert that an org can be retrieved by its name.""" org_service = factory_org_service() dictionary = org_service.as_dict() - org_name = dictionary['name'] + org_name = dictionary["name"] found_org = OrgService.find_by_org_name(org_name) assert found_org - assert found_org.get('orgs')[0].get('name') == org_name + assert found_org.get("orgs")[0].get("name") == org_name # does not return rejected orgs - org = OrgModel.find_by_org_id(dictionary['id']) + org = OrgModel.find_by_org_id(dictionary["id"]) org.status_code = OrgStatus.REJECTED.value org.save() @@ -643,38 +690,38 @@ def test_find_org_by_name_branch_name(session, auth_mock): # pylint:disable=unu """Assert that an org can be retrieved by its name annd branch nanme.""" org = factory_org_service(org_info=TestOrgInfo.org2) dictionary = org.as_dict() - org_name = dictionary['name'] - branch_name = dictionary['branch_name'] + org_name = dictionary["name"] + branch_name = dictionary["branch_name"] found_org = OrgService.find_by_org_name(org_name) assert found_org - assert found_org.get('orgs')[0].get('name') == org_name + assert found_org.get("orgs")[0].get("name") == org_name found_org = OrgService.find_by_org_name(org_name, branch_name=branch_name) assert found_org - assert found_org.get('orgs')[0].get('name') == org_name - assert found_org.get('orgs')[0].get('branch_name') == branch_name + assert found_org.get("orgs")[0].get("name") == org_name + assert found_org.get("orgs")[0].get("branch_name") == branch_name def test_add_contact(session): # pylint:disable=unused-argument """Assert that a contact can be added to an org.""" org = factory_org_service() org_dictionary = org.as_dict() - contact = OrgService.add_contact(org_dictionary['id'], TestContactInfo.contact1) + contact = OrgService.add_contact(org_dictionary["id"], TestContactInfo.contact1) dictionary = contact.as_dict() - assert dictionary['email'] == TestContactInfo.contact1['email'] + assert dictionary["email"] == TestContactInfo.contact1["email"] def test_add_contact_duplicate(session): # pylint:disable=unused-argument """Assert that a contact cannot be added to an Org if that Org already has a contact.""" org = factory_org_service() org_dictionary = org.as_dict() - OrgService.add_contact(org_dictionary['id'], TestContactInfo.contact1) + OrgService.add_contact(org_dictionary["id"], TestContactInfo.contact1) with pytest.raises(BusinessException) as exception: - OrgService.add_contact(org_dictionary['id'], TestContactInfo.contact2) + OrgService.add_contact(org_dictionary["id"], TestContactInfo.contact2) assert exception.value.code == Error.DATA_ALREADY_EXISTS.name @@ -682,15 +729,15 @@ def test_update_contact(session): # pylint:disable=unused-argument """Assert that a contact for an existing Org can be updated.""" org = factory_org_service() org_dictionary = org.as_dict() - contact = OrgService.add_contact(org_dictionary['id'], TestContactInfo.contact1) + contact = OrgService.add_contact(org_dictionary["id"], TestContactInfo.contact1) dictionary = contact.as_dict() - assert dictionary['email'] == TestContactInfo.contact1['email'] + assert dictionary["email"] == TestContactInfo.contact1["email"] - updated_contact = OrgService.update_contact(org_dictionary['id'], TestContactInfo.contact2) + updated_contact = OrgService.update_contact(org_dictionary["id"], TestContactInfo.contact2) dictionary = updated_contact.as_dict() - assert dictionary['email'] == TestContactInfo.contact2['email'] + assert dictionary["email"] == TestContactInfo.contact2["email"] def test_update_contact_no_contact(session): # pylint:disable=unused-argument @@ -699,135 +746,132 @@ def test_update_contact_no_contact(session): # pylint:disable=unused-argument org_dictionary = org.as_dict() with pytest.raises(BusinessException) as exception: - OrgService.update_contact(org_dictionary['id'], TestContactInfo.contact2) + OrgService.update_contact(org_dictionary["id"], TestContactInfo.contact2) assert exception.value.code == Error.DATA_NOT_FOUND.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_members(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that members for an org can be retrieved.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user.id) org_dictionary = org.as_dict() patch_token_info(TestJwtClaims.public_user_role, monkeypatch) - response = MembershipService.get_members_for_org(org_dictionary['id'], - status='ACTIVE') + response = MembershipService.get_members_for_org(org_dictionary["id"], status="ACTIVE") assert response assert len(response) == 1 - assert response[0].membership_type_code == 'ADMIN' + assert response[0].membership_type_code == "ADMIN" -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_invitations(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that invitations for an org can be retrieved.""" - with patch.object(InvitationService, 'send_invitation', return_value=None): + with patch.object(InvitationService, "send_invitation", return_value=None): user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user.id) org_dictionary = org.as_dict() - invitation_info = factory_invitation(org_dictionary['id']) + invitation_info = factory_invitation(org_dictionary["id"]) - invitation = InvitationService.create_invitation(invitation_info, UserService(user), '') + invitation = InvitationService.create_invitation(invitation_info, UserService(user), "") patch_token_info(TestJwtClaims.public_user_role, monkeypatch) - response = InvitationService.get_invitations_for_org(org_dictionary['id'], 'PENDING') + response = InvitationService.get_invitations_for_org(org_dictionary["id"], "PENDING") assert response assert len(response) == 1 - assert response[0].recipient_email == invitation.as_dict()['recipient_email'] + assert response[0].recipient_email == invitation.as_dict()["recipient_email"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_owner_count_one_owner(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that count of owners is correct.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user.id) assert MembershipService.get_owner_count(MembershipService, org._model) == 1 -@pytest.mark.parametrize( - 'staff_org', [(TestOrgInfo.staff_org), (TestOrgInfo.sbc_staff_org)] -) +@pytest.mark.parametrize("staff_org", [(TestOrgInfo.staff_org), (TestOrgInfo.sbc_staff_org)]) def test_create_staff_org_failure(session, keycloak_mock, staff_org, monkeypatch): # pylint:disable=unused-argument """Assert that staff org cannot be created.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) with pytest.raises(BusinessException) as exception: OrgService.create_org(TestOrgInfo.staff_org, user.id) assert exception.value.code == Error.INVALID_INPUT.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_owner_count_two_owner_with_admins(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert wrong org cannot be created.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user.id) user2 = factory_user_model(user_info=TestUserInfo.user2) - factory_membership_model(user2.id, org._model.id, member_type='COORDINATOR') + factory_membership_model(user2.id, org._model.id, member_type="COORDINATOR") user3 = factory_user_model(user_info=TestUserInfo.user3) - factory_membership_model(user3.id, org._model.id, member_type='ADMIN') + factory_membership_model(user3.id, org._model.id, member_type="ADMIN") assert MembershipService.get_owner_count(MembershipService, org._model) == 2 -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_delete_org_with_members(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an org can be deleted.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user.id) user2 = factory_user_model(user_info=TestUserInfo.user2) - factory_membership_model(user2.id, org._model.id, member_type='COORDINATOR') + factory_membership_model(user2.id, org._model.id, member_type="COORDINATOR") user3 = factory_user_model(user_info=TestUserInfo.user3) - factory_membership_model(user3.id, org._model.id, member_type='ADMIN') + factory_membership_model(user3.id, org._model.id, member_type="ADMIN") patch_token_info(TestJwtClaims.public_user_role, monkeypatch) patch_pay_account_delete(monkeypatch) - org_id = org.as_dict()['id'] + org_id = org.as_dict()["id"] OrgService.delete_org(org_id) assert len(MembershipService.get_members_for_org(org_id)) == 0 -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_org_with_affiliation(session, auth_mock, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_org_with_affiliation(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an org cannot be deleted.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] user = factory_user_model(user_info=user_with_token) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user.id) - org_id = org.as_dict()['id'] + org_id = org.as_dict()["id"] entity_service = factory_entity_service(entity_info=TestEntityInfo.entity_lear_mock) entity_dictionary = entity_service.as_dict() - business_identifier = entity_dictionary['business_identifier'] - AffiliationService.create_affiliation(org_id, business_identifier, None, - TestEntityInfo.entity_lear_mock['passCode']) + business_identifier = entity_dictionary["business_identifier"] + AffiliationService.create_affiliation( + org_id, business_identifier, None, TestEntityInfo.entity_lear_mock["passCode"] + ) patch_token_info(TestJwtClaims.public_user_role, monkeypatch) patch_pay_account_delete(monkeypatch) @@ -836,34 +880,35 @@ def test_delete_org_with_affiliation(session, auth_mock, keycloak_mock, assert len(AffiliationService.find_visible_affiliations_by_org_id(org_id)) == 0 -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_org_with_members_success(session, auth_mock, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_org_with_members_success( + session, auth_mock, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that an org can be deleted.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user = factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.public_user_role, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user.id) patch_pay_account_delete(monkeypatch) - OrgService.delete_org(org.as_dict()['id']) - org_inactive = OrgService.find_by_org_id(org.as_dict()['id']) - assert org_inactive.as_dict()['org_status'] == 'INACTIVE' + OrgService.delete_org(org.as_dict()["id"]) + org_inactive = OrgService.find_by_org_id(org.as_dict()["id"]) + assert org_inactive.as_dict()["org_status"] == "INACTIVE" def test_delete_contact_no_org(session, auth_mock): # pylint:disable=unused-argument """Assert that a contact can not be deleted if it doesn't exist.""" org = factory_org_service() org_dictionary = org.as_dict() - OrgService.add_contact(org_dictionary['id'], TestContactInfo.contact1) + OrgService.add_contact(org_dictionary["id"], TestContactInfo.contact1) - OrgService.delete_contact(org_dictionary['id']) + OrgService.delete_contact(org_dictionary["id"]) with pytest.raises(BusinessException) as exception: - OrgService.delete_contact(org_dictionary['id']) + OrgService.delete_contact(org_dictionary["id"]) assert exception.value.code == Error.DATA_NOT_FOUND.name @@ -875,7 +920,7 @@ def test_delete_contact_org_link(session, auth_mock): # pylint:disable=unused-a org = factory_org_service() org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] contact = factory_contact_model() @@ -883,13 +928,13 @@ def test_delete_contact_org_link(session, auth_mock): # pylint:disable=unused-a contact_link.contact = contact contact_link.entity = entity._model # pylint:disable=protected-access contact_link.org = org._model # pylint:disable=protected-access - contact_link.commit() + contact_link.save() OrgService.delete_contact(org_id=org_id) OrgService.find_by_org_id(org_id) response = OrgService.get_contacts(org_id) - assert len(response['contacts']) == 0 + assert len(response["contacts"]) == 0 delete_contact_link = ContactLinkModel.find_by_entity_id(entity.identifier) assert delete_contact_link @@ -898,7 +943,7 @@ def test_delete_contact_org_link(session, auth_mock): # pylint:disable=unused-a assert not exist_contact_link -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_adds_user_to_account_holders_group(session, monkeypatch): # pylint:disable=unused-argument """Assert that an Org creation adds the user to account holders group.""" # Create a user in keycloak @@ -908,19 +953,20 @@ def test_create_org_adds_user_to_account_holders_group(session, monkeypatch): # kc_user = keycloak_service.get_user_by_username(request.user_name) user = factory_user_model(TestUserInfo.get_user_with_kc_guid(kc_guid=kc_user.id)) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) OrgService.create_org(TestOrgInfo.org1, user_id=user.id) user_groups = keycloak_service.get_user_groups(user_id=kc_user.id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS in groups -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_org_removes_user_from_account_holders_group(session, auth_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_org_removes_user_from_account_holders_group( + session, auth_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that an Org deletion removes the user from account holders group.""" # Create a user in keycloak keycloak_service = KeycloakService() @@ -929,22 +975,23 @@ def test_delete_org_removes_user_from_account_holders_group(session, auth_mock, kc_user = keycloak_service.get_user_by_username(request.user_name) user = factory_user_model(TestUserInfo.get_user_with_kc_guid(kc_guid=kc_user.id)) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) patch_pay_account_delete(monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) patch_token_info(TestJwtClaims.gov_account_holder_user, monkeypatch) - OrgService.delete_org(org.as_dict().get('id')) + OrgService.delete_org(org.as_dict().get("id")) user_groups = keycloak_service.get_user_groups(user_id=kc_user.id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS not in groups -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_does_not_remove_user_from_account_holder_group(session, monkeypatch, - auth_mock): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_does_not_remove_user_from_account_holder_group( + session, monkeypatch, auth_mock +): # pylint:disable=unused-argument """Assert that if the user has multiple Orgs, and deleting one doesn't remove account holders group.""" # Create a user in keycloak keycloak_service = KeycloakService() @@ -953,74 +1000,75 @@ def test_delete_does_not_remove_user_from_account_holder_group(session, monkeypa kc_user = keycloak_service.get_user_by_username(request.user_name) user = factory_user_model(TestUserInfo.get_user_with_kc_guid(kc_guid=kc_user.id)) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) patch_pay_account_delete(monkeypatch) org1 = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) OrgService.create_org(TestOrgInfo.org2, user_id=user.id) - OrgService.delete_org(org1.as_dict().get('id')) + OrgService.delete_org(org1.as_dict().get("id")) user_groups = keycloak_service.get_user_groups(user_id=kc_user.id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) assert GROUP_ACCOUNT_HOLDERS in groups -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_with_linked_bcol_account(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.bcol_linked(), user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.bcol_linked()['name'] - assert dictionary['org_type'] == OrgType.PREMIUM.value - assert dictionary['bcol_user_id'] is not None - assert dictionary['bcol_account_id'] is not None - assert dictionary['bcol_account_name'] is not None + assert dictionary["name"] == TestOrgInfo.bcol_linked()["name"] + assert dictionary["type_code"] == OrgType.PREMIUM.value + assert dictionary["bcol_user_id"] is not None + assert dictionary["bcol_account_id"] is not None + assert dictionary["bcol_account_name"] is not None def test_bcol_account_exists(session): # pylint:disable=unused-argument """Assert that the BCOL account is exists.""" factory_org_service(bcol_info=TestBCOLInfo.bcol1) - check_result = OrgService.bcol_account_link_check(TestBCOLInfo.bcol1['bcol_account_id']) + check_result = OrgService.bcol_account_link_check(TestBCOLInfo.bcol1["bcol_account_id"]) assert check_result -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_org_with_different_name_than_bcol_account(session, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_org_with_different_name_than_bcol_account( + session, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that an Org can be created.""" user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.bcol_linked_different_name(), user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.bcol_linked_different_name()['name'] - assert dictionary['org_type'] == OrgType.PREMIUM.value - assert dictionary['bcol_user_id'] is not None - assert dictionary['bcol_account_id'] is not None - assert dictionary['bcol_account_name'] is not None + assert dictionary["name"] == TestOrgInfo.bcol_linked_different_name()["name"] + assert dictionary["type_code"] == OrgType.PREMIUM.value + assert dictionary["bcol_user_id"] is not None + assert dictionary["bcol_account_id"] is not None + assert dictionary["bcol_account_name"] is not None def test_bcol_account_not_exists(session): # pylint:disable=unused-argument """Assert that the BCOL account is not exists.""" factory_org_service(bcol_info=TestBCOLInfo.bcol1) - check_result = OrgService.bcol_account_link_check(TestBCOLInfo.bcol2['bcol_account_id']) + check_result = OrgService.bcol_account_link_check(TestBCOLInfo.bcol2["bcol_account_id"]) assert not check_result -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_with_a_linked_bcol_details(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that org creation with an existing linked BCOL account fails.""" user = factory_user_model() - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.bcol_linked(), user_id=user.id) assert org # Create again @@ -1030,39 +1078,41 @@ def test_create_org_with_a_linked_bcol_details(session, keycloak_mock, monkeypat assert exception.value.code == Error.BCOL_ACCOUNT_ALREADY_LINKED.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_by_bceid_user(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" user = factory_user_model_with_contact() token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) - with patch.object(OrgService, 'send_staff_review_account_reminder', return_value=None) as mock_notify: + with patch.object(OrgService, "send_staff_review_account_reminder", return_value=None) as mock_notify: org = OrgService.create_org(TestOrgInfo.org1, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] - assert dictionary['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value - assert dictionary['access_type'] == AccessType.EXTRA_PROVINCIAL.value + assert dictionary["name"] == TestOrgInfo.org1["name"] + assert dictionary["org_status"] == OrgStatus.PENDING_STAFF_REVIEW.value + assert dictionary["access_type"] == AccessType.EXTRA_PROVINCIAL.value mock_notify.assert_called() -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_by_in_province_bceid_user(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" user = factory_user_model_with_contact() token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', lambda: token_info) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", lambda: token_info) - with patch.object(OrgService, 'send_staff_review_account_reminder', return_value=None) as mock_notify: + with patch.object(OrgService, "send_staff_review_account_reminder", return_value=None) as mock_notify: org = OrgService.create_org(TestOrgInfo.org_regular_bceid, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org1['name'] - assert dictionary['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value - assert dictionary['access_type'] == AccessType.REGULAR_BCEID.value + assert dictionary["name"] == TestOrgInfo.org1["name"] + assert dictionary["org_status"] == OrgStatus.PENDING_STAFF_REVIEW.value + assert dictionary["access_type"] == AccessType.REGULAR_BCEID.value mock_notify.assert_called() @@ -1070,14 +1120,15 @@ def test_create_org_invalid_access_type_user(session, keycloak_mock, monkeypatch """Assert that an Org cannot be created by providing wrong access type.""" user = factory_user_model_with_contact() token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', lambda: token_info) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", lambda: token_info) with pytest.raises(BusinessException) as exception: OrgService.create_org(TestOrgInfo.org_regular, user_id=user.id) assert exception.value.code == Error.USER_CANT_CREATE_REGULAR_ORG.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_by_verified_bceid_user(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" # Steps @@ -1087,29 +1138,30 @@ def test_create_org_by_verified_bceid_user(session, keycloak_mock, monkeypatch): # 4. Same user create new org, which should be ACTIVE. user = factory_user_model_with_contact(user_info=TestUserInfo.user_bceid_tester) token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() AffidavitService.create_affidavit(affidavit_info=affidavit_info) org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value + assert org_dict["org_status"] == OrgStatus.PENDING_STAFF_REVIEW.value - task_model = TaskModel.find_by_task_for_account(org_dict['id'], status=TaskStatus.OPEN.value) - assert task_model.relationship_id == org_dict['id'] + task_model = TaskModel.find_by_task_for_account(org_dict["id"], status=TaskStatus.OPEN.value) + assert task_model.relationship_id == org_dict["id"] assert task_model.action == TaskAction.AFFIDAVIT_REVIEW.value task_info = { - 'status': TaskStatus.OPEN.value, - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value, + "status": TaskStatus.OPEN.value, + "relationshipStatus": TaskRelationshipStatus.ACTIVE.value, } TaskService.update_task(TaskService(task_model), task_info) - org_result: OrgModel = OrgModel.find_by_org_id(org_dict['id']) + org_result: OrgModel = OrgModel.find_by_org_id(org_dict["id"]) assert org_result.status_code == OrgStatus.ACTIVE.value -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_org_by_rejected_bceid_user(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an Org can be created.""" # Steps @@ -1119,34 +1171,35 @@ def test_create_org_by_rejected_bceid_user(session, keycloak_mock, monkeypatch): # 4. Same user create new org, which should be PENDING_STAFF_REVIEW. user = factory_user_model_with_contact(user_info=TestUserInfo.user_bceid_tester) token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) affidavit_info = TestAffidavit.get_test_affidavit_with_contact() AffidavitService.create_affidavit(affidavit_info=affidavit_info) - with patch.object(OrgService, 'send_staff_review_account_reminder', return_value=None) as mock_notify: + with patch.object(OrgService, "send_staff_review_account_reminder", return_value=None) as mock_notify: org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value + assert org_dict["org_status"] == OrgStatus.PENDING_STAFF_REVIEW.value org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value + assert org_dict["org_status"] == OrgStatus.PENDING_STAFF_REVIEW.value - task_model = TaskModel.find_by_task_for_account(org_dict['id'], status=TaskStatus.OPEN.value) - assert task_model.relationship_id == org_dict['id'] + task_model = TaskModel.find_by_task_for_account(org_dict["id"], status=TaskStatus.OPEN.value) + assert task_model.relationship_id == org_dict["id"] assert task_model.action == TaskAction.AFFIDAVIT_REVIEW.value task_info = { - 'status': TaskStatus.OPEN.value, - 'relationshipStatus': TaskRelationshipStatus.REJECTED.value, + "status": TaskStatus.OPEN.value, + "relationshipStatus": TaskRelationshipStatus.REJECTED.value, } TaskService.update_task(TaskService(task_model), task_info) - org_result: OrgModel = OrgModel.find_by_org_id(org_dict['id']) + org_result: OrgModel = OrgModel.find_by_org_id(org_dict["id"]) assert org_result.status_code == OrgStatus.REJECTED.value - org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(name='Test 123'), user_id=user.id) + org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(name="Test 123"), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value + assert org_dict["org_status"] == OrgStatus.PENDING_STAFF_REVIEW.value mock_notify.assert_called() @@ -1158,7 +1211,7 @@ def test_change_org_access_type(session, monkeypatch): # pylint:disable=unused- patch_token_info(token_info, monkeypatch) updated_org = org.change_org_access_type(AccessType.GOVN.value) - assert updated_org.as_dict()['access_type'] == AccessType.GOVN.value + assert updated_org.as_dict()["access_type"] == AccessType.GOVN.value def test_patch_org_status(session, monkeypatch, auth_mock): # pylint:disable=unused-argument @@ -1166,37 +1219,47 @@ def test_patch_org_status(session, monkeypatch, auth_mock): # pylint:disable=un org = factory_org_service() user = factory_user_model_with_contact() token_info = TestJwtClaims.get_test_user( - sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid) + sub=user.keycloak_guid, source=LoginSource.BCEID.value, idp_userid=user.idp_userid + ) patch_token_info(token_info, monkeypatch) # Validate and update org status patch_info = { - 'action': PatchActions.UPDATE_STATUS.value, - 'statusCode': OrgStatus.SUSPENDED.value, + "action": PatchActions.UPDATE_STATUS.value, + "statusCode": OrgStatus.SUSPENDED.value, } with pytest.raises(BusinessException) as exception: org.patch_org(PatchActions.UPDATE_STATUS.value, patch_info) assert exception.value.code == Error.INVALID_INPUT.name - patch_info['suspensionReasonCode'] = SuspensionReasonCode.OWNER_CHANGE.name - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + patch_info["suspensionReasonCode"] = SuspensionReasonCode.OWNER_CHANGE.name + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: updated_org = org.patch_org(PatchActions.UPDATE_STATUS.value, patch_info) - mock_alp.assert_called_with(Activity(action=ActivityAction.ACCOUNT_SUSPENSION.value, - org_id=ANY, name=ANY, id=ANY, - value=SuspensionReasonCode.OWNER_CHANGE.value)) - assert updated_org['status_code'] == OrgStatus.SUSPENDED.value + mock_alp.assert_called_with( + Activity( + action=ActivityAction.ACCOUNT_SUSPENSION.value, + org_id=ANY, + name=ANY, + id=ANY, + value=SuspensionReasonCode.OWNER_CHANGE.value, + ) + ) + assert updated_org["status_code"] == OrgStatus.SUSPENDED.value patch_info = { - 'action': PatchActions.UPDATE_STATUS.value, - 'statusCode': OrgStatus.ACTIVE.value, + "action": PatchActions.UPDATE_STATUS.value, + "statusCode": OrgStatus.ACTIVE.value, } updated_org = org.patch_org(PatchActions.UPDATE_STATUS.value, patch_info) - assert updated_org['status_code'] == OrgStatus.ACTIVE.value + assert updated_org["status_code"] == OrgStatus.ACTIVE.value - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: - OrgService.update_login_option(org._model.id, 'BCROS') - mock_alp.assert_called_with(Activity(action=ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, - org_id=ANY, name=ANY, id=ANY, value='BCROS')) + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: + OrgService.update_login_option(org._model.id, "BCROS") + mock_alp.assert_called_with( + Activity( + action=ActivityAction.AUTHENTICATION_METHOD_CHANGE.value, org_id=ANY, name=ANY, id=ANY, value="BCROS" + ) + ) def test_patch_org_access_type(session, monkeypatch): # pylint:disable=unused-argument @@ -1207,25 +1270,23 @@ def test_patch_org_access_type(session, monkeypatch): # pylint:disable=unused-a patch_token_info(token_info, monkeypatch) # Validate and update org status - patch_info = { - 'action': PatchActions.UPDATE_ACCESS_TYPE.value - } + patch_info = {"action": PatchActions.UPDATE_ACCESS_TYPE.value} with pytest.raises(BusinessException) as exception: org.patch_org(PatchActions.UPDATE_ACCESS_TYPE.value, patch_info) assert exception.value.code == Error.INVALID_INPUT.name - patch_info['accessType'] = AccessType.GOVM.value + patch_info["accessType"] = AccessType.GOVM.value with pytest.raises(BusinessException) as exception: org.patch_org(PatchActions.UPDATE_ACCESS_TYPE.value, patch_info) assert exception.value.code == Error.INVALID_INPUT.name - patch_info['accessType'] = AccessType.GOVN.value + patch_info["accessType"] = AccessType.GOVN.value updated_org = org.patch_org(PatchActions.UPDATE_ACCESS_TYPE.value, patch_info) - assert updated_org['access_type'] == AccessType.GOVN.value + assert updated_org["access_type"] == AccessType.GOVN.value @pytest.mark.asyncio -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_create_product_single_subscription_qs(session, monkeypatch): """Assert that qualified supplier sub product subscriptions can be created.""" # Create a user in keycloak @@ -1234,73 +1295,71 @@ def test_create_product_single_subscription_qs(session, monkeypatch): keycloak_service.add_user(request, return_if_exists=True) kc_user = keycloak_service.get_user_by_username(request.user_name) user = factory_user_model(TestUserInfo.get_bceid_user_with_kc_guid(kc_guid=kc_user.id)) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = TestOrgProductsInfo.mhr_qs_lawyer_and_notaries['subscriptions'][0]['productCode'] - external_source_id = TestOrgProductsInfo.mhr_qs_lawyer_and_notaries['subscriptions'][0]['externalSourceId'] + product_code = TestOrgProductsInfo.mhr_qs_lawyer_and_notaries["subscriptions"][0]["productCode"] + external_source_id = TestOrgProductsInfo.mhr_qs_lawyer_and_notaries["subscriptions"][0]["externalSourceId"] - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, skip_auth=True + ) - org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary['id']]) - org_prod_sub = next(prod for prod in org_subscriptions - if prod.product_code == product_code) + org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary["id"]]) + org_prod_sub = next(prod for prod in org_subscriptions if prod.product_code == product_code) token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - all_subs = ProductService.get_all_product_subscription(org_id=dictionary['id']) + all_subs = ProductService.get_all_product_subscription(org_id=dictionary["id"]) - prod_sub = next(sub for sub in all_subs if sub.get('code') == product_code) - parent_prod_sub = next(sub for sub in all_subs if sub.get('code') == 'MHR') + prod_sub = next(sub for sub in all_subs if sub.get("code") == product_code) + parent_prod_sub = next(sub for sub in all_subs if sub.get("code") == "MHR") # MHR Qualified Supplier product and parent product should be in pending staff review assert prod_sub - assert prod_sub['code'] == product_code - assert prod_sub['needReview'] - assert prod_sub['parentCode'] == 'MHR' - assert prod_sub['subscriptionStatus'] == ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value - assert prod_sub['keycloak_group'] == 'mhr_qualified_user' + assert prod_sub["code"] == product_code + assert prod_sub["needReview"] + assert prod_sub["parentCode"] == "MHR" + assert prod_sub["subscriptionStatus"] == ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value + assert prod_sub["keycloak_group"] == "mhr_qualified_user" # Parent Product MHR should also be pending staff review assert parent_prod_sub - assert parent_prod_sub['code'] == 'MHR' - assert not parent_prod_sub['needReview'] - assert not parent_prod_sub.get('parentCode') - assert parent_prod_sub['subscriptionStatus'] == ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value - assert parent_prod_sub['keycloak_group'] == 'mhr_search_user' + assert parent_prod_sub["code"] == "MHR" + assert not parent_prod_sub["needReview"] + assert not parent_prod_sub.get("parentCode") + assert parent_prod_sub["subscriptionStatus"] == ProductSubscriptionStatus.PENDING_STAFF_REVIEW.value + assert parent_prod_sub["keycloak_group"] == "mhr_search_user" # Staff review task should have been created task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.external_source_id == external_source_id assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id assert task.action == TaskAction.QUALIFIED_SUPPLIER_REVIEW.value - task_info = { - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value - } + task_info = {"relationshipStatus": TaskRelationshipStatus.ACTIVE.value} # Approve task and update org keycloak groups TaskService.update_task(TaskService(task), task_info=task_info) - ProductService.update_org_product_keycloak_groups(dictionary['id']) + ProductService.update_org_product_keycloak_groups(dictionary["id"]) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) user_groups = keycloak_service.get_user_groups(user_id=kc_user.id) groups = [] for group in user_groups: - groups.append(group.get('name')) + groups.append(group.get("name")) # Confirm account has the expected groups - assert 'mhr_search_user' in groups - assert 'mhr_qualified_user' in groups + assert "mhr_search_user" in groups + assert "mhr_qualified_user" in groups diff --git a/auth-api/tests/unit/services/test_permissions.py b/auth-api/tests/unit/services/test_permissions.py index fd8ff563c7..9426e2bc2e 100644 --- a/auth-api/tests/unit/services/test_permissions.py +++ b/auth-api/tests/unit/services/test_permissions.py @@ -26,14 +26,14 @@ def test_build_all_permission_cache(session): # pylint: disable=unused-argument """Assert that building cache works.""" PermissionService.build_all_permission_cache() assert cache is not None - assert cache.get((None, 'ADMIN')) is not None + assert cache.get((None, "ADMIN")) is not None def test_get_permissions_for_membership_cache_miss(session): # pylint: disable=unused-argument """Assert the cache miss and hit.""" PermissionService.build_all_permission_cache() - with patch('auth_api.models.Permissions.get_permissions_by_membership') as method: - PermissionService.get_permissions_for_membership('ACTIVE', 'ADMIN') - assert not method.called, 'Should Not miss the Cache' - PermissionService.get_permissions_for_membership('invalid', 'invalid') - assert method.called, 'Should miss the Cache' + with patch("auth_api.models.Permissions.get_permissions_by_membership") as method: + PermissionService.get_permissions_for_membership("ACTIVE", "ADMIN") + assert not method.called, "Should Not miss the Cache" + PermissionService.get_permissions_for_membership("invalid", "invalid") + assert method.called, "Should miss the Cache" diff --git a/auth-api/tests/unit/services/test_product.py b/auth-api/tests/unit/services/test_product.py index 2c8f0d5637..190a787fe8 100644 --- a/auth-api/tests/unit/services/test_product.py +++ b/auth-api/tests/unit/services/test_product.py @@ -15,10 +15,9 @@ Test suite to ensure that the Product service routines are working as expected. """ +from unittest import mock from unittest.mock import ANY, patch -from tests.conftest import mock_token -import mock import pytest from auth_api.models.contact_link import ContactLink as ContactLinkModel @@ -26,17 +25,21 @@ from auth_api.models.membership import Membership from auth_api.models.product_code import ProductCode as ProductCodeModel from auth_api.models.product_subscription import ProductSubscription -from auth_api.services.keycloak import KeycloakService -from auth_api.services import Product as ProductService from auth_api.services import Org +from auth_api.services import Product as ProductService from auth_api.services import User as UserService from auth_api.services.activity_log_publisher import ActivityLogPublisher - +from auth_api.services.keycloak import KeycloakService from auth_api.utils.enums import ActivityAction, KeycloakGroupActions, ProductCode, ProductSubscriptionStatus, Status from auth_api.utils.notifications import ProductSubscriptionInfo +from tests.conftest import mock_token from tests.utilities.factory_scenarios import KeycloakScenario, TestOrgInfo, TestUserInfo from tests.utilities.factory_utils import ( - factory_membership_model, factory_product_model, factory_user_model, patch_token_info) + factory_membership_model, + factory_product_model, + factory_user_model, + patch_token_info, +) def test_get_products(session): # pylint:disable=unused-argument @@ -45,51 +48,62 @@ def test_get_products(session): # pylint:disable=unused-argument assert response # assert the structure is correct by checking for name, description properties in each element for item in response: - assert item['code'] and item['description'] + assert item["code"] and item["description"] -@pytest.mark.parametrize('test_name, has_contact', [ - ('has_contact', True), - ('no_contact', False), -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@pytest.mark.parametrize( + "test_name, has_contact", + [ + ("has_contact", True), + ("no_contact", False), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_update_product_subscription(session, keycloak_mock, monkeypatch, test_name, has_contact): """Assert that updating product subscription works.""" user = factory_user_model(TestUserInfo.user_test) - patch_token_info({'sub': user.keycloak_guid, 'idp_userid': user.idp_userid}, monkeypatch) + patch_token_info({"sub": user.keycloak_guid, "idp_userid": user.idp_userid}, monkeypatch) org = Org.create_org(TestOrgInfo.org1, user_id=user.id) - product_subscription = ProductSubscription(org_id=org._model.id, - product_code='PPR', - status_code=ProductSubscriptionStatus.ACTIVE.value - ).flush() + product_subscription = ProductSubscription( + org_id=org._model.id, product_code="PPR", status_code=ProductSubscriptionStatus.ACTIVE.value + ).flush() class MockContact(object): - email = '' + email = "" class MockPerson(object): def __init__(self, contact: MockContact): self.contact = contact - with patch.object(ActivityLogPublisher, 'publish_activity', return_value=None) as mock_alp: + with patch.object(ActivityLogPublisher, "publish_activity", return_value=None) as mock_alp: if has_contact: - with patch.object(ContactLinkModel, 'find_by_user_id', return_value=MockPerson(contact=MockContact())): + with patch.object(ContactLinkModel, "find_by_user_id", return_value=MockPerson(contact=MockContact())): ProductService.update_product_subscription( - ProductSubscriptionInfo(product_subscription_id=product_subscription.id, - is_approved=True, - org_id=org._model.id)) + ProductSubscriptionInfo( + product_subscription_id=product_subscription.id, is_approved=True, org_id=org._model.id + ) + ) else: - assert UserService.get_admin_emails_for_org(org.as_dict()['id']) == '' + assert UserService.get_admin_emails_for_org(org.as_dict()["id"]) == "" ProductService.update_product_subscription( - ProductSubscriptionInfo(product_subscription_id=product_subscription.id, - is_approved=True, - org_id=org._model.id)) - - mock_alp.assert_called_with(Activity(action=ActivityAction.ADD_PRODUCT_AND_SERVICE.value, - org_id=ANY, value=ANY, id=ANY, name='Personal Property Registry')) - - -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) + ProductSubscriptionInfo( + product_subscription_id=product_subscription.id, is_approved=True, org_id=org._model.id + ) + ) + + mock_alp.assert_called_with( + Activity( + action=ActivityAction.ADD_PRODUCT_AND_SERVICE.value, + org_id=ANY, + value=ANY, + id=ANY, + name="Personal Property Registry", + ) + ) + + +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_users_product_subscriptions_kc_groups(session, keycloak_mock, monkeypatch): """Assert that our keycloak groups are returned correctly.""" # Used these to test without the keycloak_mock. @@ -97,47 +111,48 @@ def test_get_users_product_subscriptions_kc_groups(session, keycloak_mock, monke user = KeycloakService.add_user(request, return_if_exists=True) # Set keycloak groups, because empty gets filtered out. - bca_code = ProductCodeModel.find_by_code('BCA') - bca_code.keycloak_group = 'bca' + bca_code = ProductCodeModel.find_by_code("BCA") + bca_code.keycloak_group = "bca" bca_code.save() - ppr_code = ProductCodeModel.find_by_code('PPR') - ppr_code.keycloak_group = 'ppr' + ppr_code = ProductCodeModel.find_by_code("PPR") + ppr_code.keycloak_group = "ppr" ppr_code.save() - vs_code = ProductCodeModel.find_by_code('VS') - vs_code.keycloak_group = 'vs' + vs_code = ProductCodeModel.find_by_code("VS") + vs_code.keycloak_group = "vs" vs_code.save() # Filter MHR out - testing separately - mhr_code = ProductCodeModel.find_by_code('MHR') + mhr_code = ProductCodeModel.find_by_code("MHR") mhr_code.keycloak_group = None mhr_code.save() - mhr_qsln_code = ProductCodeModel.find_by_code('MHR_QSLN') + mhr_qsln_code = ProductCodeModel.find_by_code("MHR_QSLN") mhr_qsln_code.keycloak_group = None mhr_qsln_code.save() - mhr_qshm_code = ProductCodeModel.find_by_code('MHR_QSHM') + mhr_qshm_code = ProductCodeModel.find_by_code("MHR_QSHM") mhr_qshm_code.keycloak_group = None mhr_qshm_code.save() - mhr_qshd_code = ProductCodeModel.find_by_code('MHR_QSHD') + mhr_qshd_code = ProductCodeModel.find_by_code("MHR_QSHD") mhr_qshd_code.keycloak_group = None mhr_qshd_code.save() user1 = factory_user_model(TestUserInfo.get_user_with_kc_guid(user.id)) - patch_token_info({'sub': user.id, 'idp_userid': user1.idp_userid}, monkeypatch) + patch_token_info({"sub": user.id, "idp_userid": user1.idp_userid}, monkeypatch) org = Org.create_org(TestOrgInfo.org1, user_id=user1.id) - org_id1 = org.as_dict().get('id') + org_id1 = org.as_dict().get("id") factory_membership_model(user1.id, org_id1, member_status=Status.ACTIVE.value) - factory_product_model(org_id1, product_code=ProductCode.PPR.value, - status_code=ProductSubscriptionStatus.ACTIVE.value) + factory_product_model( + org_id1, product_code=ProductCode.PPR.value, status_code=ProductSubscriptionStatus.ACTIVE.value + ) kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) assert kc_groups[0].user_guid == user1.keycloak_guid - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value assert kc_groups[1].user_guid == user1.keycloak_guid - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.ADD_TO_GROUP.value assert kc_groups[2].user_guid == user1.keycloak_guid - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # Create a user with a membership row that is INACTIVE, subscription ACTIVE. @@ -148,25 +163,25 @@ def test_get_users_product_subscriptions_kc_groups(session, keycloak_mock, monke factory_membership_model(user2.id, org_id1, member_status=Status.PENDING_STAFF_REVIEW.value) kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user2.id]) - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # Create a user with a membership row that is INACTIVE, ACTIVE user3 = factory_user_model(TestUserInfo.user3) - patch_token_info({'sub': user3.keycloak_guid, 'idp_userid': user3.idp_userid}, monkeypatch) + patch_token_info({"sub": user3.keycloak_guid, "idp_userid": user3.idp_userid}, monkeypatch) factory_membership_model(user3.id, org_id1, member_status=Status.INACTIVE.value) factory_membership_model(user3.id, org_id1, member_status=Status.ACTIVE.value) kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user3.id]) - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # Create a user with a membership row that is ACTIVE, INACTIVE @@ -175,79 +190,85 @@ def test_get_users_product_subscriptions_kc_groups(session, keycloak_mock, monke factory_membership_model(user4.id, org_id1, member_status=Status.INACTIVE.value) kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user4.id]) - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # Create a product subscription that is INACTIVE, ACTIVE (should use ACTIVE row, it's latest) - factory_product_model(org_id1, product_code=ProductCode.BCA.value, - status_code=ProductSubscriptionStatus.INACTIVE.value) - factory_product_model(org_id1, product_code=ProductCode.BCA.value, - status_code=ProductSubscriptionStatus.ACTIVE.value) + factory_product_model( + org_id1, product_code=ProductCode.BCA.value, status_code=ProductSubscriptionStatus.INACTIVE.value + ) + factory_product_model( + org_id1, product_code=ProductCode.BCA.value, status_code=ProductSubscriptionStatus.ACTIVE.value + ) factory_membership_model(user4.id, org_id1, member_status=Status.ACTIVE.value) kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user4.id]) - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # Create a product subscription that is ACTIVE, INACTIVE (should use INACTIVE ROW, it's latest) - factory_product_model(org_id1, product_code=ProductCode.VS.value, - status_code=ProductSubscriptionStatus.ACTIVE.value) - factory_product_model(org_id1, product_code=ProductCode.VS.value, - status_code=ProductSubscriptionStatus.INACTIVE.value) + factory_product_model( + org_id1, product_code=ProductCode.VS.value, status_code=ProductSubscriptionStatus.ACTIVE.value + ) + factory_product_model( + org_id1, product_code=ProductCode.VS.value, status_code=ProductSubscriptionStatus.INACTIVE.value + ) kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user4.id]) assert kc_groups[2].user_guid == user4.keycloak_guid - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # Create a user with 2 ORG memberships that are opposites org = Org.create_org(TestOrgInfo.org2, user_id=user1.id) - org_id2 = org.as_dict().get('id') - factory_product_model(org_id2, product_code=ProductCode.PPR.value, - status_code=ProductSubscriptionStatus.INACTIVE.value) - factory_product_model(org_id2, product_code=ProductCode.VS.value, - status_code=ProductSubscriptionStatus.ACTIVE.value) + org_id2 = org.as_dict().get("id") + factory_product_model( + org_id2, product_code=ProductCode.PPR.value, status_code=ProductSubscriptionStatus.INACTIVE.value + ) + factory_product_model( + org_id2, product_code=ProductCode.VS.value, status_code=ProductSubscriptionStatus.ACTIVE.value + ) factory_membership_model(user1.id, org_id2, member_status=Status.ACTIVE.value) # BCA and PPR (Org1) and VS (Org2) should be active. kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.ADD_TO_GROUP.value # BCA + PPR (Org1) only should be active. [membership.delete() for membership in Membership.find_members_by_org_id(org_id2)] kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # No product subscriptions should be active. [membership.delete() for membership in Membership.find_members_by_org_id(org_id1)] kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) - assert kc_groups[0].group_name == 'bca' + assert kc_groups[0].group_name == "bca" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[1].group_name == 'ppr' + assert kc_groups[1].group_name == "ppr" assert kc_groups[1].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[2].group_name == 'vs' + assert kc_groups[2].group_name == "vs" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_get_users_sub_product_subscriptions_kc_groups(session, keycloak_mock, monkeypatch): """Assert that our keycloak groups are returned correctly for sub products.""" # Used these to test without the keycloak_mock. @@ -255,98 +276,101 @@ def test_get_users_sub_product_subscriptions_kc_groups(session, keycloak_mock, m user = KeycloakService.add_user(request, return_if_exists=True) # Filter out types that are not parents or children - bca_code = ProductCodeModel.find_by_code('BCA') + bca_code = ProductCodeModel.find_by_code("BCA") bca_code.keycloak_group = None bca_code.save() - ppr_code = ProductCodeModel.find_by_code('PPR') + ppr_code = ProductCodeModel.find_by_code("PPR") ppr_code.keycloak_group = None ppr_code.save() - vs_code = ProductCodeModel.find_by_code('VS') + vs_code = ProductCodeModel.find_by_code("VS") vs_code.keycloak_group = None vs_code.save() # Set up parent product and sub products - mhr_code = ProductCodeModel.find_by_code('MHR') - mhr_code.keycloak_group = 'mhr' + mhr_code = ProductCodeModel.find_by_code("MHR") + mhr_code.keycloak_group = "mhr" mhr_code.save() - mhr_qsln_code = ProductCodeModel.find_by_code('MHR_QSLN') - mhr_qsln_code.keycloak_group = 'mhr_qsln' - mhr_qsln_code.parent_code = 'MHR' + mhr_qsln_code = ProductCodeModel.find_by_code("MHR_QSLN") + mhr_qsln_code.keycloak_group = "mhr_qsln" + mhr_qsln_code.parent_code = "MHR" mhr_qsln_code.save() - mhr_qshm_code = ProductCodeModel.find_by_code('MHR_QSHM') - mhr_qshm_code.keycloak_group = 'mhr_qshm' - mhr_qshm_code.parent_code = 'MHR' + mhr_qshm_code = ProductCodeModel.find_by_code("MHR_QSHM") + mhr_qshm_code.keycloak_group = "mhr_qshm" + mhr_qshm_code.parent_code = "MHR" mhr_qshm_code.save() - mhr_qshd_code = ProductCodeModel.find_by_code('MHR_QSHD') - mhr_qshd_code.keycloak_group = 'mhr_qshd' - mhr_qshd_code.parent_code = 'MHR' + mhr_qshd_code = ProductCodeModel.find_by_code("MHR_QSHD") + mhr_qshd_code.keycloak_group = "mhr_qshd" + mhr_qshd_code.parent_code = "MHR" mhr_qshd_code.save() user1 = factory_user_model(TestUserInfo.get_user_with_kc_guid(user.id)) - patch_token_info({'sub': user.id, 'idp_userid': user1.idp_userid}, monkeypatch) + patch_token_info({"sub": user.id, "idp_userid": user1.idp_userid}, monkeypatch) # Validate no associations kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) assert kc_groups[0].user_guid == user1.keycloak_guid - assert kc_groups[0].group_name == 'mhr' + assert kc_groups[0].group_name == "mhr" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value assert kc_groups[1].user_guid == user1.keycloak_guid - assert kc_groups[1].group_name == 'mhr_qshd' + assert kc_groups[1].group_name == "mhr_qshd" assert kc_groups[1].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value assert kc_groups[2].user_guid == user1.keycloak_guid - assert kc_groups[2].group_name == 'mhr_qshm' + assert kc_groups[2].group_name == "mhr_qshm" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value assert kc_groups[3].user_guid == user1.keycloak_guid - assert kc_groups[3].group_name == 'mhr_qsln' + assert kc_groups[3].group_name == "mhr_qsln" assert kc_groups[3].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value org = Org.create_org(TestOrgInfo.org1, user_id=user1.id) - org_id1 = org.as_dict().get('id') + org_id1 = org.as_dict().get("id") factory_membership_model(user1.id, org_id1, member_status=Status.ACTIVE.value) - factory_product_model(org_id1, product_code=ProductCode.MHR_QSHM.value, - status_code=ProductSubscriptionStatus.ACTIVE.value) + factory_product_model( + org_id1, product_code=ProductCode.MHR_QSHM.value, status_code=ProductSubscriptionStatus.ACTIVE.value + ) # Validate sub product subscription kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) assert kc_groups[0].user_guid == user1.keycloak_guid - assert kc_groups[0].group_name == 'mhr' + assert kc_groups[0].group_name == "mhr" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value assert kc_groups[1].user_guid == user1.keycloak_guid - assert kc_groups[1].group_name == 'mhr_qshd' + assert kc_groups[1].group_name == "mhr_qshd" assert kc_groups[1].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value assert kc_groups[2].user_guid == user1.keycloak_guid - assert kc_groups[2].group_name == 'mhr_qshm' + assert kc_groups[2].group_name == "mhr_qshm" assert kc_groups[2].group_action == KeycloakGroupActions.ADD_TO_GROUP.value assert kc_groups[3].user_guid == user1.keycloak_guid - assert kc_groups[3].group_name == 'mhr_qsln' + assert kc_groups[3].group_name == "mhr_qsln" assert kc_groups[3].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value # Validate subscription for Qualified Supplier - Lawyers and Notaries # Include parent as well - factory_product_model(org_id1, product_code=ProductCode.MHR_QSLN.value, - status_code=ProductSubscriptionStatus.ACTIVE.value) - factory_product_model(org_id1, product_code=mhr_qsln_code.parent_code, - status_code=ProductSubscriptionStatus.ACTIVE.value) + factory_product_model( + org_id1, product_code=ProductCode.MHR_QSLN.value, status_code=ProductSubscriptionStatus.ACTIVE.value + ) + factory_product_model( + org_id1, product_code=mhr_qsln_code.parent_code, status_code=ProductSubscriptionStatus.ACTIVE.value + ) kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) - assert kc_groups[0].group_name == 'mhr' + assert kc_groups[0].group_name == "mhr" assert kc_groups[0].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[1].group_name == 'mhr_qshd' + assert kc_groups[1].group_name == "mhr_qshd" assert kc_groups[1].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[2].group_name == 'mhr_qshm' + assert kc_groups[2].group_name == "mhr_qshm" assert kc_groups[2].group_action == KeycloakGroupActions.ADD_TO_GROUP.value - assert kc_groups[3].group_name == 'mhr_qsln' + assert kc_groups[3].group_name == "mhr_qsln" assert kc_groups[3].group_action == KeycloakGroupActions.ADD_TO_GROUP.value # Validate there should be no active subscriptions [membership.delete() for membership in Membership.find_members_by_org_id(org_id1)] kc_groups = ProductService.get_users_product_subscriptions_kc_groups([user1.id]) - assert kc_groups[0].group_name == 'mhr' + assert kc_groups[0].group_name == "mhr" assert kc_groups[0].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[1].group_name == 'mhr_qshd' + assert kc_groups[1].group_name == "mhr_qshd" assert kc_groups[1].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[2].group_name == 'mhr_qshm' + assert kc_groups[2].group_name == "mhr_qshm" assert kc_groups[2].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value - assert kc_groups[3].group_name == 'mhr_qsln' + assert kc_groups[3].group_name == "mhr_qsln" assert kc_groups[3].group_action == KeycloakGroupActions.REMOVE_FROM_GROUP.value diff --git a/auth-api/tests/unit/services/test_product_notifications.py b/auth-api/tests/unit/services/test_product_notifications.py index 8cb7ba7ec6..1b3e95e2f2 100644 --- a/auth-api/tests/unit/services/test_product_notifications.py +++ b/auth-api/tests/unit/services/test_product_notifications.py @@ -16,9 +16,9 @@ Test suite to ensure that the correct product notifications are generated. """ +from unittest import mock from unittest.mock import patch -import mock import pytest from sbc_common_components.utils.enums import QueueMessageTypes @@ -32,22 +32,24 @@ from auth_api.services.user import User as UserService from auth_api.utils.enums import LoginSource, TaskAction, TaskRelationshipStatus, TaskRelationshipType, TaskStatus from auth_api.utils.notifications import ( - NotificationAttachmentType, ProductAccessDescriptor, ProductCategoryDescriptor, ProductSubjectDescriptor) + NotificationAttachmentType, + ProductAccessDescriptor, + ProductCategoryDescriptor, + ProductSubjectDescriptor, +) +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo, TestOrgProductsInfo, TestUserInfo from tests.utilities.factory_utils import factory_user_model_with_contact, patch_token_info -from tests.conftest import mock_token -@pytest.mark.parametrize('org_product_info', [ - TestOrgProductsInfo.org_products_vs -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') +@pytest.mark.parametrize("org_product_info", [TestOrgProductsInfo.org_products_vs]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") def test_default_approved_notification(mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info): """Assert product approved notification default is created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -55,68 +57,60 @@ def test_default_approved_notification(mock_mailer, session, auth_mock, keycloak org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) - org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary['id']]) - org_prod_sub = next(prod for prod in org_subscriptions - if prod.product_code == product_code) + org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary["id"]]) + org_prod_sub = next(prod for prod in org_subscriptions if prod.product_code == product_code) # Fetch products and confirm product subscription is present token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - all_subs = ProductService.get_all_product_subscription(org_id=dictionary['id']) + all_subs = ProductService.get_all_product_subscription(org_id=dictionary["id"]) - prod_sub = next(sub for sub in all_subs if sub.get('code') == product_code) + prod_sub = next(sub for sub in all_subs if sub.get("code") == product_code) assert prod_sub - assert prod_sub['code'] == product_code + assert prod_sub["code"] == product_code # Staff review task should have been created task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id assert task.action == TaskAction.PRODUCT_REVIEW.value # Approve task and check for publish to mailer - task_info = { - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value - } + task_info = {"relationshipStatus": TaskRelationshipStatus.ACTIVE.value} product_code_model = ProductCodeModel.find_by_code(product_code) - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): TaskService.update_task(TaskService(task), task_info=task_info) - expected_data = { - 'productName': product_code_model.description, - 'emailAddresses': 'test@test.com' - } - mock_mailer.assert_called_with(QueueMessageTypes.PROD_PACKAGE_APPROVED_NOTIFICATION.value, - data=expected_data) + expected_data = {"productName": product_code_model.description, "emailAddresses": "test@test.com"} + mock_mailer.assert_called_with(QueueMessageTypes.PROD_PACKAGE_APPROVED_NOTIFICATION.value, data=expected_data) -@pytest.mark.parametrize('org_product_info', [ - TestOrgProductsInfo.org_products_vs -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') +@pytest.mark.parametrize("org_product_info", [TestOrgProductsInfo.org_products_vs]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") def test_default_rejected_notification(mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info): """Assert product rejected notification default is created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -124,70 +118,67 @@ def test_default_rejected_notification(mock_mailer, session, auth_mock, keycloak org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) - org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary['id']]) - org_prod_sub = next(prod for prod in org_subscriptions - if prod.product_code == product_code) + org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary["id"]]) + org_prod_sub = next(prod for prod in org_subscriptions if prod.product_code == product_code) # Fetch products and confirm product subscription is present token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - all_subs = ProductService.get_all_product_subscription(org_id=dictionary['id']) + all_subs = ProductService.get_all_product_subscription(org_id=dictionary["id"]) - prod_sub = next(sub for sub in all_subs if sub.get('code') == product_code) + prod_sub = next(sub for sub in all_subs if sub.get("code") == product_code) assert prod_sub - assert prod_sub['code'] == product_code + assert prod_sub["code"] == product_code # Staff review task should have been created task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id assert task.action == TaskAction.PRODUCT_REVIEW.value # Approve task and check for publish to mailer - task_info = { - 'relationshipStatus': TaskRelationshipStatus.REJECTED.value - } + task_info = {"relationshipStatus": TaskRelationshipStatus.REJECTED.value} product_code_model = ProductCodeModel.find_by_code(product_code) - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): TaskService.update_task(TaskService(task), task_info=task_info) - expected_data = { - 'productName': product_code_model.description, - 'emailAddresses': 'test@test.com' - } - mock_mailer.assert_called_with(QueueMessageTypes.PROD_PACKAGE_REJECTED_NOTIFICATION.value, - data=expected_data) + expected_data = {"productName": product_code_model.description, "emailAddresses": "test@test.com"} + mock_mailer.assert_called_with(QueueMessageTypes.PROD_PACKAGE_REJECTED_NOTIFICATION.value, data=expected_data) -@pytest.mark.parametrize('org_product_info', [ - TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, - TestOrgProductsInfo.mhr_qs_home_manufacturers, - TestOrgProductsInfo.mhr_qs_home_dealers -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') +@pytest.mark.parametrize( + "org_product_info", + [ + TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, + TestOrgProductsInfo.mhr_qs_home_manufacturers, + TestOrgProductsInfo.mhr_qs_home_dealers, + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") def test_detailed_approved_notification(mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info): """Assert product approved notification with details is created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -195,83 +186,87 @@ def test_detailed_approved_notification(mock_mailer, session, auth_mock, keycloa org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] product_code_model = ProductCodeModel.find_by_code(product_code) if product_code_model.parent_code: # Create parent product subscription - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data={'subscriptions': [ - {'productCode': product_code_model.parent_code}]}, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], + subscription_data={"subscriptions": [{"productCode": product_code_model.parent_code}]}, + skip_auth=True, + ) # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) - org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary['id']]) - org_prod_sub = next(prod for prod in org_subscriptions - if prod.product_code == product_code) + org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary["id"]]) + org_prod_sub = next(prod for prod in org_subscriptions if prod.product_code == product_code) # Fetch products and confirm product subscription is present token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - all_subs = ProductService.get_all_product_subscription(org_id=dictionary['id']) + all_subs = ProductService.get_all_product_subscription(org_id=dictionary["id"]) - prod_sub = next(sub for sub in all_subs if sub.get('code') == product_code) + prod_sub = next(sub for sub in all_subs if sub.get("code") == product_code) assert prod_sub - assert prod_sub['code'] == product_code + assert prod_sub["code"] == product_code # Staff review task should have been created task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id assert task.action == TaskAction.QUALIFIED_SUPPLIER_REVIEW.value # Approve task and check for publish to mailer - task_info = { - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value - } + task_info = {"relationshipStatus": TaskRelationshipStatus.ACTIVE.value} product_code_model = ProductCodeModel.find_by_code(product_code) - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): TaskService.update_task(TaskService(task), task_info=task_info) expected_data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'isReapproved': False, - 'productName': product_code_model.description, - 'emailAddresses': 'test@test.com' + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "isReapproved": False, + "productName": product_code_model.description, + "emailAddresses": "test@test.com", } - mock_mailer.assert_called_with(QueueMessageTypes.PRODUCT_APPROVED_NOTIFICATION_DETAILED.value, - data=expected_data) - - -@pytest.mark.parametrize('org_product_info, contact_type', [ - (TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, 'BCOL'), - (TestOrgProductsInfo.mhr_qs_home_manufacturers, 'BCREG'), - (TestOrgProductsInfo.mhr_qs_home_dealers, 'BCREG') -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') -def test_detailed_rejected_notification(mock_mailer, session, auth_mock, keycloak_mock, - monkeypatch, org_product_info, contact_type): + mock_mailer.assert_called_with( + QueueMessageTypes.PRODUCT_APPROVED_NOTIFICATION_DETAILED.value, data=expected_data + ) + + +@pytest.mark.parametrize( + "org_product_info, contact_type", + [ + (TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, "BCOL"), + (TestOrgProductsInfo.mhr_qs_home_manufacturers, "BCREG"), + (TestOrgProductsInfo.mhr_qs_home_dealers, "BCREG"), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") +def test_detailed_rejected_notification( + mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info, contact_type +): """Assert product rejected notification with details is created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -279,86 +274,88 @@ def test_detailed_rejected_notification(mock_mailer, session, auth_mock, keycloa org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] product_code_model = ProductCodeModel.find_by_code(product_code) if product_code_model.parent_code: # Create parent product subscription - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data={'subscriptions': [ - {'productCode': product_code_model.parent_code}]}, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], + subscription_data={"subscriptions": [{"productCode": product_code_model.parent_code}]}, + skip_auth=True, + ) # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) - org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary['id']]) - org_prod_sub = next(prod for prod in org_subscriptions - if prod.product_code == product_code) + org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary["id"]]) + org_prod_sub = next(prod for prod in org_subscriptions if prod.product_code == product_code) # Fetch products and confirm product subscription is present token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - all_subs = ProductService.get_all_product_subscription(org_id=dictionary['id']) + all_subs = ProductService.get_all_product_subscription(org_id=dictionary["id"]) - prod_sub = next(sub for sub in all_subs if sub.get('code') == product_code) + prod_sub = next(sub for sub in all_subs if sub.get("code") == product_code) assert prod_sub - assert prod_sub['code'] == product_code + assert prod_sub["code"] == product_code # Staff review task should have been created task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id assert task.action == TaskAction.QUALIFIED_SUPPLIER_REVIEW.value # Approve task and check for publish to mailer - task_info = { - 'relationshipStatus': TaskRelationshipStatus.REJECTED.value, - 'remarks': ['Test remark'] - } + task_info = {"relationshipStatus": TaskRelationshipStatus.REJECTED.value, "remarks": ["Test remark"]} product_code_model = ProductCodeModel.find_by_code(product_code) - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): task = TaskService.update_task(TaskService(task), task_info=task_info) task_dict = task.as_dict() expected_data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'accessDisclaimer': True, - 'productName': product_code_model.description, - 'emailAddresses': 'test@test.com', - 'contactType': contact_type, - 'remarks': task_dict['remarks'][0] + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "accessDisclaimer": True, + "productName": product_code_model.description, + "emailAddresses": "test@test.com", + "contactType": contact_type, + "remarks": task_dict["remarks"][0], } - mock_mailer.assert_called_with(QueueMessageTypes.PRODUCT_REJECTED_NOTIFICATION_DETAILED.value, - data=expected_data) - - -@pytest.mark.parametrize('org_product_info', [ - TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, - TestOrgProductsInfo.mhr_qs_home_manufacturers, - TestOrgProductsInfo.mhr_qs_home_dealers -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') + mock_mailer.assert_called_with( + QueueMessageTypes.PRODUCT_REJECTED_NOTIFICATION_DETAILED.value, data=expected_data + ) + + +@pytest.mark.parametrize( + "org_product_info", + [ + TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, + TestOrgProductsInfo.mhr_qs_home_manufacturers, + TestOrgProductsInfo.mhr_qs_home_dealers, + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") def test_hold_notification(mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info): """Assert product notification is not created for on hold state.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -366,43 +363,44 @@ def test_hold_notification(mock_mailer, session, auth_mock, keycloak_mock, monke org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] product_code_model = ProductCodeModel.find_by_code(product_code) if product_code_model.parent_code: # Create parent product subscription - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data={'subscriptions': [ - {'productCode': product_code_model.parent_code}]}, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], + subscription_data={"subscriptions": [{"productCode": product_code_model.parent_code}]}, + skip_auth=True, + ) # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) - org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary['id']]) - org_prod_sub = next(prod for prod in org_subscriptions - if prod.product_code == product_code) + org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary["id"]]) + org_prod_sub = next(prod for prod in org_subscriptions if prod.product_code == product_code) # Fetch products and confirm product subscription is present token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - all_subs = ProductService.get_all_product_subscription(org_id=dictionary['id']) + all_subs = ProductService.get_all_product_subscription(org_id=dictionary["id"]) - prod_sub = next(sub for sub in all_subs if sub.get('code') == product_code) + prod_sub = next(sub for sub in all_subs if sub.get("code") == product_code) assert prod_sub - assert prod_sub['code'] == product_code + assert prod_sub["code"] == product_code # Staff review task should have been created task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id @@ -410,28 +408,32 @@ def test_hold_notification(mock_mailer, session, auth_mock, keycloak_mock, monke # Hold task and check publish to mailer is not called task_info = { - 'relationshipStatus': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, - 'status': TaskStatus.HOLD.value + "relationshipStatus": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + "status": TaskStatus.HOLD.value, } - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): TaskService.update_task(TaskService(task), task_info=task_info) mock_mailer.assert_not_called -@pytest.mark.parametrize('org_product_info, contact_type', [ - (TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, 'BCOL'), - (TestOrgProductsInfo.mhr_qs_home_manufacturers, 'BCREG'), - (TestOrgProductsInfo.mhr_qs_home_dealers, 'BCREG') -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') -def test_confirmation_notification(mock_mailer, session, auth_mock, keycloak_mock, - monkeypatch, org_product_info, contact_type): +@pytest.mark.parametrize( + "org_product_info, contact_type", + [ + (TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, "BCOL"), + (TestOrgProductsInfo.mhr_qs_home_manufacturers, "BCREG"), + (TestOrgProductsInfo.mhr_qs_home_dealers, "BCREG"), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") +def test_confirmation_notification( + mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info, contact_type +): """Assert product confirmation notification is properly created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -439,49 +441,47 @@ def test_confirmation_notification(mock_mailer, session, auth_mock, keycloak_moc org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] product_code_model = ProductCodeModel.find_by_code(product_code) if product_code_model.parent_code: # Create parent product subscription - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data={'subscriptions': [ - {'productCode': product_code_model.parent_code}]}, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], + subscription_data={"subscriptions": [{"productCode": product_code_model.parent_code}]}, + skip_auth=True, + ) - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) expected_data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'productName': product_code_model.description, - 'emailAddresses': 'test@test.com', - 'contactType': contact_type, - 'hasAgreementAttachment': True, - 'attachmentType': NotificationAttachmentType.MHR_QS.value + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "productName": product_code_model.description, + "emailAddresses": "test@test.com", + "contactType": contact_type, + "hasAgreementAttachment": True, + "attachmentType": NotificationAttachmentType.MHR_QS.value, } - mock_mailer.assert_called_with(QueueMessageTypes.PRODUCT_CONFIRMATION_NOTIFICATION.value, - data=expected_data) + mock_mailer.assert_called_with(QueueMessageTypes.PRODUCT_CONFIRMATION_NOTIFICATION.value, data=expected_data) -@pytest.mark.parametrize('org_product_info', [ - TestOrgProductsInfo.org_products_vs -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') +@pytest.mark.parametrize("org_product_info", [TestOrgProductsInfo.org_products_vs]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") def test_no_confirmation_notification(mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info): """Assert product confirmation notification not created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -489,39 +489,44 @@ def test_no_confirmation_notification(mock_mailer, session, auth_mock, keycloak_ org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] product_code_model = ProductCodeModel.find_by_code(product_code) if product_code_model.parent_code: # Create parent product subscription - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data={'subscriptions': [ - {'productCode': product_code_model.parent_code}]}, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], + subscription_data={"subscriptions": [{"productCode": product_code_model.parent_code}]}, + skip_auth=True, + ) - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) mock_mailer.assert_not_called() -@pytest.mark.parametrize('org_product_info, contact_type', [ - (TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, 'BCOL'), - (TestOrgProductsInfo.mhr_qs_home_manufacturers, 'BCREG'), - (TestOrgProductsInfo.mhr_qs_home_dealers, 'BCREG') -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@patch.object(auth_api.services.products, 'publish_to_mailer') -def test_resubmission_notification(mock_mailer, session, auth_mock, keycloak_mock, - monkeypatch, org_product_info, contact_type): +@pytest.mark.parametrize( + "org_product_info, contact_type", + [ + (TestOrgProductsInfo.mhr_qs_lawyer_and_notaries, "BCOL"), + (TestOrgProductsInfo.mhr_qs_home_manufacturers, "BCREG"), + (TestOrgProductsInfo.mhr_qs_home_dealers, "BCREG"), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@patch.object(auth_api.services.products, "publish_to_mailer") +def test_resubmission_notification( + mock_mailer, session, auth_mock, keycloak_mock, monkeypatch, org_product_info, contact_type +): """Assert product resubmission notifications are created.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -529,105 +534,102 @@ def test_resubmission_notification(mock_mailer, session, auth_mock, keycloak_moc org = OrgService.create_org(TestOrgInfo.org_premium, user_id=user.id) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_premium['name'] + assert dictionary["name"] == TestOrgInfo.org_premium["name"] - product_code = org_product_info['subscriptions'][0]['productCode'] + product_code = org_product_info["subscriptions"][0]["productCode"] product_code_model = ProductCodeModel.find_by_code(product_code) if product_code_model.parent_code: # Create parent product subscription - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data={'subscriptions': [ - {'productCode': product_code_model.parent_code}]}, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], + subscription_data={"subscriptions": [{"productCode": product_code_model.parent_code}]}, + skip_auth=True, + ) # Subscribe to product - ProductService.create_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + ProductService.create_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) - org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary['id']]) - org_prod_sub = next(prod for prod in org_subscriptions - if prod.product_code == product_code) + org_subscriptions = ProductSubscriptionModel.find_by_org_ids(org_ids=[dictionary["id"]]) + org_prod_sub = next(prod for prod in org_subscriptions if prod.product_code == product_code) # Fetch products and confirm product subscription is present token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - all_subs = ProductService.get_all_product_subscription(org_id=dictionary['id']) + all_subs = ProductService.get_all_product_subscription(org_id=dictionary["id"]) - prod_sub = next(sub for sub in all_subs if sub.get('code') == product_code) + prod_sub = next(sub for sub in all_subs if sub.get("code") == product_code) assert prod_sub - assert prod_sub['code'] == product_code + assert prod_sub["code"] == product_code # Staff review task should have been created task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id assert task.action == TaskAction.QUALIFIED_SUPPLIER_REVIEW.value # Reject task and check for publish to mailer - task_info = { - 'relationshipStatus': TaskRelationshipStatus.REJECTED.value, - 'remarks': ['Test remark'] - } + task_info = {"relationshipStatus": TaskRelationshipStatus.REJECTED.value, "remarks": ["Test remark"]} TaskService.update_task(TaskService(task), task_info=task_info) # Resubmit product subscription - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): - ProductService.resubmit_product_subscription(org_id=dictionary['id'], - subscription_data=org_product_info, - skip_auth=True) + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): + ProductService.resubmit_product_subscription( + org_id=dictionary["id"], subscription_data=org_product_info, skip_auth=True + ) expected_data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'productName': product_code_model.description, - 'emailAddresses': 'test@test.com', - 'contactType': contact_type, - 'hasAgreementAttachment': True, - 'attachmentType': NotificationAttachmentType.MHR_QS.value + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "productName": product_code_model.description, + "emailAddresses": "test@test.com", + "contactType": contact_type, + "hasAgreementAttachment": True, + "attachmentType": NotificationAttachmentType.MHR_QS.value, } # Assert that confirmation email is re-sent on re-submission - mock_mailer.assert_called_with(QueueMessageTypes.PRODUCT_CONFIRMATION_NOTIFICATION.value, - data=expected_data) + mock_mailer.assert_called_with(QueueMessageTypes.PRODUCT_CONFIRMATION_NOTIFICATION.value, data=expected_data) # Staff review task should be back in review task = TaskModel.find_by_task_relationship_id( - task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id) + task_relationship_type=TaskRelationshipType.PRODUCT.value, relationship_id=org_prod_sub.id + ) assert task - assert task.account_id == dictionary['id'] + assert task.account_id == dictionary["id"] assert task.relationship_type == TaskRelationshipType.PRODUCT.value assert task.relationship_status == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value assert task.relationship_id == org_prod_sub.id assert task.action == TaskAction.QUALIFIED_SUPPLIER_REVIEW.value # Approve task and check for publish to mailer - task_info = { - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value - } + task_info = {"relationshipStatus": TaskRelationshipStatus.ACTIVE.value} product_code_model = ProductCodeModel.find_by_code(product_code) # Should use re-approved template - with patch.object(UserService, 'get_admin_emails_for_org', return_value='test@test.com'): + with patch.object(UserService, "get_admin_emails_for_org", return_value="test@test.com"): TaskService.update_task(TaskService(task), task_info=task_info) expected_data = { - 'subjectDescriptor': ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'productAccessDescriptor': ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, - 'categoryDescriptor': ProductCategoryDescriptor.MHR.value, - 'isReapproved': True, - 'productName': product_code_model.description, - 'emailAddresses': 'test@test.com' + "subjectDescriptor": ProductSubjectDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "productAccessDescriptor": ProductAccessDescriptor.MHR_QUALIFIED_SUPPLIER.value, + "categoryDescriptor": ProductCategoryDescriptor.MHR.value, + "isReapproved": True, + "productName": product_code_model.description, + "emailAddresses": "test@test.com", } - mock_mailer.assert_called_with(QueueMessageTypes.PRODUCT_APPROVED_NOTIFICATION_DETAILED.value, - data=expected_data) + mock_mailer.assert_called_with( + QueueMessageTypes.PRODUCT_APPROVED_NOTIFICATION_DETAILED.value, data=expected_data + ) diff --git a/auth-api/tests/unit/services/test_queues.py b/auth-api/tests/unit/services/test_queues.py index 78a4c14353..054821575b 100644 --- a/auth-api/tests/unit/services/test_queues.py +++ b/auth-api/tests/unit/services/test_queues.py @@ -1,20 +1,21 @@ """ADHOC unit tests for queues.""" + import pytest from dotenv import load_dotenv +from sbc_common_components.utils.enums import QueueMessageTypes from auth_api import create_app from auth_api.services.activity_log_publisher import Activity, ActivityLogPublisher from auth_api.utils.account_mailer import publish_to_mailer -from sbc_common_components.utils.enums import QueueMessageTypes -@pytest.mark.skip(reason='ADHOC only test.') +@pytest.mark.skip(reason="ADHOC only test.") def test_gcp_pubsub_connectivity(monkeypatch): """Test that a queue can publish to gcp pubsub.""" # We don't want any of the monkeypatches by the fixtures. monkeypatch.undo() - load_dotenv('.env') - app = create_app('production') + load_dotenv(".env") + app = create_app("production") with app.app_context(): - ActivityLogPublisher.publish_activity(Activity(org_id=1, name='hey', action='test')) - publish_to_mailer(QueueMessageTypes.RESET_PASSCODE.value, {'email': ''}) + ActivityLogPublisher.publish_activity(Activity(org_id=1, name="hey", action="test")) + publish_to_mailer(QueueMessageTypes.RESET_PASSCODE.value, {"email": ""}) diff --git a/auth-api/tests/unit/services/test_reset.py b/auth-api/tests/unit/services/test_reset.py deleted file mode 100644 index 7a7105d3df..0000000000 --- a/auth-api/tests/unit/services/test_reset.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tests to verify the reset data Service. - -Test-Suite to ensure that the reset data Service is working as expected. -""" - -import pytest - -from auth_api.exceptions import BusinessException -from auth_api.exceptions.errors import Error -from auth_api.services import Membership as MembershipService -from auth_api.services import Org as OrgService -from auth_api.services import ResetTestData as ResetDataService -from auth_api.services import User as UserService -from auth_api.services.entity import Entity as EntityService -from auth_api.services.keycloak import KeycloakService -from tests.utilities.factory_scenarios import KeycloakScenario, TestEntityInfo, TestJwtClaims, TestUserInfo -from tests.utilities.factory_utils import ( - factory_entity_model, factory_membership_model, factory_org_model, factory_user_model, patch_token_info) - - -def test_reset(session, auth_mock, monkeypatch): # pylint: disable=unused-argument - """Assert that can be reset data by the provided token.""" - user_with_token = TestUserInfo.user_tester - user_with_token['keycloak_guid'] = TestJwtClaims.tester_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.tester_role['idp_userid'] - user = factory_user_model(user_info=user_with_token) - org = factory_org_model(user_id=user.id) - factory_membership_model(user.id, org.id) - entity = factory_entity_model(user_id=user.id) - - patch_token_info(TestJwtClaims.tester_role, monkeypatch) - ResetDataService.reset() - - with pytest.raises(BusinessException) as exception: - patch_token_info(user_with_token, monkeypatch) - UserService.find_by_jwt_token() - assert exception.value.code == Error.DATA_NOT_FOUND.name - - found_org = OrgService.find_by_org_id(org.id) - assert found_org is None - - found_entity = EntityService.find_by_entity_id(entity.id) - assert found_entity is not None - dictionary = found_entity.as_dict() - assert dictionary['business_identifier'] == TestEntityInfo.entity1['businessIdentifier'] - assert not dictionary['pass_code_claimed'] - - found_memeber = MembershipService.get_members_for_org(org.id) - assert found_memeber is None - - -def test_reset_user_notexists(session, auth_mock, monkeypatch): # pylint: disable=unused-argument - """Assert that can not be reset data by the provided token not exists in database.""" - patch_token_info(TestJwtClaims.tester_role, monkeypatch) - response = ResetDataService.reset() - assert response is None - - -def test_reset_user_without_tester_role(session, auth_mock, monkeypatch): # pylint: disable=unused-argument - """Assert that can not be reset data by the user doesn't have tester role.""" - user_with_token = TestUserInfo.user_tester - user_with_token['keycloak_guid'] = TestJwtClaims.tester_role['sub'] - user = factory_user_model(user_info=user_with_token) - org = factory_org_model(user_id=user.id) - - patch_token_info(TestJwtClaims.public_user_role, monkeypatch) - response = ResetDataService.reset() - assert response is None - - found_org = OrgService.find_by_org_id(org.id) - assert found_org is not None - - -def test_reset_bceid_user(session, auth_mock, monkeypatch): # pylint: disable=unused-argument - """Assert that reset data from a bceid user.""" - keycloak_service = KeycloakService() - patch_token_info(TestJwtClaims.tester_bceid_role, monkeypatch) - - request = KeycloakScenario.create_user_by_user_info(TestJwtClaims.tester_bceid_role) - keycloak_service.add_user(request, return_if_exists=True) - user = keycloak_service.get_user_by_username(request.user_name) - assert user is not None - user_id = user.id - user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = user_id - user_with_token['idp_userid'] = user_id - user = factory_user_model(user_info=user_with_token) - org = factory_org_model(user_id=user.id) - - patch_token_info(TestJwtClaims.get_test_user(user_id, 'BCEID'), monkeypatch) - response = ResetDataService.reset() - assert response is None - - found_org = OrgService.find_by_org_id(org.id) - assert found_org is None diff --git a/auth-api/tests/unit/services/test_task.py b/auth-api/tests/unit/services/test_task.py index c180611636..06fe3b9f9a 100644 --- a/auth-api/tests/unit/services/test_task.py +++ b/auth-api/tests/unit/services/test_task.py @@ -15,15 +15,16 @@ Test suite to ensure that the Task service routines are working as expected. """ -import mock -import pytest from datetime import datetime +from unittest import mock from unittest.mock import patch +import pytest + from auth_api.models import ContactLink as ContactLinkModel from auth_api.models import ProductCode as ProductCodeModel -from auth_api.models import User as UserModel from auth_api.models import Task as TaskModel +from auth_api.models import User as UserModel from auth_api.models.dataclass import TaskSearch from auth_api.services import Affidavit as AffidavitService from auth_api.services import Org as OrgService @@ -31,13 +32,30 @@ from auth_api.services import User as UserService from auth_api.services.rest_service import RestService from auth_api.utils.enums import ( - LoginSource, OrgStatus, TaskAction, TaskRelationshipStatus, TaskRelationshipType, TaskStatus, TaskTypePrefix) + LoginSource, + OrgStatus, + TaskAction, + TaskRelationshipStatus, + TaskRelationshipType, + TaskStatus, + TaskTypePrefix, +) +from tests.conftest import mock_token from tests.utilities.factory_scenarios import ( - TestAffidavit, TestJwtClaims, TestOrgInfo, TestPaymentMethodInfo, TestUserInfo) + TestAffidavit, + TestJwtClaims, + TestOrgInfo, + TestPaymentMethodInfo, + TestUserInfo, +) from tests.utilities.factory_utils import ( - factory_org_model, factory_product_model, factory_task_service, factory_user_model, factory_user_model_with_contact, - patch_token_info) -from tests.conftest import mock_token + factory_org_model, + factory_product_model, + factory_task_service, + factory_user_model, + factory_user_model_with_contact, + patch_token_info, +) def test_fetch_tasks(session, auth_mock): # pylint:disable=unused-argument @@ -45,19 +63,15 @@ def test_fetch_tasks(session, auth_mock): # pylint:disable=unused-argument user = factory_user_model() task = factory_task_service(user.id) dictionary = task.as_dict() - name = dictionary['name'] + name = dictionary["name"] - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 - ) + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) fetched_task = TaskService.fetch_tasks(task_search) - assert fetched_task['tasks'] - for item in fetched_task['tasks']: - assert item['name'] == name + assert fetched_task["tasks"] + for item in fetched_task["tasks"]: + assert item["name"] == name def test_create_task_org(session, keycloak_mock): # pylint:disable=unused-argument @@ -66,21 +80,21 @@ def test_create_task_org(session, keycloak_mock): # pylint:disable=unused-argum test_org = factory_org_model() task_type_new_account = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value test_task_info = { - 'name': test_org.name, - 'relationshipId': test_org.id, - 'relatedTo': user.id, - 'dateSubmitted': datetime.today(), - 'relationshipType': TaskRelationshipType.ORG.value, - 'type': task_type_new_account, - 'status': [TaskStatus.OPEN.value], - 'relationship_status': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, - 'action': TaskAction.AFFIDAVIT_REVIEW.value + "name": test_org.name, + "relationshipId": test_org.id, + "relatedTo": user.id, + "dateSubmitted": datetime.today(), + "relationshipType": TaskRelationshipType.ORG.value, + "type": task_type_new_account, + "status": [TaskStatus.OPEN.value], + "relationship_status": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + "action": TaskAction.AFFIDAVIT_REVIEW.value, } task = TaskService.create_task(test_task_info) assert task dictionary = task.as_dict() - assert dictionary['name'] == test_org.name - assert dictionary['action'] == test_task_info['action'] + assert dictionary["name"] == test_org.name + assert dictionary["action"] == test_task_info["action"] def test_create_task_product(session, keycloak_mock): # pylint:disable=unused-argument @@ -90,34 +104,37 @@ def test_create_task_product(session, keycloak_mock): # pylint:disable=unused-a test_product = factory_product_model(org_id=test_org.id) product: ProductCodeModel = ProductCodeModel.find_by_code(test_product.product_code) test_task_info = { - 'name': test_org.name, - 'relationshipId': test_product.id, - 'relatedTo': user.id, - 'dateSubmitted': datetime.today(), - 'relationshipType': TaskRelationshipType.PRODUCT.value, - 'type': product.description, - 'status': [TaskStatus.OPEN.value], - 'accountId': test_org.id, - 'relationship_status': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value + "name": test_org.name, + "relationshipId": test_product.id, + "relatedTo": user.id, + "dateSubmitted": datetime.today(), + "relationshipType": TaskRelationshipType.PRODUCT.value, + "type": product.description, + "status": [TaskStatus.OPEN.value], + "accountId": test_org.id, + "relationship_status": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, } task = TaskService.create_task(test_task_info) assert task dictionary = task.as_dict() - assert dictionary['name'] == test_task_info['name'] - assert dictionary['account_id'] == test_org.id - assert dictionary['relationship_type'] == TaskRelationshipType.PRODUCT.value - - -@pytest.mark.parametrize('test_name, rmv_contact', [ - ('has_contact', False), - ('no_contact', True), -]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) + assert dictionary["name"] == test_task_info["name"] + assert dictionary["account_id"] == test_org.id + assert dictionary["relationship_type"] == TaskRelationshipType.PRODUCT.value + + +@pytest.mark.parametrize( + "test_name, rmv_contact", + [ + ("has_contact", False), + ("no_contact", True), + ], +) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_update_task(session, keycloak_mock, monkeypatch, test_name, rmv_contact): # pylint:disable=unused-argument """Assert that a task can be updated.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -125,7 +142,7 @@ def test_update_task(session, keycloak_mock, monkeypatch, test_name, rmv_contact AffidavitService.create_affidavit(affidavit_info=affidavit_info) org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value + assert org_dict["status_code"] == OrgStatus.PENDING_STAFF_REVIEW.value if rmv_contact: # remove contact link @@ -133,39 +150,33 @@ def test_update_task(session, keycloak_mock, monkeypatch, test_name, rmv_contact contact_link.user_id = None session.add(contact_link) session.commit() - assert UserService.get_admin_emails_for_org(org_dict['id']) == '' + assert UserService.get_admin_emails_for_org(org_dict["id"]) == "" token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 - ) + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) tasks = TaskService.fetch_tasks(task_search) - fetched_tasks = tasks['tasks'] + fetched_tasks = tasks["tasks"] fetched_task = fetched_tasks[0] - task_info = { - 'relationshipStatus': TaskRelationshipStatus.ACTIVE.value - } - task: TaskModel = TaskModel.find_by_task_id(fetched_task['id']) + task_info = {"relationshipStatus": TaskRelationshipStatus.ACTIVE.value} + task: TaskModel = TaskModel.find_by_task_id(fetched_task["id"]) task = TaskService.update_task(TaskService(task), task_info=task_info) dictionary = task.as_dict() user = UserModel.find_by_id(user.id) - assert dictionary['status'] == TaskStatus.COMPLETED.value - assert dictionary['relationship_status'] == TaskRelationshipStatus.ACTIVE.value + assert dictionary["status"] == TaskStatus.COMPLETED.value + assert dictionary["relationship_status"] == TaskRelationshipStatus.ACTIVE.value assert user.verified -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_hold_task(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a task can be updated.""" user_with_token = TestUserInfo.user_bceid_tester - user_with_token['keycloak_guid'] = TestJwtClaims.public_bceid_user['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_bceid_user['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_bceid_user["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_bceid_user["idp_userid"] user = factory_user_model_with_contact(user_with_token) patch_token_info(TestJwtClaims.public_bceid_user, monkeypatch) @@ -173,90 +184,77 @@ def test_hold_task(session, keycloak_mock, monkeypatch): # pylint:disable=unuse AffidavitService.create_affidavit(affidavit_info=affidavit_info) org = OrgService.create_org(TestOrgInfo.org_with_mailing_address(), user_id=user.id) org_dict = org.as_dict() - assert org_dict['org_status'] == OrgStatus.PENDING_STAFF_REVIEW.value + assert org_dict["status_code"] == OrgStatus.PENDING_STAFF_REVIEW.value token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value) patch_token_info(token_info, monkeypatch) - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 - ) + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) tasks = TaskService.fetch_tasks(task_search) - fetched_tasks = tasks['tasks'] + fetched_tasks = tasks["tasks"] fetched_task = fetched_tasks[0] task_info = { - 'relationshipStatus': TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, - 'status': TaskStatus.HOLD.value, - 'remarks': ['Test Remark'] - + "relationshipStatus": TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + "status": TaskStatus.HOLD.value, + "remarks": ["Test Remark"], } - task: TaskModel = TaskModel.find_by_task_id(fetched_task['id']) + task: TaskModel = TaskModel.find_by_task_id(fetched_task["id"]) task = TaskService.update_task(TaskService(task), task_info=task_info) dictionary = task.as_dict() - assert dictionary['status'] == TaskStatus.HOLD.value - assert dictionary['relationship_status'] == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value - assert dictionary['remarks'] == ['Test Remark'] + assert dictionary["status"] == TaskStatus.HOLD.value + assert dictionary["relationship_status"] == TaskRelationshipStatus.PENDING_STAFF_REVIEW.value + assert dictionary["remarks"] == ["Test Remark"] -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_create_task_govm(session, - keycloak_mock, monkeypatch): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_create_task_govm(session, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a task can be created when updating a GOVM account.""" user = factory_user_model() - token_info = TestJwtClaims.get_test_user(sub=user.keycloak_guid, source=LoginSource.STAFF.value, - roles=['create_accounts'], idp_userid=user.idp_userid) + token_info = TestJwtClaims.get_test_user( + sub=user.keycloak_guid, source=LoginSource.STAFF.value, roles=["create_accounts"], idp_userid=user.idp_userid + ) user2 = factory_user_model(TestUserInfo.user2) - public_token_info = TestJwtClaims.get_test_user(sub=user2.keycloak_guid, source=LoginSource.STAFF.value, - roles=['gov_account_user'], idp_userid=user2.idp_userid) + public_token_info = TestJwtClaims.get_test_user( + sub=user2.keycloak_guid, source=LoginSource.STAFF.value, roles=["gov_account_user"], idp_userid=user2.idp_userid + ) patch_token_info(token_info, monkeypatch) org: OrgService = OrgService.create_org(TestOrgInfo.org_govm, user_id=user.id) assert org - with patch.object(RestService, 'put') as mock_post: + with patch.object(RestService, "put") as mock_post: payment_details = TestPaymentMethodInfo.get_payment_method_input_with_revenue() - org_body = { - 'mailingAddress': TestOrgInfo.get_mailing_address(), - **payment_details - - } + org_body = {"mailingAddress": TestOrgInfo.get_mailing_address(), **payment_details} patch_token_info(public_token_info, monkeypatch) org = OrgService.update_org(org, org_body) assert org dictionary = org.as_dict() - assert dictionary['name'] == TestOrgInfo.org_govm['name'] + assert dictionary["name"] == TestOrgInfo.org_govm["name"] mock_post.assert_called() - actual_data = mock_post.call_args.kwargs.get('data') + actual_data = mock_post.call_args.kwargs.get("data") expected_data = { - 'accountId': dictionary.get('id'), - 'accountName': dictionary.get('name') + '-' + dictionary.get('branch_name'), - 'branchName': dictionary.get('branch_name'), - 'paymentInfo': { - 'methodOfPayment': 'EJV', - 'revenueAccount': payment_details.get('paymentInfo').get('revenueAccount') + "accountId": dictionary.get("id"), + "accountName": dictionary.get("name") + "-" + dictionary.get("branch_name"), + "branchName": dictionary.get("branch_name"), + "paymentInfo": { + "methodOfPayment": "EJV", + "revenueAccount": payment_details.get("paymentInfo").get("revenueAccount"), }, - 'contactInfo': TestOrgInfo.get_mailing_address() - + "contactInfo": TestOrgInfo.get_mailing_address(), } assert expected_data == actual_data # Assert the task that is created patch_token_info(token_info, monkeypatch) - task_search = TaskSearch( - status=[TaskStatus.OPEN.value], - page=1, - limit=10 - ) + task_search = TaskSearch(status=[TaskStatus.OPEN.value], page=1, limit=10) fetched_task = TaskService.fetch_tasks(task_search) - for item in fetched_task['tasks']: - assert item['name'] == dictionary['name'] - assert item['type'] == TaskTypePrefix.GOVM_REVIEW.value - assert item['status'] == TaskStatus.OPEN.value - assert item['relationship_id'] == dictionary['id'] + for item in fetched_task["tasks"]: + assert item["name"] == dictionary["name"] + assert item["type"] == TaskTypePrefix.GOVM_REVIEW.value + assert item["status"] == TaskStatus.OPEN.value + assert item["relationship_id"] == dictionary["id"] diff --git a/auth-api/tests/unit/services/test_user.py b/auth-api/tests/unit/services/test_user.py index 8539aee444..ece0913bf2 100644 --- a/auth-api/tests/unit/services/test_user.py +++ b/auth-api/tests/unit/services/test_user.py @@ -17,9 +17,9 @@ Test-Suite to ensure that the User Service is working as expected. """ import json +from unittest import mock from unittest.mock import patch -import mock import pytest from werkzeug.exceptions import HTTPException @@ -35,13 +35,26 @@ from auth_api.services.keycloak_user import KeycloakUser from auth_api.utils.enums import IdpHint, ProductCode, Status from auth_api.utils.roles import ADMIN, COORDINATOR, USER, Role +from tests.conftest import mock_token from tests.utilities.factory_scenarios import ( - KeycloakScenario, TestAnonymousMembership, TestContactInfo, TestEntityInfo, TestJwtClaims, TestOrgInfo, - TestUserInfo) + KeycloakScenario, + TestAnonymousMembership, + TestContactInfo, + TestEntityInfo, + TestJwtClaims, + TestOrgInfo, + TestUserInfo, +) from tests.utilities.factory_utils import ( - factory_contact_model, factory_entity_model, factory_membership_model, factory_org_model, factory_product_model, - factory_user_model, get_tos_latest_version, patch_token_info) -from tests.conftest import mock_token + factory_contact_model, + factory_entity_model, + factory_membership_model, + factory_org_model, + factory_product_model, + factory_user_model, + get_tos_latest_version, + patch_token_info, +) def test_as_dict(session): # pylint: disable=unused-argument @@ -50,7 +63,7 @@ def test_as_dict(session): # pylint: disable=unused-argument user = UserService(user_model) dictionary = user.as_dict() - assert dictionary['username'] == TestUserInfo.user1['username'] + assert dictionary["username"] == TestUserInfo.user1["username"] def test_user_save_by_token(session, monkeypatch): # pylint: disable=unused-argument @@ -59,8 +72,8 @@ def test_user_save_by_token(session, monkeypatch): # pylint: disable=unused-arg user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() - assert dictionary['username'] == TestJwtClaims.user_test['preferred_username'] - assert dictionary['keycloak_guid'] == TestJwtClaims.user_test['sub'] + assert dictionary["username"] == TestJwtClaims.user_test["preferred_username"] + assert dictionary["keycloak_guid"] == TestJwtClaims.user_test["sub"] def test_bcros_user_save_by_token(session, monkeypatch): # pylint: disable=unused-argument @@ -69,8 +82,8 @@ def test_bcros_user_save_by_token(session, monkeypatch): # pylint: disable=unus user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() - assert dictionary['username'] == TestJwtClaims.anonymous_bcros_role['preferred_username'] - assert dictionary['keycloak_guid'] == TestJwtClaims.anonymous_bcros_role['sub'] + assert dictionary["username"] == TestJwtClaims.anonymous_bcros_role["preferred_username"] + assert dictionary["keycloak_guid"] == TestJwtClaims.anonymous_bcros_role["sub"] def test_bcros_user_update_by_token(session, monkeypatch): # pylint: disable=unused-argument @@ -78,14 +91,14 @@ def test_bcros_user_update_by_token(session, monkeypatch): # pylint: disable=un user_model = factory_user_model(TestUserInfo.user_bcros) user = UserService(user_model) dictionary = user.as_dict() - assert dictionary.get('keycloak_guid', None) is None + assert dictionary.get("keycloak_guid", None) is None patch_token_info(TestJwtClaims.anonymous_bcros_role, monkeypatch) user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() - assert dictionary['username'] == TestJwtClaims.anonymous_bcros_role['preferred_username'] - assert dictionary['keycloak_guid'] == TestJwtClaims.anonymous_bcros_role['sub'] + assert dictionary["username"] == TestJwtClaims.anonymous_bcros_role["preferred_username"] + assert dictionary["keycloak_guid"] == TestJwtClaims.anonymous_bcros_role["sub"] def test_user_save_by_token_no_token(session): # pylint: disable=unused-argument @@ -94,15 +107,16 @@ def test_user_save_by_token_no_token(session): # pylint: disable=unused-argumen assert user is None -def test_create_user_and_add_membership_owner_skip_auth_mode(session, auth_mock, - keycloak_mock): # pylint:disable=unused-argument +def test_create_user_and_add_membership_owner_skip_auth_mode( + session, auth_mock, keycloak_mock +): # pylint:disable=unused-argument """Assert that an owner can be added as anonymous.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) - assert len(users['users']) == 1 - assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] - assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name + assert len(users["users"]) == 1 + assert users["users"][0]["username"] == IdpHint.BCROS.value + "/" + membership[0]["username"] + assert users["users"][0]["type"] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) @@ -123,11 +137,11 @@ def test_reset_password(session, auth_mock, keycloak_mock, monkeypatch): # pyli membership = [TestAnonymousMembership.generate_random_user(USER)] users = UserService.create_user_and_add_membership(membership, org.id) - user_name = users['users'][0]['username'] - user_info = {'username': user_name, 'password': 'password'} + user_name = users["users"][0]["username"] + user_info = {"username": user_name, "password": "password"} kc_user = UserService.reset_password_for_anon_user(user_info, user_name) # cant assert anything else since password wont be gotten back - assert kc_user.user_name == user_name.replace(f'{IdpHint.BCROS.value}/', '').lower() + assert kc_user.user_name == user_name.replace(f"{IdpHint.BCROS.value}/", "").lower() def test_reset_password_by_member(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument @@ -141,8 +155,8 @@ def test_reset_password_by_member(session, auth_mock, keycloak_mock, monkeypatch patch_token_info(admin_claims, monkeypatch) users = UserService.create_user_and_add_membership(membership, org.id) - user_name = users['users'][0]['username'] - user_info = {'username': user_name, 'password': 'password'} + user_name = users["users"][0]["username"] + user_info = {"username": user_name, "password": "password"} with pytest.raises(HTTPException) as excinfo: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.reset_password_for_anon_user(user_info, user_name) @@ -159,7 +173,7 @@ def test_delete_otp_for_user(session, auth_mock, keycloak_mock, monkeypatch): membership = [TestAnonymousMembership.generate_random_user(USER)] keycloak_service = KeycloakService() request = KeycloakScenario.create_user_request() - request.user_name = membership[0]['username'] + request.user_name = membership[0]["username"] keycloak_service.add_user(request) user = kc_service.get_user_by_username(request.user_name) user = factory_user_model(TestUserInfo.get_bceid_user_with_kc_guid(user.id)) @@ -168,94 +182,99 @@ def test_delete_otp_for_user(session, auth_mock, keycloak_mock, monkeypatch): patch_token_info(admin_claims, monkeypatch) UserService.delete_otp_for_user(user.username) user1 = kc_service.get_user_by_username(request.user_name) - assert 'CONFIGURE_TOTP' in json.loads(user1.value()).get('requiredActions') + assert "CONFIGURE_TOTP" in json.loads(user1.value()).get("requiredActions") -def test_create_user_and_add_same_user_name_error_in_kc(session, auth_mock, - keycloak_mock): # pylint:disable=unused-argument +def test_create_user_and_add_same_user_name_error_in_kc( + session, auth_mock, keycloak_mock +): # pylint:disable=unused-argument """Assert that same user name cannot be added twice.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] keycloak_service = KeycloakService() request = KeycloakScenario.create_user_request() - request.user_name = membership[0]['username'] + request.user_name = membership[0]["username"] keycloak_service.add_user(request) users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) - assert users['users'][0]['http_status'] == 409 - assert users['users'][0]['error'] == 'The username is already taken' + assert users["users"][0]["http_status"] == 409 + assert users["users"][0]["error"] == "The username is already taken" -def test_create_user_and_add_same_user_name_error_in_db(session, auth_mock, - keycloak_mock): # pylint:disable=unused-argument +def test_create_user_and_add_same_user_name_error_in_db( + session, auth_mock, keycloak_mock +): # pylint:disable=unused-argument """Assert that same user name cannot be added twice.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model(TestUserInfo.user_bcros) factory_membership_model(user.id, org.id) new_members = TestAnonymousMembership.generate_random_user(ADMIN) - new_members['username'] = user.username.replace(f'{IdpHint.BCROS.value}/', '') + new_members["username"] = user.username.replace(f"{IdpHint.BCROS.value}/", "") membership = [new_members] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) - assert users['users'][0]['http_status'] == 409 - assert users['users'][0]['error'] == 'The username is already taken' + assert users["users"][0]["http_status"] == 409 + assert users["users"][0]["error"] == "The username is already taken" -def test_create_user_and_add_transaction_membership(session, auth_mock, - keycloak_mock): # pylint:disable=unused-argument +def test_create_user_and_add_transaction_membership( + session, auth_mock, keycloak_mock +): # pylint:disable=unused-argument """Assert transactions works fine.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] - with patch('auth_api.models.Membership.flush', side_effect=Exception('mocked error')): + with patch("auth_api.models.Membership.flush", side_effect=Exception("mocked error")): users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) - user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] - assert len(users['users']) == 1 - assert users['users'][0]['username'] == membership[0]['username'] - assert users['users'][0]['http_status'] == 500 - assert users['users'][0]['error'] == 'Adding User Failed' + user_name = IdpHint.BCROS.value + "/" + membership[0]["username"] + assert len(users["users"]) == 1 + assert users["users"][0]["username"] == membership[0]["username"] + assert users["users"][0]["http_status"] == 500 + assert users["users"][0]["error"] == "Adding User Failed" # make sure no records are created user = UserModel.find_by_username(user_name) assert user is None - user = UserModel.find_by_username(membership[0]['username']) + user = UserModel.find_by_username(membership[0]["username"]) assert user is None members = MembershipModel.find_members_by_org_id(org.id) # only one member should be there since its a STAFF created org assert len(members) == 0 -def test_create_user_and_add_transaction_membership_1(session, auth_mock, - keycloak_mock): # pylint:disable=unused-argument +def test_create_user_and_add_transaction_membership_1( + session, auth_mock, keycloak_mock +): # pylint:disable=unused-argument """Assert transactions works fine.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] - with patch('auth_api.models.User.flush', side_effect=Exception('mocked error')): + with patch("auth_api.models.User.flush", side_effect=Exception("mocked error")): users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) - user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] - assert len(users['users']) == 1 - assert users['users'][0]['username'] == membership[0]['username'] - assert users['users'][0]['http_status'] == 500 - assert users['users'][0]['error'] == 'Adding User Failed' + user_name = IdpHint.BCROS.value + "/" + membership[0]["username"] + assert len(users["users"]) == 1 + assert users["users"][0]["username"] == membership[0]["username"] + assert users["users"][0]["http_status"] == 500 + assert users["users"][0]["error"] == "Adding User Failed" # make sure no records are created user = UserModel.find_by_username(user_name) assert user is None - user = UserModel.find_by_username(membership[0]['username']) + user = UserModel.find_by_username(membership[0]["username"]) assert user is None members = MembershipModel.find_members_by_org_id(org.id) # only one member should be there since its a STAFF created org assert len(members) == 0 -def test_create_user_and_add_membership_admin_skip_auth_mode(session, auth_mock, - keycloak_mock): # pylint:disable=unused-argument +def test_create_user_and_add_membership_admin_skip_auth_mode( + session, auth_mock, keycloak_mock +): # pylint:disable=unused-argument """Assert that an admin can be added as anonymous.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(COORDINATOR)] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) - assert len(users['users']) == 1 - assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] - assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name + assert len(users["users"]) == 1 + assert users["users"][0]["username"] == IdpHint.BCROS.value + "/" + membership[0]["username"] + assert users["users"][0]["type"] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) @@ -264,8 +283,9 @@ def test_create_user_and_add_membership_admin_skip_auth_mode(session, auth_mock, assert members[0].membership_type_code == COORDINATOR -def test_create_user_and_add_membership_admin_bulk_mode(session, auth_mock, - keycloak_mock, monkeypatch): # pylint:disable=unused-argument +def test_create_user_and_add_membership_admin_bulk_mode( + session, auth_mock, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that an admin can add a member.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() @@ -277,9 +297,9 @@ def test_create_user_and_add_membership_admin_bulk_mode(session, auth_mock, membership = [TestAnonymousMembership.generate_random_user(USER)] users = UserService.create_user_and_add_membership(membership, org.id) - assert len(users['users']) == 1 - assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] - assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name + assert len(users["users"]) == 1 + assert users["users"][0]["username"] == IdpHint.BCROS.value + "/" + membership[0]["username"] + assert users["users"][0]["type"] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) @@ -287,8 +307,9 @@ def test_create_user_and_add_membership_admin_bulk_mode(session, auth_mock, assert len(members) == 2 -def test_create_user_add_membership_reenable(session, auth_mock, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +def test_create_user_add_membership_reenable( + session, auth_mock, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that an admin can add a member.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() @@ -300,10 +321,10 @@ def test_create_user_add_membership_reenable(session, auth_mock, keycloak_mock, anon_member = TestAnonymousMembership.generate_random_user(USER) membership = [anon_member] users = UserService.create_user_and_add_membership(membership, org.id) - user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] - assert len(users['users']) == 1 - assert users['users'][0]['username'] == user_name - assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name + user_name = IdpHint.BCROS.value + "/" + membership[0]["username"] + assert len(users["users"]) == 1 + assert users["users"][0]["username"] == user_name + assert users["users"][0]["type"] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) @@ -312,8 +333,8 @@ def test_create_user_add_membership_reenable(session, auth_mock, keycloak_mock, # assert cant be readded users = UserService.create_user_and_add_membership(membership, org.id) - assert users['users'][0]['http_status'] == 409 - assert users['users'][0]['error'] == 'The username is already taken' + assert users["users"][0]["http_status"] == 409 + assert users["users"][0]["error"] == "The username is already taken" # deactivate everything and try again @@ -324,28 +345,28 @@ def test_create_user_add_membership_reenable(session, auth_mock, keycloak_mock, membership_model.status = Status.INACTIVE.value update_user_request = KeycloakUser() - update_user_request.user_name = membership[0]['username'] + update_user_request.user_name = membership[0]["username"] update_user_request.enabled = False KeycloakService.update_user(update_user_request) - org2 = factory_org_model(org_info=TestOrgInfo.org_anonymous_2, org_type_info={'code': 'BASIC'}) + org2 = factory_org_model(org_info=TestOrgInfo.org_anonymous_2, org_type_info={"code": "BASIC"}) factory_membership_model(user.id, org2.id) factory_product_model(org2.id, product_code=ProductCode.DIR_SEARCH.value) users = UserService.create_user_and_add_membership(membership, org2.id) - assert users['users'][0]['http_status'] == 409 - assert users['users'][0]['error'] == 'The username is already taken' + assert users["users"][0]["http_status"] == 409 + assert users["users"][0]["error"] == "The username is already taken" # add to same org.Should work users = UserService.create_user_and_add_membership(membership, org.id) - assert len(users['users']) == 1 - assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] - assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name + assert len(users["users"]) == 1 + assert users["users"][0]["username"] == IdpHint.BCROS.value + "/" + membership[0]["username"] + assert users["users"][0]["type"] == Role.ANONYMOUS_USER.name -def test_create_user_and_add_membership_admin_bulk_mode_unauthorised(session, auth_mock, - keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +def test_create_user_and_add_membership_admin_bulk_mode_unauthorised( + session, auth_mock, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that bulk operation cannot be performed by unauthorised users.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() @@ -358,26 +379,28 @@ def test_create_user_and_add_membership_admin_bulk_mode_unauthorised(session, au assert excinfo.value.code == 403 -def test_create_user_and_add_membership_admin_bulk_mode_multiple(session, auth_mock, - keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +def test_create_user_and_add_membership_admin_bulk_mode_multiple( + session, auth_mock, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that an admin can add a group of members.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) - membership = [TestAnonymousMembership.generate_random_user(USER), - TestAnonymousMembership.generate_random_user(COORDINATOR)] + membership = [ + TestAnonymousMembership.generate_random_user(USER), + TestAnonymousMembership.generate_random_user(COORDINATOR), + ] patch_token_info(claims, monkeypatch) users = UserService.create_user_and_add_membership(membership, org.id) - assert len(users['users']) == 2 - assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] - assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name - assert users['users'][1]['username'] == IdpHint.BCROS.value + '/' + membership[1]['username'] - assert users['users'][1]['type'] == Role.ANONYMOUS_USER.name + assert len(users["users"]) == 2 + assert users["users"][0]["username"] == IdpHint.BCROS.value + "/" + membership[0]["username"] + assert users["users"][0]["type"] == Role.ANONYMOUS_USER.name + assert users["users"][1]["username"] == IdpHint.BCROS.value + "/" + membership[1]["username"] + assert users["users"][1]["type"] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) @@ -385,8 +408,9 @@ def test_create_user_and_add_membership_admin_bulk_mode_multiple(session, auth_m assert len(members) == 3 -def test_create_user_and_add_membership_member_error_skip_auth_mode(session, auth_mock, - keycloak_mock): # pylint:disable=unused-argument +def test_create_user_and_add_membership_member_error_skip_auth_mode( + session, auth_mock, keycloak_mock +): # pylint:disable=unused-argument """Assert that an member cannot be added as anonymous in single_mode mode.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(USER)] @@ -395,12 +419,15 @@ def test_create_user_and_add_membership_member_error_skip_auth_mode(session, aut assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name -def test_create_user_and_add_membership_multiple_error_skip_auth_mode(session, auth_mock, keycloak_mock, - monkeypatch): # pylint:disable=unused-argument +def test_create_user_and_add_membership_multiple_error_skip_auth_mode( + session, auth_mock, keycloak_mock, monkeypatch +): # pylint:disable=unused-argument """Assert that multiple user cannot be created in single_mode mode.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) - membership = [TestAnonymousMembership.generate_random_user(USER), - TestAnonymousMembership.generate_random_user(COORDINATOR)] + membership = [ + TestAnonymousMembership.generate_random_user(USER), + TestAnonymousMembership.generate_random_user(COORDINATOR), + ] with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.create_user_and_add_membership(membership, org.id, single_mode=True) @@ -409,7 +436,7 @@ def test_create_user_and_add_membership_multiple_error_skip_auth_mode(session, a def test_user_save_by_token_fail(session, monkeypatch): # pylint: disable=unused-argument """Assert that a user cannot not be created.""" - with patch.object(UserModel, 'create_from_jwt_token', return_value=None): + with patch.object(UserModel, "create_from_jwt_token", return_value=None): patch_token_info(TestJwtClaims.user_test, monkeypatch) user = UserService.save_from_jwt_token() assert user is None @@ -418,16 +445,16 @@ def test_user_save_by_token_fail(session, monkeypatch): # pylint: disable=unuse def test_add_contact_to_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact can be added to a user.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] - user_with_token['idp_userid'] = TestJwtClaims.user_test['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.user_test["sub"] + user_with_token["idp_userid"] = TestJwtClaims.user_test["idp_userid"] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) contact = UserService.add_contact(TestContactInfo.contact1).as_dict() - assert contact['email'] == TestContactInfo.contact1['email'] - assert contact['phone'] == TestContactInfo.contact1['phone'] - assert contact['phone_extension'] == TestContactInfo.contact1['phoneExtension'] + assert contact["email"] == TestContactInfo.contact1["email"] + assert contact["phone"] == TestContactInfo.contact1["phone"] + assert contact["phone_extension"] == TestContactInfo.contact1["phoneExtension"] def test_add_contact_user_no_user(session, monkeypatch): # pylint: disable=unused-argument @@ -441,8 +468,8 @@ def test_add_contact_user_no_user(session, monkeypatch): # pylint: disable=unus def test_add_contact_to_user_already_exists(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact cannot be added to a user that already has a contact.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] - user_with_token['idp_userid'] = TestJwtClaims.user_test['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.user_test["sub"] + user_with_token["idp_userid"] = TestJwtClaims.user_test["idp_userid"] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) @@ -456,8 +483,8 @@ def test_add_contact_to_user_already_exists(session, monkeypatch): # pylint: di def test_update_contact_for_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact can be updated for a user.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] - user_with_token['idp_userid'] = TestJwtClaims.user_test['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.user_test["sub"] + user_with_token["idp_userid"] = TestJwtClaims.user_test["idp_userid"] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) @@ -468,7 +495,7 @@ def test_update_contact_for_user(session, monkeypatch): # pylint: disable=unuse updated_contact = UserService.update_contact(TestContactInfo.contact2).as_dict() assert updated_contact is not None - assert updated_contact['email'] == TestContactInfo.contact2['email'] + assert updated_contact["email"] == TestContactInfo.contact2["email"] def test_update_terms_of_use_for_user(session, monkeypatch): # pylint: disable=unused-argument @@ -478,7 +505,7 @@ def test_update_terms_of_use_for_user(session, monkeypatch): # pylint: disable= updated_user = UserService.update_terms_of_use(True, 1) dictionary = updated_user.as_dict() - assert dictionary['user_terms']['isTermsOfUseAccepted'] is True + assert dictionary["user_terms"]["isTermsOfUseAccepted"] is True def test_terms_of_service_prev_version(session, monkeypatch): # pylint: disable=unused-argument @@ -489,22 +516,22 @@ def test_terms_of_service_prev_version(session, monkeypatch): # pylint: disable # update TOS with old version updated_user = UserService.update_terms_of_use(True, 1) dictionary = updated_user.as_dict() - assert dictionary['user_terms']['isTermsOfUseAccepted'] is True + assert dictionary["user_terms"]["isTermsOfUseAccepted"] is True # accepted version from previous step was old.so comparison should return false updated_user = UserService.save_from_jwt_token() dictionary = updated_user.as_dict() - assert dictionary['user_terms']['isTermsOfUseAccepted'] is False + assert dictionary["user_terms"]["isTermsOfUseAccepted"] is False # update TOS with latest version updated_user = UserService.update_terms_of_use(True, get_tos_latest_version()) dictionary = updated_user.as_dict() - assert dictionary['user_terms']['isTermsOfUseAccepted'] is True + assert dictionary["user_terms"]["isTermsOfUseAccepted"] is True # accepted version from previous step is latest.so comparison should return true updated_user = UserService.save_from_jwt_token() dictionary = updated_user.as_dict() - assert dictionary['user_terms']['isTermsOfUseAccepted'] is True + assert dictionary["user_terms"]["isTermsOfUseAccepted"] is True def test_update_contact_for_user_no_user(session, monkeypatch): # pylint: disable=unused-argument @@ -525,12 +552,12 @@ def test_update_contact_for_user_no_contact(session, monkeypatch): # pylint: di assert exception.value.code == Error.DATA_NOT_FOUND.name -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_delete_contact_for_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact can be deleted for a user.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] - user_with_token['idp_userid'] = TestJwtClaims.user_test['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.user_test["sub"] + user_with_token["idp_userid"] = TestJwtClaims.user_test["idp_userid"] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) @@ -543,7 +570,7 @@ def test_delete_contact_for_user(session, monkeypatch): # pylint: disable=unuse assert deleted_contact is not None contacts = UserService.get_contacts() - assert contacts.get('contacts') == [] + assert contacts.get("contacts") == [] def test_delete_contact_for_user_no_user(session, monkeypatch): # pylint: disable=unused-argument @@ -570,7 +597,7 @@ def test_find_users(session): # pylint: disable=unused-argument factory_user_model(user_info=TestUserInfo.user2) - users = UserService.find_users(last_name='User') + users = UserService.find_users(last_name="User") assert users is not None assert len(users) == 2 @@ -578,8 +605,8 @@ def test_find_users(session): # pylint: disable=unused-argument def test_user_find_by_token(session, monkeypatch): # pylint: disable=unused-argument """Assert that a user can be found by token.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] - user_with_token['idp_userid'] = TestJwtClaims.user_test['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.user_test["sub"] + user_with_token["idp_userid"] = TestJwtClaims.user_test["idp_userid"] factory_user_model(user_info=user_with_token) found_user = UserService.find_by_jwt_token() @@ -591,15 +618,15 @@ def test_user_find_by_token(session, monkeypatch): # pylint: disable=unused-arg found_user = UserService.find_by_jwt_token() assert found_user is not None dictionary = found_user.as_dict() - assert dictionary['username'] == TestJwtClaims.user_test['preferred_username'] - assert dictionary['keycloak_guid'] == TestJwtClaims.user_test['sub'] - assert dictionary['user_terms']['isTermsOfUseAccepted'] is False + assert dictionary["username"] == TestJwtClaims.user_test["preferred_username"] + assert dictionary["keycloak_guid"] == TestJwtClaims.user_test["sub"] + assert dictionary["user_terms"]["isTermsOfUseAccepted"] is False # User accepted latest version terms and conditions should return True UserService.update_terms_of_use(True, get_tos_latest_version()) found_user = UserService.find_by_jwt_token() dictionary = found_user.as_dict() - assert dictionary['user_terms']['isTermsOfUseAccepted'] is True + assert dictionary["user_terms"]["isTermsOfUseAccepted"] is True def test_user_find_by_username(session): # pylint: disable=unused-argument @@ -610,15 +637,15 @@ def test_user_find_by_username(session): # pylint: disable=unused-argument user = UserService.find_by_username(None) assert user is None - user = UserService.find_by_username(TestUserInfo.user1['username']) + user = UserService.find_by_username(TestUserInfo.user1["username"]) assert user is not None dictionary = user.as_dict() - assert dictionary['username'] == TestUserInfo.user1['username'] + assert dictionary["username"] == TestUserInfo.user1["username"] def test_user_find_by_username_no_model_object(session): # pylint: disable=unused-argument """Assert that the business can't be found with no model.""" - username = TestUserInfo.user_test['username'] + username = TestUserInfo.user_test["username"] user = UserService.find_by_username(username) @@ -630,17 +657,17 @@ def test_user_find_by_username_missing_username(session): # pylint: disable=unu user_model = factory_user_model(user_info=TestUserInfo.user_test) user = UserService(user_model) - user = UserService.find_by_username('foo') + user = UserService.find_by_username("foo") assert user is None -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_delete_contact_user_link(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a contact can not be deleted if contact link exists.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user_model = factory_user_model(user_info=user_with_token) user = UserService(user_model) @@ -648,7 +675,7 @@ def test_delete_contact_user_link(session, auth_mock, keycloak_mock, monkeypatch org = OrgService.create_org(TestOrgInfo.org1, user_id=user.identifier) org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] contact = factory_contact_model() @@ -657,7 +684,7 @@ def test_delete_contact_user_link(session, auth_mock, keycloak_mock, monkeypatch contact_link.user = user_model contact_link.org = org._model # pylint:disable=protected-access contact_link = contact_link.flush() - contact_link.commit() + contact_link.save() deleted_contact = UserService.delete_contact() @@ -670,18 +697,18 @@ def test_delete_contact_user_link(session, auth_mock, keycloak_mock, monkeypatch assert exist_contact_link -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_delete_user(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a user can be deleted.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] - user_with_token['idp_userid'] = TestJwtClaims.user_test['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.user_test["sub"] + user_with_token["idp_userid"] = TestJwtClaims.user_test["idp_userid"] user_model = factory_user_model(user_info=user_with_token) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model - contact_link.commit() + contact_link.save() patch_token_info(TestJwtClaims.user_test, monkeypatch) @@ -693,13 +720,14 @@ def test_delete_user(session, auth_mock, keycloak_mock, monkeypatch): # pylint: user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: - assert org.status_code == 'INACTIVE' + assert org.status_code == "INACTIVE" -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -@pytest.mark.parametrize('environment', ['test', None]) -def test_delete_user_where_org_has_affiliations(session, auth_mock, keycloak_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +@pytest.mark.parametrize("environment", ["test", None]) +def test_delete_user_where_org_has_affiliations( + session, auth_mock, keycloak_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Assert that a user can be deleted.""" user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() @@ -707,11 +735,11 @@ def test_delete_user_where_org_has_affiliations(session, auth_mock, keycloak_moc contact_link.contact = contact contact_link.user = user_model contact_link = contact_link.flush() - contact_link.commit() + contact_link.save() patch_token_info(TestJwtClaims.user_test, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id).as_dict() - org_id = org['id'] + org_id = org["id"] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) @@ -727,13 +755,14 @@ def test_delete_user_where_org_has_affiliations(session, auth_mock, keycloak_moc user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: - assert org.status_code == 'ACTIVE' + assert org.status_code == "ACTIVE" -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_user_where_user_is_member_on_org(session, auth_mock, keycloak_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_user_where_user_is_member_on_org( + session, auth_mock, keycloak_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Assert that a user can be deleted.""" # Create a user and org user_model = factory_user_model(user_info=TestUserInfo.user_test) @@ -741,13 +770,14 @@ def test_delete_user_where_user_is_member_on_org(session, auth_mock, keycloak_mo contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model - contact_link.commit() + contact_link.save() - patch_token_info(TestJwtClaims.get_test_user(user_model.keycloak_guid, - idp_userid=user_model.idp_userid), monkeypatch) + patch_token_info( + TestJwtClaims.get_test_user(user_model.keycloak_guid, idp_userid=user_model.idp_userid), monkeypatch + ) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id, environment=environment) @@ -759,14 +789,16 @@ def test_delete_user_where_user_is_member_on_org(session, auth_mock, keycloak_mo contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model2 - contact_link.commit() + contact_link.save() - membership = MembershipModel(org_id=org_id, user_id=user_model2.id, membership_type_code='USER', - membership_type_status=Status.ACTIVE.value) + membership = MembershipModel( + org_id=org_id, user_id=user_model2.id, membership_type_code="USER", membership_type_status=Status.ACTIVE.value + ) membership.save() - patch_token_info(TestJwtClaims.get_test_user(user_model2.keycloak_guid, - idp_userid=user_model2.idp_userid), monkeypatch) + patch_token_info( + TestJwtClaims.get_test_user(user_model2.keycloak_guid, idp_userid=user_model2.idp_userid), monkeypatch + ) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() @@ -774,13 +806,14 @@ def test_delete_user_where_user_is_member_on_org(session, auth_mock, keycloak_mo user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: - assert org.status_code == 'INACTIVE' + assert org.status_code == "INACTIVE" -@pytest.mark.parametrize('environment', ['test', None]) -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) -def test_delete_user_where_org_has_another_owner(session, auth_mock, keycloak_mock, - monkeypatch, environment): # pylint:disable=unused-argument +@pytest.mark.parametrize("environment", ["test", None]) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) +def test_delete_user_where_org_has_another_owner( + session, auth_mock, keycloak_mock, monkeypatch, environment +): # pylint:disable=unused-argument """Assert that a user can be deleted.""" # Create a user and org user_model = factory_user_model(user_info=TestUserInfo.user_test) @@ -788,13 +821,14 @@ def test_delete_user_where_org_has_another_owner(session, auth_mock, keycloak_mo contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model - contact_link.commit() + contact_link.save() - patch_token_info(TestJwtClaims.get_test_user(user_model.keycloak_guid, - idp_userid=user_model.idp_userid), monkeypatch) + patch_token_info( + TestJwtClaims.get_test_user(user_model.keycloak_guid, idp_userid=user_model.idp_userid), monkeypatch + ) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id, environment=environment) @@ -806,16 +840,18 @@ def test_delete_user_where_org_has_another_owner(session, auth_mock, keycloak_mo contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model2 - contact_link.commit() + contact_link.save() - membership = MembershipModel(org_id=org_id, user_id=user_model2.id, membership_type_code='ADMIN', - membership_type_status=Status.ACTIVE.value) + membership = MembershipModel( + org_id=org_id, user_id=user_model2.id, membership_type_code="ADMIN", membership_type_status=Status.ACTIVE.value + ) membership.save() membership.commit() # with pytest.raises(BusinessException) as exception: - patch_token_info(TestJwtClaims.get_test_user(user_model2.keycloak_guid, - idp_userid=user_model2.idp_userid), monkeypatch) + patch_token_info( + TestJwtClaims.get_test_user(user_model2.keycloak_guid, idp_userid=user_model2.idp_userid), monkeypatch + ) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() @@ -823,4 +859,4 @@ def test_delete_user_where_org_has_another_owner(session, auth_mock, keycloak_mo user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: - assert org.status_code == 'INACTIVE' + assert org.status_code == "INACTIVE" diff --git a/auth-api/tests/unit/services/test_user_settings.py b/auth-api/tests/unit/services/test_user_settings.py index 7a023fc70e..944038f3f2 100644 --- a/auth-api/tests/unit/services/test_user_settings.py +++ b/auth-api/tests/unit/services/test_user_settings.py @@ -17,36 +17,37 @@ Test-Suite to ensure that the User Service is working as expected. """ -import mock +from unittest import mock + from auth_api.services import Org as OrgService from auth_api.services import User as UserService from auth_api.services import UserSettings as UserSettingsService +from tests.conftest import mock_token from tests.utilities.factory_scenarios import TestJwtClaims, TestOrgInfo, TestUserInfo from tests.utilities.factory_utils import factory_user_model, patch_token_info -from tests.conftest import mock_token -@mock.patch('auth_api.services.affiliation_invitation.RestService.get_service_account_token', mock_token) +@mock.patch("auth_api.services.affiliation_invitation.RestService.get_service_account_token", mock_token) def test_user_settings(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a contact can not be deleted if contact link exists.""" user_with_token = TestUserInfo.user_test - user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] - user_with_token['idp_userid'] = TestJwtClaims.public_user_role['idp_userid'] + user_with_token["keycloak_guid"] = TestJwtClaims.public_user_role["sub"] + user_with_token["idp_userid"] = TestJwtClaims.public_user_role["idp_userid"] user_model = factory_user_model(user_info=user_with_token) user = UserService(user_model) patch_token_info(TestJwtClaims.public_user_role, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.identifier) org_dictionary = org.as_dict() - org_id = org_dictionary['id'] + org_id = org_dictionary["id"] usersettings = UserSettingsService.fetch_user_settings(user.identifier) assert usersettings is not None - org = [x for x in usersettings if x.type == 'ACCOUNT'] + org = [x for x in usersettings if x.type == "ACCOUNT"] assert len(usersettings) == 3 - assert org[0].label == TestOrgInfo.org1['name'] + assert org[0].label == TestOrgInfo.org1["name"] assert org[0].id == org_id - assert org[0].additional_label == '', 'no additional label' + assert org[0].additional_label == "", "no additional label" # add an org with branch name and assert additonal label org = OrgService.create_org(TestOrgInfo.org_branch_name, user_id=user.identifier) @@ -54,5 +55,5 @@ def test_user_settings(session, auth_mock, keycloak_mock, monkeypatch): # pylin usersettings = UserSettingsService.fetch_user_settings(user.identifier) assert len(usersettings) == 4 - org = [x for x in usersettings if x.type == 'ACCOUNT' and x.label == org_with_branch_dictionary.get('name')] - assert org[0].additional_label == org_with_branch_dictionary.get('branch_name'), 'additional label matches' + org = [x for x in usersettings if x.type == "ACCOUNT" and x.label == org_with_branch_dictionary.get("name")] + assert org[0].additional_label == org_with_branch_dictionary.get("branch_name"), "additional label matches" diff --git a/auth-api/tests/unit/utils/test_logging.py b/auth-api/tests/unit/utils/test_logging.py deleted file mode 100644 index a3304d53ba..0000000000 --- a/auth-api/tests/unit/utils/test_logging.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tests to assure the logging utilities. - -Test-Suite to ensure that the logging setup is working as expected. -""" - -import os - -from auth_api.utils.util_logging import setup_logging - - -def test_logging_with_file(capsys): - """Assert that logging is setup with the configuration file.""" - file_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.conf') - setup_logging(file_path) # important to do this first - - captured = capsys.readouterr() - - assert captured.out.startswith('Configure logging, from conf') - - -def test_logging_with_missing_file(capsys): - """Assert that a message is sent to STDERR when the configuration doesn't exist.""" - file_path = None - setup_logging(file_path) # important to do this first - - captured = capsys.readouterr() - - assert captured.err.startswith('Unable to configure logging') diff --git a/auth-api/tests/unit/utils/test_passcode.py b/auth-api/tests/unit/utils/test_passcode.py index bf37deb118..2bcd6f6897 100644 --- a/auth-api/tests/unit/utils/test_passcode.py +++ b/auth-api/tests/unit/utils/test_passcode.py @@ -21,7 +21,7 @@ def test_passcode_hash(): """Assert that passcode can be hashed.""" - pass_code: str = '111111111' + pass_code: str = "111111111" hashed_pass_code: str = passcode.passcode_hash(pass_code) assert hashed_pass_code @@ -35,7 +35,7 @@ def test_passcode_hash_fail(): def test_passcode_hash_different(): """Assert that the same passcode get different hash value by multiple running.""" - pass_code: str = '111111111' + pass_code: str = "111111111" hashed_pass_code: str = passcode.passcode_hash(pass_code) hashed_pass_code2: str = passcode.passcode_hash(pass_code) assert hashed_pass_code != hashed_pass_code2 @@ -43,16 +43,16 @@ def test_passcode_hash_different(): def test_validate_passcode(): """Assert that passcode can be validate.""" - pass_code: str = '111111111' + pass_code: str = "111111111" hashed_pass_code: str = passcode.passcode_hash(pass_code) - checked_pass_code: str = '111111111' + checked_pass_code: str = "111111111" validated: bool = passcode.validate_passcode(checked_pass_code, hashed_pass_code) assert validated def test_validate_passcode_empty_input(): """Assert that passcode can be validate.""" - pass_code: str = '111111111' + pass_code: str = "111111111" hashed_pass_code: str = passcode.passcode_hash(pass_code) checked_pass_code: str = None validated: bool = passcode.validate_passcode(checked_pass_code, hashed_pass_code) @@ -61,8 +61,8 @@ def test_validate_passcode_empty_input(): def test_validate_passcode_fail(): """Assert that passcode can be validate.""" - pass_code: str = '111111111' + pass_code: str = "111111111" hashed_pass_code: str = passcode.passcode_hash(pass_code) - checked_pass_code: str = '222222222' + checked_pass_code: str = "222222222" validated: bool = passcode.validate_passcode(checked_pass_code, hashed_pass_code) assert not validated diff --git a/auth-api/tests/unit/utils/test_role_validator.py b/auth-api/tests/unit/utils/test_role_validator.py index 2dde97c0cf..743debe058 100644 --- a/auth-api/tests/unit/utils/test_role_validator.py +++ b/auth-api/tests/unit/utils/test_role_validator.py @@ -17,27 +17,25 @@ import pytest -from auth_api.auth import jwt as _jwt +from auth_api.utils.auth import jwt as _jwt from auth_api.utils.role_validator import validate_roles -@patch.object(_jwt, 'requires_auth', side_effect=lambda f: f) -@pytest.mark.parametrize('allowed_roles,not_allowed_roles', [ - (['role1'], ['']), # just one allowed role - (['role1', 'role2'], ['']), # one more additional role - (['role1'], ['role3']), # one not allowed role which user doesnt have -]) +@patch.object(_jwt, "requires_auth", side_effect=lambda f: f) +@pytest.mark.parametrize( + "allowed_roles,not_allowed_roles", + [ + (["role1"], [""]), # just one allowed role + (["role1", "role2"], [""]), # one more additional role + (["role1"], ["role3"]), # one not allowed role which user doesnt have + ], +) def test_validate_roles_valid(jwt, monkeypatch, allowed_roles, not_allowed_roles): """Assert that valid roles yields proper response.""" token = { - 'realm_access': { - 'roles': [ - 'role1' - ] - }, + "realm_access": {"roles": ["role1"]}, } - monkeypatch.setattr('auth_api.utils.role_validator._get_token_info', - lambda: token) + monkeypatch.setattr("auth_api.utils.role_validator._get_token_info", lambda: token) @validate_roles(allowed_roles=allowed_roles, not_allowed_roles=not_allowed_roles) def decorated(x): @@ -46,23 +44,19 @@ def decorated(x): assert decorated(1) == 1 -@patch.object(_jwt, 'requires_auth', side_effect=lambda f: f) -@pytest.mark.parametrize('allowed_roles,not_allowed_roles', [ - ([''], ['']), # no allowed role - (['role1'], ['role1']), # allowed role ;but not_allowed_roles prohibits access - ([''], ['role1']), # not_allowed_roles prohibits access -]) +@patch.object(_jwt, "requires_auth", side_effect=lambda f: f) +@pytest.mark.parametrize( + "allowed_roles,not_allowed_roles", + [ + ([""], [""]), # no allowed role + (["role1"], ["role1"]), # allowed role ;but not_allowed_roles prohibits access + ([""], ["role1"]), # not_allowed_roles prohibits access + ], +) def test_validate_roles_invalid(jwt, monkeypatch, allowed_roles, not_allowed_roles): """Assert that invalid roles yields error response.""" - token = { - 'realm_access': { - 'roles': [ - 'role1' - ] - } - } - monkeypatch.setattr('auth_api.utils.role_validator._get_token_info', - lambda: token) + token = {"realm_access": {"roles": ["role1"]}} + monkeypatch.setattr("auth_api.utils.role_validator._get_token_info", lambda: token) @validate_roles(allowed_roles=allowed_roles, not_allowed_roles=not_allowed_roles) def decorated(x): diff --git a/auth-api/tests/unit/utils/test_util.py b/auth-api/tests/unit/utils/test_util.py index a5cd0c55ae..3d2cd0e8c3 100644 --- a/auth-api/tests/unit/utils/test_util.py +++ b/auth-api/tests/unit/utils/test_util.py @@ -23,32 +23,28 @@ from auth_api.utils.util import camelback2snake, escape_wam_friendly_url, snake2camelback +TEST_CAMEL_DATA = {"loginSource": "PASSCODE", "userName": "test name", "realmAccess": {"roles": ["basic"]}} -TEST_CAMEL_DATA = {'loginSource': 'PASSCODE', 'userName': 'test name', 'realmAccess': { - 'roles': ['basic'] -}} - -TEST_SNAKE_DATA = {'login_source': 'PASSCODE', 'user_name': 'test name', 'realm_access': { - 'roles': ['basic'] -}} +TEST_SNAKE_DATA = {"login_source": "PASSCODE", "user_name": "test name", "realm_access": {"roles": ["basic"]}} def test_camelback2snake(): """Assert that the options methos is added to the class and that the correct access controls are set.""" snake = camelback2snake(TEST_CAMEL_DATA) - assert snake['login_source'] == TEST_SNAKE_DATA['login_source'] + assert snake["login_source"] == TEST_SNAKE_DATA["login_source"] def test_snake2camelback(): """Assert that the options methos is added to the class and that the correct access controls are set.""" camel = snake2camelback(TEST_SNAKE_DATA) - assert camel['loginSource'] == TEST_CAMEL_DATA['loginSource'] + assert camel["loginSource"] == TEST_CAMEL_DATA["loginSource"] -@pytest.mark.parametrize('test_input,expected', [('foo', 'Zm9v'), ('foo-bar', 'Zm9vLWJhcg%3D%3D'), - ('foo bar.....', 'Zm9vIGJhci4uLi4u')]) +@pytest.mark.parametrize( + "test_input,expected", [("foo", "Zm9v"), ("foo-bar", "Zm9vLWJhcg%3D%3D"), ("foo bar.....", "Zm9vIGJhci4uLi4u")] +) def test_escape_wam_friendly_url_multiple(test_input, expected): """Assert manually calculated url encodings.""" assert escape_wam_friendly_url(test_input) == expected @@ -56,8 +52,8 @@ def test_escape_wam_friendly_url_multiple(test_input, expected): def test_escape_wam_friendly_url(): """Assert conversion back yields same string.""" - org_name = 'foo-bar helo ..' + org_name = "foo-bar helo .." org_name_encoded = escape_wam_friendly_url(org_name) param1 = unquote(org_name_encoded) - org_name_actual = base64.b64decode(bytes(param1, encoding='utf-8')).decode('utf-8') + org_name_actual = base64.b64decode(bytes(param1, encoding="utf-8")).decode("utf-8") assert org_name_actual == org_name diff --git a/auth-api/tests/utilities/decorators.py b/auth-api/tests/utilities/decorators.py index 88deec93f1..4cc9409052 100644 --- a/auth-api/tests/utilities/decorators.py +++ b/auth-api/tests/utilities/decorators.py @@ -17,8 +17,7 @@ import pytest from dotenv import find_dotenv, load_dotenv - # this will load all the envars from a .env file located in the project root (api) load_dotenv(find_dotenv()) -skip_in_pod = pytest.mark.skipif(os.getenv('POD_TESTING', False), reason='Skip test when running in pod') +skip_in_pod = pytest.mark.skipif(os.getenv("POD_TESTING", False), reason="Skip test when running in pod") diff --git a/auth-api/tests/utilities/factory_scenarios.py b/auth-api/tests/utilities/factory_scenarios.py index 1930f79389..9ab58900a8 100644 --- a/auth-api/tests/utilities/factory_scenarios.py +++ b/auth-api/tests/utilities/factory_scenarios.py @@ -28,936 +28,786 @@ fake = Faker() -CONFIG = get_named_config('testing') +CONFIG = get_named_config("testing") -JWT_HEADER = { - 'alg': CONFIG.JWT_OIDC_TEST_ALGORITHMS, - 'typ': 'JWT', - 'kid': CONFIG.JWT_OIDC_TEST_AUDIENCE -} +JWT_HEADER = {"alg": CONFIG.JWT_OIDC_TEST_ALGORITHMS, "typ": "JWT", "kid": CONFIG.JWT_OIDC_TEST_AUDIENCE} class TestJwtClaims(dict, Enum): """Test scenarios of jwt claims.""" + __test__ = False + no_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302065', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302065', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302065", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302065", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": []}, } invalid = { - 'sub': 'barfoo', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), + "sub": "barfoo", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), } public_user_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'public_user' - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["public_user"]}, } public_account_holder_user = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'public_user', - 'account_holder' - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["public_user", "account_holder"]}, } gov_account_holder_user = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'public_user', - 'account_holder', - 'gov_account_user' - ] - }, - 'loginSource': LoginSource.IDIR.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["public_user", "account_holder", "gov_account_user"]}, + "loginSource": LoginSource.IDIR.value, } public_bceid_user = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'public_user', - 'edit' - ] - }, - 'email': 'test@test.com', - 'loginSource': LoginSource.BCEID.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["public_user", "edit"]}, + "email": "test@test.com", + "loginSource": LoginSource.BCEID.value, } public_bceid_account_holder_user = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'public_user', - 'edit', - 'account_holder' - ] - }, - 'email': 'test@test.com', - 'loginSource': LoginSource.BCEID.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["public_user", "edit", "account_holder"]}, + "email": "test@test.com", + "loginSource": LoginSource.BCEID.value, } edit_user_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'edit' - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["edit"]}, } edit_role_2 = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302075', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302075', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'edit' - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302075", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302075", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["edit"]}, } view_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'view' - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["view"]}, } staff_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'edit' - ] - }, - 'loginSource': LoginSource.STAFF.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "edit"]}, + "loginSource": LoginSource.STAFF.value, } staff_manage_business = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'edit', - 'manage_business' - ] - }, - 'loginSource': LoginSource.STAFF.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "edit", "manage_business"]}, + "loginSource": LoginSource.STAFF.value, } staff_view_accounts_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'view_accounts' - ] - }, - 'loginSource': LoginSource.STAFF.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "view_accounts"]}, + "loginSource": LoginSource.STAFF.value, } staff_manage_accounts_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'view_accounts', - 'manage_accounts' - ] - }, - 'loginSource': LoginSource.STAFF.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "view_accounts", "manage_accounts"]}, + "loginSource": LoginSource.STAFF.value, } staff_admin_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'view_accounts', - 'create_accounts' - ] - }, - 'roles': [ - 'staff', 'edit', 'create_accounts' - ], - 'loginSource': LoginSource.STAFF.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "view_accounts", "create_accounts"]}, + "roles": ["staff", "edit", "create_accounts"], + "loginSource": LoginSource.STAFF.value, } manage_eft_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'view_accounts', - 'manage_eft' - ] - }, - 'loginSource': LoginSource.STAFF.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "view_accounts", "manage_eft"]}, + "loginSource": LoginSource.STAFF.value, } staff_admin_dir_search_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'create_accounts', - 'view_accounts', - 'edit' - ] - }, - 'roles': [ - 'staff', - 'create_accounts' - ], - 'loginSource': LoginSource.STAFF.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "create_accounts", "view_accounts", "edit"]}, + "roles": ["staff", "create_accounts"], + "loginSource": LoginSource.STAFF.value, } bcol_admin_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'staff', - 'manage_accounts', - 'view_accounts', - 'suspend_accounts' - ] - }, - 'roles': [ - 'staff', - 'manage_accounts', - 'view_accounts' - ] + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["staff", "manage_accounts", "view_accounts", "suspend_accounts"]}, + "roles": ["staff", "manage_accounts", "view_accounts"], } system_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'system' - ] - }, - 'product_code': ProductCode.BUSINESS.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["system"]}, + "product_code": ProductCode.BUSINESS.value, } system_admin_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - 'system', - 'staff', - 'create_accounts' - ] - }, - 'product_code': 'ALL' + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": ["system", "staff", "create_accounts"]}, + "product_code": "ALL", } passcode = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'username': 'CP1234567', - 'realm_access': { - 'roles': [ - 'system' - ] - }, - 'roles': [ - 'staff', - 'system' - ], - 'loginSource': 'PASSCODE', - 'product_code': ProductCode.BUSINESS.value + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "username": "CP1234567", + "realm_access": {"roles": ["system"]}, + "roles": ["staff", "system"], + "loginSource": "PASSCODE", + "product_code": ProductCode.BUSINESS.value, } updated_test = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'realm_access': { - 'roles': [ - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "realm_access": {"roles": []}, } user_test = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': '1b20db59-19a0-4727-affe-c6f64309fd04', - 'idp_userid': '1b20db59-19a0-4727-affe-c6f64309fd04', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': 'CP1234567', - 'username': 'CP1234567', - 'realm_access': { - 'roles': [ - 'edit', 'uma_authorization', 'staff' - ] - }, - 'loginSource': 'PASSCODE' + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "1b20db59-19a0-4727-affe-c6f64309fd04", + "idp_userid": "1b20db59-19a0-4727-affe-c6f64309fd04", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": "CP1234567", + "username": "CP1234567", + "realm_access": {"roles": ["edit", "uma_authorization", "staff"]}, + "loginSource": "PASSCODE", } tester_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'loginSource': 'BCSC', - 'realm_access': { - 'roles': [ - 'tester' - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "loginSource": "BCSC", + "realm_access": {"roles": ["tester"]}, } anonymous_bcros_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302069', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302069', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': f'{IdpHint.BCROS.value}/{fake.user_name()}', - 'accessType': 'ANONYMOUS', - 'loginSource': 'BCROS', - 'realm_access': { - 'roles': [ - 'edit', - 'anonymous_user', - 'public_user' - ] - }, - 'product_code': 'DIR_SEARCH' + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302069", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302069", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": f"{IdpHint.BCROS.value}/{fake.user_name()}", + "accessType": "ANONYMOUS", + "loginSource": "BCROS", + "realm_access": {"roles": ["edit", "anonymous_user", "public_user"]}, + "product_code": "DIR_SEARCH", } tester_bceid_role = { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'idp_userid': 'f7a4a1d3-73a8-4cbc-a40f-bb1145302064', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': fake.user_name(), - 'user_name': fake.user_name(), - 'loginSource': 'BCEID', - 'realm_access': { - 'roles': [ - 'tester' - ] - } + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "idp_userid": "f7a4a1d3-73a8-4cbc-a40f-bb1145302064", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": fake.user_name(), + "user_name": fake.user_name(), + "loginSource": "BCEID", + "realm_access": {"roles": ["tester"]}, } @staticmethod - def get_test_real_user(sub, preferred_username=fake.user_name(), access_ype='', roles=[], idp_userid=None): + def get_test_real_user(sub, preferred_username=fake.user_name(), access_ype="", roles=[], idp_userid=None): """Produce a created user.""" return { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': str(sub), - 'idp_userid': idp_userid or str(sub), - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'accessType': access_ype, - 'preferred_username': preferred_username, - 'realm_access': { - 'roles': [ - 'edit', 'public_user', *roles - ] - }, - 'roles': [ - 'edit', 'public_user', *roles - ] + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": str(sub), + "idp_userid": idp_userid or str(sub), + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "accessType": access_ype, + "preferred_username": preferred_username, + "realm_access": {"roles": ["edit", "public_user", *roles]}, + "roles": ["edit", "public_user", *roles], } @staticmethod - def get_test_user(sub, source: str = 'PASSCODE', roles=['edit', 'staff', 'tester'], idp_userid=None): + def get_test_user(sub, source: str = "PASSCODE", roles=["edit", "staff", "tester"], idp_userid=None): """Return test user with subject from argument.""" return { - 'iss': CONFIG.JWT_OIDC_TEST_ISSUER, - 'sub': sub, - 'idp_userid': idp_userid or str(sub), - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'preferred_username': 'CP1234567', - 'username': 'CP1234567', - 'realm_access': { - 'roles': roles - }, - 'roles': roles, - 'loginSource': source + "iss": CONFIG.JWT_OIDC_TEST_ISSUER, + "sub": sub, + "idp_userid": idp_userid or str(sub), + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "preferred_username": "CP1234567", + "username": "CP1234567", + "realm_access": {"roles": roles}, + "roles": roles, + "loginSource": source, } class TestOrgTypeInfo(dict, Enum): """Test scenarios of org type.""" - test_type = {'code': 'TEST', 'desc': 'Test'} - implicit = {'code': 'BASIC', 'desc': 'BASIC'} + __test__ = False + + test_type = {"code": "TEST", "desc": "Test"} + implicit = {"code": "BASIC", "desc": "BASIC"} class TestPaymentTypeInfo(dict, Enum): """Test scenarios of payment type.""" - test_type = {'code': 'TEST', 'desc': 'Test'} + __test__ = False + + test_type = {"code": "TEST", "desc": "Test"} class TestPaymentMethodInfo(dict, Enum): """Test scenarios of payment type.""" + __test__ = False + @staticmethod def get_payment_method_input(payment_method: PaymentMethod = PaymentMethod.CREDIT_CARD): """Return payment info payload.""" - return {'paymentInfo': {'paymentMethod': payment_method.value}} + return {"paymentInfo": {"paymentMethod": payment_method.value}} @staticmethod def get_payment_method_input_with_revenue(payment_method: PaymentMethod = PaymentMethod.EJV): """Return payment info payload.""" revenue_account_details = { - 'client': '100', - 'projectCode': 1111111, - 'responsibilityCentre': '22222', - 'serviceLine': '1111111', - 'stob': '9000' + "client": "100", + "projectCode": 1111111, + "responsibilityCentre": "22222", + "serviceLine": "1111111", + "stob": "9000", } - return {'paymentInfo': {'paymentMethod': payment_method.value, 'revenueAccount': revenue_account_details}} + return {"paymentInfo": {"paymentMethod": payment_method.value, "revenueAccount": revenue_account_details}} class TestAnonymousMembership(dict, Enum): """Test scenarios of org status.""" + __test__ = False + @staticmethod def generate_random_user(membership: str): """Return user with keycloak guid.""" - return {'username': ''.join(choice(ascii_uppercase) for i in range(5)), 'password': 'firstuser', - 'membershipType': membership} + return { + "username": "".join(choice(ascii_uppercase) for i in range(5)), + "password": "firstuser", + "membershipType": membership, + } class TestOrgStatusInfo(dict, Enum): """Test scenarios of org status.""" - test_status = {'code': 'TEST', 'desc': 'Test'} + __test__ = False + + test_status = {"code": "TEST", "desc": "Test"} class TestOrgInfo(dict, Enum): """Test scenarios of org.""" - affiliation_from_org = {'name': 'Test Affiliation Invitation From Org'} - affiliation_to_org = {'name': 'Test Affiliation Invitation To Org'} - org1 = {'name': 'My Test Org'} - org_details = {'name': 'My test Org details'} - org_branch_name = {'name': 'Foo', 'branchName': 'Bar', } - - org_onlinebanking = {'name': 'My Test Org', 'paymentInfo': {'paymentMethod': 'ONLINE_BANKING'}} - org2 = {'name': 'My Test Updated Org'} - org3 = {'name': 'Third Orgs'} - org4 = {'name': 'fourth Orgs'} - org5 = {'name': 'fifth Orgs'} - org_anonymous = {'name': 'My Test Anon Org', 'accessType': 'ANONYMOUS'} - org_govm = {'name': 'My Test Anon Org', 'branchName': 'Bar', 'accessType': AccessType.GOVM.value} - org_anonymous_2 = {'name': 'Another test org', 'accessType': 'ANONYMOUS'} - org_premium = {'name': 'Another test org', 'typeCode': OrgType.PREMIUM.value} - invalid = {'foo': 'bar'} - invalid_name_space = {'name': ''} - invalid_name_spaces = {'name': ' '} - invalid_name_start_space = {'name': ' helo'} - invalid_name_end_space = {'name': ' helo '} - org_regular_bceid = { - 'name': 'My Test Org', - 'accessType': AccessType.REGULAR_BCEID.value - } - org_regular = { - 'name': 'My Test Org', - 'accessType': AccessType.REGULAR.value - } - - update_org_with_business_type = { - 'businessType': 'LAW', - 'businessSize': '2-5', - 'isBusinessAccount': True - } + __test__ = False + + affiliation_from_org = {"name": "Test Affiliation Invitation From Org"} + affiliation_to_org = {"name": "Test Affiliation Invitation To Org"} + org1 = {"name": "My Test Org"} + org_details = {"name": "My test Org details"} + org_branch_name = { + "name": "Foo", + "branchName": "Bar", + } + + org_onlinebanking = {"name": "My Test Org", "paymentInfo": {"paymentMethod": "ONLINE_BANKING"}} + org2 = {"name": "My Test Updated Org"} + org3 = {"name": "Third Orgs"} + org4 = {"name": "fourth Orgs"} + org5 = {"name": "fifth Orgs"} + org_anonymous = {"name": "My Test Anon Org", "accessType": "ANONYMOUS"} + org_govm = {"name": "My Test Anon Org", "branchName": "Bar", "accessType": AccessType.GOVM.value} + org_anonymous_2 = {"name": "Another test org", "accessType": "ANONYMOUS"} + org_premium = {"name": "Another test org", "typeCode": OrgType.PREMIUM.value} + invalid = {"foo": "bar"} + invalid_name_space = {"name": ""} + invalid_name_spaces = {"name": " "} + invalid_name_start_space = {"name": " helo"} + invalid_name_end_space = {"name": " helo "} + org_regular_bceid = {"name": "My Test Org", "accessType": AccessType.REGULAR_BCEID.value} + org_regular = {"name": "My Test Org", "accessType": AccessType.REGULAR.value} + + update_org_with_business_type = {"businessType": "LAW", "businessSize": "2-5", "isBusinessAccount": True} org_with_products = { - 'name': 'My Test Org', - 'paymentInfo': { - 'paymentMethod': 'ONLINE_BANKING' - }, - 'productSubscriptions': [{'productCode': 'BUSINESS'}, {'productCode': 'VS'}] + "name": "My Test Org", + "paymentInfo": {"paymentMethod": "ONLINE_BANKING"}, + "productSubscriptions": [{"productCode": "BUSINESS"}, {"productCode": "VS"}], } org_with_all_info = { - 'name': 'My Test Org', - 'accessType': AccessType.REGULAR.value, - 'paymentInfo': { - 'paymentMethod': 'ONLINE_BANKING' - }, - 'productSubscriptions': [{'productCode': 'BUSINESS'}, {'productCode': 'VS'}], - 'businessType': 'LAW', - 'businessSize': '2-5', - 'isBusinessAccount': True + "name": "My Test Org", + "accessType": AccessType.REGULAR.value, + "paymentInfo": {"paymentMethod": "ONLINE_BANKING"}, + "productSubscriptions": [{"productCode": "BUSINESS"}, {"productCode": "VS"}], + "businessType": "LAW", + "businessSize": "2-5", + "isBusinessAccount": True, } update_org_with_all_info = { - 'accessType': AccessType.REGULAR.value, - 'paymentInfo': { - 'paymentMethod': 'ONLINE_BANKING' - }, - 'productSubscriptions': [{'productCode': 'BUSINESS'}, {'productCode': 'VS'}], - 'businessType': 'LAW', - 'businessSize': '2-5', - 'isBusinessAccount': True + "accessType": AccessType.REGULAR.value, + "paymentInfo": {"paymentMethod": "ONLINE_BANKING"}, + "productSubscriptions": [{"productCode": "BUSINESS"}, {"productCode": "VS"}], + "businessType": "LAW", + "businessSize": "2-5", + "isBusinessAccount": True, } bceid_org_with_all_info = { - 'name': 'My Test Org', - 'accessType': AccessType.REGULAR_BCEID.value, - 'paymentInfo': { - 'paymentMethod': 'ONLINE_BANKING' - }, - 'productSubscriptions': [{'productCode': 'BUSINESS'}, {'productCode': 'VS'}], - 'businessType': 'LAW', - 'businessSize': '2-5', - 'isBusinessAccount': True + "name": "My Test Org", + "accessType": AccessType.REGULAR_BCEID.value, + "paymentInfo": {"paymentMethod": "ONLINE_BANKING"}, + "productSubscriptions": [{"productCode": "BUSINESS"}, {"productCode": "VS"}], + "businessType": "LAW", + "businessSize": "2-5", + "isBusinessAccount": True, } - staff_org = {'name': 'My Test Org', 'typeCode': OrgType.STAFF.value} + staff_org = {"name": "My Test Org", "typeCode": OrgType.STAFF.value} - sbc_staff_org = {'name': 'My Test Org', 'typeCode': OrgType.SBC_STAFF.value} + sbc_staff_org = {"name": "My Test Org", "typeCode": OrgType.SBC_STAFF.value} @staticmethod def bcol_linked(): """Return org info for bcol linked info.""" return { - 'name': 'BC ONLINE TECHNICAL TEAM DEVL', - 'bcOnlineCredential': { - 'userId': 'test', - 'password': 'password' - }, - 'mailingAddress': TestOrgInfo.get_mailing_address(), - 'typeCode': OrgType.PREMIUM.value + "name": "BC ONLINE TECHNICAL TEAM DEVL", + "bcOnlineCredential": {"userId": "test", "password": "password"}, + "mailingAddress": TestOrgInfo.get_mailing_address(), + "typeCode": OrgType.PREMIUM.value, } @staticmethod def update_bcol_linked(): """Return org info for bcol linked info.""" return { - 'bcOnlineCredential': { - 'userId': 'test', - 'password': 'password' - }, - 'mailingAddress': TestOrgInfo.get_mailing_address(), - 'typeCode': OrgType.PREMIUM.value + "bcOnlineCredential": {"userId": "test", "password": "password"}, + "mailingAddress": TestOrgInfo.get_mailing_address(), + "typeCode": OrgType.PREMIUM.value, } @staticmethod def get_mailing_address(): """Return mailing Address.""" - return { - 'street': '1234 Abcd Street', - 'city': 'Test', - 'region': 'BC', - 'postalCode': 'T1T1T1', - 'country': 'CA' - } + return {"street": "1234 Abcd Street", "city": "Test", "region": "BC", "postalCode": "T1T1T1", "country": "CA"} @staticmethod - def org_with_mailing_address(name: str = 'BC ONLINE TECHNICAL TEAM DEVL'): + def org_with_mailing_address(name: str = "BC ONLINE TECHNICAL TEAM DEVL"): """Return org info for bcol linked info.""" - return { - 'name': name, - 'mailingAddress': TestOrgInfo.get_mailing_address() - } + return {"name": name, "mailingAddress": TestOrgInfo.get_mailing_address()} @staticmethod def update_org_with_mailing_address(): """Return org info for update org - bcol linked info.""" - return { - 'mailingAddress': TestOrgInfo.get_mailing_address() - } + return {"mailingAddress": TestOrgInfo.get_mailing_address()} @staticmethod def bcol_linked_incomplete_mailing_addrees(): """Return org info for bcol linked info.""" return { - 'name': 'BC ONLINE TECHNICAL TEAM DEVL', - 'bcOnlineCredential': { - 'userId': 'test', - 'password': 'password' - }, - 'mailingAddress': TestOrgInfo.get_mailing_address().pop('street') + "name": "BC ONLINE TECHNICAL TEAM DEVL", + "bcOnlineCredential": {"userId": "test", "password": "password"}, + "mailingAddress": TestOrgInfo.get_mailing_address().pop("street"), } @staticmethod def bcol_linked_different_name(): """Return org info for bcol linked info with different org name than bcol account name.""" return { - 'name': 'Test', - 'bcOnlineCredential': { - 'userId': 'test', - 'password': 'password' - }, - 'mailingAddress': TestOrgInfo.get_mailing_address(), - 'typeCode': OrgType.PREMIUM.value + "name": "Test", + "bcOnlineCredential": {"userId": "test", "password": "password"}, + "mailingAddress": TestOrgInfo.get_mailing_address(), + "typeCode": OrgType.PREMIUM.value, } class TestOrgProductsInfo(dict, Enum): """Test scenarios of attaching products to org.""" - org_products1 = {'subscriptions': [{'productCode': 'PPR'}]} - org_products2 = {'subscriptions': [{'productCode': 'VS'}, - {'productCode': 'PPR'}]} - org_products_vs = {'subscriptions': [{'productCode': 'VS'}]} - org_products_business = {'subscriptions': [{'productCode': 'BUSINESS'}]} - org_products_nds = {'subscriptions': [{'productCode': 'NDS'}]} - mhr = {'subscriptions': [{'productCode': 'MHR'}]} - mhr_qs_lawyer_and_notaries = {'subscriptions': [{'productCode': 'MHR_QSLN', 'externalSourceId': 'ABC101'}]} - mhr_qs_home_manufacturers = {'subscriptions': [{'productCode': 'MHR_QSHM', 'externalSourceId': 'ABC102'}]} - mhr_qs_home_dealers = {'subscriptions': [{'productCode': 'MHR_QSHD', 'externalSourceId': 'ABC103'}]} + __test__ = False + + org_products1 = {"subscriptions": [{"productCode": "PPR"}]} + org_products2 = {"subscriptions": [{"productCode": "VS"}, {"productCode": "PPR"}]} + org_products_vs = {"subscriptions": [{"productCode": "VS"}]} + org_products_business = {"subscriptions": [{"productCode": "BUSINESS"}]} + org_products_nds = {"subscriptions": [{"productCode": "NDS"}]} + mhr = {"subscriptions": [{"productCode": "MHR"}]} + mhr_qs_lawyer_and_notaries = {"subscriptions": [{"productCode": "MHR_QSLN", "externalSourceId": "ABC101"}]} + mhr_qs_home_manufacturers = {"subscriptions": [{"productCode": "MHR_QSHM", "externalSourceId": "ABC102"}]} + mhr_qs_home_dealers = {"subscriptions": [{"productCode": "MHR_QSHD", "externalSourceId": "ABC103"}]} class TestEntityInfo(dict, Enum): """Test scenarios of entity.""" - entity1 = {'businessIdentifier': 'CP1234567', - 'businessNumber': '791861073BC0001', - 'name': 'Foobar, Inc.', - 'passCode': '', - 'corpTypeCode': 'CP', - 'folioNumber': '1234'} - entity2 = {'businessIdentifier': 'CP1234568', - 'businessNumber': '791861079BC0001', - 'name': 'BarFoo, Inc.', - 'passCode': '', 'corpTypeCode': 'CP'} - entity_passcode = {'businessIdentifier': 'CP1234568', - 'businessNumber': '791861079BC0001', - 'name': 'Foobar, Inc.', - 'passCode': '111111111', 'corpTypeCode': 'CP'} - entity_passcode2 = {'businessIdentifier': 'CP1234568', - 'businessNumber': '791861078BC0001', - 'name': 'BarFoo, Inc.', - 'passCode': '222222222', 'corpTypeCode': 'CP'} - - bc_entity_passcode3 = {'businessIdentifier': 'CP123456890', - 'businessNumber': '791861078BC0002', - 'name': 'BarFoo, Inc.3', - 'passCode': '222222222', 'corpTypeCode': 'BC'} - - bc_entity_passcode4 = {'businessIdentifier': 'CP123456891', - 'businessNumber': '791861078BC0003', - 'name': 'BarFoo, Inc.4', - 'passCode': '222222222', 'corpTypeCode': 'BC'} - entity_reset_passcode = {'businessIdentifier': 'CP1234567', - 'resetPasscode': True, - 'passcodeResetEmail': 'abc@test.com'} - - invalid = {'foo': 'bar'} - - entity_lear_mock = {'businessIdentifier': 'CP0002103', - 'businessNumber': '791861078BC0001', - 'name': 'BarFoo, Inc.', - 'passCode': '222222222', 'corpTypeCode': 'CP'} - - entity_lear_mock2 = {'businessIdentifier': 'CP0002106', - 'businessNumber': '791861078BC0002', - 'name': 'Foobar, Inc.', - 'passCode': '222222222', 'corpTypeCode': 'CP'} - - entity_lear_mock3 = {'businessIdentifier': 'FM1000001', - 'businessNumber': '791861078BC0002', - 'name': 'Foobar, Inc.', - 'passCode': 'Horton, Connor', 'corpTypeCode': 'SP'} - - entity_folio_number = {'businessIdentifier': 'CP1234568', - 'folioNumber': '12345678'} - - name_request = { - 'businessIdentifier': 'NR 1234567', - 'name': 'ABC Corp Inc.', - 'corpTypeCode': 'NR' + __test__ = False + + entity1 = { + "businessIdentifier": "CP1234567", + "businessNumber": "791861073BC0001", + "name": "Foobar, Inc.", + "passCode": "", + "corpTypeCode": "CP", + "folioNumber": "1234", + } + entity2 = { + "businessIdentifier": "CP1234568", + "businessNumber": "791861079BC0001", + "name": "BarFoo, Inc.", + "passCode": "", + "corpTypeCode": "CP", + } + entity_passcode = { + "businessIdentifier": "CP1234568", + "businessNumber": "791861079BC0001", + "name": "Foobar, Inc.", + "passCode": "111111111", + "corpTypeCode": "CP", + } + entity_passcode2 = { + "businessIdentifier": "CP1234568", + "businessNumber": "791861078BC0001", + "name": "BarFoo, Inc.", + "passCode": "222222222", + "corpTypeCode": "CP", + } + + bc_entity_passcode3 = { + "businessIdentifier": "CP123456890", + "businessNumber": "791861078BC0002", + "name": "BarFoo, Inc.3", + "passCode": "222222222", + "corpTypeCode": "BC", + } + + bc_entity_passcode4 = { + "businessIdentifier": "CP123456891", + "businessNumber": "791861078BC0003", + "name": "BarFoo, Inc.4", + "passCode": "222222222", + "corpTypeCode": "BC", + } + entity_reset_passcode = { + "businessIdentifier": "CP1234567", + "resetPasscode": True, + "passcodeResetEmail": "abc@test.com", + } + + invalid = {"foo": "bar"} + + entity_lear_mock = { + "businessIdentifier": "CP0002103", + "businessNumber": "791861078BC0001", + "name": "BarFoo, Inc.", + "passCode": "222222222", + "corpTypeCode": "CP", } - temp_business = { - 'businessIdentifier': 'QWERTYUIO', - 'name': 'NR 1234567', - 'corpTypeCode': 'TMP' + entity_lear_mock2 = { + "businessIdentifier": "CP0002106", + "businessNumber": "791861078BC0002", + "name": "Foobar, Inc.", + "passCode": "222222222", + "corpTypeCode": "CP", } - temp_business_incoporated = { - 'businessIdentifier': 'QWERTYUIO', - 'name': 'BC1234567890', - 'corpTypeCode': 'TMP' + entity_lear_mock3 = { + "businessIdentifier": "FM1000001", + "businessNumber": "791861078BC0002", + "name": "Foobar, Inc.", + "passCode": "Horton, Connor", + "corpTypeCode": "SP", } + entity_folio_number = {"businessIdentifier": "CP1234568", "folioNumber": "12345678"} + + name_request = {"businessIdentifier": "NR 1234567", "name": "ABC Corp Inc.", "corpTypeCode": "NR"} + + temp_business = {"businessIdentifier": "QWERTYUIO", "name": "NR 1234567", "corpTypeCode": "TMP"} + + temp_business_incoporated = {"businessIdentifier": "QWERTYUIO", "name": "BC1234567890", "corpTypeCode": "TMP"} + business_incoporated = { - 'businessIdentifier': 'BC1234567890', - 'name': 'My New Incorporated Company', - 'corpTypeCode': 'BC' + "businessIdentifier": "BC1234567890", + "name": "My New Incorporated Company", + "corpTypeCode": "BC", } class TestAffliationInfo(dict, Enum): """Test scenarios of affiliation.""" - affiliation1 = {'businessIdentifier': 'CP1234567'} - affiliation2 = {'businessIdentifier': 'CP1234568'} - affiliation3 = {'businessIdentifier': 'CP0002103', 'passCode': '222222222'} - affiliation4 = {'businessIdentifier': 'CP0002106', 'passCode': '222222222'} - nr_affiliation = {'businessIdentifier': 'NR 1234567', 'phone': '1112223333'} - new_business_affiliation = {'businessIdentifier': 'CP1234568', - 'certifiedByName': 'John Wick', 'phone': '1112223333', 'email': 'test@test.com'} - invalid = {'name': 'CP1234567'} + __test__ = False + + affiliation1 = {"businessIdentifier": "CP1234567"} + affiliation2 = {"businessIdentifier": "CP1234568"} + affiliation3 = {"businessIdentifier": "CP0002103", "passCode": "222222222"} + affiliation4 = {"businessIdentifier": "CP0002106", "passCode": "222222222"} + nr_affiliation = {"businessIdentifier": "NR 1234567", "phone": "1112223333"} + new_business_affiliation = { + "businessIdentifier": "CP1234568", + "certifiedByName": "John Wick", + "phone": "1112223333", + "email": "test@test.com", + } + invalid = {"name": "CP1234567"} class DeleteAffiliationPayload(dict, Enum): """Test scenarios of delete affiliation.""" - delete_affiliation1 = {'passcodeResetEmail': 'test@gmail.com', 'resetPasscode': True} - delete_affiliation2 = {'resetPasscode': False} + delete_affiliation1 = {"passcodeResetEmail": "test@gmail.com", "resetPasscode": True} + delete_affiliation2 = {"resetPasscode": False} class TestContactInfo(dict, Enum): """Test scenarios of contact.""" - contact1 = { - 'email': 'foo@bar.com', - 'phone': '(555) 555-5555', - 'phoneExtension': '123' - } + __test__ = False - contact2 = { - 'email': 'bar@foo.com', - 'phone': '(555) 555-5555', - 'phoneExtension': '123' - } + contact1 = {"email": "foo@bar.com", "phone": "(555) 555-5555", "phoneExtension": "123"} + + contact2 = {"email": "bar@foo.com", "phone": "(555) 555-5555", "phoneExtension": "123"} - invalid = {'email': 'bar'} + invalid = {"email": "bar"} # According to front end email validator and email address standard, the email below is valid email. - email_valid = {'email': "abc!#$%&'*+-/=?^_`{|.123@test-test.com"} + email_valid = {"email": "abc!#$%&'*+-/=?^_`{|.123@test-test.com"} class TestUserInfo(dict, Enum): """Test scenarios of user.""" + __test__ = False + user1 = { - 'username': fake.user_name(), - 'firstname': fake.first_name(), - 'lastname': 'User', - 'roles': '{edit, uma_authorization, staff}', - 'keycloak_guid': uuid.uuid4(), - 'idp_userid': uuid.uuid4() + "username": fake.user_name(), + "firstname": fake.first_name(), + "lastname": "User", + "roles": "{edit, uma_authorization, staff}", + "keycloak_guid": uuid.uuid4(), + "idp_userid": uuid.uuid4(), } user_staff_admin = { - 'username': fake.user_name(), - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, staff_admin}', - 'keycloak_guid': uuid.uuid4(), - 'idp_userid': uuid.uuid4(), - 'type': 'STAFF' - + "username": fake.user_name(), + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, staff_admin}", + "keycloak_guid": uuid.uuid4(), + "idp_userid": uuid.uuid4(), + "type": "STAFF", } user2 = { - 'username': fake.user_name(), - 'firstname': fake.first_name(), - 'lastname': 'User', - 'roles': '{edit, uma_authorization, staff}', - 'keycloak_guid': uuid.uuid4(), - 'idp_userid': uuid.uuid4() + "username": fake.user_name(), + "firstname": fake.first_name(), + "lastname": "User", + "roles": "{edit, uma_authorization, staff}", + "keycloak_guid": uuid.uuid4(), + "idp_userid": uuid.uuid4(), } user3 = { - 'username': fake.user_name(), - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, staff}', - 'keycloak_guid': uuid.uuid4(), - 'idp_userid': uuid.uuid4() + "username": fake.user_name(), + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, staff}", + "keycloak_guid": uuid.uuid4(), + "idp_userid": uuid.uuid4(), } user_test = { - 'username': 'CP1234567', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, staff}', - 'keycloak_guid': '1b20db59-19a0-4727-affe-c6f64309fd04', - 'idp_userid': '1b20db59-19a0-4727-affe-c6f64309fd04' + "username": "CP1234567", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, staff}", + "keycloak_guid": "1b20db59-19a0-4727-affe-c6f64309fd04", + "idp_userid": "1b20db59-19a0-4727-affe-c6f64309fd04", } user_tester = { - 'username': 'CP1234567', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, tester}', - 'keycloak_guid': '1b20db59-19a0-4727-affe-c6f64309fd04', - 'idp_userid': '1b20db59-19a0-4727-affe-c6f64309fd04' + "username": "CP1234567", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, tester}", + "keycloak_guid": "1b20db59-19a0-4727-affe-c6f64309fd04", + "idp_userid": "1b20db59-19a0-4727-affe-c6f64309fd04", } user_anonymous_1 = { - 'username': fake.user_name(), - 'password': 'Password@1234', + "username": fake.user_name(), + "password": "Password@1234", } user_bcros = { - 'username': f'{IdpHint.BCROS.value}/{fake.user_name()}', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, staff}' + "username": f"{IdpHint.BCROS.value}/{fake.user_name()}", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, staff}", # dont add a kc_guid } user_bcros_active = { - 'username': f'{IdpHint.BCROS.value}/{fake.user_name()}', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, staff}', - 'keycloak_guid': uuid.uuid4(), - 'idp_userid': uuid.uuid4(), - 'access_type': 'ANONYMOUS', + "username": f"{IdpHint.BCROS.value}/{fake.user_name()}", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, staff}", + "keycloak_guid": uuid.uuid4(), + "idp_userid": uuid.uuid4(), + "access_type": "ANONYMOUS", } user_bceid_tester = { - 'username': f'{fake.user_name()}@{IdpHint.BCEID.value}', - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, tester}', - 'keycloak_guid': uuid.uuid4(), - 'idp_userid': uuid.uuid4(), - 'access_type': 'BCEID', - 'loginSource': LoginSource.BCEID.value + "username": f"{fake.user_name()}@{IdpHint.BCEID.value}", + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, tester}", + "keycloak_guid": uuid.uuid4(), + "idp_userid": uuid.uuid4(), + "access_type": "BCEID", + "loginSource": LoginSource.BCEID.value, } @staticmethod def get_bceid_user_with_kc_guid(kc_guid: str): """Return user with keycloak guid.""" return { - 'username': fake.user_name(), - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, staff}', - 'keycloak_guid': kc_guid, - 'idp_userid': kc_guid, - 'access_type': 'BCEID', - 'login_source': LoginSource.BCEID.value + "username": fake.user_name(), + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, staff}", + "keycloak_guid": kc_guid, + "idp_userid": kc_guid, + "access_type": "BCEID", + "login_source": LoginSource.BCEID.value, } @staticmethod def get_user_with_kc_guid(kc_guid: str): """Return user with keycloak guid.""" return { - 'username': fake.user_name(), - 'firstname': fake.first_name(), - 'lastname': fake.last_name(), - 'roles': '{edit, uma_authorization, staff}', - 'keycloak_guid': kc_guid, - 'idp_userid': kc_guid + "username": fake.user_name(), + "firstname": fake.first_name(), + "lastname": fake.last_name(), + "roles": "{edit, uma_authorization, staff}", + "keycloak_guid": kc_guid, + "idp_userid": kc_guid, } @@ -968,13 +818,13 @@ class KeycloakScenario: def create_user_request(): """Return create user request.""" create_user_request = KeycloakUser() - user_name = ''.join(choice(ascii_lowercase) for i in range(5)) + user_name = "".join(choice(ascii_lowercase) for i in range(5)) create_user_request.user_name = user_name - create_user_request.password = 'Test@123' - create_user_request.first_name = 'test_first' - create_user_request.last_name = 'test_last' - create_user_request.email = f'{user_name}@gov.bc.ca' - create_user_request.attributes = {'corp_type': 'CP', 'source': 'BCSC'} + create_user_request.password = "Test@123" + create_user_request.first_name = "test_first" + create_user_request.last_name = "test_last" + create_user_request.email = f"{user_name}@gov.bc.ca" + create_user_request.attributes = {"corp_type": "CP", "source": "BCSC"} create_user_request.enabled = True return create_user_request @@ -982,11 +832,11 @@ def create_user_request(): def create_user_by_user_info(user_info: dict): """Return create user request.""" create_user_request = KeycloakUser() - create_user_request.user_name = user_info['preferred_username'] - create_user_request.password = 'Test@123' - create_user_request.first_name = user_info['firstname'] - create_user_request.last_name = user_info['lastname'] - create_user_request.attributes = {'source': user_info['loginSource']} + create_user_request.user_name = user_info["preferred_username"] + create_user_request.password = "Test@123" + create_user_request.first_name = user_info["firstname"] + create_user_request.last_name = user_info["lastname"] + create_user_request.attributes = {"source": user_info["loginSource"]} create_user_request.enabled = True return create_user_request @@ -994,69 +844,71 @@ def create_user_by_user_info(user_info: dict): @staticmethod def token_info(kc_guid: str): # pylint: disable=unused-argument; mocks of library methods """Return token info for test.""" - return { - 'sub': kc_guid, - 'username': 'public user', - 'realm_access': { - 'roles': [ - ] - } - } + return {"sub": kc_guid, "username": "public user", "realm_access": {"roles": []}} class BulkUserTestScenario: """Test scenarios of bulk users.""" + __test__ = False + @staticmethod def get_bulk_user1_for_org(org_id: str): """Generate a bulk user input.""" - return {'users': [ - {'username': ''.join(choice(ascii_uppercase) for i in range(5)), 'password': 'Test@12345', - 'membershipType': 'COORDINATOR'}, - {'username': ''.join(choice(ascii_uppercase) for i in range(5)), 'password': 'Test@12345', - 'membershipType': 'USER'} - ], - 'orgId': org_id + return { + "users": [ + { + "username": "".join(choice(ascii_uppercase) for i in range(5)), + "password": "Test@12345", + "membershipType": "COORDINATOR", + }, + { + "username": "".join(choice(ascii_uppercase) for i in range(5)), + "password": "Test@12345", + "membershipType": "USER", + }, + ], + "orgId": org_id, } class TestBCOLInfo(dict, Enum): """Test scenarios of org.""" - bcol1 = {'bcol_account_id': 'BCOL1'} - bcol2 = {'bcol_account_id': 'BCOL2'} + __test__ = False + + bcol1 = {"bcol_account_id": "BCOL1"} + bcol2 = {"bcol_account_id": "BCOL2"} class TestAffidavit: """Test affidavit scenarios.""" + __test__ = False + @staticmethod - def get_test_affidavit_with_contact(doc_id: str = '1234567890987654323456789876543456787654345678.txt', - issuer='ABC Notaries Inc.', - email='foo@bar.com'): + def get_test_affidavit_with_contact( + doc_id: str = "1234567890987654323456789876543456787654345678.txt", + issuer="ABC Notaries Inc.", + email="foo@bar.com", + ): """Return a dict for affidavit.""" return { - 'issuer': issuer, - 'documentId': doc_id, - 'contact': { - 'email': email, - 'phone': '(555) 555-5555', - 'phoneExtension': '123' - } + "issuer": issuer, + "documentId": doc_id, + "contact": {"email": email, "phone": "(555) 555-5555", "phoneExtension": "123"}, } @staticmethod - def get_test_affidavit_with_contact_rejected(doc_id: str = '1234567890987654323456789876543456787654345678.txt', - issuer='ABC Notaries Inc.', - email='foo@bar.com'): + def get_test_affidavit_with_contact_rejected( + doc_id: str = "1234567890987654323456789876543456787654345678.txt", + issuer="ABC Notaries Inc.", + email="foo@bar.com", + ): """Return a dict for affidavit.""" return { - 'issuer': issuer, - 'documentId': doc_id, - 'contact': { - 'email': email, - 'phone': '(555) 555-5555', - 'phoneExtension': '123' - }, - 'status_code': AffidavitStatus.REJECTED.value + "issuer": issuer, + "documentId": doc_id, + "contact": {"email": email, "phone": "(555) 555-5555", "phoneExtension": "123"}, + "status_code": AffidavitStatus.REJECTED.value, } diff --git a/auth-api/tests/utilities/factory_utils.py b/auth-api/tests/utilities/factory_utils.py index fc1a1696fe..6397ea5a21 100644 --- a/auth-api/tests/utilities/factory_utils.py +++ b/auth-api/tests/utilities/factory_utils.py @@ -16,9 +16,9 @@ Test Utility for creating model factory. """ import datetime -from sqlalchemy import event from requests.exceptions import HTTPError +from sqlalchemy import event from auth_api.models import ActivityLog as ActivityLogModel from auth_api.models import Affiliation as AffiliationModel @@ -39,17 +39,31 @@ from auth_api.services import Org as OrgService from auth_api.services import Task as TaskService from auth_api.utils.enums import ( - AccessType, InvitationType, OrgType, ProductSubscriptionStatus, TaskRelationshipStatus, TaskRelationshipType, - TaskStatus, TaskTypePrefix) + AccessType, + InvitationType, + OrgType, + ProductSubscriptionStatus, + TaskRelationshipStatus, + TaskRelationshipType, + TaskStatus, + TaskTypePrefix, +) from auth_api.utils.roles import Role from tests.utilities.factory_scenarios import ( - JWT_HEADER, TestBCOLInfo, TestContactInfo, TestEntityInfo, TestOrgInfo, TestOrgTypeInfo, TestUserInfo) + JWT_HEADER, + TestBCOLInfo, + TestContactInfo, + TestEntityInfo, + TestOrgInfo, + TestOrgTypeInfo, + TestUserInfo, +) from tests.utilities.sqlalchemy import clear_event_listeners def factory_auth_header(jwt, claims): """Produce JWT tokens for use in tests.""" - return {'Authorization': 'Bearer ' + jwt.create_jwt(claims=claims, header=JWT_HEADER)} + return {"Authorization": "Bearer " + jwt.create_jwt(claims=claims, header=JWT_HEADER)} def factory_entity_model(entity_info: dict = TestEntityInfo.entity1, user_id=None) -> EntityModel: @@ -69,23 +83,24 @@ def factory_entity_service(entity_info: dict = TestEntityInfo.entity1): def factory_user_model(user_info: dict = TestUserInfo.user1): """Produce a user model.""" - roles = user_info.get('roles', None) - if user_info.get('access_type', None) == AccessType.ANONYMOUS.value: + roles = user_info.get("roles", None) + if user_info.get("access_type", None) == AccessType.ANONYMOUS.value: user_type = Role.ANONYMOUS_USER.name elif Role.STAFF.value in roles: user_type = Role.STAFF.name else: user_type = None - user = UserModel(username=user_info['username'], - firstname=user_info['firstname'], - lastname=user_info['lastname'], - keycloak_guid=user_info.get('keycloak_guid', None), - type=user_type, - email='test@test.com', - login_source=user_info.get('login_source', None), - idp_userid=user_info.get('idp_userid', None) - ) + user = UserModel( + username=user_info["username"], + firstname=user_info["firstname"], + lastname=user_info["lastname"], + keycloak_guid=user_info.get("keycloak_guid", None), + type=user_type, + email="test@test.com", + login_source=user_info.get("login_source", None), + idp_userid=user_info.get("idp_userid", None), + ) user.save() return user @@ -93,16 +108,17 @@ def factory_user_model(user_info: dict = TestUserInfo.user1): def factory_user_model_with_contact(user_info: dict = TestUserInfo.user1, keycloak_guid=None): """Produce a user model.""" - user_type = Role.ANONYMOUS_USER.name if user_info.get('access_type', None) == AccessType.ANONYMOUS.value else None - user = UserModel(username=user_info.get('username', user_info.get('preferred_username')), - firstname=user_info['firstname'], - lastname=user_info['lastname'], - keycloak_guid=user_info.get('keycloak_guid', keycloak_guid), - type=user_type, - email='test@test.com', - login_source=user_info.get('loginSource'), - idp_userid=user_info.get('idp_userid', None) - ) + user_type = Role.ANONYMOUS_USER.name if user_info.get("access_type", None) == AccessType.ANONYMOUS.value else None + user = UserModel( + username=user_info.get("username", user_info.get("preferred_username")), + firstname=user_info["firstname"], + lastname=user_info["lastname"], + keycloak_guid=user_info.get("keycloak_guid", keycloak_guid), + type=user_type, + email="test@test.com", + login_source=user_info.get("loginSource"), + idp_userid=user_info.get("idp_userid", None), + ) user.save() @@ -116,56 +132,58 @@ def factory_user_model_with_contact(user_info: dict = TestUserInfo.user1, keyclo return user -def factory_membership_model(user_id, org_id, member_type='ADMIN', member_status=1): +def factory_membership_model(user_id, org_id, member_type="ADMIN", member_status=1): """Produce a Membership model.""" - membership = MembershipModel(user_id=user_id, - org_id=org_id, - membership_type_code=member_type, - membership_type_status=member_status) + membership = MembershipModel( + user_id=user_id, org_id=org_id, membership_type_code=member_type, membership_type_status=member_status + ) membership.created_by_id = user_id membership.save() return membership -def factory_org_model(org_info: dict = TestOrgInfo.org1, - org_type_info: dict = None, - org_status_info: dict = None, - user_id=None, - bcol_info: dict = TestBCOLInfo.bcol1): +def factory_org_model( + org_info: dict = TestOrgInfo.org1, + org_type_info: dict = None, + org_status_info: dict = None, + user_id=None, + bcol_info: dict = TestBCOLInfo.bcol1, +): """Produce a templated org model.""" org_type = OrgTypeModel.get_default_type() - if org_type_info and org_type_info['code'] != TestOrgTypeInfo.implicit['code']: - org_type = OrgTypeModel(code=org_type_info['code'], description=org_type_info['desc']) + if org_type_info and org_type_info["code"] != TestOrgTypeInfo.implicit["code"]: + org_type = OrgTypeModel(code=org_type_info["code"], description=org_type_info["desc"]) org_type.save() if org_status_info is not None: - org_status = OrgStatusModel(code=org_status_info['code'], description=org_status_info['desc']) + org_status = OrgStatusModel(code=org_status_info["code"], description=org_status_info["desc"]) org_status.save() else: org_status = OrgStatusModel.get_default_status() - org = OrgModel(name=org_info['name']) + org = OrgModel(name=org_info["name"]) org.org_type = org_type - org.access_type = org_info.get('accessType', '') + org.access_type = org_info.get("accessType", "") org.org_status = org_status org.created_by_id = user_id - org.bcol_account_id = bcol_info.get('bcol_account_id', '') - org.bcol_user_id = bcol_info.get('bcol_user_id', '') + org.bcol_account_id = bcol_info.get("bcol_account_id", "") + org.bcol_user_id = bcol_info.get("bcol_user_id", "") org.save() return org -def factory_org_service(org_info: dict = TestOrgInfo.org1, - org_type_info: dict = None, - org_status_info: dict = None, - bcol_info: dict = TestBCOLInfo.bcol1): +def factory_org_service( + org_info: dict = TestOrgInfo.org1, + org_type_info: dict = None, + org_status_info: dict = None, + bcol_info: dict = TestBCOLInfo.bcol1, +): """Produce a templated org service.""" - org_model = factory_org_model(org_info=org_info, - org_type_info=org_type_info, - org_status_info=org_status_info, - bcol_info=bcol_info) + org_model = factory_org_model( + org_info=org_info, org_type_info=org_type_info, org_status_info=org_status_info, bcol_info=bcol_info + ) org_service = OrgService(org_model) return org_service @@ -193,83 +211,70 @@ def factory_affiliation_service(entity_id, org_id): return affiliation_service -def factory_affiliation_invitation(from_org_id, - business_identifier, - to_org_id=None, - to_org_uuid=None, - invitation_type='EMAIL'): +def factory_affiliation_invitation( + from_org_id, business_identifier, to_org_id=None, to_org_uuid=None, invitation_type="EMAIL" +): """Produce an affiliation invitation for the given from/to org, business and email.""" affiliation_invitation_dict = { - 'fromOrgId': from_org_id, - 'toOrgId': to_org_id, - 'businessIdentifier': business_identifier, - 'type': invitation_type + "fromOrgId": from_org_id, + "toOrgId": to_org_id, + "businessIdentifier": business_identifier, + "type": invitation_type, } if to_org_uuid: - affiliation_invitation_dict['toOrgUuid'] = to_org_uuid + affiliation_invitation_dict["toOrgUuid"] = to_org_uuid return affiliation_invitation_dict def factory_contact_model(contact_info: dict = TestContactInfo.contact1): """Return a valid contact object with the provided fields.""" - contact = ContactModel(email=contact_info['email']) + contact = ContactModel(email=contact_info["email"]) contact.save() return contact -def factory_invitation(org_id, - email='abc123@email.com', - sent_date=datetime.datetime.now().strftime('Y-%m-%d %H:%M:%S'), - membership_type='USER'): +def factory_invitation( + org_id, + email="abc123@email.com", + sent_date=datetime.datetime.now().strftime("Y-%m-%d %H:%M:%S"), + membership_type="USER", +): """Produce an invite for the given org and email.""" return { - 'recipientEmail': email, - 'sentDate': sent_date, - 'membership': [ - { - 'membershipType': membership_type, - 'orgId': org_id - } - ] + "recipientEmail": email, + "sentDate": sent_date, + "membership": [{"membershipType": membership_type, "orgId": org_id}], } -def factory_invitation_anonymous(org_id, - email='abc123@email.com', - sent_date=datetime.datetime.now().strftime('Y-%m-%d %H:%M:%S'), - membership_type='ADMIN'): +def factory_invitation_anonymous( + org_id, + email="abc123@email.com", + sent_date=datetime.datetime.now().strftime("Y-%m-%d %H:%M:%S"), + membership_type="ADMIN", +): """Produce an invite for the given org and email.""" return { - 'recipientEmail': email, - 'sentDate': sent_date, - 'type': InvitationType.DIRECTOR_SEARCH.value, - 'membership': [ - { - 'membershipType': membership_type, - 'orgId': org_id - } - ] + "recipientEmail": email, + "sentDate": sent_date, + "type": InvitationType.DIRECTOR_SEARCH.value, + "membership": [{"membershipType": membership_type, "orgId": org_id}], } -def factory_document_model(version_id, doc_type, content, content_type='text/html'): +def factory_document_model(version_id, doc_type, content, content_type="text/html"): """Produce a Document model.""" - document = DocumentsModel(version_id=version_id, - type=doc_type, - content=content, - content_type=content_type) + document = DocumentsModel(version_id=version_id, type=doc_type, content=content, content_type=content_type) document.save() return document -def factory_product_model(org_id: str, - product_code: str = 'PPR', status_code=ProductSubscriptionStatus.ACTIVE.value): +def factory_product_model(org_id: str, product_code: str = "PPR", status_code=ProductSubscriptionStatus.ACTIVE.value): """Produce a templated product model.""" - subscription = ProductSubscriptionModel(org_id=org_id, product_code=product_code, - status_code=status_code) + subscription = ProductSubscriptionModel(org_id=org_id, product_code=product_code, status_code=status_code) subscription.save() return subscription @@ -282,20 +287,22 @@ def factory_task_service(user_id: int = 1, org_id: int = 1): return service -def factory_task_model(user_id: int = 1, org_id: int = 1, - modified_by_id: int = None, date_submitted: datetime = datetime.datetime.now()): +def factory_task_model( + user_id: int = 1, org_id: int = 1, modified_by_id: int = None, date_submitted: datetime = datetime.datetime.now() +): """Produce a Task model.""" task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value - task = TaskModel(name='foo', - date_submitted=date_submitted, - relationship_type=TaskRelationshipType.ORG.value, - relationship_id=org_id, - type=task_type, - status=TaskStatus.OPEN.value, - related_to=user_id, - relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, - modified_by_id=modified_by_id - ) + task = TaskModel( + name="foo", + date_submitted=date_submitted, + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=org_id, + type=task_type, + status=TaskStatus.OPEN.value, + related_to=user_id, + relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + modified_by_id=modified_by_id, + ) task.save() return task @@ -304,19 +311,29 @@ def factory_task_models(count: int, user_id: int): """Produce a collection of Task models.""" task_type = TaskTypePrefix.NEW_ACCOUNT_STAFF_REVIEW.value for i in range(0, count): - task = TaskModel(name='TEST {}'.format(i), date_submitted=datetime.datetime.now(), - relationship_type=TaskRelationshipType.ORG.value, - relationship_id=10, type=task_type, - status=TaskStatus.OPEN.value, - related_to=user_id, - relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value) + task = TaskModel( + name="TEST {}".format(i), + date_submitted=datetime.datetime.now(), + relationship_type=TaskRelationshipType.ORG.value, + relationship_id=10, + type=task_type, + status=TaskStatus.OPEN.value, + related_to=user_id, + relationship_status=TaskRelationshipStatus.PENDING_STAFF_REVIEW.value, + ) task.save() -def factory_activity_log_model(actor: str, action: str, item_type: str = 'Account', item_name='Foo Bar', item_id=10, - item_value: str = 'Val', - org_id=10, - remote_addr=''): +def factory_activity_log_model( + actor: str, + action: str, + item_type: str = "Account", + item_name="Foo Bar", + item_id=10, + item_value: str = "Val", + org_id=10, + remote_addr="", +): """Create a Log Model.""" activity_log = ActivityLogModel( actor_id=actor, @@ -326,7 +343,7 @@ def factory_activity_log_model(actor: str, action: str, item_type: str = 'Accoun item_type=item_type, item_value=item_value, remote_addr=remote_addr, - org_id=org_id + org_id=org_id, ) activity_log.save() @@ -338,21 +355,22 @@ def token_info(): """Return token info.""" return claims - monkeypatch.setattr('auth_api.utils.user_context._get_token_info', token_info) + monkeypatch.setattr("auth_api.utils.user_context._get_token_info", token_info) def get_tos_latest_version(): """Return latest tos version.""" - return '5' + return "5" def get_tos_pad_latest_version(): """Return latest tos pad version.""" - return 'p1' + return "p1" def patch_pay_account_post(monkeypatch): """Patch pay account post success (200 or 201).""" + class MockPayResponse: def __init__(self, json_data, status_code): self.json_data = json_data @@ -365,12 +383,14 @@ def json(): def raise_for_status(self): pass - monkeypatch.setattr('auth_api.services.rest_service.RestService.post', lambda *args, - **kwargs: MockPayResponse(None, 200)) + monkeypatch.setattr( + "auth_api.services.rest_service.RestService.post", lambda *args, **kwargs: MockPayResponse(None, 200) + ) def patch_pay_account_put(monkeypatch): """Patch pay account post success (200 or 201).""" + class MockPayResponse: def __init__(self, json_data, status_code): self.json_data = json_data @@ -383,12 +403,14 @@ def json(): def raise_for_status(self): pass - monkeypatch.setattr('auth_api.services.rest_service.RestService.put', lambda *args, - **kwargs: MockPayResponse(None, 200)) + monkeypatch.setattr( + "auth_api.services.rest_service.RestService.put", lambda *args, **kwargs: MockPayResponse(None, 200) + ) def patch_pay_account_delete(monkeypatch): """Patch pay account delete success.""" + class MockPayResponse: @staticmethod @@ -398,45 +420,41 @@ def json(): def raise_for_status(self): pass - monkeypatch.setattr('auth_api.services.rest_service.RestService.delete', lambda *args, **kwargs: MockPayResponse()) + monkeypatch.setattr("auth_api.services.rest_service.RestService.delete", lambda *args, **kwargs: MockPayResponse()) def patch_pay_account_delete_error(monkeypatch): """Patch pay account delete error.""" + class MockPayResponse: @staticmethod def json(): - return {'type': 'OUTSTANDING_CREDIT', 'title': 'OUTSTANDING_CREDIT'} + return {"type": "OUTSTANDING_CREDIT", "title": "OUTSTANDING_CREDIT"} def raise_for_status(self): - raise HTTPError('TEST ERROR') + raise HTTPError("TEST ERROR") - monkeypatch.setattr('auth_api.services.rest_service.RestService.delete', lambda *args, **kwargs: MockPayResponse()) + monkeypatch.setattr("auth_api.services.rest_service.RestService.delete", lambda *args, **kwargs: MockPayResponse()) def patch_get_firms_parties(monkeypatch): """Patch pay account delete success.""" + class MockPartiesResponse: @staticmethod def json(): return { - 'parties': [ + "parties": [ { - 'officer': { - 'email': 'test@email.com', - 'firstName': 'Connor', - 'lastName': 'Horton', - 'partyType': 'person' + "officer": { + "email": "test@email.com", + "firstName": "Connor", + "lastName": "Horton", + "partyType": "person", }, - 'roles': [ - { - 'appointmentDate': '2022-03-01', - 'cessationDate': 'None', - 'roleType': 'Partner' - } - ] + "roles": [{"appointmentDate": "2022-03-01", "cessationDate": "None", "roleType": "Partner"}], } ] } @@ -444,7 +462,7 @@ def json(): def raise_for_status(self): pass - monkeypatch.setattr('auth_api.services.rest_service.RestService.get', lambda *args, **kwargs: MockPartiesResponse()) + monkeypatch.setattr("auth_api.services.rest_service.RestService.get", lambda *args, **kwargs: MockPartiesResponse()) def convert_org_to_staff_org(org_id: int, type_code: OrgType): @@ -454,5 +472,5 @@ def convert_org_to_staff_org(org_id: int, type_code: OrgType): org_db = OrgModel.find_by_id(org_id) org_db.type_code = type_code org_db.save() - event.listen(OrgModel, 'before_update', receive_before_update, raw=True) - event.listen(OrgModel, 'before_insert', receive_before_insert) + event.listen(OrgModel, "before_update", receive_before_update, raw=True) + event.listen(OrgModel, "before_insert", receive_before_insert) diff --git a/auth-api/tests/utilities/schema_assertions.py b/auth-api/tests/utilities/schema_assertions.py index 24d3d1f8e8..9eb11b7d08 100644 --- a/auth-api/tests/utilities/schema_assertions.py +++ b/auth-api/tests/utilities/schema_assertions.py @@ -30,7 +30,7 @@ def assert_valid_schema(data: dict, schema_file: dict): def _load_json_schema(filename: str): """Return the given schema file identified by filename.""" - relative_path = join('schemas', filename) + relative_path = join("schemas", filename) absolute_path = join(dirname(__file__), relative_path) with open(absolute_path) as schema_file: diff --git a/auth-api/tests/utilities/sqlalchemy.py b/auth-api/tests/utilities/sqlalchemy.py index d7315fa9ac..470190fa27 100644 --- a/auth-api/tests/utilities/sqlalchemy.py +++ b/auth-api/tests/utilities/sqlalchemy.py @@ -13,6 +13,7 @@ # limitations under the License. """Utility to remove event listeners for models.""" import ctypes + from sqlalchemy import event diff --git a/auth-api/update_db.sh b/auth-api/update_db.sh new file mode 100755 index 0000000000..e3d9a0c79d --- /dev/null +++ b/auth-api/update_db.sh @@ -0,0 +1,4 @@ +#! /bin/sh +echo 'starting upgrade' +export DEPLOYMENT_ENV=migration +flask db upgrade \ No newline at end of file diff --git a/auth-api/wsgi.py b/auth-api/wsgi.py index d841635d28..94087305e9 100644 --- a/auth-api/wsgi.py +++ b/auth-api/wsgi.py @@ -14,12 +14,11 @@ """Provides the WSGI entry point for running the application """ import os -from auth_api import create_app +from auth_api import create_app -# Openshift s2i expects a lower case name of application -app = create_app() # pylint: disable=invalid-name +app = create_app() # pylint: disable=invalid-name if __name__ == "__main__": - server_port = os.environ.get('PORT', '8080') - app.run(debug=False, port=server_port, host='0.0.0.0') + server_port = os.environ.get("PORT", "5000") + app.run(debug=False, threaded=False, port=server_port, host="0.0.0.0") diff --git a/auth-web/.env.example b/auth-web/.env.example index e11aad1ba9..13aa5dbc96 100644 --- a/auth-web/.env.example +++ b/auth-web/.env.example @@ -69,4 +69,4 @@ VUE_APP_KEYCLOAK_CLIENTID="account-web" VUE_APP_SENTRY_DSN= #vaults hotjar -VUE_APP_HOTJAR_ID= +VUE_APP_HOTJAR_ID= \ No newline at end of file diff --git a/auth-web/.gitignore b/auth-web/.gitignore index 9611c7e601..f8076dbbf4 100644 --- a/auth-web/.gitignore +++ b/auth-web/.gitignore @@ -20,5 +20,6 @@ __mock__ .env .env.bak +!.env.example *.firebaserc .firebase \ No newline at end of file diff --git a/auth-web/Makefile b/auth-web/Makefile deleted file mode 100644 index d2a90b0bfd..0000000000 --- a/auth-web/Makefile +++ /dev/null @@ -1,85 +0,0 @@ -.PHONY: setup -.PHONY: ci cd - -PROJECT_NAME:=auth-web -DOCKER_NAME:=auth-web - -################################################################################# -# COMMANDS -- Setup -# expects the terminal to be openshift login -# expects export OPENSHIFT_REPOSITORY="" -################################################################################# -setup: ## Clean and Install npm dependencies - npm ci - -################################################################################# -# COMMANDS - CI # -################################################################################# -ci: lint test - -lint: ## Run linting ofcode. - npm run lint - -test: ## Unit testing -ifeq ($(shell test -s .env && echo -n yes),yes) - mv .env .env.bak; -endif - cp .env.example .env - npm run test:unit -ifeq ($(shell test -s .env.bak && echo -n yes),yes) - mv .env.bak .env; -endif - - -################################################################################# -# COMMANDS - CD -# expects the terminal to be openshift login -# expects export OPENSHIFT_DOCKER_REGISTRY="" -# expects export OPENSHIFT_SA_NAME="$(oc whoami)" -# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)" -# expects export OPENSHIFT_REPOSITORY="" -# expects export TAG_NAME="dev/test/prod" -################################################################################# -cd: ## CD flow -ifeq ($(TAG_NAME), test) -BUILD_TAG_NAME=test-latest -cd: build tag -else ifeq ($(TAG_NAME), prod) -BUILD_TAG_NAME=prod-latest -cd: build tag-production -else -BUILD_TAG_NAME=dev-latest -TAG_NAME=dev -cd: build tag -endif - -build: ## Build the docker container - docker build . -t $(DOCKER_NAME) \ - --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \ - --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -build-nc: ## Build the docker container without caching - docker build --no-cache -t $(DOCKER_NAME) . - -REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME) -push: #build ## Push the docker container to the registry & tag latest - @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\ - docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):$(BUILD_TAG_NAME) ;\ - docker push $(REGISTRY_IMAGE):$(BUILD_TAG_NAME) - -tag: push ## tag image - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(BUILD_TAG_NAME) $(DOCKER_NAME):$(TAG_NAME) - -tag-production: push ## tag image - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F) ;\ - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(BUILD_TAG_NAME) $(DOCKER_NAME):$(TAG_NAME) - -################################################################################# -# Self Documenting Commands # -################################################################################# -.PHONY: help - -.DEFAULT_GOAL := help - -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/auth-web/package.json b/auth-web/package.json index be87023bbc..3a710c8208 100644 --- a/auth-web/package.json +++ b/auth-web/package.json @@ -7,9 +7,11 @@ "scripts": { "dev": "vite", "build": "vite build", + "build-check": "vite build", "preview": "vite preview --port 8080", "lint": "eslint . --ext ts,vue --fix src", "lint:nofix": "eslint . --ext ts,vue --no-fix", + "test": "vitest run", "test:unit": "vitest run", "test:watch": "vitest watch", "test:coverage": "vitest run --coverage" diff --git a/build-deps/build_deps/__init__.py b/build-deps/build_deps/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/build-deps/poetry.lock b/build-deps/poetry.lock new file mode 100644 index 0000000000..ce86cc1bfb --- /dev/null +++ b/build-deps/poetry.lock @@ -0,0 +1,1540 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "cachecontrol" +version = "0.14.0" +description = "httplib2 caching for requests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, + {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, +] + +[package.dependencies] +msgpack = ">=0.5.2,<2.0.0" +requests = ">=2.16.0" + +[package.extras] +dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] +filecache = ["filelock (>=3.8.0)"] +redis = ["redis (>=2.10.5)"] + +[[package]] +name = "cachelib" +version = "0.13.0" +description = "A collection of cache libraries in the same API interface." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cachelib-0.13.0-py3-none-any.whl", hash = "sha256:8c8019e53b6302967d4e8329a504acf75e7bc46130291d30188a6e4e58162516"}, + {file = "cachelib-0.13.0.tar.gz", hash = "sha256:209d8996e3c57595bee274ff97116d1d73c4980b2fd9a34c7846cd07fd2e1a48"}, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "ecdsa" +version = "0.19.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" +files = [ + {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, + {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "expiringdict" +version = "1.2.2" +description = "Dictionary with auto-expiring values for caching purposes" +optional = false +python-versions = "*" +files = [ + {file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"}, + {file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"}, +] + +[package.extras] +tests = ["coverage", "coveralls", "dill", "mock", "nose"] + +[[package]] +name = "flask" +version = "3.0.2" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-jwt-oidc" +version = "0.7.0" +description = "Opinionated flask oidc client" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "flask_jwt_oidc-0.7.0-py3-none-any.whl", hash = "sha256:0fec097ea0557fe315ca576f4c9a795c874bf28ae62b21eadf4626c5c5ca9029"}, + {file = "flask_jwt_oidc-0.7.0.tar.gz", hash = "sha256:ba90e3557e931b1c5cbd37fb2ddc462a525f74e2a85aa7d684a733e68fa9a3e4"}, +] + +[package.dependencies] +cachelib = ">=0.13.0,<0.14.0" +Flask = ">=2" +python-jose = ">=3.3.0,<4.0.0" +six = ">=1.16.0,<2.0.0" + +[[package]] +name = "flask-opentracing" +version = "1.1.0" +description = "OpenTracing support for Flask applications" +optional = false +python-versions = "*" +files = [ + {file = "Flask-OpenTracing-1.1.0.tar.gz", hash = "sha256:a9a39d367fbe7e9ed9c77b90ac48159c1a3e82982a5abf84d3f4d710d24580ac"}, +] + +[package.dependencies] +Flask = "*" +opentracing = ">=2.0,<3" + +[package.extras] +tests = ["flake8", "flake8-quotes", "mock", "pytest", "pytest-cov"] + +[[package]] +name = "flask-sqlalchemy" +version = "3.1.1" +description = "Add SQLAlchemy support to your Flask application." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, + {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, +] + +[package.dependencies] +flask = ">=2.2.5" +sqlalchemy = ">=2.0.16" + +[[package]] +name = "gcp-queue" +version = "0.3.0" +description = "" +optional = false +python-versions = "^3.8" +files = [] +develop = false + +[package.dependencies] +flask = ">=1" +google-auth = "^2.28.2" +google-cloud-pubsub = "^2.20.2" +simple-cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py.git"} + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/gcp-queue" + +[[package]] +name = "google-api-core" +version = "2.20.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, + {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.35.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-pubsub" +version = "2.25.2" +description = "Google Cloud Pub/Sub API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_cloud_pubsub-2.25.2-py2.py3-none-any.whl", hash = "sha256:1e8470586136804b2a49c290f0e75082d949f778e1aaae2ebb9759741d829014"}, + {file = "google_cloud_pubsub-2.25.2.tar.gz", hash = "sha256:e0db6bfa52f04bf17347c4afbfea3dc094d31fc54259a1581407b4cd784da433"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +grpcio = ">=1.51.3,<2.0dev" +grpcio-status = ">=1.33.2" +opentelemetry-api = {version = ">=1.27.0", markers = "python_version >= \"3.8\""} +opentelemetry-sdk = {version = ">=1.27.0", markers = "python_version >= \"3.8\""} +proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + +[package.extras] +libcst = ["libcst (>=0.3.10)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.65.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, + {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.1" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, + {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + +[[package]] +name = "grpcio" +version = "1.66.2" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, + {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, + {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, + {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, + {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, + {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, + {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, + {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, + {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, + {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, + {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, + {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, + {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, + {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, + {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, + {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, + {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, + {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, + {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, + {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, + {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, + {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, + {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, + {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, + {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.66.2)"] + +[[package]] +name = "grpcio-status" +version = "1.66.2" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio_status-1.66.2-py3-none-any.whl", hash = "sha256:e5fe189f6897d12aa9cd74408a17ca41e44fad30871cf84f5cbd17bd713d2455"}, + {file = "grpcio_status-1.66.2.tar.gz", hash = "sha256:fb55cbb5c2e67062f7a4d5c99e489d074fb57e98678d5c3c6692a2d74d89e9ae"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.66.2" +protobuf = ">=5.26.1,<6.0dev" + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "8.4.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jaeger-client" +version = "4.8.0" +description = "Jaeger Python OpenTracing Tracer implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, +] + +[package.dependencies] +opentracing = ">=2.1,<3.0" +threadloop = ">=1,<2" +thrift = "*" +tornado = ">=4.3" + +[package.extras] +tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "launchdarkly-eventsource" +version = "1.2.0" +description = "LaunchDarkly SSE Client" +optional = false +python-versions = ">=3.8" +files = [ + {file = "launchdarkly_eventsource-1.2.0-py3-none-any.whl", hash = "sha256:9b5ec7149e2ad9995be22ad5361deb480c229701e6b0cc799e94aa14f067b77b"}, + {file = "launchdarkly_eventsource-1.2.0.tar.gz", hash = "sha256:8cb3301ec0daeb5e17eaa37b3b65f6660fab851b317e69271185ef2fb42c2fde"}, +] + +[package.dependencies] +urllib3 = ">=1.26.0,<3" + +[[package]] +name = "launchdarkly-server-sdk" +version = "9.5.0" +description = "LaunchDarkly SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "launchdarkly_server_sdk-9.5.0-py3-none-any.whl", hash = "sha256:bf2cf213f9eb71cd43d5f20f2ac9ec9235c693036459e5038a69015a6648c035"}, + {file = "launchdarkly_server_sdk-9.5.0.tar.gz", hash = "sha256:af64d985621a03257107210266c563c5e268ca8320d1d71b5c18d9592d14fef7"}, +] + +[package.dependencies] +certifi = ">=2018.4.16" +expiringdict = ">=1.1.4" +launchdarkly-eventsource = ">=1.1.0,<2.0.0" +pyRFC3339 = ">=1.0" +semver = ">=2.10.2" +urllib3 = ">=1.26.0,<3" + +[package.extras] +consul = ["python-consul (>=1.0.1)"] +dynamodb = ["boto3 (>=1.9.71)"] +redis = ["redis (>=2.10.5)"] +test-filesource = ["pyyaml (>=5.3.1)", "watchdog (>=3.0.0)"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] + +[[package]] +name = "msgpack" +version = "1.1.0" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, +] + +[[package]] +name = "opentelemetry-api" +version = "1.27.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, + {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=8.4.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.27.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, + {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, +] + +[package.dependencies] +opentelemetry-api = "1.27.0" +opentelemetry-semantic-conventions = "0.48b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.48b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, + {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.27.0" + +[[package]] +name = "opentracing" +version = "2.4.0" +description = "OpenTracing API for Python. See documentation at http://opentracing.io" +optional = false +python-versions = "*" +files = [ + {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, +] + +[package.extras] +tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pytest", "pytest-cov", "pytest-mock", "six (>=1.10.0,<2.0)", "sphinx_rtd_theme", "tornado"] + +[[package]] +name = "proto-plus" +version = "1.24.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<6.0.0dev" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "5.28.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, + {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, + {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, + {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, + {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, + {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, + {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, + {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, + {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pyrfc3339" +version = "1.1" +description = "Generate and parse RFC 3339 timestamps" +optional = false +python-versions = "*" +files = [ + {file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"}, + {file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"}, +] + +[package.dependencies] +pytz = "*" + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +optional = false +python-versions = "*" +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[package.dependencies] +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "sbc_common_components" +version = "0.0.0" +description = "" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +flask = "*" +flask-jwt-oidc = ">=0.1.5" +Flask-OpenTracing = "1.1.0" +Flask-SQLAlchemy = "*" +jaeger-client = "*" + +[package.source] +type = "git" +url = "https://github.com/bolyachevets/sbc-common-components.git" +reference = "camel_case_empty_dict" +resolved_reference = "e856ba65f86461a4333753467dca2ccc5f15778c" +subdirectory = "python" + +[[package]] +name = "semver" +version = "3.0.2" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, + {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, +] + +[[package]] +name = "simple-cloudevent" +version = "0.0.2" +description = "A short description of the project" +optional = false +python-versions = ">=3.8" +files = [] +develop = false + +[package.dependencies] +strict-rfc3339 = "*" + +[package.source] +type = "git" +url = "https://github.com/daxiom/simple-cloudevent.py.git" +reference = "HEAD" +resolved_reference = "447cabb988202206ac69e71177d7cd11b6c0b002" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.35" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "strict-rfc3339" +version = "0.7" +description = "Strict, simple, lightweight RFC3339 functions" +optional = false +python-versions = "*" +files = [ + {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, +] + +[[package]] +name = "structlog" +version = "24.4.0" +description = "Structured Logging for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"}, + {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"}, +] + +[package.extras] +dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"] +tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy (>=1.4)", "rich", "twisted"] + +[[package]] +name = "structured-logging" +version = "0.4.0" +description = "" +optional = false +python-versions = "^3.9" +files = [] +develop = false + +[package.dependencies] +flask = ">= 1" +structlog = "^24.1.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/structured-logging" + +[[package]] +name = "threadloop" +version = "1.0.2" +description = "Tornado IOLoop Backed Concurrent Futures" +optional = false +python-versions = "*" +files = [ + {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, + {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, +] + +[package.dependencies] +tornado = "*" + +[[package]] +name = "thrift" +version = "0.21.0" +description = "Python bindings for the Apache Thrift RPC system" +optional = false +python-versions = "*" +files = [ + {file = "thrift-0.21.0.tar.gz", hash = "sha256:5e6f7c50f936ebfa23e924229afc95eb219f8c8e5a83202dd4a391244803e402"}, +] + +[package.dependencies] +six = ">=1.7.2" + +[package.extras] +all = ["tornado (>=4.0)", "twisted"] +tornado = ["tornado (>=4.0)"] +twisted = ["twisted"] + +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "werkzeug" +version = "3.0.0" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, + {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "zipp" +version = "3.20.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "fe2f0b0646423a1bc4157e7cc13f2e0c3fe2b21830cdf18b545ee5a2fa526b50" diff --git a/build-deps/pyproject.toml b/build-deps/pyproject.toml new file mode 100644 index 0000000000..fedd50be08 --- /dev/null +++ b/build-deps/pyproject.toml @@ -0,0 +1,29 @@ +[tool.poetry] +name = "build-deps" +version = "1.0.0" +description = "common dependencies for all auth tools" +authors = ["BC Registries and Online Services"] + +[tool.poetry.dependencies] +attrs = "24.2.0" +CacheControl = "0.14.0" +cachetools = "5.5.0" +certifi = "2024.8.30" +flask = "3.0.2" +itsdangerous = "2.1.2" +jinja2 = "3.1.3" +jsonschema = "4.17.3" +launchdarkly-server-sdk = "9.5.0" +MarkupSafe = "2.1.1" +python = "^3.12" +python-dotenv = "^1.0.1" +requests = "2.32.3" +Werkzeug = "3.0.0" + +sbc-common-components = { git = "https://github.com/bolyachevets/sbc-common-components.git", rev = "camel_case_empty_dict", subdirectory = "python" } +gcp-queue = { git = "https://github.com/bcgov/sbc-connect-common.git", subdirectory = "python/gcp-queue", branch = "main" } +structured-logging = { git = "https://github.com/bcgov/sbc-connect-common.git", subdirectory = "python/structured-logging", branch = "main" } + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/codecov.yaml b/codecov.yaml index 1e477fb935..8d5dc0b171 100644 --- a/codecov.yaml +++ b/codecov.yaml @@ -15,7 +15,6 @@ coverage: target: 80% flags: - authapi - - statusapi - eventlistenerqueue - accountmailer ui: @@ -44,10 +43,6 @@ flags: paths: - auth-api/src/auth_api carryforward: true - statusapi: - paths: - - status-api/src/status_api - carryforward: true authweb: paths: - auth-web/src diff --git a/queue_services/account-mailer/Dockerfile b/queue_services/account-mailer/Dockerfile index 80dba6df31..c6fb0a9caf 100644 --- a/queue_services/account-mailer/Dockerfile +++ b/queue_services/account-mailer/Dockerfile @@ -1,35 +1,80 @@ -FROM python:3.8.5-buster +FROM python:3.12.5-bullseye as development_build +USER root ARG VCS_REF="missing" ARG BUILD_DATE="missing" ENV VCS_REF=${VCS_REF} ENV BUILD_DATE=${BUILD_DATE} +ENV PORT=8080 LABEL org.label-schema.vcs-ref=${VCS_REF} \ org.label-schema.build-date=${BUILD_DATE} -USER root +LABEL vendor="BCROS" + +ARG APP_ENV \ + # Needed for fixing permissions of files created by Docker: + UID=1000 \ + GID=1000 + +ENV APP_ENV=${APP_ENV} \ + # python: + PYTHONFAULTHANDLER=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PYTHONDONTWRITEBYTECODE=1 \ + # pip: + PIP_NO_CACHE_DIR=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PIP_DEFAULT_TIMEOUT=100 \ + PIP_ROOT_USER_ACTION=ignore \ + # poetry: + POETRY_VERSION=1.8.3 \ + POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_CREATE=false \ + POETRY_CACHE_DIR='/var/cache/pypoetry' \ + POETRY_HOME='/usr/local' + +SHELL ["/bin/bash", "-eo", "pipefail", "-c"] -# Create working directory -RUN mkdir /opt/app-root && chmod 755 /opt/app-root -WORKDIR /opt/app-root +RUN apt-get update && apt-get upgrade -y \ + && apt-get install --no-install-recommends -y \ + bash \ + build-essential \ + curl \ + git \ + libpq-dev \ + && curl -sSL 'https://install.python-poetry.org' | python3 - \ + && poetry --version \ + && poetry config installer.max-workers 1 \ + # Cleaning cache: + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ + && apt-get clean -y && rm -rf /var/lib/apt/lists/* -# Install the requirements -COPY ./requirements.txt . +WORKDIR /code -RUN pip install --upgrade pip -RUN pip install --no-cache-dir -r requirements.txt +RUN groupadd -g "${GID}" -r web \ + && useradd -d '/code' -g web -l -r -u "${UID}" web \ + && chown web:web -R '/code' -COPY . . +COPY --chown=web:web ./poetry.lock ./pyproject.toml /code/ -RUN pip install . +COPY --chown=web:web ./src /code/src +COPY --chown=web:web ./README.md /code -USER 1001 +RUN --mount=type=cache,target="$POETRY_CACHE_DIR" \ + echo "$APP_ENV" \ + && poetry version \ + && poetry run pip install -U pip \ + && poetry install \ + $(if [ -z ${APP_ENV+x} ] || [ "$APP_ENV" = 'production' ]; then echo '--only main'; fi) \ + --no-interaction --no-ansi -# Set Python path -ENV PYTHONPATH=/opt/app-root/src +# Running as non-root user: +USER web -#EXPOSE 8080 +FROM development_build AS production_build +COPY --chown=web:web . /code -CMD ["gunicorn", "-b 0.0.0.0:8080", "app:app"] +CMD gunicorn --bind 0.0.0.0:${PORT} --config /code/gunicorn_config.py app:app diff --git a/queue_services/account-mailer/MANIFEST.in b/queue_services/account-mailer/MANIFEST.in deleted file mode 100644 index 1a342bdebf..0000000000 --- a/queue_services/account-mailer/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include requirements.txt -include config.py -include logging.conf -include LICENSE -include README.md \ No newline at end of file diff --git a/queue_services/account-mailer/Makefile b/queue_services/account-mailer/Makefile index 2d7772dd2f..0c8220463a 100644 --- a/queue_services/account-mailer/Makefile +++ b/queue_services/account-mailer/Makefile @@ -15,7 +15,8 @@ DOCKER_NAME:=account-mailer setup: clean install install-dev ## Setup the project clean: clean-build clean-pyc clean-test ## Clean the project - rm -rf venv/ + rm -rf .venv/ + rm -rf poetry.lock clean-build: ## Clean build files rm -fr build/ @@ -37,24 +38,18 @@ clean-test: ## clean test files rm -fr htmlcov/ build-req: clean ## Upgrade requirements - test -f venv/bin/activate || python3 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install --upgrade pip ;\ - pip install -Ur requirements/prod.txt ;\ - pip freeze | sort > requirements.txt ;\ - cat requirements/repo-libraries.txt >> requirements.txt ;\ - pip install -Ur requirements/repo-libraries.txt + update: ## Upgrade lock + poetry update install: clean ## Install python virtrual environment - test -f venv/bin/activate || python3 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install --upgrade pip ;\ - pip install -Ur requirements.txt + unset HOME ## unset HOME because it's in the DEV .env file, will cause permissions issues + pip install poetry ;\ + poetry config virtualenvs.in-project true ;\ + poetry install install-dev: ## Install local application - . venv/bin/activate ; \ - pip install -Ur requirements/dev.txt; \ - pip install -e . + poetry add --dev pylint astroid + poetry install --with dev ################################################################################# # COMMANDS - CI # @@ -62,15 +57,15 @@ install-dev: ## Install local application ci: lint flake8 test ## CI flow pylint: ## Linting with pylint - . venv/bin/activate && pylint --rcfile=setup.cfg src/$(PROJECT_NAME) + poetry run pylint --rcfile=setup.cfg src/$(PROJECT_NAME) flake8: ## Linting with flake8 - . venv/bin/activate && flake8 src/$(PROJECT_NAME) tests + poetry run flake8 src/$(PROJECT_NAME) tests lint: pylint flake8 ## run all lint type scripts test: ## Unit testing - . venv/bin/activate && pytest + poetry run pytest mac-cov: test ## Run the coverage report and display in a browser window (mac) @open -a "Google Chrome" htmlcov/index.html @@ -131,7 +126,7 @@ tag: push ## tag image ################################################################################# run: ## Run the project in local - . venv/bin/activate && python -m flask run -p 5002 + poetry run flask run -p 5000 ################################################################################# # Self Documenting Commands # diff --git a/queue_services/account-mailer/app.py b/queue_services/account-mailer/app.py index a0973ab7e5..724562ebb4 100755 --- a/queue_services/account-mailer/app.py +++ b/queue_services/account-mailer/app.py @@ -18,8 +18,10 @@ import os from account_mailer import create_app + + app = create_app() if __name__ == '__main__': - server_port = os.environ.get('PORT', '5002') + server_port = os.environ.get('PORT', '8080') app.run(debug=False, port=server_port, host='0.0.0.0') diff --git a/queue_services/account-mailer/devops/gcp/clouddeploy.yaml b/queue_services/account-mailer/devops/gcp/clouddeploy.yaml new file mode 100644 index 0000000000..00bbf5c7e3 --- /dev/null +++ b/queue_services/account-mailer/devops/gcp/clouddeploy.yaml @@ -0,0 +1,75 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: deploy.cloud.google.com/v1 +kind: DeliveryPipeline +metadata: + name: account-mailer-pipeline +description: Deployment pipeline +serialPipeline: + stages: + - targetId: gtksf3-dev + profiles: [dev] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "development" + deploy-project-id: "gtksf3-dev" + service-name: "account-mailer-dev" + container-name: "account-mailer-dev" + cloudsql-instances: "gtksf3-dev:northamerica-northeast1:auth-db-dev" + service-account: "sa-api@gtksf3-dev.iam.gserviceaccount.com" + - targetId: gtksf3-test + profiles: [test] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "development" + deploy-project-id: "gtksf3-test" + service-name: "account-mailer-test" + container-name: "account-mailer-test" + cloudsql-instances: "gtksf3-test:northamerica-northeast1:auth-db-test" + service-account: "sa-api@gtksf3-test.iam.gserviceaccount.com" + - targetId: gtksf3-sandbox + profiles: [sandbox] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "production" + deploy-project-id: "gtksf3-tools" + service-name: "account-mailer-sandbox" + container-name: "account-mailer-sandbox" + cloudsql-instances: "gtksf3-tools:northamerica-northeast1:auth-db-sandbox" + service-account: "sa-api@gtksf3-tools.iam.gserviceaccount.com" + - targetId: gtksf3-prod + profiles: [prod] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "production" + deploy-project-id: "gtksf3-prod" + service-name: "account-mailer-prod" + container-name: "account-mailer-prod" + cloudsql-instances: "gtksf3-prod:northamerica-northeast1:auth-db-prod" + service-account: "sa-api@gtksf3-prod.iam.gserviceaccount.com" + max-scale: "10" + container-concurrency: "20" diff --git a/queue_services/account-mailer/devops/vaults.gcp.env b/queue_services/account-mailer/devops/vaults.gcp.env new file mode 100644 index 0000000000..a7fb4e2464 --- /dev/null +++ b/queue_services/account-mailer/devops/vaults.gcp.env @@ -0,0 +1,33 @@ +AUTH_LD_SDK_KEY="op://launchdarkly/$APP_ENV/business-api/BUSINESS_API_LD_SDK_KEY" +DATABASE_USERNAME="op://database/$APP_ENV/auth-db-gcp/DATABASE_USERNAME" +DATABASE_PASSWORD="op://database/$APP_ENV/auth-db-gcp/DATABASE_PASSWORD" +DATABASE_PORT="op://database/$APP_ENV/auth-db-gcp/DATABASE_PORT" +DATABASE_NAME="op://database/$APP_ENV/auth-db-gcp/DATABASE_NAME" +DATABASE_UNIX_SOCKET="op://database/$APP_ENV/auth-db-gcp/DATABASE_UNIX_SOCKET" +JWT_OIDC_ISSUER="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_ISSUER" +KEYCLOAK_BASE_URL="op://keycloak/$APP_ENV/base/KEYCLOAK_BASE_URL" +KEYCLOAK_REALMNAME="op://keycloak/$APP_ENV/base/KEYCLOAK_REALMNAME" +SBC_AUTH_ADMIN_CLIENT_ID="op://keycloak/$APP_ENV/sbc-auth-admin/SBC_AUTH_ADMIN_CLIENT_ID" +SBC_AUTH_ADMIN_CLIENT_SECRET="op://keycloak/$APP_ENV/sbc-auth-admin/SBC_AUTH_ADMIN_CLIENT_SECRET" +PAY_API_URL="op://API/$APP_ENV/pay-api/PAY_API_URL" +PAY_API_VERSION="op://API/$APP_ENV/pay-api/PAY_API_VERSION" +NOTIFY_API_URL="op://API/$APP_ENV/notify-api/NOTIFY_API_URL" +NOTIFY_API_VERSION="op://API/$APP_ENV/notify-api/NOTIFY_API_VERSION" +REPORT_API_URL="op://API/$APP_ENV/report-api/REPORT_API_URL" +REPORT_API_VERSION="op://API/$APP_ENV/report-api/REPORT_API_VERSION" +VPC_CONNECTOR="op://CD/$APP_ENV/account-mailer/VPC_CONNECTOR" +ACCOUNT_MAILER_AUDIENCE_SUB="op://gcp-queue/$APP_ENV/authpay/ACCOUNT_MAILER_AUDIENCE_SUB" +AUTHPAY_SERVICE_ACCOUNT="op://gcp-queue/$APP_ENV/gtksf3/AUTHPAY_SERVICE_ACCOUNT" +MINIO_ENDPOINT="op://minio/$APP_ENV/base/MINIO_ENDPOINT" +MINIO_ACCESS_KEY="op://minio/$APP_ENV/base/MINIO_ACCESS_KEY" +MINIO_ACCESS_SECRET="op://minio/$APP_ENV/base/MINIO_ACCESS_SECRET" +MINIO_BUCKET="op://minio/$APP_ENV/account-mailer/MINIO_BUCKET" +REFUND_REQUEST_RECIPIENTS="op://relationship/$APP_ENV/account-mailer/REFUND_REQUEST_RECIPIENTS" +BCOL_REFUND_REQUEST_RECIPIENTS="op://relationship/$APP_ENV/account-mailer/BCOL_REFUND_REQUEST_RECIPIENTS" +PDF_TEMPLATE_PATH="op://relationship/$APP_ENV/account-mailer/PDF_TEMPLATE_PATH" +TEMPLATE_PATH="op://relationship/$APP_ENV/account-mailer/TEMPLATE_PATH" +HTTP_ORIGIN="op://relationship/$APP_ENV/account-mailer/HTTP_ORIGIN" +PAD_TOS_FILE="op://relationship/$APP_ENV/account-mailer/PAD_TOS_FILE" +WEB_APP_URL="op://relationship/$APP_ENV/account-mailer/WEB_APP_URL" +BCOL_ADMIN_EMAIL="op://relationship/$APP_ENV/account-mailer/BCOL_ADMIN_EMAIL" +DASHBOARD_URL="op://web-url/$APP_ENV/business/DASHBOARD_URL" \ No newline at end of file diff --git a/queue_services/account-mailer/devops/vaults.json b/queue_services/account-mailer/devops/vaults.json deleted file mode 100644 index 2d277685d4..0000000000 --- a/queue_services/account-mailer/devops/vaults.json +++ /dev/null @@ -1,51 +0,0 @@ -[ - { - "vault": "shared", - "application": [ - "api-endpoints" - ] - }, - { - "vault": "keycloak", - "application": [ - "base", - "jwt-base", - "sbc-auth-admin" - ] - }, - { - "vault": "minio", - "application": [ - "base", - "account-mailer" - ] - }, - { - "vault": "relationship", - "application": [ - "postgres-auth", - "account-mailer", - "jwt" - ] - }, - { - "vault": "sentry", - "application": [ - "relationship-api" - ] - }, - { - "vault": "launchdarkly", - "application": [ - "auth" - ] - }, - { - "vault": "gcp-queue", - "application": [ - "authpay", - "topics", - "gtksf3" - ] - } -] diff --git a/auth-api/src/auth_api/utils/util_logging.py b/queue_services/account-mailer/gunicorn_config.py similarity index 50% rename from auth-api/src/auth_api/utils/util_logging.py rename to queue_services/account-mailer/gunicorn_config.py index bbefdbb520..ffe23fde75 100644 --- a/auth-api/src/auth_api/utils/util_logging.py +++ b/queue_services/account-mailer/gunicorn_config.py @@ -1,4 +1,4 @@ -# Copyright © 2019 Province of British Columbia +# Copyright © 2024 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,19 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Centralized setup of logging for the service.""" -import logging.config -import sys -from os import path +"""The configuration for gunicorn, which picks up the + runtime options from environment variables +""" +import os -def setup_logging(conf): - """Create the services logger. - TODO should be reworked to load in the proper loggers and remove others - """ - if conf and path.isfile(conf): - logging.config.fileConfig(conf) - print(f'Configure logging, from conf:{conf}', file=sys.stdout) - else: - print(f'Unable to configure logging, attempted conf:{conf}', file=sys.stderr) +workers = int(os.environ.get('GUNICORN_PROCESSES', '1')) # pylint: disable=invalid-name +threads = int(os.environ.get('GUNICORN_THREADS', '1')) # pylint: disable=invalid-name + +forwarded_allow_ips = '*' # pylint: disable=invalid-name +secure_scheme_headers = {'X-Forwarded-Proto': 'https'} # pylint: disable=invalid-name diff --git a/queue_services/account-mailer/poetry.lock b/queue_services/account-mailer/poetry.lock new file mode 100644 index 0000000000..3a944ff352 --- /dev/null +++ b/queue_services/account-mailer/poetry.lock @@ -0,0 +1,3127 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.3" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, +] + +[[package]] +name = "aiohttp" +version = "3.10.8" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a1ba7bc139592339ddeb62c06486d0fa0f4ca61216e14137a40d626c81faf10c"}, + {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85e4d7bd05d18e4b348441e7584c681eff646e3bf38f68b2626807f3add21aa2"}, + {file = "aiohttp-3.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69de056022e7abf69cb9fec795515973cc3eeaff51e3ea8d72a77aa933a91c52"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3587506898d4a404b33bd19689286ccf226c3d44d7a73670c8498cd688e42c"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe285a697c851734285369614443451462ce78aac2b77db23567507484b1dc6f"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10c7932337285a6bfa3a5fe1fd4da90b66ebfd9d0cbd1544402e1202eb9a8c3e"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd9716ef0224fe0d0336997eb242f40619f9f8c5c57e66b525a1ebf9f1d8cebe"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ceacea31f8a55cdba02bc72c93eb2e1b77160e91f8abd605969c168502fd71eb"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9721554bfa9e15f6e462da304374c2f1baede3cb06008c36c47fa37ea32f1dc4"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:22cdeb684d8552490dd2697a5138c4ecb46f844892df437aaf94f7eea99af879"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e56bb7e31c4bc79956b866163170bc89fd619e0581ce813330d4ea46921a4881"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3a95d2686bc4794d66bd8de654e41b5339fab542b2bca9238aa63ed5f4f2ce82"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d82404a0e7b10e0d7f022cf44031b78af8a4f99bd01561ac68f7c24772fed021"}, + {file = "aiohttp-3.10.8-cp310-cp310-win32.whl", hash = "sha256:4e10b04542d27e21538e670156e88766543692a0a883f243ba8fad9ddea82e53"}, + {file = "aiohttp-3.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:680dbcff5adc7f696ccf8bf671d38366a1f620b5616a1d333d0cb33956065395"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:33a68011a38020ed4ff41ae0dbf4a96a202562ecf2024bdd8f65385f1d07f6ef"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c7efa6616a95e3bd73b8a69691012d2ef1f95f9ea0189e42f338fae080c2fc6"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb9b9764cfb4459acf01c02d2a59d3e5066b06a846a364fd1749aa168efa2be"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7f270f4ca92760f98a42c45a58674fff488e23b144ec80b1cc6fa2effed377"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6984dda9d79064361ab58d03f6c1e793ea845c6cfa89ffe1a7b9bb400dfd56bd"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f6d47e392c27206701565c8df4cac6ebed28fdf6dcaea5b1eea7a4631d8e6db"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a72f89aea712c619b2ca32c6f4335c77125ede27530ad9705f4f349357833695"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36074b26f3263879ba8e4dbd33db2b79874a3392f403a70b772701363148b9f"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e32148b4a745e70a255a1d44b5664de1f2e24fcefb98a75b60c83b9e260ddb5b"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5aa1a073514cf59c81ad49a4ed9b5d72b2433638cd53160fd2f3a9cfa94718db"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d3a79200a9d5e621c4623081ddb25380b713c8cf5233cd11c1aabad990bb9381"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e45fdfcb2d5bcad83373e4808825b7512953146d147488114575780640665027"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f78e2a78432c537ae876a93013b7bc0027ba5b93ad7b3463624c4b6906489332"}, + {file = "aiohttp-3.10.8-cp311-cp311-win32.whl", hash = "sha256:f8179855a4e4f3b931cb1764ec87673d3fbdcca2af496c8d30567d7b034a13db"}, + {file = "aiohttp-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:ef9b484604af05ca745b6108ca1aaa22ae1919037ae4f93aaf9a37ba42e0b835"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ab2d6523575fc98896c80f49ac99e849c0b0e69cc80bf864eed6af2ae728a52b"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f5d5d5401744dda50b943d8764508d0e60cc2d3305ac1e6420935861a9d544bc"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de23085cf90911600ace512e909114385026b16324fa203cc74c81f21fd3276a"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4618f0d2bf523043866a9ff8458900d8eb0a6d4018f251dae98e5f1fb699f3a8"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21c1925541ca84f7b5e0df361c0a813a7d6a56d3b0030ebd4b220b8d232015f9"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:497a7d20caea8855c5429db3cdb829385467217d7feb86952a6107e033e031b9"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c887019dbcb4af58a091a45ccf376fffe800b5531b45c1efccda4bedf87747ea"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40d2d719c3c36a7a65ed26400e2b45b2d9ed7edf498f4df38b2ae130f25a0d01"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57359785f27394a8bcab0da6dcd46706d087dfebf59a8d0ad2e64a4bc2f6f94f"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a961ee6f2cdd1a2be4735333ab284691180d40bad48f97bb598841bfcbfb94ec"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:fe3d79d6af839ffa46fdc5d2cf34295390894471e9875050eafa584cb781508d"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a281cba03bdaa341c70b7551b2256a88d45eead149f48b75a96d41128c240b3"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6769d71bfb1ed60321363a9bc05e94dcf05e38295ef41d46ac08919e5b00d19"}, + {file = "aiohttp-3.10.8-cp312-cp312-win32.whl", hash = "sha256:a3081246bab4d419697ee45e555cef5cd1def7ac193dff6f50be761d2e44f194"}, + {file = "aiohttp-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:ab1546fc8e00676febc81c548a876c7bde32f881b8334b77f84719ab2c7d28dc"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b1a012677b8e0a39e181e218de47d6741c5922202e3b0b65e412e2ce47c39337"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2df786c96c57cd6b87156ba4c5f166af7b88f3fc05f9d592252fdc83d8615a3c"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8885ca09d3a9317219c0831276bfe26984b17b2c37b7bf70dd478d17092a4772"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dbf252ac19860e0ab56cd480d2805498f47c5a2d04f5995d8d8a6effd04b48c"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2036479b6b94afaaca7d07b8a68dc0e67b0caf5f6293bb6a5a1825f5923000"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:365783e1b7c40b59ed4ce2b5a7491bae48f41cd2c30d52647a5b1ee8604c68ad"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:270e653b5a4b557476a1ed40e6b6ce82f331aab669620d7c95c658ef976c9c5e"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8960fabc20bfe4fafb941067cda8e23c8c17c98c121aa31c7bf0cdab11b07842"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f21e8f2abed9a44afc3d15bba22e0dfc71e5fa859bea916e42354c16102b036f"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fecd55e7418fabd297fd836e65cbd6371aa4035a264998a091bbf13f94d9c44d"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:badb51d851358cd7535b647bb67af4854b64f3c85f0d089c737f75504d5910ec"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e860985f30f3a015979e63e7ba1a391526cdac1b22b7b332579df7867848e255"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:71462f8eeca477cbc0c9700a9464e3f75f59068aed5e9d4a521a103692da72dc"}, + {file = "aiohttp-3.10.8-cp313-cp313-win32.whl", hash = "sha256:177126e971782769b34933e94fddd1089cef0fe6b82fee8a885e539f5b0f0c6a"}, + {file = "aiohttp-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:98a4eb60e27033dee9593814ca320ee8c199489fbc6b2699d0f710584db7feb7"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ffef3d763e4c8fc97e740da5b4d0f080b78630a3914f4e772a122bbfa608c1db"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:597128cb7bc5f068181b49a732961f46cb89f85686206289d6ccb5e27cb5fbe2"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f23a6c1d09de5de89a33c9e9b229106cb70dcfdd55e81a3a3580eaadaa32bc92"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da57af0c54a302b7c655fa1ccd5b1817a53739afa39924ef1816e7b7c8a07ccb"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7a6af57091056a79a35104d6ec29d98ec7f1fb7270ad9c6fff871b678d1ff8"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32710d6b3b6c09c60c794d84ca887a3a2890131c0b02b3cefdcc6709a2260a7c"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b91f4f62ad39a8a42d511d66269b46cb2fb7dea9564c21ab6c56a642d28bff5"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:471a8c47344b9cc309558b3fcc469bd2c12b49322b4b31eb386c4a2b2d44e44a"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc0e7f91705445d79beafba9bb3057dd50830e40fe5417017a76a214af54e122"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:85431c9131a9a0f65260dc7a65c800ca5eae78c4c9931618f18c8e0933a0e0c1"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:b91557ee0893da52794b25660d4f57bb519bcad8b7df301acd3898f7197c5d81"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:4954e6b06dd0be97e1a5751fc606be1f9edbdc553c5d9b57d72406a8fbd17f9d"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a087c84b4992160ffef7afd98ef24177c8bd4ad61c53607145a8377457385100"}, + {file = "aiohttp-3.10.8-cp38-cp38-win32.whl", hash = "sha256:e1f0f7b27171b2956a27bd8f899751d0866ddabdd05cbddf3520f945130a908c"}, + {file = "aiohttp-3.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:c4916070e12ae140110aa598031876c1bf8676a36a750716ea0aa5bd694aa2e7"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5284997e3d88d0dfb874c43e51ae8f4a6f4ca5b90dcf22995035187253d430db"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9443d9ebc5167ce1fbb552faf2d666fb22ef5716a8750be67efd140a7733738c"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b667e2a03407d79a76c618dc30cedebd48f082d85880d0c9c4ec2faa3e10f43e"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98fae99d5c2146f254b7806001498e6f9ffb0e330de55a35e72feb7cb2fa399b"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8296edd99d0dd9d0eb8b9e25b3b3506eef55c1854e9cc230f0b3f885f680410b"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ce46dfb49cfbf9e92818be4b761d4042230b1f0e05ffec0aad15b3eb162b905"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c38cfd355fd86c39b2d54651bd6ed7d63d4fe3b5553f364bae3306e2445f847"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:713dff3f87ceec3bde4f3f484861464e722cf7533f9fa6b824ec82bb5a9010a7"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21a72f4a9c69a8567a0aca12042f12bba25d3139fd5dd8eeb9931f4d9e8599cd"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6d1ad868624f6cea77341ef2877ad4e71f7116834a6cd7ec36ec5c32f94ee6ae"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a78ba86d5a08207d1d1ad10b97aed6ea48b374b3f6831d02d0b06545ac0f181e"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:aff048793d05e1ce05b62e49dccf81fe52719a13f4861530706619506224992b"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d088ca05381fd409793571d8e34eca06daf41c8c50a05aeed358d2d340c7af81"}, + {file = "aiohttp-3.10.8-cp39-cp39-win32.whl", hash = "sha256:ee97c4e54f457c366e1f76fbbf3e8effee9de57dae671084a161c00f481106ce"}, + {file = "aiohttp-3.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:d95ae4420669c871667aad92ba8cce6251d61d79c1a38504621094143f94a8b4"}, + {file = "aiohttp-3.10.8.tar.gz", hash = "sha256:21f8225f7dc187018e8433c9326be01477fb2810721e048b33ac49091b19fb4a"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.12.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alembic" +version = "1.13.3" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, + {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "asn1crypto" +version = "1.5.1" +description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" +optional = false +python-versions = "*" +files = [ + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, +] + +[[package]] +name = "astroid" +version = "3.2.4" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, +] + +[[package]] +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + +[[package]] +name = "auth-api" +version = "3.0.5" +description = "" +optional = false +python-versions = "^3.12" +files = [] +develop = false + +[package.dependencies] +aiohttp = "^3.10.2" +bcrypt = "^4.2.0" +build-deps = {git = "https://github.com/bcgov/sbc-auth.git", rev = "feature-gcp-migration", subdirectory = "build-deps"} +cachelib = "0.9.0" +cattrs = "^23.2.3" +flask-caching = "2.3.0" +flask-cors = "^5.0.0" +flask-jwt-oidc = {git = "https://github.com/seeker25/flask-jwt-oidc.git", branch = "main"} +flask-mail = "^0.10.0" +flask-marshmallow = "^1.2.1" +flask-migrate = "^4.0.7" +flask-moment = "^1.0.6" +flask-sqlalchemy = "^3.1.1" +gunicorn = "^22.0.0" +marshmallow-sqlalchemy = "^1.0.0" +minio = "^7.2.7" +orjson = "^3.10.7" +pg8000 = "^1.31.2" +psycopg2 = "^2.9.9" +pyhumps = "^3.8.0" +sql-versioning = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/sql-versioning"} +sqlalchemy-utils = "^0.41.2" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-auth.git" +reference = "feature-gcp-migration" +resolved_reference = "450c8ee6f42aec444fc06e2b9fd39f60fa053d91" +subdirectory = "auth-api" + +[[package]] +name = "bcrypt" +version = "4.2.0" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, + {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, + {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, + {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, + {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, + {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, + {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "build-deps" +version = "1.0.0" +description = "common dependencies for all auth tools" +optional = false +python-versions = "^3.12" +files = [] +develop = false + +[package.dependencies] +attrs = "24.2.0" +CacheControl = "0.14.0" +cachetools = "5.5.0" +certifi = "2024.8.30" +flask = "3.0.2" +gcp-queue = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/gcp-queue"} +itsdangerous = "2.1.2" +jinja2 = "3.1.3" +jsonschema = "4.17.3" +launchdarkly-server-sdk = "9.5.0" +MarkupSafe = "2.1.1" +python-dotenv = "^1.0.1" +requests = "2.32.3" +sbc-common-components = {git = "https://github.com/bolyachevets/sbc-common-components.git", rev = "camel_case_empty_dict", subdirectory = "python"} +structured-logging = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/structured-logging"} +Werkzeug = "3.0.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-auth.git" +reference = "feature-gcp-migration" +resolved_reference = "450c8ee6f42aec444fc06e2b9fd39f60fa053d91" +subdirectory = "build-deps" + +[[package]] +name = "cachecontrol" +version = "0.14.0" +description = "httplib2 caching for requests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, + {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, +] + +[package.dependencies] +msgpack = ">=0.5.2,<2.0.0" +requests = ">=2.16.0" + +[package.extras] +dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] +filecache = ["filelock (>=3.8.0)"] +redis = ["redis (>=2.10.5)"] + +[[package]] +name = "cachelib" +version = "0.9.0" +description = "A collection of cache libraries in the same API interface." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, + {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "ecdsa" +version = "0.19.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" +files = [ + {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, + {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "expiringdict" +version = "1.2.2" +description = "Dictionary with auto-expiring values for caching purposes" +optional = false +python-versions = "*" +files = [ + {file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"}, + {file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"}, +] + +[package.extras] +tests = ["coverage", "coveralls", "dill", "mock", "nose"] + +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + +[[package]] +name = "flake8-blind-except" +version = "0.2.1" +description = "A flake8 extension that checks for blind except: statements" +optional = false +python-versions = "*" +files = [ + {file = "flake8-blind-except-0.2.1.tar.gz", hash = "sha256:f25a575a9dcb3eeb3c760bf9c22db60b8b5a23120224ed1faa9a43f75dd7dd16"}, +] + +[[package]] +name = "flake8-debugger" +version = "4.1.2" +description = "ipdb/pdb statement checker plugin for flake8" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, + {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, +] + +[package.dependencies] +flake8 = ">=3.0" +pycodestyle = "*" + +[[package]] +name = "flake8-docstrings" +version = "1.7.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, +] + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-isort" +version = "6.1.1" +description = "flake8 plugin that integrates isort" +optional = false +python-versions = ">=3.8" +files = [ + {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, + {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, +] + +[package.dependencies] +flake8 = "*" +isort = ">=5.0.0,<6" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "flake8-print" +version = "5.0.0" +description = "print statement checker plugin for flake8" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-print-5.0.0.tar.gz", hash = "sha256:76915a2a389cc1c0879636c219eb909c38501d3a43cc8dae542081c9ba48bdf9"}, + {file = "flake8_print-5.0.0-py3-none-any.whl", hash = "sha256:84a1a6ea10d7056b804221ac5e62b1cee1aefc897ce16f2e5c42d3046068f5d8"}, +] + +[package.dependencies] +flake8 = ">=3.0" +pycodestyle = "*" + +[[package]] +name = "flake8-quotes" +version = "3.4.0" +description = "Flake8 lint for quotes." +optional = false +python-versions = "*" +files = [ + {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, +] + +[package.dependencies] +flake8 = "*" +setuptools = "*" + +[[package]] +name = "flask" +version = "3.0.2" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-caching" +version = "2.3.0" +description = "Adds caching support to Flask applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Flask_Caching-2.3.0-py3-none-any.whl", hash = "sha256:51771c75682e5abc1483b78b96d9131d7941dc669b073852edfa319dd4e29b6e"}, + {file = "flask_caching-2.3.0.tar.gz", hash = "sha256:d7e4ca64a33b49feb339fcdd17e6ba25f5e01168cf885e53790e885f83a4d2cf"}, +] + +[package.dependencies] +cachelib = ">=0.9.0,<0.10.0" +Flask = "*" + +[[package]] +name = "flask-cors" +version = "5.0.0" +description = "A Flask extension adding a decorator for CORS support" +optional = false +python-versions = "*" +files = [ + {file = "Flask_Cors-5.0.0-py2.py3-none-any.whl", hash = "sha256:b9e307d082a9261c100d8fb0ba909eec6a228ed1b60a8315fd85f783d61910bc"}, + {file = "flask_cors-5.0.0.tar.gz", hash = "sha256:5aadb4b950c4e93745034594d9f3ea6591f734bb3662e16e255ffbf5e89c88ef"}, +] + +[package.dependencies] +Flask = ">=0.9" + +[[package]] +name = "flask-jwt-oidc" +version = "0.7.0" +description = "Opinionated flask oidc client" +optional = false +python-versions = "^3.9" +files = [] +develop = false + +[package.dependencies] +cachelib = "0.*" +Flask = ">=2" +python-jose = "^3.3.0" +six = "^1.16.0" + +[package.source] +type = "git" +url = "https://github.com/seeker25/flask-jwt-oidc.git" +reference = "main" +resolved_reference = "d208d4643e3b17358f7295bee0f955e67ba6ac88" + +[[package]] +name = "flask-mail" +version = "0.10.0" +description = "Flask extension for sending email" +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask_mail-0.10.0-py3-none-any.whl", hash = "sha256:a451e490931bb3441d9b11ebab6812a16bfa81855792ae1bf9c1e1e22c4e51e7"}, + {file = "flask_mail-0.10.0.tar.gz", hash = "sha256:44083e7b02bbcce792209c06252f8569dd5a325a7aaa76afe7330422bd97881d"}, +] + +[package.dependencies] +blinker = "*" +flask = "*" + +[[package]] +name = "flask-marshmallow" +version = "1.2.1" +description = "Flask + marshmallow for beautiful APIs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask_marshmallow-1.2.1-py3-none-any.whl", hash = "sha256:10b5048ecfaa26f7c8d0aed7d81083164450e6be8e81c04b3d4a586b3f7b6678"}, + {file = "flask_marshmallow-1.2.1.tar.gz", hash = "sha256:00ee96399ed664963afff3b5d6ee518640b0f91dbc2aace2b5abcf32f40ef23a"}, +] + +[package.dependencies] +Flask = ">=2.2" +marshmallow = ">=3.0.0" + +[package.extras] +dev = ["flask-marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["Sphinx (==7.2.6)", "marshmallow-sqlalchemy (>=0.19.0)", "sphinx-issues (==4.0.0)"] +sqlalchemy = ["flask-sqlalchemy (>=3.0.0)", "marshmallow-sqlalchemy (>=0.29.0)"] +tests = ["flask-marshmallow[sqlalchemy]", "pytest"] + +[[package]] +name = "flask-migrate" +version = "4.0.7" +description = "SQLAlchemy database migrations for Flask applications using Alembic." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622"}, + {file = "Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617"}, +] + +[package.dependencies] +alembic = ">=1.9.0" +Flask = ">=0.9" +Flask-SQLAlchemy = ">=1.0" + +[[package]] +name = "flask-moment" +version = "1.0.6" +description = "Formatting of dates and times in Flask templates using moment.js." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Flask_Moment-1.0.6-py3-none-any.whl", hash = "sha256:3ae8baea20a41e99f457b9710ecd1368911dd5133f09a27583eb0dcb3491e31d"}, + {file = "flask_moment-1.0.6.tar.gz", hash = "sha256:2f8969907cbacde4a88319792e8f920ba5c9dd9d99ced2346cad563795302b88"}, +] + +[package.dependencies] +Flask = "*" +packaging = ">=14.1" + +[package.extras] +docs = ["sphinx"] + +[[package]] +name = "flask-opentracing" +version = "1.1.0" +description = "OpenTracing support for Flask applications" +optional = false +python-versions = "*" +files = [ + {file = "Flask-OpenTracing-1.1.0.tar.gz", hash = "sha256:a9a39d367fbe7e9ed9c77b90ac48159c1a3e82982a5abf84d3f4d710d24580ac"}, +] + +[package.dependencies] +Flask = "*" +opentracing = ">=2.0,<3" + +[package.extras] +tests = ["flake8", "flake8-quotes", "mock", "pytest", "pytest-cov"] + +[[package]] +name = "flask-sqlalchemy" +version = "3.1.1" +description = "Add SQLAlchemy support to your Flask application." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, + {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, +] + +[package.dependencies] +flask = ">=2.2.5" +sqlalchemy = ">=2.0.16" + +[[package]] +name = "freezegun" +version = "1.5.1" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, + {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "gcp-queue" +version = "0.3.0" +description = "" +optional = false +python-versions = "^3.8" +files = [] +develop = false + +[package.dependencies] +flask = ">=1" +google-auth = "^2.28.2" +google-cloud-pubsub = "^2.20.2" +simple-cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py.git"} + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/gcp-queue" + +[[package]] +name = "google-api-core" +version = "1.34.1" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-1.34.1.tar.gz", hash = "sha256:3399c92887a97d33038baa4bfd3bf07acc05d474b0171f333e1f641c1364e552"}, + {file = "google_api_core-1.34.1-py3-none-any.whl", hash = "sha256:52bcc9d9937735f8a3986fa0bbf9135ae9cf5393a722387e5eced520e39c774a"}, +] + +[package.dependencies] +google-auth = ">=1.25.0,<3.0dev" +googleapis-common-protos = ">=1.56.2,<2.0dev" +grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.0.0dev" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] + +[[package]] +name = "google-auth" +version = "2.28.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.28.2.tar.gz", hash = "sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30"}, + {file = "google_auth-2.28.2-py2.py3-none-any.whl", hash = "sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-pubsub" +version = "2.20.2" +description = "Google Cloud Pub/Sub API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-pubsub-2.20.2.tar.gz", hash = "sha256:236046ea860230c788e4d4ea2d0f12299cdf1d94ac71ec42ed1a0ce1ba28d66f"}, + {file = "google_cloud_pubsub-2.20.2-py2.py3-none-any.whl", hash = "sha256:9607bb8f973cbd123b5fa2db9c0aa38501a1a42f18593739067cd307263d090f"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +grpcio = ">=1.51.3,<2.0dev" +grpcio-status = ">=1.33.2" +proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[package.extras] +libcst = ["libcst (>=0.3.10)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, + {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "grpcio" +version = "1.64.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.64.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:3b09c3d9de95461214a11d82cc0e6a46a6f4e1f91834b50782f932895215e5db"}, + {file = "grpcio-1.64.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7e013428ab472892830287dd082b7d129f4d8afef49227a28223a77337555eaa"}, + {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:02cc9cc3f816d30f7993d0d408043b4a7d6a02346d251694d8ab1f78cc723e7e"}, + {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f5de082d936e0208ce8db9095821361dfa97af8767a6607ae71425ac8ace15c"}, + {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b7bf346391dffa182fba42506adf3a84f4a718a05e445b37824136047686a1"}, + {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b2cbdfba18408389a1371f8c2af1659119e1831e5ed24c240cae9e27b4abc38d"}, + {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca4f15427d2df592e0c8f3d38847e25135e4092d7f70f02452c0e90d6a02d6d"}, + {file = "grpcio-1.64.0-cp310-cp310-win32.whl", hash = "sha256:7c1f5b2298244472bcda49b599be04579f26425af0fd80d3f2eb5fd8bc84d106"}, + {file = "grpcio-1.64.0-cp310-cp310-win_amd64.whl", hash = "sha256:73f84f9e5985a532e47880b3924867de16fa1aa513fff9b26106220c253c70c5"}, + {file = "grpcio-1.64.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2a18090371d138a57714ee9bffd6c9c9cb2e02ce42c681aac093ae1e7189ed21"}, + {file = "grpcio-1.64.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:59c68df3a934a586c3473d15956d23a618b8f05b5e7a3a904d40300e9c69cbf0"}, + {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b52e1ec7185512103dd47d41cf34ea78e7a7361ba460187ddd2416b480e0938c"}, + {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d598b5d5e2c9115d7fb7e2cb5508d14286af506a75950762aa1372d60e41851"}, + {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01615bbcae6875eee8091e6b9414072f4e4b00d8b7e141f89635bdae7cf784e5"}, + {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0b2dfe6dcace264807d9123d483d4c43274e3f8c39f90ff51de538245d7a4145"}, + {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7f17572dc9acd5e6dfd3014d10c0b533e9f79cd9517fc10b0225746f4c24b58e"}, + {file = "grpcio-1.64.0-cp311-cp311-win32.whl", hash = "sha256:6ec5ed15b4ffe56e2c6bc76af45e6b591c9be0224b3fb090adfb205c9012367d"}, + {file = "grpcio-1.64.0-cp311-cp311-win_amd64.whl", hash = "sha256:597191370951b477b7a1441e1aaa5cacebeb46a3b0bd240ec3bb2f28298c7553"}, + {file = "grpcio-1.64.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:1ce4cd5a61d4532651079e7aae0fedf9a80e613eed895d5b9743e66b52d15812"}, + {file = "grpcio-1.64.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:650a8150a9b288f40d5b7c1d5400cc11724eae50bd1f501a66e1ea949173649b"}, + {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8de0399b983f8676a7ccfdd45e5b2caec74a7e3cc576c6b1eecf3b3680deda5e"}, + {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46b8b43ba6a2a8f3103f103f97996cad507bcfd72359af6516363c48793d5a7b"}, + {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a54362f03d4dcfae63be455d0a7d4c1403673498b92c6bfe22157d935b57c7a9"}, + {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1f8ea18b928e539046bb5f9c124d717fbf00cc4b2d960ae0b8468562846f5aa1"}, + {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c56c91bd2923ddb6e7ed28ebb66d15633b03e0df22206f22dfcdde08047e0a48"}, + {file = "grpcio-1.64.0-cp312-cp312-win32.whl", hash = "sha256:874c741c8a66f0834f653a69e7e64b4e67fcd4a8d40296919b93bab2ccc780ba"}, + {file = "grpcio-1.64.0-cp312-cp312-win_amd64.whl", hash = "sha256:0da1d921f8e4bcee307aeef6c7095eb26e617c471f8cb1c454fd389c5c296d1e"}, + {file = "grpcio-1.64.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c46fb6bfca17bfc49f011eb53416e61472fa96caa0979b4329176bdd38cbbf2a"}, + {file = "grpcio-1.64.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3d2004e85cf5213995d09408501f82c8534700d2babeb81dfdba2a3bff0bb396"}, + {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6d5541eb460d73a07418524fb64dcfe0adfbcd32e2dac0f8f90ce5b9dd6c046c"}, + {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f279ad72dd7d64412e10f2443f9f34872a938c67387863c4cd2fb837f53e7d2"}, + {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85fda90b81da25993aa47fae66cae747b921f8f6777550895fb62375b776a231"}, + {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a053584079b793a54bece4a7d1d1b5c0645bdbee729215cd433703dc2532f72b"}, + {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:579dd9fb11bc73f0de061cab5f8b2def21480fd99eb3743ed041ad6a1913ee2f"}, + {file = "grpcio-1.64.0-cp38-cp38-win32.whl", hash = "sha256:23b6887bb21d77649d022fa1859e05853fdc2e60682fd86c3db652a555a282e0"}, + {file = "grpcio-1.64.0-cp38-cp38-win_amd64.whl", hash = "sha256:753cb58683ba0c545306f4e17dabf468d29cb6f6b11832e1e432160bb3f8403c"}, + {file = "grpcio-1.64.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:2186d76a7e383e1466e0ea2b0febc343ffeae13928c63c6ec6826533c2d69590"}, + {file = "grpcio-1.64.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0f30596cdcbed3c98024fb4f1d91745146385b3f9fd10c9f2270cbfe2ed7ed91"}, + {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:d9171f025a196f5bcfec7e8e7ffb7c3535f7d60aecd3503f9e250296c7cfc150"}, + {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf4c8daed18ae2be2f1fc7d613a76ee2a2e28fdf2412d5c128be23144d28283d"}, + {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3550493ac1d23198d46dc9c9b24b411cef613798dc31160c7138568ec26bc9b4"}, + {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3161a8f8bb38077a6470508c1a7301cd54301c53b8a34bb83e3c9764874ecabd"}, + {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8fabe2cc57a369638ab1ad8e6043721014fdf9a13baa7c0e35995d3a4a7618"}, + {file = "grpcio-1.64.0-cp39-cp39-win32.whl", hash = "sha256:31890b24d47b62cc27da49a462efe3d02f3c120edb0e6c46dcc0025506acf004"}, + {file = "grpcio-1.64.0-cp39-cp39-win_amd64.whl", hash = "sha256:5a56797dea8c02e7d3a85dfea879f286175cf4d14fbd9ab3ef2477277b927baa"}, + {file = "grpcio-1.64.0.tar.gz", hash = "sha256:257baf07f53a571c215eebe9679c3058a313fd1d1f7c4eede5a8660108c52d9c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.64.0)"] + +[[package]] +name = "grpcio-status" +version = "1.48.2" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"}, + {file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.48.2" +protobuf = ">=3.12.0" + +[[package]] +name = "gunicorn" +version = "22.0.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, + {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-resources" +version = "5.13.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, + {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jaeger-client" +version = "4.8.0" +description = "Jaeger Python OpenTracing Tracer implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, +] + +[package.dependencies] +opentracing = ">=2.1,<3.0" +threadloop = ">=1,<2" +thrift = "*" +tornado = ">=4.3" + +[package.extras] +tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "launchdarkly-eventsource" +version = "1.2.0" +description = "LaunchDarkly SSE Client" +optional = false +python-versions = ">=3.8" +files = [ + {file = "launchdarkly_eventsource-1.2.0-py3-none-any.whl", hash = "sha256:9b5ec7149e2ad9995be22ad5361deb480c229701e6b0cc799e94aa14f067b77b"}, + {file = "launchdarkly_eventsource-1.2.0.tar.gz", hash = "sha256:8cb3301ec0daeb5e17eaa37b3b65f6660fab851b317e69271185ef2fb42c2fde"}, +] + +[package.dependencies] +urllib3 = ">=1.26.0,<3" + +[[package]] +name = "launchdarkly-server-sdk" +version = "9.5.0" +description = "LaunchDarkly SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "launchdarkly_server_sdk-9.5.0-py3-none-any.whl", hash = "sha256:bf2cf213f9eb71cd43d5f20f2ac9ec9235c693036459e5038a69015a6648c035"}, + {file = "launchdarkly_server_sdk-9.5.0.tar.gz", hash = "sha256:af64d985621a03257107210266c563c5e268ca8320d1d71b5c18d9592d14fef7"}, +] + +[package.dependencies] +certifi = ">=2018.4.16" +expiringdict = ">=1.1.4" +launchdarkly-eventsource = ">=1.1.0,<2.0.0" +pyRFC3339 = ">=1.0" +semver = ">=2.10.2" +urllib3 = ">=1.26.0,<3" + +[package.extras] +consul = ["python-consul (>=1.0.1)"] +dynamodb = ["boto3 (>=1.9.71)"] +redis = ["redis (>=2.10.5)"] +test-filesource = ["pyyaml (>=5.3.1)", "watchdog (>=3.0.0)"] + +[[package]] +name = "lovely-pytest-docker" +version = "1.0.0" +description = "Pytest testing utilities with docker containers." +optional = false +python-versions = "*" +files = [ + {file = "lovely_pytest_docker-1.0.0.tar.gz", hash = "sha256:7283abfe400c31ecc7155f9338c6f5af476f2ab506e1aadb9f7e9a5005e491d6"}, +] + +[package.dependencies] +pytest = "*" +six = "*" + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] + +[[package]] +name = "marshmallow" +version = "3.22.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "marshmallow-sqlalchemy" +version = "1.1.0" +description = "SQLAlchemy integration with the marshmallow (de)serialization library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow_sqlalchemy-1.1.0-py3-none-any.whl", hash = "sha256:cce261148e4c6ec4ee275f3d29352933380a1afa2fd3933f5e9ecd02fdc16ade"}, + {file = "marshmallow_sqlalchemy-1.1.0.tar.gz", hash = "sha256:2ab092da269dafa8a05d51a58409af71a8d2183958ba47143127dd239e0359d8"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0" +SQLAlchemy = ">=1.4.40,<3.0" + +[package.extras] +dev = ["marshmallow-sqlalchemy[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==1.0.0)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)"] +tests = ["pytest (<9)", "pytest-lazy-fixtures"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "minio" +version = "7.2.9" +description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" +optional = false +python-versions = ">3.8" +files = [ + {file = "minio-7.2.9-py3-none-any.whl", hash = "sha256:fe5523d9c4a4d6cfc07e96905852841bccdb22b22770e1efca4bf5ae8b65774b"}, + {file = "minio-7.2.9.tar.gz", hash = "sha256:a83c2fcd981944602a8dc11e8e07543ed9cda0a9462264e3f46a13171c56bccb"}, +] + +[package.dependencies] +argon2-cffi = "*" +certifi = "*" +pycryptodome = "*" +typing-extensions = "*" +urllib3 = "*" + +[[package]] +name = "msgpack" +version = "1.0.8" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, + {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, + {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, + {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, + {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, + {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, + {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, + {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, + {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, + {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, + {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, +] + +[[package]] +name = "multidict" +version = "6.1.0" +description = "multidict implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +] + +[[package]] +name = "opentracing" +version = "2.4.0" +description = "OpenTracing API for Python. See documentation at http://opentracing.io" +optional = false +python-versions = "*" +files = [ + {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, +] + +[package.extras] +tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pytest", "pytest-cov", "pytest-mock", "six (>=1.10.0,<2.0)", "sphinx_rtd_theme", "tornado"] + +[[package]] +name = "orjson" +version = "3.10.7" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pep8-naming" +version = "0.14.1" +description = "Check PEP-8 naming conventions, plugin for flake8" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pep8-naming-0.14.1.tar.gz", hash = "sha256:1ef228ae80875557eb6c1549deafed4dabbf3261cfcafa12f773fe0db9be8a36"}, + {file = "pep8_naming-0.14.1-py3-none-any.whl", hash = "sha256:63f514fc777d715f935faf185dedd679ab99526a7f2f503abb61587877f7b1c5"}, +] + +[package.dependencies] +flake8 = ">=5.0.0" + +[[package]] +name = "pg8000" +version = "1.31.2" +description = "PostgreSQL interface library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pg8000-1.31.2-py3-none-any.whl", hash = "sha256:436c771ede71af4d4c22ba867a30add0bc5c942d7ab27fadbb6934a487ecc8f6"}, + {file = "pg8000-1.31.2.tar.gz", hash = "sha256:1ea46cf09d8eca07fe7eaadefd7951e37bee7fabe675df164f1a572ffb300876"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.2" +scramp = ">=1.4.5" + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "proto-plus" +version = "1.24.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<6.0.0dev" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "3.19.6" +description = "Protocol Buffers" +optional = false +python-versions = ">=3.5" +files = [ + {file = "protobuf-3.19.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:010be24d5a44be7b0613750ab40bc8b8cedc796db468eae6c779b395f50d1fa1"}, + {file = "protobuf-3.19.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11478547958c2dfea921920617eb457bc26867b0d1aa065ab05f35080c5d9eb6"}, + {file = "protobuf-3.19.6-cp310-cp310-win32.whl", hash = "sha256:559670e006e3173308c9254d63facb2c03865818f22204037ab76f7a0ff70b5f"}, + {file = "protobuf-3.19.6-cp310-cp310-win_amd64.whl", hash = "sha256:347b393d4dd06fb93a77620781e11c058b3b0a5289262f094379ada2920a3730"}, + {file = "protobuf-3.19.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a8ce5ae0de28b51dff886fb922012dad885e66176663950cb2344c0439ecb473"}, + {file = "protobuf-3.19.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b0d02163c4e67279ddb6dc25e063db0130fc299aefabb5d481053509fae5c8"}, + {file = "protobuf-3.19.6-cp36-cp36m-win32.whl", hash = "sha256:30f5370d50295b246eaa0296533403961f7e64b03ea12265d6dfce3a391d8992"}, + {file = "protobuf-3.19.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0c0714b025ec057b5a7600cb66ce7c693815f897cfda6d6efb58201c472e3437"}, + {file = "protobuf-3.19.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5057c64052a1f1dd7d4450e9aac25af6bf36cfbfb3a1cd89d16393a036c49157"}, + {file = "protobuf-3.19.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:bb6776bd18f01ffe9920e78e03a8676530a5d6c5911934c6a1ac6eb78973ecb6"}, + {file = "protobuf-3.19.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a04134866861b11556a82dd91ea6daf1f4925746b992f277b84013a7cc1229"}, + {file = "protobuf-3.19.6-cp37-cp37m-win32.whl", hash = "sha256:4bc98de3cdccfb5cd769620d5785b92c662b6bfad03a202b83799b6ed3fa1fa7"}, + {file = "protobuf-3.19.6-cp37-cp37m-win_amd64.whl", hash = "sha256:aa3b82ca1f24ab5326dcf4ea00fcbda703e986b22f3d27541654f749564d778b"}, + {file = "protobuf-3.19.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2b2d2913bcda0e0ec9a784d194bc490f5dc3d9d71d322d070b11a0ade32ff6ba"}, + {file = "protobuf-3.19.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d0b635cefebd7a8a0f92020562dead912f81f401af7e71f16bf9506ff3bdbb38"}, + {file = "protobuf-3.19.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a552af4dc34793803f4e735aabe97ffc45962dfd3a237bdde242bff5a3de684"}, + {file = "protobuf-3.19.6-cp38-cp38-win32.whl", hash = "sha256:0469bc66160180165e4e29de7f445e57a34ab68f49357392c5b2f54c656ab25e"}, + {file = "protobuf-3.19.6-cp38-cp38-win_amd64.whl", hash = "sha256:91d5f1e139ff92c37e0ff07f391101df77e55ebb97f46bbc1535298d72019462"}, + {file = "protobuf-3.19.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c0ccd3f940fe7f3b35a261b1dd1b4fc850c8fde9f74207015431f174be5976b3"}, + {file = "protobuf-3.19.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:30a15015d86b9c3b8d6bf78d5b8c7749f2512c29f168ca259c9d7727604d0e39"}, + {file = "protobuf-3.19.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:878b4cd080a21ddda6ac6d1e163403ec6eea2e206cf225982ae04567d39be7b0"}, + {file = "protobuf-3.19.6-cp39-cp39-win32.whl", hash = "sha256:5a0d7539a1b1fb7e76bf5faa0b44b30f812758e989e59c40f77a7dab320e79b9"}, + {file = "protobuf-3.19.6-cp39-cp39-win_amd64.whl", hash = "sha256:bbf5cea5048272e1c60d235c7bd12ce1b14b8a16e76917f371c718bd3005f045"}, + {file = "protobuf-3.19.6-py2.py3-none-any.whl", hash = "sha256:14082457dc02be946f60b15aad35e9f5c69e738f80ebbc0900a19bc83734a5a4"}, + {file = "protobuf-3.19.6.tar.gz", hash = "sha256:5f5540d57a43042389e87661c6eaa50f47c19c6176e8cf1c4f287aeefeccb5c4"}, +] + +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] + +[[package]] +name = "pycountry" +version = "23.12.11" +description = "ISO country, subdivision, language, currency and script definitions and their translations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycountry-23.12.11-py3-none-any.whl", hash = "sha256:2ff91cff4f40ff61086e773d61e72005fe95de4a57bfc765509db05695dc50ab"}, + {file = "pycountry-23.12.11.tar.gz", hash = "sha256:00569d82eaefbc6a490a311bfa84a9c571cff9ddbf8b0a4f4e7b4f868b4ad925"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pycryptodome" +version = "3.21.0" +description = "Cryptographic library for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"}, + {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, +] + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" + +[package.extras] +toml = ["tomli (>=1.2.3)"] + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] + +[[package]] +name = "pyhamcrest" +version = "2.1.0" +description = "Hamcrest framework for matcher objects" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyhamcrest-2.1.0-py3-none-any.whl", hash = "sha256:f6913d2f392e30e0375b3ecbd7aee79e5d1faa25d345c8f4ff597665dcac2587"}, + {file = "pyhamcrest-2.1.0.tar.gz", hash = "sha256:c6acbec0923d0cb7e72c22af1926f3e7c97b8e8d69fc7498eabacaf7c975bd9c"}, +] + +[package.extras] +dev = ["black", "doc2dash", "flake8", "pyhamcrest[docs,tests]", "pytest-mypy", "towncrier", "tox", "tox-asdf", "twine"] +docs = ["alabaster (>=0.7,<1.0)", "sphinx (>=4.0,<5.0)"] +tests = ["coverage[toml]", "dataclasses", "mypy (!=0.940)", "pytest (>=5.0)", "pytest-mypy-plugins", "pytest-sugar", "pytest-xdist", "pyyaml", "types-dataclasses", "types-mock"] +tests-numpy = ["numpy", "pyhamcrest[tests]"] + +[[package]] +name = "pyhumps" +version = "3.8.0" +description = "🐫 Convert strings (and dictionary keys) between snake case, camel case and pascal case in Python. Inspired by Humps for Node" +optional = false +python-versions = "*" +files = [ + {file = "pyhumps-3.8.0-py3-none-any.whl", hash = "sha256:060e1954d9069f428232a1adda165db0b9d8dfdce1d265d36df7fbff540acfd6"}, + {file = "pyhumps-3.8.0.tar.gz", hash = "sha256:498026258f7ee1a8e447c2e28526c0bea9407f9a59c03260aee4bd6c04d681a3"}, +] + +[[package]] +name = "pylint" +version = "3.2.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, + {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, +] + +[package.dependencies] +astroid = ">=3.2.2,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pylint-flask" +version = "0.6" +description = "pylint-flask is a Pylint plugin to aid Pylint in recognizing and understanding errors caused when using Flask" +optional = false +python-versions = "*" +files = [ + {file = "pylint-flask-0.6.tar.gz", hash = "sha256:f4d97de2216bf7bfce07c9c08b166e978fe9f2725de2a50a9845a97de7e31517"}, +] + +[package.dependencies] +pylint-plugin-utils = ">=0.2.1" + +[[package]] +name = "pylint-plugin-utils" +version = "0.8.2" +description = "Utilities and helpers for writing Pylint plugins" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "pylint_plugin_utils-0.8.2-py3-none-any.whl", hash = "sha256:ae11664737aa2effbf26f973a9e0b6779ab7106ec0adc5fe104b0907ca04e507"}, + {file = "pylint_plugin_utils-0.8.2.tar.gz", hash = "sha256:d3cebf68a38ba3fba23a873809155562571386d4c1b03e5b4c4cc26c3eee93e4"}, +] + +[package.dependencies] +pylint = ">=1.7" + +[[package]] +name = "pyrfc3339" +version = "1.1" +description = "Generate and parse RFC 3339 timestamps" +optional = false +python-versions = "*" +files = [ + {file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"}, + {file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"}, +] + +[package.dependencies] +pytz = "*" + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.18.3" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"}, + {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"}, + {file = "pytest_asyncio-0.18.3-py3-none-any.whl", hash = "sha256:8fafa6c52161addfd41ee7ab35f11836c5a16ec208f93ee388f752bea3493a84"}, +] + +[package.dependencies] +pytest = ">=6.1.0" + +[package.extras] +testing = ["coverage (==6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (==0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +optional = false +python-versions = "*" +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[package.dependencies] +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "sbc_common_components" +version = "0.0.0" +description = "" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +flask = "*" +flask-jwt-oidc = ">=0.1.5" +Flask-OpenTracing = "1.1.0" +Flask-SQLAlchemy = "*" +jaeger-client = "*" + +[package.source] +type = "git" +url = "https://github.com/bolyachevets/sbc-common-components.git" +reference = "camel_case_empty_dict" +resolved_reference = "20ce13be6d59946583385c857a5aca1c4c517ad0" +subdirectory = "python" + +[[package]] +name = "scramp" +version = "1.4.5" +description = "An implementation of the SCRAM protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "scramp-1.4.5-py3-none-any.whl", hash = "sha256:50e37c464fc67f37994e35bee4151e3d8f9320e9c204fca83a5d313c121bbbe7"}, + {file = "scramp-1.4.5.tar.gz", hash = "sha256:be3fbe774ca577a7a658117dca014e5d254d158cecae3dd60332dfe33ce6d78e"}, +] + +[package.dependencies] +asn1crypto = ">=1.5.1" + +[[package]] +name = "semver" +version = "3.0.2" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, + {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, +] + +[[package]] +name = "setuptools" +version = "75.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] + +[[package]] +name = "simple-cloudevent" +version = "0.0.2" +description = "A short description of the project" +optional = false +python-versions = ">=3.8" +files = [] +develop = false + +[package.dependencies] +strict-rfc3339 = "*" + +[package.source] +type = "git" +url = "https://github.com/daxiom/simple-cloudevent.py.git" +reference = "HEAD" +resolved_reference = "447cabb988202206ac69e71177d7cd11b6c0b002" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sql-versioning" +version = "0.1.0" +description = "" +optional = false +python-versions = "^3.10" +files = [] +develop = false + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/sql-versioning" + +[[package]] +name = "sqlalchemy" +version = "2.0.35" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlalchemy-utils" +version = "0.41.2" +description = "Various utility functions for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, + {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, +] + +[package.dependencies] +SQLAlchemy = ">=1.3" + +[package.extras] +arrow = ["arrow (>=0.3.4)"] +babel = ["Babel (>=1.3)"] +color = ["colour (>=0.0.4)"] +encrypted = ["cryptography (>=0.6)"] +intervals = ["intervals (>=0.7.1)"] +password = ["passlib (>=1.6,<2.0)"] +pendulum = ["pendulum (>=2.0.5)"] +phone = ["phonenumbers (>=5.9.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +timezone = ["python-dateutil"] +url = ["furl (>=0.4.1)"] + +[[package]] +name = "strict-rfc3339" +version = "0.7" +description = "Strict, simple, lightweight RFC3339 functions" +optional = false +python-versions = "*" +files = [ + {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, +] + +[[package]] +name = "structlog" +version = "24.4.0" +description = "Structured Logging for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"}, + {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"}, +] + +[package.extras] +dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"] +tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy (>=1.4)", "rich", "twisted"] + +[[package]] +name = "structured-logging" +version = "0.4.0" +description = "" +optional = false +python-versions = "^3.9" +files = [] +develop = false + +[package.dependencies] +flask = ">= 1" +structlog = "^24.1.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/structured-logging" + +[[package]] +name = "threadloop" +version = "1.0.2" +description = "Tornado IOLoop Backed Concurrent Futures" +optional = false +python-versions = "*" +files = [ + {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, + {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, +] + +[package.dependencies] +tornado = "*" + +[[package]] +name = "thrift" +version = "0.20.0" +description = "Python bindings for the Apache Thrift RPC system" +optional = false +python-versions = "*" +files = [ + {file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"}, +] + +[package.dependencies] +six = ">=1.7.2" + +[package.extras] +all = ["tornado (>=4.0)", "twisted"] +tornado = ["tornado (>=4.0)"] +twisted = ["twisted"] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "werkzeug" +version = "3.0.0" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, + {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "yarl" +version = "1.13.1" +description = "Yet another URL library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec9dd328016d8d25702a24ee274932aebf6be9787ed1c28d021945d264235b3c"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5820bd4178e6a639b3ef1db8b18500a82ceab6d8b89309e121a6859f56585b05"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86c438ce920e089c8c2388c7dcc8ab30dfe13c09b8af3d306bcabb46a053d6f7"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3de86547c820e4f4da4606d1c8ab5765dd633189791f15247706a2eeabc783ae"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca53632007c69ddcdefe1e8cbc3920dd88825e618153795b57e6ebcc92e752a"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4ee1d240b84e2f213565f0ec08caef27a0e657d4c42859809155cf3a29d1735"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c49f3e379177f4477f929097f7ed4b0622a586b0aa40c07ac8c0f8e40659a1ac"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5c5e32fef09ce101fe14acd0f498232b5710effe13abac14cd95de9c274e689e"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab9524e45ee809a083338a749af3b53cc7efec458c3ad084361c1dbf7aaf82a2"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b1481c048fe787f65e34cb06f7d6824376d5d99f1231eae4778bbe5c3831076d"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:31497aefd68036d8e31bfbacef915826ca2e741dbb97a8d6c7eac66deda3b606"}, + {file = "yarl-1.13.1-cp310-cp310-win32.whl", hash = "sha256:1fa56f34b2236f5192cb5fceba7bbb09620e5337e0b6dfe2ea0ddbd19dd5b154"}, + {file = "yarl-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:1bbb418f46c7f7355084833051701b2301092e4611d9e392360c3ba2e3e69f88"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:216a6785f296169ed52cd7dcdc2612f82c20f8c9634bf7446327f50398732a51"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40c6e73c03a6befb85b72da213638b8aaa80fe4136ec8691560cf98b11b8ae6e"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2430cf996113abe5aee387d39ee19529327205cda975d2b82c0e7e96e5fdabdc"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb4134cc6e005b99fa29dbc86f1ea0a298440ab6b07c6b3ee09232a3b48f495"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309c104ecf67626c033845b860d31594a41343766a46fa58c3309c538a1e22b2"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f90575e9fe3aae2c1e686393a9689c724cd00045275407f71771ae5d690ccf38"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d2e1626be8712333a9f71270366f4a132f476ffbe83b689dd6dc0d114796c74"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b66c87da3c6da8f8e8b648878903ca54589038a0b1e08dde2c86d9cd92d4ac9"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf1ad338620249f8dd6d4b6a91a69d1f265387df3697ad5dc996305cf6c26fb2"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9915300fe5a0aa663c01363db37e4ae8e7c15996ebe2c6cce995e7033ff6457f"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:703b0f584fcf157ef87816a3c0ff868e8c9f3c370009a8b23b56255885528f10"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1d8e3ca29f643dd121f264a7c89f329f0fcb2e4461833f02de6e39fef80f89da"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7055bbade838d68af73aea13f8c86588e4bcc00c2235b4b6d6edb0dbd174e246"}, + {file = "yarl-1.13.1-cp311-cp311-win32.whl", hash = "sha256:a3442c31c11088e462d44a644a454d48110f0588de830921fd201060ff19612a"}, + {file = "yarl-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:81bad32c8f8b5897c909bf3468bf601f1b855d12f53b6af0271963ee67fff0d2"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f452cc1436151387d3d50533523291d5f77c6bc7913c116eb985304abdbd9ec9"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9cec42a20eae8bebf81e9ce23fb0d0c729fc54cf00643eb251ce7c0215ad49fe"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d959fe96e5c2712c1876d69af0507d98f0b0e8d81bee14cfb3f6737470205419"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c837ab90c455f3ea8e68bee143472ee87828bff19ba19776e16ff961425b57"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a993f976cdcb2dc1b855d8b89b792893220db8862d1a619efa7451817c836b"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2442a415a5f4c55ced0fade7b72123210d579f7d950e0b5527fc598866e62c"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fdbf0418489525231723cdb6c79e7738b3cbacbaed2b750cb033e4ea208f220"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b7f6e699304717fdc265a7e1922561b02a93ceffdaefdc877acaf9b9f3080b8"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bcd5bf4132e6a8d3eb54b8d56885f3d3a38ecd7ecae8426ecf7d9673b270de43"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2a93a4557f7fc74a38ca5a404abb443a242217b91cd0c4840b1ebedaad8919d4"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:22b739f99c7e4787922903f27a892744189482125cc7b95b747f04dd5c83aa9f"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2db874dd1d22d4c2c657807562411ffdfabec38ce4c5ce48b4c654be552759dc"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4feaaa4742517eaceafcbe74595ed335a494c84634d33961214b278126ec1485"}, + {file = "yarl-1.13.1-cp312-cp312-win32.whl", hash = "sha256:bbf9c2a589be7414ac4a534d54e4517d03f1cbb142c0041191b729c2fa23f320"}, + {file = "yarl-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:d07b52c8c450f9366c34aa205754355e933922c79135125541daae6cbf31c799"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:95c6737f28069153c399d875317f226bbdea939fd48a6349a3b03da6829fb550"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cd66152561632ed4b2a9192e7f8e5a1d41e28f58120b4761622e0355f0fe034c"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6a2acde25be0cf9be23a8f6cbd31734536a264723fca860af3ae5e89d771cd71"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18595e6a2ee0826bf7dfdee823b6ab55c9b70e8f80f8b77c37e694288f5de1"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a31d21089894942f7d9a8df166b495101b7258ff11ae0abec58e32daf8088813"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45f209fb4bbfe8630e3d2e2052535ca5b53d4ce2d2026bed4d0637b0416830da"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f722f30366474a99745533cc4015b1781ee54b08de73260b2bbe13316079851"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3bf60444269345d712838bb11cc4eadaf51ff1a364ae39ce87a5ca8ad3bb2c8"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:942c80a832a79c3707cca46bd12ab8aa58fddb34b1626d42b05aa8f0bcefc206"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:44b07e1690f010c3c01d353b5790ec73b2f59b4eae5b0000593199766b3f7a5c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:396e59b8de7e4d59ff5507fb4322d2329865b909f29a7ed7ca37e63ade7f835c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3bb83a0f12701c0b91112a11148b5217617982e1e466069d0555be9b372f2734"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c92b89bffc660f1274779cb6fbb290ec1f90d6dfe14492523a0667f10170de26"}, + {file = "yarl-1.13.1-cp313-cp313-win32.whl", hash = "sha256:269c201bbc01d2cbba5b86997a1e0f73ba5e2f471cfa6e226bcaa7fd664b598d"}, + {file = "yarl-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:1d0828e17fa701b557c6eaed5edbd9098eb62d8838344486248489ff233998b8"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8be8cdfe20787e6a5fcbd010f8066227e2bb9058331a4eccddec6c0db2bb85b2"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08d7148ff11cb8e886d86dadbfd2e466a76d5dd38c7ea8ebd9b0e07946e76e4b"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4afdf84610ca44dcffe8b6c22c68f309aff96be55f5ea2fa31c0c225d6b83e23"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0d12fe78dcf60efa205e9a63f395b5d343e801cf31e5e1dda0d2c1fb618073d"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298c1eecfd3257aa16c0cb0bdffb54411e3e831351cd69e6b0739be16b1bdaa8"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c14c16831b565707149c742d87a6203eb5597f4329278446d5c0ae7a1a43928e"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9bacedbb99685a75ad033fd4de37129449e69808e50e08034034c0bf063f99"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658e8449b84b92a4373f99305de042b6bd0d19bf2080c093881e0516557474a5"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:373f16f38721c680316a6a00ae21cc178e3a8ef43c0227f88356a24c5193abd6"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:45d23c4668d4925688e2ea251b53f36a498e9ea860913ce43b52d9605d3d8177"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f7917697bcaa3bc3e83db91aa3a0e448bf5cde43c84b7fc1ae2427d2417c0224"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5989a38ba1281e43e4663931a53fbf356f78a0325251fd6af09dd03b1d676a09"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11b3ca8b42a024513adce810385fcabdd682772411d95bbbda3b9ed1a4257644"}, + {file = "yarl-1.13.1-cp38-cp38-win32.whl", hash = "sha256:dcaef817e13eafa547cdfdc5284fe77970b891f731266545aae08d6cce52161e"}, + {file = "yarl-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:7addd26594e588503bdef03908fc207206adac5bd90b6d4bc3e3cf33a829f57d"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0ae6637b173d0c40b9c1462e12a7a2000a71a3258fa88756a34c7d38926911c"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:576365c9f7469e1f6124d67b001639b77113cfd05e85ce0310f5f318fd02fe85"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78f271722423b2d4851cf1f4fa1a1c4833a128d020062721ba35e1a87154a049"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d74f3c335cfe9c21ea78988e67f18eb9822f5d31f88b41aec3a1ec5ecd32da5"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1891d69a6ba16e89473909665cd355d783a8a31bc84720902c5911dbb6373465"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb382fd7b4377363cc9f13ba7c819c3c78ed97c36a82f16f3f92f108c787cbbf"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8854b9f80693d20cec797d8e48a848c2fb273eb6f2587b57763ccba3f3bd4b"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf2c3f04ff50f16404ce70f822cdc59760e5e2d7965905f0e700270feb2bbfc"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb9f59f3848edf186a76446eb8bcf4c900fe147cb756fbbd730ef43b2e67c6a7"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef9b85fa1bc91c4db24407e7c4da93a5822a73dd4513d67b454ca7064e8dc6a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:098b870c18f1341786f290b4d699504e18f1cd050ed179af8123fd8232513424"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8c723c91c94a3bc8033dd2696a0f53e5d5f8496186013167bddc3fb5d9df46a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44a4c40a6f84e4d5955b63462a0e2a988f8982fba245cf885ce3be7618f6aa7d"}, + {file = "yarl-1.13.1-cp39-cp39-win32.whl", hash = "sha256:84bbcdcf393139f0abc9f642bf03f00cac31010f3034faa03224a9ef0bb74323"}, + {file = "yarl-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:fc2931ac9ce9c61c9968989ec831d3a5e6fcaaff9474e7cfa8de80b7aff5a093"}, + {file = "yarl-1.13.1-py3-none-any.whl", hash = "sha256:6a5185ad722ab4dd52d5fb1f30dcc73282eb1ed494906a92d1a228d3f89607b0"}, + {file = "yarl-1.13.1.tar.gz", hash = "sha256:ec8cfe2295f3e5e44c51f57272afbd69414ae629ec7c6b27f5a410efc78b70a0"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.19.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, + {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "a6ed14626865fb0cc0c71c8d95ea8e83113f5c98737fdac7f05d95d9b26d3b60" diff --git a/queue_services/account-mailer/pyproject.toml b/queue_services/account-mailer/pyproject.toml new file mode 100644 index 0000000000..b3b7bd811d --- /dev/null +++ b/queue_services/account-mailer/pyproject.toml @@ -0,0 +1,71 @@ +[tool.poetry] +name = "account-mailer" +version = "0.1.0" +description = "" +authors = ["Avni Salhotra "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +blinker = "1.8.2" +charset-normalizer = "3.3.2" +click = "8.1.7" +expiringdict = "1.2.2" +google-api-core = "1.34.1" +google-auth = "2.28.2" +google-cloud-pubsub = "2.20.2" +googleapis-common-protos = "1.63.0" +grpc-google-iam-v1 = "0.13.0" +grpcio-status = "1.48.2" +grpcio = "1.64.0" +idna = "3.7" +importlib-resources = "5.13.0" +jaeger-client = "4.8.0" +msgpack = "1.0.8" +opentracing = "2.4.0" +pkgutil-resolve-name = "1.3.10" +protobuf = ">=3.19.5,<3.20.0" +pyrfc3339 = "1.1" +pycountry = "23.12.11" +pyrsistent = "0.20.0" +pytz = "2024.1" +semver = "3.0.2" +six = "1.16.0" +threadloop = "1.0.2" +thrift = "0.20.0" +tornado = "6.4.1" +urllib3 = "2.2.2" +zipp = "3.19.1" + +# VCS dependencies +auth-api = { git = "https://github.com/bcgov/sbc-auth.git", rev = "feature-gcp-migration", subdirectory = "auth-api" } +simple-cloudevent = { git = "https://github.com/daxiom/simple-cloudevent.py.git" } +build-deps = { git = "https://github.com/bcgov/sbc-auth.git", rev = "feature-gcp-migration", subdirectory = "build-deps" } + +[tool.poetry.group.dev.dependencies] +psycopg2 = "^2.9.9" +pytest = "^8.3.2" +pytest-mock = "^3.14.0" +pyhamcrest = "^2.1.0" +pytest-cov = "^5.0.0" +freezegun = "^1.5.1" +flake8 = "5.0.4" +flake8-blind-except = "^0.2.1" +flake8-debugger = "^4.1.2" +flake8-docstrings = "^1.7.0" +flake8-isort = "^6.1.1" +flake8-print = "^5.0.0" +flake8-quotes = "^3.4.0" +pep8-naming = "^0.14.1" +coverage = "^7.6.1" +pylint = "3.2.3" +pylint-flask = "^0.6" +pydocstyle = "^6.3.0" +isort = "^5.13.2" +lovely-pytest-docker = "^1.0.0" +pytest-asyncio = "0.18.3" +astroid = "^3.2.3" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/queue_services/account-mailer/requirements.txt b/queue_services/account-mailer/requirements.txt deleted file mode 100644 index 707d8ed319..0000000000 --- a/queue_services/account-mailer/requirements.txt +++ /dev/null @@ -1,39 +0,0 @@ -CacheControl==0.14.0 -Flask==1.1.2 -Jinja2==3.0.3 -MarkupSafe==2.0.1 -Werkzeug==1.0.1 -attrs==23.2.0 -blinker==1.8.2 -certifi==2024.7.4 -charset-normalizer==3.3.2 -click==8.1.7 -expiringdict==1.2.2 -idna==3.7 -importlib-resources==5.13.0 -itsdangerous==2.0.1 -jaeger-client==4.8.0 -jsonschema==4.17.3 -launchdarkly-server-sdk==8.2.1 -msgpack==1.0.8 -opentracing==2.4.0 -pkgutil_resolve_name==1.3.10 -protobuf==3.19.6 -pyRFC3339==1.1 -pycountry==23.12.11 -pyrsistent==0.20.0 -python-dotenv==1.0.1 -pytz==2024.1 -requests==2.32.2 -semver==3.0.2 -sentry-sdk==2.3.1 -six==1.16.0 -threadloop==1.0.2 -thrift==0.20.0 -tornado==6.4.1 -urllib3==2.2.2 -zipp==3.19.1 --e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python --e git+https://github.com/seeker25/sbc-auth.git@20087#egg=auth-api&subdirectory=auth-api --e git+https://github.com/seeker25/sbc-connect-common.git@small_tweaks#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/daxiom/simple-cloudevent.py.git diff --git a/queue_services/account-mailer/requirements/dev.txt b/queue_services/account-mailer/requirements/dev.txt deleted file mode 100755 index 4859b2ba13..0000000000 --- a/queue_services/account-mailer/requirements/dev.txt +++ /dev/null @@ -1,32 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest -pytest-mock -requests -pyhamcrest -pytest-cov -FreezeGun - -# Lint and code style -flake8==5.0.4 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-print -flake8-quotes -pep8-naming -autopep8 -coverage -pylint -pylint-flask -pydocstyle -isort - - -# docker -lovely-pytest-docker -pytest-asyncio==0.18.3 - diff --git a/queue_services/account-mailer/requirements/prod.txt b/queue_services/account-mailer/requirements/prod.txt deleted file mode 100644 index 213050366a..0000000000 --- a/queue_services/account-mailer/requirements/prod.txt +++ /dev/null @@ -1,14 +0,0 @@ -Flask -jsonschema==4.17.3 -python-dotenv -sentry-sdk[flask] -pycountry -Werkzeug<2 -jaeger-client -attrs -itsdangerous==2.0.1 -Jinja2==3.0.3 -markupsafe==2.0.1 -protobuf~=3.19.5 -launchdarkly-server-sdk==8.2.1 -cachecontrol diff --git a/queue_services/account-mailer/requirements/repo-libraries.txt b/queue_services/account-mailer/requirements/repo-libraries.txt deleted file mode 100644 index ed38ad555e..0000000000 --- a/queue_services/account-mailer/requirements/repo-libraries.txt +++ /dev/null @@ -1,4 +0,0 @@ --e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python --e git+https://github.com/seeker25/sbc-auth.git@refactor_queues#egg=auth-api&subdirectory=auth-api --e git+https://github.com/seeker25/sbc-connect-common.git@small_tweaks#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/daxiom/simple-cloudevent.py.git diff --git a/queue_services/account-mailer/setup.py b/queue_services/account-mailer/setup.py deleted file mode 100644 index b43021e229..0000000000 --- a/queue_services/account-mailer/setup.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright © 2019 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module -""" -import ast -from glob import glob -from os.path import basename, splitext -import re - -from setuptools import setup, find_packages - -_version_re = re.compile(r'__version__\s+=\s+(.*)') # pylint: disable=invalid-name - -with open('src/account_mailer/version.py', 'rb') as f: - version = str(ast.literal_eval(_version_re.search( # pylint: disable=invalid-name - f.read().decode('utf-8')).group(1))) - - -def read_requirements(filename): - """ - Get application requirements from - the requirements.txt file. - :return: Python requirements - """ - with open(filename, 'r') as req: - requirements = req.readlines() - install_requires = [r.strip() for r in requirements if (r.find('git+') != 0 and r.find('-e git+') != 0)] - return install_requires - - -def read(filepath): - """ - Read the contents from a file. - :param str filepath: path to the file to be read - :return: file contents - """ - with open(filepath, 'r') as file_handle: - content = file_handle.read() - return content - - -REQUIREMENTS = read_requirements('requirements.txt') - -setup( - name="account_mailer", - version=version, - author_email='', - packages=find_packages('src'), - package_dir={'': 'src'}, - py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')], - include_package_data=True, - license=read('LICENSE'), - long_description=read('README.md'), - zip_safe=False, - install_requires=REQUIREMENTS, - setup_requires=["pytest-runner", ], - tests_require=["pytest", ], -) diff --git a/queue_services/account-mailer/src/account_mailer/__init__.py b/queue_services/account-mailer/src/account_mailer/__init__.py index 9b5d148e24..e576ce42b5 100644 --- a/queue_services/account-mailer/src/account_mailer/__init__.py +++ b/queue_services/account-mailer/src/account_mailer/__init__.py @@ -19,23 +19,18 @@ import os -import sentry_sdk +from auth_api.exceptions import ExceptionHandler from auth_api.models import db from auth_api.resources.ops import bp as ops_bp from auth_api.services.flags import flags from auth_api.services.gcp_queue import queue from auth_api.utils.cache import cache -from auth_api.utils.util_logging import setup_logging from flask import Flask -from sentry_sdk.integrations.flask import FlaskIntegration from account_mailer import config from account_mailer.resources.worker import bp as worker_endpoint -setup_logging(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.conf')) # important to do this first - - def register_endpoints(app: Flask): """Register endpoints with the flask application.""" # Allow base route to match with, and without a trailing slash @@ -52,13 +47,7 @@ def create_app(run_mode=os.getenv('DEPLOYMENT_ENV', 'production')) -> Flask: """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(config.get_named_config(run_mode)) - - if str(app.config.get('SENTRY_ENABLE')).lower() == 'true': - if app.config.get('SENTRY_DSN', None): - sentry_sdk.init( # pylint: disable=abstract-class-instantiated - dsn=app.config.get('SENTRY_DSN'), - integrations=[FlaskIntegration()] - ) + app.config['ENV'] = run_mode db.init_app(app) flags.init_app(app) @@ -66,5 +55,6 @@ def create_app(run_mode=os.getenv('DEPLOYMENT_ENV', 'production')) -> Flask: queue.init_app(app) register_endpoints(app) + ExceptionHandler(app) return app diff --git a/queue_services/account-mailer/src/account_mailer/config.py b/queue_services/account-mailer/src/account_mailer/config.py index d04eaa2879..59b94da2fb 100644 --- a/queue_services/account-mailer/src/account_mailer/config.py +++ b/queue_services/account-mailer/src/account_mailer/config.py @@ -62,9 +62,6 @@ class _Config(): # pylint: disable=too-few-public-methods TESTING = False DEBUG = False - SENTRY_ENABLE = os.getenv('SENTRY_ENABLE', 'False') - SENTRY_DSN = os.getenv('SENTRY_DSN', None) - SQLALCHEMY_TRACK_MODIFICATIONS = False AUTH_LD_SDK_KEY = os.getenv('AUTH_LD_SDK_KEY', None) @@ -75,7 +72,10 @@ class _Config(): # pylint: disable=too-few-public-methods DB_NAME = os.getenv('DATABASE_NAME', '') DB_HOST = os.getenv('DATABASE_HOST', '') DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}' + if DB_UNIX_SOCKET := os.getenv('DATABASE_UNIX_SOCKET', None): + SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}' # noqa: E231, E501 + else: + SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' # noqa: E231, E501 # Keycloak & Jwt JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_ISSUER') @@ -91,9 +91,16 @@ class _Config(): # pylint: disable=too-few-public-methods KEYCLOAK_SERVICE_ACCOUNT_SECRET = os.getenv('SBC_AUTH_ADMIN_CLIENT_SECRET') # API endpoints - PAY_API_URL = os.getenv('PAY_API_URL') - NOTIFY_API_URL = os.getenv('NOTIFY_API_URL') - REPORT_API_BASE_URL = f'{os.getenv("REPORT_API_URL")}/reports' + PAY_API_URL = os.getenv('PAY_API_URL', '') + PAY_API_VERSION = os.getenv('PAY_API_VERSION', '') + PAY_API_URL = PAY_API_URL + PAY_API_VERSION + NOTIFY_API_URL = os.getenv('NOTIFY_API_URL', '') + NOTIFY_API_VERSION = os.getenv('NOTIFY_API_VERSION', '') + NOTIFY_API_URL = NOTIFY_API_URL + NOTIFY_API_VERSION + REPORT_API_URL = os.getenv('REPORT_API_URL', '') + REPORT_API_VERSION = os.getenv('REPORT_API_VERSION', '') + REPORT_API_URL = REPORT_API_URL + REPORT_API_VERSION + REPORT_API_BASE_URL = f'{REPORT_API_URL}/reports' # PUB/SUB - SUB: account-mailer-dev # If blank in PUB/SUB, this should match the https endpoint the subscription is pushing to. @@ -161,7 +168,7 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432') SQLALCHEMY_DATABASE_URI = os.getenv( 'DATABASE_TEST_URL', - default=f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}', + default=f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}', # noqa: E231 ) JWT_OIDC_ISSUER = os.getenv('JWT_OIDC_TEST_ISSUER') diff --git a/queue_services/account-mailer/src/account_mailer/email_processors/account_unlock.py b/queue_services/account-mailer/src/account_mailer/email_processors/account_unlock.py index c81e9d8aee..62889fa789 100644 --- a/queue_services/account-mailer/src/account_mailer/email_processors/account_unlock.py +++ b/queue_services/account-mailer/src/account_mailer/email_processors/account_unlock.py @@ -19,13 +19,17 @@ from auth_api.utils.enums import AuthHeaderType, ContentType from flask import current_app from jinja2 import Template +from structured_logging import StructuredLogging from account_mailer.email_processors import generate_template +logger = StructuredLogging.get_logger() + + def process(data: dict, token: str) -> dict: """Build the email for Account Unlocked notification.""" - current_app.logger.debug('email_msg notification: %s', data) + logger.debug('email_msg notification: %s', data) pdf_attachment = _get_account_unlock_pdf(data, token) html_body = _get_account_unlock_email(data) return { @@ -72,7 +76,7 @@ def _get_account_unlock_pdf(data, token): additional_headers={'Accept': 'application/pdf'}) pdf_attachment = None if report_response.status_code != 200: - current_app.logger.error('Failed to get pdf') + logger.error('Failed to get pdf') else: pdf_attachment = base64.b64encode(report_response.content) diff --git a/queue_services/account-mailer/src/account_mailer/email_processors/common_mailer.py b/queue_services/account-mailer/src/account_mailer/email_processors/common_mailer.py index 3b446ce0cb..95d459342a 100644 --- a/queue_services/account-mailer/src/account_mailer/email_processors/common_mailer.py +++ b/queue_services/account-mailer/src/account_mailer/email_processors/common_mailer.py @@ -15,14 +15,18 @@ from auth_api.models import Org as OrgModel from flask import current_app from jinja2 import Template +from structured_logging import StructuredLogging from account_mailer.auth_utils import get_dashboard_url, get_login_url, get_payment_statements_url from account_mailer.email_processors import generate_template +logger = StructuredLogging.get_logger() + + def process(org_id, recipients, template_name, subject, logo_url, **kwargs) -> dict: """Build the email for Account notification.""" - current_app.logger.debug('account notification: %s', org_id) + logger.debug('account notification: %s', org_id) account_name: str = None account_name_with_branch: str = None @@ -47,7 +51,7 @@ def process(org_id, recipients, template_name, subject, logo_url, **kwargs) -> d 'payment_statement_url': get_payment_statements_url(org_id), **kwargs } - current_app.logger.debug('notification args: %s', jinja_kwargs) + logger.debug('notification args: %s', jinja_kwargs) html_out = jnja_template.render(jinja_kwargs) diff --git a/queue_services/account-mailer/src/account_mailer/email_processors/ejv_failures.py b/queue_services/account-mailer/src/account_mailer/email_processors/ejv_failures.py index b57d097590..7eef70d91a 100644 --- a/queue_services/account-mailer/src/account_mailer/email_processors/ejv_failures.py +++ b/queue_services/account-mailer/src/account_mailer/email_processors/ejv_failures.py @@ -17,15 +17,19 @@ from flask import current_app from jinja2 import Template +from structured_logging import StructuredLogging from account_mailer.email_processors import generate_template from account_mailer.enums import SubjectType, TemplateType from account_mailer.services import minio_service +logger = StructuredLogging.get_logger() + + def process(email_msg: dict) -> dict: """Build the email for JV failures.""" - current_app.logger.debug('ejv_failures: %s', email_msg) + logger.debug('ejv_failures: %s', email_msg) # fill in template failed_jv_file_name = email_msg.get('fileName') file_location = email_msg.get('minioLocation') diff --git a/queue_services/account-mailer/src/account_mailer/email_processors/pad_confirmation.py b/queue_services/account-mailer/src/account_mailer/email_processors/pad_confirmation.py index f1b5234b33..6168305a05 100644 --- a/queue_services/account-mailer/src/account_mailer/email_processors/pad_confirmation.py +++ b/queue_services/account-mailer/src/account_mailer/email_processors/pad_confirmation.py @@ -22,14 +22,18 @@ from auth_api.utils.enums import AuthHeaderType, ContentType from flask import current_app from jinja2 import Template +from structured_logging import StructuredLogging from account_mailer.email_processors import generate_template from account_mailer.services import minio_service +logger = StructuredLogging.get_logger() + + def process(email_msg: dict, token: str) -> dict: """Build the email for PAD Confirmation notification.""" - current_app.logger.debug('email_msg notification: %s', email_msg) + logger.debug('email_msg notification: %s', email_msg) # fill in template username = email_msg.get('padTosAcceptedBy') @@ -120,7 +124,7 @@ def _get_pad_confirmation_report_pdf(email_msg, token): additional_headers={'Accept': 'application/pdf'}) pdf_attachment = None if report_response.status_code != 200: - current_app.logger.error('Failed to get pdf') + logger.error('Failed to get pdf') else: pdf_attachment = base64.b64encode(report_response.content) diff --git a/queue_services/account-mailer/src/account_mailer/email_processors/refund_requested.py b/queue_services/account-mailer/src/account_mailer/email_processors/refund_requested.py index 85bdca4c40..b0ea94d6c6 100644 --- a/queue_services/account-mailer/src/account_mailer/email_processors/refund_requested.py +++ b/queue_services/account-mailer/src/account_mailer/email_processors/refund_requested.py @@ -17,13 +17,17 @@ from flask import current_app from jinja2 import Template +from structured_logging import StructuredLogging from account_mailer.email_processors import generate_template +logger = StructuredLogging.get_logger() + + def process(email_msg: dict) -> dict: """Build the email for Payment Completed notification.""" - current_app.logger.debug('refund_request notification: %s', email_msg) + logger.debug('refund_request notification: %s', email_msg) template_name = 'bcol_refund_request_email' recepients = current_app.config.get('REFUND_REQUEST').get('bcol').get('recipients') refund_date = datetime.strptime(email_msg.get('refundDate'), '%Y%m%d').strftime('%Y-%m-%d') diff --git a/queue_services/account-mailer/src/account_mailer/logging.conf b/queue_services/account-mailer/src/account_mailer/logging.conf deleted file mode 100644 index ded5cb81c1..0000000000 --- a/queue_services/account-mailer/src/account_mailer/logging.conf +++ /dev/null @@ -1,34 +0,0 @@ -[loggers] -keys=root,api,asyncio - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_asyncio] -level=DEBUG -handlers=console -qualname=asyncio -propagate=0 - -[logger_api] -level=DEBUG -handlers=console -qualname=api -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= diff --git a/queue_services/account-mailer/src/account_mailer/resources/worker.py b/queue_services/account-mailer/src/account_mailer/resources/worker.py index 61c532fe74..b944c1d5fb 100644 --- a/queue_services/account-mailer/src/account_mailer/resources/worker.py +++ b/queue_services/account-mailer/src/account_mailer/resources/worker.py @@ -23,8 +23,9 @@ from auth_api.services.gcp_queue.gcp_auth import ensure_authorized_queue_user from auth_api.services.rest_service import RestService from auth_api.utils.roles import ADMIN, COORDINATOR -from flask import Blueprint, current_app, request +from flask import Blueprint, request from sbc_common_components.utils.enums import QueueMessageTypes +from structured_logging import StructuredLogging from account_mailer.auth_utils import get_login_url, get_member_emails from account_mailer.email_processors import ( @@ -37,6 +38,9 @@ bp = Blueprint('worker', __name__) +logger = StructuredLogging.get_logger() + + @bp.route('/', methods=('POST',)) @ensure_authorized_queue_user def worker(): @@ -46,9 +50,9 @@ def worker(): return {}, HTTPStatus.OK try: - current_app.logger.info('Event message received: %s', json.dumps(dataclasses.asdict(event_message))) + logger.info('Event message received: %s', json.dumps(dataclasses.asdict(event_message))) if is_message_processed(event_message): - current_app.logger.info('Event message already processed, skipping.') + logger.info('Event message already processed, skipping.') return {}, HTTPStatus.OK message_type, email_msg = event_message.type, event_message.data email_msg['logo_url'] = minio_service.MinioService.get_minio_public_url('bc_logo_for_email.png') @@ -73,7 +77,7 @@ def worker(): # Note if you're extending above, make sure to include the new type in handle_other_messages below. handle_other_messages(message_type, email_msg) except Exception: # NOQA # pylint: disable=broad-except - current_app.logger.error('Error processing event:', exc_info=True) + logger.error('Error processing event:', exc_info=True) return {}, HTTPStatus.OK @@ -126,7 +130,7 @@ def handle_eft_available_notification(message_type, email_msg): def handle_nsf_lock_unlock_account(message_type, email_msg): """Handle the NSF lock/unlock account message.""" if message_type == QueueMessageTypes.NSF_LOCK_ACCOUNT.value: - current_app.logger.debug('Lock account message received') + logger.debug('Lock account message received') template_name = TemplateType.NSF_LOCK_ACCOUNT_TEMPLATE_NAME.value org_id = email_msg.get('accountId') emails = get_member_emails(org_id, (ADMIN, COORDINATOR)) @@ -138,7 +142,7 @@ def handle_nsf_lock_unlock_account(message_type, email_msg): subject, logo_url=logo_url) process_email(email_dict) elif message_type == QueueMessageTypes.NSF_UNLOCK_ACCOUNT.value: - current_app.logger.debug('Unlock account message received') + logger.debug('Unlock account message received') template_name = TemplateType.NSF_UNLOCK_ACCOUNT_TEMPLATE_NAME.value org_id = email_msg.get('accountId') admin_coordinator_emails = get_member_emails(org_id, (ADMIN, COORDINATOR)) @@ -178,7 +182,7 @@ def handle_account_confirmation_period_over(message_type, email_msg): def handle_team_actions(message_type, email_msg): """Handle the team actions messages.""" if message_type in (QueueMessageTypes.TEAM_MODIFIED.value, QueueMessageTypes.TEAM_MEMBER_INVITED.value): - current_app.logger.debug('Team Modified message received') + logger.debug('Team Modified message received') template_name = TemplateType.TEAM_MODIFIED_TEMPLATE_NAME.value org_id = email_msg.get('accountId') admin_coordinator_emails = get_member_emails(org_id, (ADMIN,)) @@ -188,7 +192,7 @@ def handle_team_actions(message_type, email_msg): subject, logo_url=logo_url) process_email(email_dict) elif message_type == QueueMessageTypes.ADMIN_REMOVED.value: - current_app.logger.debug('ADMIN_REMOVED message received') + logger.debug('ADMIN_REMOVED message received') template_name = TemplateType.ADMIN_REMOVED_TEMPLATE_NAME.value org_id = email_msg.get('accountId') recipient_email = email_msg.get('recipientEmail') @@ -466,7 +470,7 @@ def handle_other_messages(message_type, email_msg): ) template_name = TemplateType[f'{QueueMessageTypes(message_type).name}_TEMPLATE_NAME'].value else: - current_app.logger.error('Unknown message type: %s', message_type) + logger.error('Unknown message type: %s', message_type) return kwargs = { @@ -495,10 +499,10 @@ def handle_other_messages(message_type, email_msg): def process_email(email_dict: dict, token: str = None): # pylint: disable=too-many-branches """Process the email contained in the message.""" - current_app.logger.debug('Attempting to process email: %s', email_dict.get('recipients', '')) + logger.debug('Attempting to process email: %s', email_dict.get('recipients', '')) if email_dict: if not token: token = RestService.get_service_account_token() notification_service.send_email(email_dict, token=token) else: - current_app.logger.error('No email content generated') + logger.error('No email content generated') diff --git a/queue_services/account-mailer/src/account_mailer/services/minio_service.py b/queue_services/account-mailer/src/account_mailer/services/minio_service.py index badec4248c..e627ce0368 100644 --- a/queue_services/account-mailer/src/account_mailer/services/minio_service.py +++ b/queue_services/account-mailer/src/account_mailer/services/minio_service.py @@ -18,6 +18,10 @@ from flask import current_app from minio import Minio +from structured_logging import StructuredLogging + + +logger = StructuredLogging.get_logger() class MinioService: @@ -27,7 +31,7 @@ class MinioService: def get_minio_file(bucket_name: str, file_name: str): """Return the file from Minio.""" minio_client: Minio = MinioService._get_client() - current_app.logger.debug(f'Get Minio file {bucket_name}/{file_name}') + logger.debug(f'Get Minio file {bucket_name}/{file_name}') return minio_client.get_object(bucket_name, file_name) @@ -35,7 +39,7 @@ def get_minio_file(bucket_name: str, file_name: str): def put_minio_file(bucket_name: str, file_name: str, value_as_bytes: bytearray): """Return the file from Minio.""" minio_client: Minio = MinioService._get_client() - current_app.logger.debug(f'Put Minio file {bucket_name}/{file_name}') + logger.debug(f'Put Minio file {bucket_name}/{file_name}') value_as_stream = io.BytesIO(value_as_bytes) minio_client.put_object(current_app.config['MINIO_BUCKET_NAME'], file_name, value_as_stream, @@ -57,7 +61,7 @@ def _get_client() -> Minio: @staticmethod def get_minio_public_url(key: str) -> str: """Return a URL for uploaded document.""" - current_app.logger.debug(f'GET URL for {key}') + logger.debug(f'GET URL for {key}') minio_endpoint = current_app.config['MINIO_ENDPOINT'] - return f'https://{minio_endpoint}/public/{key}' + return f'https://{minio_endpoint}/public/{key}' # noqa: E231 diff --git a/queue_services/account-mailer/src/account_mailer/services/notification_service.py b/queue_services/account-mailer/src/account_mailer/services/notification_service.py index 805a8acd95..bd7596b2c5 100644 --- a/queue_services/account-mailer/src/account_mailer/services/notification_service.py +++ b/queue_services/account-mailer/src/account_mailer/services/notification_service.py @@ -14,11 +14,15 @@ """Service for managing Invitation data.""" from auth_api.services.rest_service import RestService from flask import current_app +from structured_logging import StructuredLogging + + +logger = StructuredLogging.get_logger() def send_email(notify_body: dict, token: str): # pylint:disable=unused-argument """Send the email asynchronously, using the given details.""" - current_app.logger.info(f'send_email to {notify_body.get("recipients")}') + logger.info(f'send_email to {notify_body.get("recipients")}') notify_url = current_app.config.get('NOTIFY_API_URL') + '/notify/' RestService.post(notify_url, token=token, data=notify_body) - current_app.logger.info(f'Email sent to {notify_body.get("recipients")}') + logger.info(f'Email sent to {notify_body.get("recipients")}') diff --git a/queue_services/account-mailer/src/account_mailer/utils.py b/queue_services/account-mailer/src/account_mailer/utils.py index 5f0e1a6d96..a551db74f4 100644 --- a/queue_services/account-mailer/src/account_mailer/utils.py +++ b/queue_services/account-mailer/src/account_mailer/utils.py @@ -51,7 +51,7 @@ def get_local_formatted_date(date_val: datetime, dt_format: str = '%Y-%m-%d'): def format_currency(amount: str): """Format currency to two decimal places.""" - return f'{float(amount):0,.2f}' + return f'{float(amount):0,.2f}' # noqa: E231 def format_day_with_suffix(day: int) -> str: diff --git a/queue_services/account-mailer/src/account_mailer/version.py b/queue_services/account-mailer/src/account_mailer/version.py index dc116fa97a..ed2ae0453e 100644 --- a/queue_services/account-mailer/src/account_mailer/version.py +++ b/queue_services/account-mailer/src/account_mailer/version.py @@ -23,3 +23,4 @@ """ __version__ = '2.18.4' # pylint: disable=invalid-name +__version__ = '2.18.4' # pylint: disable=invalid-name diff --git a/queue_services/account-mailer/tests/conftest.py b/queue_services/account-mailer/tests/conftest.py index 12a8f51f07..9eee5e19bd 100644 --- a/queue_services/account-mailer/tests/conftest.py +++ b/queue_services/account-mailer/tests/conftest.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. """Common setup and fixtures for the pytest suite used by this service.""" -import logging -import os import random import time from concurrent.futures import CancelledError @@ -28,15 +26,6 @@ from account_mailer import create_app -def setup_logging(conf): - """Create the services logger. - - TODO should be reworked to load in the proper loggers and remove others - """ - if conf and os.path.isfile(conf): - logging.config.fileConfig(conf) - - @contextmanager def not_raises(exception): """Corallary to the pytest raises builtin. @@ -69,11 +58,12 @@ def db(app): # pylint: disable=redefined-outer-name, invalid-name Drops all existing tables - Meta follows Postgres FKs """ with app.app_context(): - drop_schema_sql = """DROP SCHEMA public CASCADE; - CREATE SCHEMA public; - GRANT ALL ON SCHEMA public TO postgres; - GRANT ALL ON SCHEMA public TO public; - """ + drop_schema_sql = text(""" + DROP SCHEMA public CASCADE; + CREATE SCHEMA public; + GRANT ALL ON SCHEMA public TO postgres; + GRANT ALL ON SCHEMA public TO public; + """) sess = _db.session() sess.execute(drop_schema_sql) @@ -88,15 +78,25 @@ def db(app): # pylint: disable=redefined-outer-name, invalid-name # This is the path we'll use in legal_api!! # even though this isn't referenced directly, it sets up the internal configs that upgrade + import os import sys + + venv_src_path = os.path.abspath( + os.path.join( + os.path.dirname(__file__), + os.pardir, + '.venv/src/sbc-auth/auth-api' + ) + ) + if venv_src_path not in sys.path: + sys.path.insert(0, venv_src_path) + auth_api_folder = [folder for folder in sys.path if 'auth-api' in folder][0] - migration_path = auth_api_folder.replace('/auth-api/src', '/auth-api/migrations') + migration_path = auth_api_folder.replace('/auth-api', '/auth-api/migrations') Migrate(app, _db, directory=migration_path) upgrade() - # Restore the logging, alembic and sqlalchemy have their own logging from alembic.ini. - setup_logging(os.path.abspath('logging.conf')) return _db @@ -136,7 +136,7 @@ def session(app, db): # pylint: disable=redefined-outer-name, invalid-name txn = conn.begin() options = dict(bind=conn, binds={}) - sess = db.create_scoped_session(options=options) + sess = db._make_scoped_session(options=options) # establish a SAVEPOINT just before beginning the test # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint) diff --git a/queue_services/auth-queue/Dockerfile b/queue_services/auth-queue/Dockerfile index 80dba6df31..da8b2ea7dc 100644 --- a/queue_services/auth-queue/Dockerfile +++ b/queue_services/auth-queue/Dockerfile @@ -1,35 +1,80 @@ -FROM python:3.8.5-buster +FROM python:3.12.5-bullseye as development_build +USER root ARG VCS_REF="missing" ARG BUILD_DATE="missing" ENV VCS_REF=${VCS_REF} ENV BUILD_DATE=${BUILD_DATE} +ENV PORT=8080 LABEL org.label-schema.vcs-ref=${VCS_REF} \ org.label-schema.build-date=${BUILD_DATE} -USER root +LABEL vendor="BCROS" + +ARG APP_ENV \ + # Needed for fixing permissions of files created by Docker: + UID=1000 \ + GID=1000 + +ENV APP_ENV=${APP_ENV} \ + # python: + PYTHONFAULTHANDLER=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PYTHONDONTWRITEBYTECODE=1 \ + # pip: + PIP_NO_CACHE_DIR=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PIP_DEFAULT_TIMEOUT=100 \ + PIP_ROOT_USER_ACTION=ignore \ + # poetry: + POETRY_VERSION=1.8.3 \ + POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_CREATE=false \ + POETRY_CACHE_DIR='/var/cache/pypoetry' \ + POETRY_HOME='/usr/local' + +SHELL ["/bin/bash", "-eo", "pipefail", "-c"] -# Create working directory -RUN mkdir /opt/app-root && chmod 755 /opt/app-root -WORKDIR /opt/app-root +RUN apt-get update && apt-get upgrade -y \ + && apt-get install --no-install-recommends -y \ + bash \ + build-essential \ + curl \ + git \ + libpq-dev \ + && curl -sSL 'https://install.python-poetry.org' | python3 - \ + && poetry --version \ + && poetry config installer.max-workers 1 \ + # Cleaning cache: + && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ + && apt-get clean -y && rm -rf /var/lib/apt/lists/* -# Install the requirements -COPY ./requirements.txt . +WORKDIR /code -RUN pip install --upgrade pip -RUN pip install --no-cache-dir -r requirements.txt +RUN groupadd -g "${GID}" -r web \ + && useradd -d '/code' -g web -l -r -u "${UID}" web \ + && chown web:web -R '/code' -COPY . . +COPY --chown=web:web ./poetry.lock ./pyproject.toml /code/ -RUN pip install . +COPY --chown=web:web ./src /code/src +COPY --chown=web:web ./README.md /code -USER 1001 +RUN --mount=type=cache,target="$POETRY_CACHE_DIR" \ + echo "$APP_ENV" \ + && poetry version \ + && poetry run pip install -U pip \ + && poetry install \ + $(if [ -z ${APP_ENV+x} ] || [ "$APP_ENV" = 'production' ]; then echo '--only main'; fi) \ + --no-interaction --no-ansi -# Set Python path -ENV PYTHONPATH=/opt/app-root/src +# Running as non-root user: +USER web -#EXPOSE 8080 +FROM development_build AS production_build +COPY --chown=web:web . /code -CMD ["gunicorn", "-b 0.0.0.0:8080", "app:app"] +CMD gunicorn --bind 0.0.0.0:${PORT} --config /code/gunicorn_config.py app:app diff --git a/queue_services/auth-queue/MANIFEST.in b/queue_services/auth-queue/MANIFEST.in deleted file mode 100644 index 1a342bdebf..0000000000 --- a/queue_services/auth-queue/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include requirements.txt -include config.py -include logging.conf -include LICENSE -include README.md \ No newline at end of file diff --git a/queue_services/auth-queue/Makefile b/queue_services/auth-queue/Makefile index 0c3fa31ca0..db23350c2f 100644 --- a/queue_services/auth-queue/Makefile +++ b/queue_services/auth-queue/Makefile @@ -15,7 +15,8 @@ DOCKER_NAME:=auth-queue setup: install install-dev ## Setup the project clean: clean-build clean-pyc clean-test ## Clean the project - rm -rf venv/ + rm -rf .venv/ + rm -rf poetry.lock clean-build: ## Clean build files rm -fr build/ @@ -36,25 +37,18 @@ clean-test: ## clean test files rm -f .coverage rm -fr htmlcov/ -build-req: clean ## Upgrade requirements - test -f venv/bin/activate || python3 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install --upgrade pip ;\ - pip install -Ur requirements/prod.txt ;\ - pip freeze | sort > requirements.txt ;\ - cat requirements/repo-libraries.txt >> requirements.txt ;\ - pip install -Ur requirements/repo-libraries.txt +update: ## Upgrade lock + poetry update install: clean ## Install python virtrual environment - test -f venv/bin/activate || python3 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install --upgrade pip ;\ - pip install -Ur requirements.txt + unset HOME ## unset HOME because it's in the DEV .env file, will cause permissions issues + pip install poetry ;\ + poetry config virtualenvs.in-project true ;\ + poetry install -install-dev: ## Install local application - . venv/bin/activate ; \ - pip install -Ur requirements/dev.txt; \ - pip install -e . +install-dev: ## Instal development dependencies + poetry add --dev pylint astroid + poetry install --with dev ################################################################################# # COMMANDS - CI # @@ -62,15 +56,15 @@ install-dev: ## Install local application ci: lint flake8 test ## CI flow pylint: ## Linting with pylint - . venv/bin/activate && pylint --rcfile=setup.cfg src/$(PROJECT_NAME) + poetry run pylint --rcfile=setup.cfg src/$(PROJECT_NAME) flake8: ## Linting with flake8 - . venv/bin/activate && flake8 src/$(PROJECT_NAME) tests + poetry run flake8 src/$(PROJECT_NAME) tests lint: pylint flake8 ## run all lint type scripts test: ## Unit testing - . venv/bin/activate && pytest + poetry run pytest mac-cov: test ## Run the coverage report and display in a browser window (mac) @open -a "Google Chrome" htmlcov/index.html @@ -131,7 +125,7 @@ tag: push ## tag image ################################################################################# run: ## Run the project in local - . venv/bin/activate && python -m flask run -p 5001 + poetry run flask run -p 5000 ################################################################################# # Self Documenting Commands # diff --git a/queue_services/auth-queue/app.py b/queue_services/auth-queue/app.py index 7cfb892b00..93a39d4d56 100755 --- a/queue_services/auth-queue/app.py +++ b/queue_services/auth-queue/app.py @@ -21,5 +21,5 @@ app = create_app() if __name__ == '__main__': - server_port = os.environ.get('PORT', '5001') + server_port = os.environ.get('PORT', '8080') app.run(debug=False, port=server_port, host='0.0.0.0') diff --git a/queue_services/auth-queue/devops/gcp/clouddeploy.yaml b/queue_services/auth-queue/devops/gcp/clouddeploy.yaml new file mode 100644 index 0000000000..96136739d0 --- /dev/null +++ b/queue_services/auth-queue/devops/gcp/clouddeploy.yaml @@ -0,0 +1,75 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: deploy.cloud.google.com/v1 +kind: DeliveryPipeline +metadata: + name: auth-queue-pipeline +description: Deployment pipeline +serialPipeline: + stages: + - targetId: gtksf3-dev + profiles: [dev] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "development" + deploy-project-id: "gtksf3-dev" + service-name: "auth-queue-dev" + container-name: "auth-queue-dev" + cloudsql-instances: "gtksf3-dev:northamerica-northeast1:auth-db-dev" + service-account: "sa-api@gtksf3-dev.iam.gserviceaccount.com" + - targetId: gtksf3-test + profiles: [test] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "development" + deploy-project-id: "gtksf3-test" + service-name: "auth-queue-test" + container-name: "auth-queue-test" + cloudsql-instances: "gtksf3-test:northamerica-northeast1:auth-db-test" + service-account: "sa-api@gtksf3-test.iam.gserviceaccount.com" + - targetId: gtksf3-sandbox + profiles: [sandbox] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "production" + deploy-project-id: "gtksf3-tools" + service-name: "auth-queue-sandbox" + container-name: "auth-queue-sandbox" + cloudsql-instances: "gtksf3-tools:northamerica-northeast1:auth-db-sandbox" + service-account: "sa-api@gtksf3-tools.iam.gserviceaccount.com" + - targetId: gtksf3-prod + profiles: [prod] + strategy: + standard: + verify: false + deployParameters: + - values: + deploy-env: "production" + deploy-project-id: "gtksf3-prod" + service-name: "auth-queue-prod" + container-name: "auth-queue-prod" + cloudsql-instances: "gtksf3-prod:northamerica-northeast1:auth-db-prod" + service-account: "sa-api@gtksf3-prod.iam.gserviceaccount.com" + max-scale: "10" + container-concurrency: "20" diff --git a/queue_services/auth-queue/devops/vaults.gcp.env b/queue_services/auth-queue/devops/vaults.gcp.env new file mode 100644 index 0000000000..ec6777ca3a --- /dev/null +++ b/queue_services/auth-queue/devops/vaults.gcp.env @@ -0,0 +1,17 @@ +AUTH_LD_SDK_KEY="op://launchdarkly/$APP_ENV/business-api/BUSINESS_API_LD_SDK_KEY" +DATABASE_USERNAME="op://database/$APP_ENV/auth-db-gcp/DATABASE_USERNAME" +DATABASE_PASSWORD="op://database/$APP_ENV/auth-db-gcp/DATABASE_PASSWORD" +DATABASE_PORT="op://database/$APP_ENV/auth-db-gcp/DATABASE_PORT" +DATABASE_NAME="op://database/$APP_ENV/auth-db-gcp/DATABASE_NAME" +DATABASE_UNIX_SOCKET="op://database/$APP_ENV/auth-db-gcp/DATABASE_UNIX_SOCKET" +ACCOUNT_MAILER_TOPIC="op://gcp-queue/$APP_ENV/topics/ACCOUNT_MAILER_TOPIC" +GCP_AUTH_KEY="op://gcp-queue/$APP_ENV/gtksf3/AUTHPAY_GCP_AUTH_KEY" +PAY_API_URL="op://API/$APP_ENV/pay-api/PAY_API_URL" +PAY_API_VERSION="op://API/$APP_ENV/pay-api/PAY_API_VERSION" +JWT_OIDC_ISSUER="op://keycloak/$APP_ENV/jwt-base/JWT_OIDC_ISSUER" +SBC_AUTH_ADMIN_CLIENT_ID="op://keycloak/$APP_ENV/sbc-auth-admin/SBC_AUTH_ADMIN_CLIENT_ID" +SBC_AUTH_ADMIN_CLIENT_SECRET="op://keycloak/$APP_ENV/sbc-auth-admin/SBC_AUTH_ADMIN_CLIENT_SECRET" +BUSINESS_SERVICE_ACCOUNT="op://gcp-queue/$APP_ENV/a083gt/BUSINESS_SERVICE_ACCOUNT" +AUTHPAY_SERVICE_ACCOUNT="op://gcp-queue/$APP_ENV/gtksf3/AUTHPAY_SERVICE_ACCOUNT" +VPC_CONNECTOR="op://CD/$APP_ENV/auth-queue/VPC_CONNECTOR" +AUTH_QUEUE_AUDIENCE_SUB="op://gcp-queue/$APP_ENV/authpay/AUTH_QUEUE_AUDIENCE_SUB" diff --git a/queue_services/auth-queue/devops/vaults.json b/queue_services/auth-queue/devops/vaults.json deleted file mode 100644 index 761e47e6d9..0000000000 --- a/queue_services/auth-queue/devops/vaults.json +++ /dev/null @@ -1,42 +0,0 @@ -[ - { - "vault": "relationship", - "application": [ - "postgres-auth" - ] - }, - { - "vault": "sentry", - "application": [ - "relationship-api" - ] - }, - { - "vault": "API", - "application": [ - "pay-api" - ] - }, - { - "vault": "keycloak", - "application": [ - "jwt-base", - "sbc-auth-admin" - ] - }, - { - "vault": "launchdarkly", - "application": [ - "auth" - ] - }, - { - "vault": "gcp-queue", - "application": [ - "a083gt", - "authpay", - "topics", - "gtksf3" - ] - } -] diff --git a/auth-api/tests/unit/conf/test_version.py b/queue_services/auth-queue/gunicorn_config.py similarity index 53% rename from auth-api/tests/unit/conf/test_version.py rename to queue_services/auth-queue/gunicorn_config.py index d24fc12ddd..4a32d91c3d 100644 --- a/auth-api/tests/unit/conf/test_version.py +++ b/queue_services/auth-queue/gunicorn_config.py @@ -1,4 +1,4 @@ -# Copyright © 2019 Province of British Columbia +# Copyright © 2024 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,18 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +"""The configuration for gunicorn, which picks up the + runtime options from environment variables +""" -"""Tests to assure the version utilities. +import os -Test-Suite to ensure that the version utilities are working as expected. -""" -from auth_api import utils -from auth_api.version import __version__ -from tests import skip_in_pod +workers = int(os.environ.get('GUNICORN_PROCESSES', '1')) # pylint: disable=invalid-name +threads = int(os.environ.get('GUNICORN_THREADS', '1')) # pylint: disable=invalid-name -@skip_in_pod -def test_get_version(): - """Assert thatThe version is returned correctly.""" - rv = utils.run_version.get_run_version() - assert rv == __version__ +forwarded_allow_ips = '*' # pylint: disable=invalid-name +secure_scheme_headers = {'X-Forwarded-Proto': 'https'} # pylint: disable=invalid-name \ No newline at end of file diff --git a/queue_services/auth-queue/poetry.lock b/queue_services/auth-queue/poetry.lock new file mode 100644 index 0000000000..284127c431 --- /dev/null +++ b/queue_services/auth-queue/poetry.lock @@ -0,0 +1,3171 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.3" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, +] + +[[package]] +name = "aiohttp" +version = "3.10.8" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a1ba7bc139592339ddeb62c06486d0fa0f4ca61216e14137a40d626c81faf10c"}, + {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85e4d7bd05d18e4b348441e7584c681eff646e3bf38f68b2626807f3add21aa2"}, + {file = "aiohttp-3.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69de056022e7abf69cb9fec795515973cc3eeaff51e3ea8d72a77aa933a91c52"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3587506898d4a404b33bd19689286ccf226c3d44d7a73670c8498cd688e42c"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe285a697c851734285369614443451462ce78aac2b77db23567507484b1dc6f"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10c7932337285a6bfa3a5fe1fd4da90b66ebfd9d0cbd1544402e1202eb9a8c3e"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd9716ef0224fe0d0336997eb242f40619f9f8c5c57e66b525a1ebf9f1d8cebe"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ceacea31f8a55cdba02bc72c93eb2e1b77160e91f8abd605969c168502fd71eb"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9721554bfa9e15f6e462da304374c2f1baede3cb06008c36c47fa37ea32f1dc4"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:22cdeb684d8552490dd2697a5138c4ecb46f844892df437aaf94f7eea99af879"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e56bb7e31c4bc79956b866163170bc89fd619e0581ce813330d4ea46921a4881"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3a95d2686bc4794d66bd8de654e41b5339fab542b2bca9238aa63ed5f4f2ce82"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d82404a0e7b10e0d7f022cf44031b78af8a4f99bd01561ac68f7c24772fed021"}, + {file = "aiohttp-3.10.8-cp310-cp310-win32.whl", hash = "sha256:4e10b04542d27e21538e670156e88766543692a0a883f243ba8fad9ddea82e53"}, + {file = "aiohttp-3.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:680dbcff5adc7f696ccf8bf671d38366a1f620b5616a1d333d0cb33956065395"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:33a68011a38020ed4ff41ae0dbf4a96a202562ecf2024bdd8f65385f1d07f6ef"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c7efa6616a95e3bd73b8a69691012d2ef1f95f9ea0189e42f338fae080c2fc6"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb9b9764cfb4459acf01c02d2a59d3e5066b06a846a364fd1749aa168efa2be"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7f270f4ca92760f98a42c45a58674fff488e23b144ec80b1cc6fa2effed377"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6984dda9d79064361ab58d03f6c1e793ea845c6cfa89ffe1a7b9bb400dfd56bd"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f6d47e392c27206701565c8df4cac6ebed28fdf6dcaea5b1eea7a4631d8e6db"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a72f89aea712c619b2ca32c6f4335c77125ede27530ad9705f4f349357833695"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36074b26f3263879ba8e4dbd33db2b79874a3392f403a70b772701363148b9f"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e32148b4a745e70a255a1d44b5664de1f2e24fcefb98a75b60c83b9e260ddb5b"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5aa1a073514cf59c81ad49a4ed9b5d72b2433638cd53160fd2f3a9cfa94718db"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d3a79200a9d5e621c4623081ddb25380b713c8cf5233cd11c1aabad990bb9381"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e45fdfcb2d5bcad83373e4808825b7512953146d147488114575780640665027"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f78e2a78432c537ae876a93013b7bc0027ba5b93ad7b3463624c4b6906489332"}, + {file = "aiohttp-3.10.8-cp311-cp311-win32.whl", hash = "sha256:f8179855a4e4f3b931cb1764ec87673d3fbdcca2af496c8d30567d7b034a13db"}, + {file = "aiohttp-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:ef9b484604af05ca745b6108ca1aaa22ae1919037ae4f93aaf9a37ba42e0b835"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ab2d6523575fc98896c80f49ac99e849c0b0e69cc80bf864eed6af2ae728a52b"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f5d5d5401744dda50b943d8764508d0e60cc2d3305ac1e6420935861a9d544bc"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de23085cf90911600ace512e909114385026b16324fa203cc74c81f21fd3276a"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4618f0d2bf523043866a9ff8458900d8eb0a6d4018f251dae98e5f1fb699f3a8"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21c1925541ca84f7b5e0df361c0a813a7d6a56d3b0030ebd4b220b8d232015f9"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:497a7d20caea8855c5429db3cdb829385467217d7feb86952a6107e033e031b9"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c887019dbcb4af58a091a45ccf376fffe800b5531b45c1efccda4bedf87747ea"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40d2d719c3c36a7a65ed26400e2b45b2d9ed7edf498f4df38b2ae130f25a0d01"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57359785f27394a8bcab0da6dcd46706d087dfebf59a8d0ad2e64a4bc2f6f94f"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a961ee6f2cdd1a2be4735333ab284691180d40bad48f97bb598841bfcbfb94ec"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:fe3d79d6af839ffa46fdc5d2cf34295390894471e9875050eafa584cb781508d"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a281cba03bdaa341c70b7551b2256a88d45eead149f48b75a96d41128c240b3"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6769d71bfb1ed60321363a9bc05e94dcf05e38295ef41d46ac08919e5b00d19"}, + {file = "aiohttp-3.10.8-cp312-cp312-win32.whl", hash = "sha256:a3081246bab4d419697ee45e555cef5cd1def7ac193dff6f50be761d2e44f194"}, + {file = "aiohttp-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:ab1546fc8e00676febc81c548a876c7bde32f881b8334b77f84719ab2c7d28dc"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b1a012677b8e0a39e181e218de47d6741c5922202e3b0b65e412e2ce47c39337"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2df786c96c57cd6b87156ba4c5f166af7b88f3fc05f9d592252fdc83d8615a3c"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8885ca09d3a9317219c0831276bfe26984b17b2c37b7bf70dd478d17092a4772"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dbf252ac19860e0ab56cd480d2805498f47c5a2d04f5995d8d8a6effd04b48c"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2036479b6b94afaaca7d07b8a68dc0e67b0caf5f6293bb6a5a1825f5923000"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:365783e1b7c40b59ed4ce2b5a7491bae48f41cd2c30d52647a5b1ee8604c68ad"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:270e653b5a4b557476a1ed40e6b6ce82f331aab669620d7c95c658ef976c9c5e"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8960fabc20bfe4fafb941067cda8e23c8c17c98c121aa31c7bf0cdab11b07842"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f21e8f2abed9a44afc3d15bba22e0dfc71e5fa859bea916e42354c16102b036f"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fecd55e7418fabd297fd836e65cbd6371aa4035a264998a091bbf13f94d9c44d"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:badb51d851358cd7535b647bb67af4854b64f3c85f0d089c737f75504d5910ec"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e860985f30f3a015979e63e7ba1a391526cdac1b22b7b332579df7867848e255"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:71462f8eeca477cbc0c9700a9464e3f75f59068aed5e9d4a521a103692da72dc"}, + {file = "aiohttp-3.10.8-cp313-cp313-win32.whl", hash = "sha256:177126e971782769b34933e94fddd1089cef0fe6b82fee8a885e539f5b0f0c6a"}, + {file = "aiohttp-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:98a4eb60e27033dee9593814ca320ee8c199489fbc6b2699d0f710584db7feb7"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ffef3d763e4c8fc97e740da5b4d0f080b78630a3914f4e772a122bbfa608c1db"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:597128cb7bc5f068181b49a732961f46cb89f85686206289d6ccb5e27cb5fbe2"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f23a6c1d09de5de89a33c9e9b229106cb70dcfdd55e81a3a3580eaadaa32bc92"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da57af0c54a302b7c655fa1ccd5b1817a53739afa39924ef1816e7b7c8a07ccb"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7a6af57091056a79a35104d6ec29d98ec7f1fb7270ad9c6fff871b678d1ff8"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32710d6b3b6c09c60c794d84ca887a3a2890131c0b02b3cefdcc6709a2260a7c"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b91f4f62ad39a8a42d511d66269b46cb2fb7dea9564c21ab6c56a642d28bff5"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:471a8c47344b9cc309558b3fcc469bd2c12b49322b4b31eb386c4a2b2d44e44a"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc0e7f91705445d79beafba9bb3057dd50830e40fe5417017a76a214af54e122"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:85431c9131a9a0f65260dc7a65c800ca5eae78c4c9931618f18c8e0933a0e0c1"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:b91557ee0893da52794b25660d4f57bb519bcad8b7df301acd3898f7197c5d81"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:4954e6b06dd0be97e1a5751fc606be1f9edbdc553c5d9b57d72406a8fbd17f9d"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a087c84b4992160ffef7afd98ef24177c8bd4ad61c53607145a8377457385100"}, + {file = "aiohttp-3.10.8-cp38-cp38-win32.whl", hash = "sha256:e1f0f7b27171b2956a27bd8f899751d0866ddabdd05cbddf3520f945130a908c"}, + {file = "aiohttp-3.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:c4916070e12ae140110aa598031876c1bf8676a36a750716ea0aa5bd694aa2e7"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5284997e3d88d0dfb874c43e51ae8f4a6f4ca5b90dcf22995035187253d430db"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9443d9ebc5167ce1fbb552faf2d666fb22ef5716a8750be67efd140a7733738c"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b667e2a03407d79a76c618dc30cedebd48f082d85880d0c9c4ec2faa3e10f43e"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98fae99d5c2146f254b7806001498e6f9ffb0e330de55a35e72feb7cb2fa399b"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8296edd99d0dd9d0eb8b9e25b3b3506eef55c1854e9cc230f0b3f885f680410b"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ce46dfb49cfbf9e92818be4b761d4042230b1f0e05ffec0aad15b3eb162b905"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c38cfd355fd86c39b2d54651bd6ed7d63d4fe3b5553f364bae3306e2445f847"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:713dff3f87ceec3bde4f3f484861464e722cf7533f9fa6b824ec82bb5a9010a7"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21a72f4a9c69a8567a0aca12042f12bba25d3139fd5dd8eeb9931f4d9e8599cd"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6d1ad868624f6cea77341ef2877ad4e71f7116834a6cd7ec36ec5c32f94ee6ae"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a78ba86d5a08207d1d1ad10b97aed6ea48b374b3f6831d02d0b06545ac0f181e"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:aff048793d05e1ce05b62e49dccf81fe52719a13f4861530706619506224992b"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d088ca05381fd409793571d8e34eca06daf41c8c50a05aeed358d2d340c7af81"}, + {file = "aiohttp-3.10.8-cp39-cp39-win32.whl", hash = "sha256:ee97c4e54f457c366e1f76fbbf3e8effee9de57dae671084a161c00f481106ce"}, + {file = "aiohttp-3.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:d95ae4420669c871667aad92ba8cce6251d61d79c1a38504621094143f94a8b4"}, + {file = "aiohttp-3.10.8.tar.gz", hash = "sha256:21f8225f7dc187018e8433c9326be01477fb2810721e048b33ac49091b19fb4a"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.12.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alembic" +version = "1.13.3" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, + {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "asn1crypto" +version = "1.5.1" +description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" +optional = false +python-versions = "*" +files = [ + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, +] + +[[package]] +name = "astroid" +version = "3.2.4" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, +] + +[[package]] +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + +[[package]] +name = "auth-api" +version = "3.0.5" +description = "" +optional = false +python-versions = "^3.12" +files = [] +develop = false + +[package.dependencies] +aiohttp = "^3.10.2" +bcrypt = "^4.2.0" +build-deps = {git = "https://github.com/bcgov/sbc-auth.git", rev = "feature-gcp-migration", subdirectory = "build-deps"} +cachelib = "0.9.0" +cattrs = "^23.2.3" +flask-caching = "2.3.0" +flask-cors = "^5.0.0" +flask-jwt-oidc = {git = "https://github.com/seeker25/flask-jwt-oidc.git", branch = "main"} +flask-mail = "^0.10.0" +flask-marshmallow = "^1.2.1" +flask-migrate = "^4.0.7" +flask-moment = "^1.0.6" +flask-sqlalchemy = "^3.1.1" +gunicorn = "^22.0.0" +marshmallow-sqlalchemy = "^1.0.0" +minio = "^7.2.7" +orjson = "^3.10.7" +pg8000 = "^1.31.2" +psycopg2 = "^2.9.9" +pyhumps = "^3.8.0" +sql-versioning = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/sql-versioning"} +sqlalchemy-utils = "^0.41.2" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-auth.git" +reference = "feature-gcp-migration" +resolved_reference = "450c8ee6f42aec444fc06e2b9fd39f60fa053d91" +subdirectory = "auth-api" + +[[package]] +name = "autopep8" +version = "1.7.0" +description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" +optional = false +python-versions = "*" +files = [ + {file = "autopep8-1.7.0-py2.py3-none-any.whl", hash = "sha256:6f09e90a2be784317e84dc1add17ebfc7abe3924239957a37e5040e27d812087"}, + {file = "autopep8-1.7.0.tar.gz", hash = "sha256:ca9b1a83e53a7fad65d731dc7a2a2d50aa48f43850407c59f6a1a306c4201142"}, +] + +[package.dependencies] +pycodestyle = ">=2.9.1" +toml = "*" + +[[package]] +name = "bcrypt" +version = "4.2.0" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, + {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, + {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, + {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, + {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, + {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, + {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "build-deps" +version = "1.0.0" +description = "common dependencies for all auth tools" +optional = false +python-versions = "^3.12" +files = [] +develop = false + +[package.dependencies] +attrs = "24.2.0" +CacheControl = "0.14.0" +cachetools = "5.5.0" +certifi = "2024.8.30" +flask = "3.0.2" +gcp-queue = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/gcp-queue"} +itsdangerous = "2.1.2" +jinja2 = "3.1.3" +jsonschema = "4.17.3" +launchdarkly-server-sdk = "9.5.0" +MarkupSafe = "2.1.1" +python-dotenv = "^1.0.1" +requests = "2.32.3" +sbc-common-components = {git = "https://github.com/bolyachevets/sbc-common-components.git", rev = "camel_case_empty_dict", subdirectory = "python"} +structured-logging = {git = "https://github.com/bcgov/sbc-connect-common.git", branch = "main", subdirectory = "python/structured-logging"} +Werkzeug = "3.0.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-auth.git" +reference = "feature-gcp-migration" +resolved_reference = "450c8ee6f42aec444fc06e2b9fd39f60fa053d91" +subdirectory = "build-deps" + +[[package]] +name = "cachecontrol" +version = "0.14.0" +description = "httplib2 caching for requests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, + {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, +] + +[package.dependencies] +msgpack = ">=0.5.2,<2.0.0" +requests = ">=2.16.0" + +[package.extras] +dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] +filecache = ["filelock (>=3.8.0)"] +redis = ["redis (>=2.10.5)"] + +[[package]] +name = "cachelib" +version = "0.9.0" +description = "A collection of cache libraries in the same API interface." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, + {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "5.5" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +files = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] + +[package.extras] +toml = ["toml"] + +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "ecdsa" +version = "0.19.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6" +files = [ + {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"}, + {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "expiringdict" +version = "1.2.2" +description = "Dictionary with auto-expiring values for caching purposes" +optional = false +python-versions = "*" +files = [ + {file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"}, + {file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"}, +] + +[package.extras] +tests = ["coverage", "coveralls", "dill", "mock", "nose"] + +[[package]] +name = "faker" +version = "8.16.0" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Faker-8.16.0-py3-none-any.whl", hash = "sha256:bb10913b9d3ac2aa37180f816c82040e81f9e0c32cb08445533f293cec8930bf"}, + {file = "Faker-8.16.0.tar.gz", hash = "sha256:d70b375d0af0e4c3abd594003691a1055a96281a414884e623d27bccc7d781da"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" +text-unidecode = "1.3" + +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + +[[package]] +name = "flake8-blind-except" +version = "0.1.1" +description = "A flake8 extension that checks for blind except: statements" +optional = false +python-versions = "*" +files = [ + {file = "flake8-blind-except-0.1.1.tar.gz", hash = "sha256:aca3356633825544cec51997260fe31a8f24a1a2795ce8e81696b9916745e599"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "flake8-docstrings" +version = "1.7.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, +] + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-isort" +version = "4.2.0" +description = "flake8 plugin that integrates isort ." +optional = false +python-versions = "*" +files = [ + {file = "flake8-isort-4.2.0.tar.gz", hash = "sha256:26571500cd54976bbc0cf1006ffbcd1a68dd102f816b7a1051b219616ba9fee0"}, + {file = "flake8_isort-4.2.0-py3-none-any.whl", hash = "sha256:5b87630fb3719bf4c1833fd11e0d9534f43efdeba524863e15d8f14a7ef6adbf"}, +] + +[package.dependencies] +flake8 = ">=3.2.1,<6" +isort = ">=4.3.5,<6" + +[package.extras] +test = ["pytest-cov"] + +[[package]] +name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +optional = false +python-versions = "*" +files = [ + {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, + {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, +] + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-quotes" +version = "3.4.0" +description = "Flake8 lint for quotes." +optional = false +python-versions = "*" +files = [ + {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, +] + +[package.dependencies] +flake8 = "*" +setuptools = "*" + +[[package]] +name = "flask" +version = "3.0.2" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-caching" +version = "2.3.0" +description = "Adds caching support to Flask applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Flask_Caching-2.3.0-py3-none-any.whl", hash = "sha256:51771c75682e5abc1483b78b96d9131d7941dc669b073852edfa319dd4e29b6e"}, + {file = "flask_caching-2.3.0.tar.gz", hash = "sha256:d7e4ca64a33b49feb339fcdd17e6ba25f5e01168cf885e53790e885f83a4d2cf"}, +] + +[package.dependencies] +cachelib = ">=0.9.0,<0.10.0" +Flask = "*" + +[[package]] +name = "flask-cors" +version = "5.0.0" +description = "A Flask extension adding a decorator for CORS support" +optional = false +python-versions = "*" +files = [ + {file = "Flask_Cors-5.0.0-py2.py3-none-any.whl", hash = "sha256:b9e307d082a9261c100d8fb0ba909eec6a228ed1b60a8315fd85f783d61910bc"}, + {file = "flask_cors-5.0.0.tar.gz", hash = "sha256:5aadb4b950c4e93745034594d9f3ea6591f734bb3662e16e255ffbf5e89c88ef"}, +] + +[package.dependencies] +Flask = ">=0.9" + +[[package]] +name = "flask-jwt-oidc" +version = "0.7.0" +description = "Opinionated flask oidc client" +optional = false +python-versions = "^3.9" +files = [] +develop = false + +[package.dependencies] +cachelib = "0.*" +Flask = ">=2" +python-jose = "^3.3.0" +six = "^1.16.0" + +[package.source] +type = "git" +url = "https://github.com/seeker25/flask-jwt-oidc.git" +reference = "main" +resolved_reference = "d208d4643e3b17358f7295bee0f955e67ba6ac88" + +[[package]] +name = "flask-mail" +version = "0.10.0" +description = "Flask extension for sending email" +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask_mail-0.10.0-py3-none-any.whl", hash = "sha256:a451e490931bb3441d9b11ebab6812a16bfa81855792ae1bf9c1e1e22c4e51e7"}, + {file = "flask_mail-0.10.0.tar.gz", hash = "sha256:44083e7b02bbcce792209c06252f8569dd5a325a7aaa76afe7330422bd97881d"}, +] + +[package.dependencies] +blinker = "*" +flask = "*" + +[[package]] +name = "flask-marshmallow" +version = "1.2.1" +description = "Flask + marshmallow for beautiful APIs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask_marshmallow-1.2.1-py3-none-any.whl", hash = "sha256:10b5048ecfaa26f7c8d0aed7d81083164450e6be8e81c04b3d4a586b3f7b6678"}, + {file = "flask_marshmallow-1.2.1.tar.gz", hash = "sha256:00ee96399ed664963afff3b5d6ee518640b0f91dbc2aace2b5abcf32f40ef23a"}, +] + +[package.dependencies] +Flask = ">=2.2" +marshmallow = ">=3.0.0" + +[package.extras] +dev = ["flask-marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["Sphinx (==7.2.6)", "marshmallow-sqlalchemy (>=0.19.0)", "sphinx-issues (==4.0.0)"] +sqlalchemy = ["flask-sqlalchemy (>=3.0.0)", "marshmallow-sqlalchemy (>=0.29.0)"] +tests = ["flask-marshmallow[sqlalchemy]", "pytest"] + +[[package]] +name = "flask-migrate" +version = "4.0.7" +description = "SQLAlchemy database migrations for Flask applications using Alembic." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622"}, + {file = "Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617"}, +] + +[package.dependencies] +alembic = ">=1.9.0" +Flask = ">=0.9" +Flask-SQLAlchemy = ">=1.0" + +[[package]] +name = "flask-moment" +version = "1.0.6" +description = "Formatting of dates and times in Flask templates using moment.js." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Flask_Moment-1.0.6-py3-none-any.whl", hash = "sha256:3ae8baea20a41e99f457b9710ecd1368911dd5133f09a27583eb0dcb3491e31d"}, + {file = "flask_moment-1.0.6.tar.gz", hash = "sha256:2f8969907cbacde4a88319792e8f920ba5c9dd9d99ced2346cad563795302b88"}, +] + +[package.dependencies] +Flask = "*" +packaging = ">=14.1" + +[package.extras] +docs = ["sphinx"] + +[[package]] +name = "flask-opentracing" +version = "1.1.0" +description = "OpenTracing support for Flask applications" +optional = false +python-versions = "*" +files = [ + {file = "Flask-OpenTracing-1.1.0.tar.gz", hash = "sha256:a9a39d367fbe7e9ed9c77b90ac48159c1a3e82982a5abf84d3f4d710d24580ac"}, +] + +[package.dependencies] +Flask = "*" +opentracing = ">=2.0,<3" + +[package.extras] +tests = ["flake8", "flake8-quotes", "mock", "pytest", "pytest-cov"] + +[[package]] +name = "flask-sqlalchemy" +version = "3.1.1" +description = "Add SQLAlchemy support to your Flask application." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, + {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, +] + +[package.dependencies] +flask = ">=2.2.5" +sqlalchemy = ">=2.0.16" + +[[package]] +name = "freezegun" +version = "1.5.1" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, + {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "gcp-queue" +version = "0.3.0" +description = "" +optional = false +python-versions = "^3.8" +files = [] +develop = false + +[package.dependencies] +flask = ">=1" +google-auth = "^2.28.2" +google-cloud-pubsub = "^2.20.2" +simple-cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py.git"} + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/gcp-queue" + +[[package]] +name = "google-api-core" +version = "1.34.1" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-1.34.1.tar.gz", hash = "sha256:3399c92887a97d33038baa4bfd3bf07acc05d474b0171f333e1f641c1364e552"}, + {file = "google_api_core-1.34.1-py3-none-any.whl", hash = "sha256:52bcc9d9937735f8a3986fa0bbf9135ae9cf5393a722387e5eced520e39c774a"}, +] + +[package.dependencies] +google-auth = ">=1.25.0,<3.0dev" +googleapis-common-protos = ">=1.56.2,<2.0dev" +grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.0.0dev" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] + +[[package]] +name = "google-auth" +version = "2.28.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.28.2.tar.gz", hash = "sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30"}, + {file = "google_auth-2.28.2-py2.py3-none-any.whl", hash = "sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-pubsub" +version = "2.21.4" +description = "Google Cloud Pub/Sub API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-pubsub-2.21.4.tar.gz", hash = "sha256:2cb8c7698adbfea0448db6c89b78aa7217fce07c1e37b46d3e535fa1633f3ae6"}, + {file = "google_cloud_pubsub-2.21.4-py2.py3-none-any.whl", hash = "sha256:e80a4c37c3cd45b68ee3c75eea45af228beeebc3f03915e8ee0b2fccd72e9926"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +grpcio = ">=1.51.3,<2.0dev" +grpcio-status = ">=1.33.2" +proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[package.extras] +libcst = ["libcst (>=0.3.10)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "greenlet" +version = "3.1.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, + {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "grpcio" +version = "1.64.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.64.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:3b09c3d9de95461214a11d82cc0e6a46a6f4e1f91834b50782f932895215e5db"}, + {file = "grpcio-1.64.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7e013428ab472892830287dd082b7d129f4d8afef49227a28223a77337555eaa"}, + {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:02cc9cc3f816d30f7993d0d408043b4a7d6a02346d251694d8ab1f78cc723e7e"}, + {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f5de082d936e0208ce8db9095821361dfa97af8767a6607ae71425ac8ace15c"}, + {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b7bf346391dffa182fba42506adf3a84f4a718a05e445b37824136047686a1"}, + {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b2cbdfba18408389a1371f8c2af1659119e1831e5ed24c240cae9e27b4abc38d"}, + {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca4f15427d2df592e0c8f3d38847e25135e4092d7f70f02452c0e90d6a02d6d"}, + {file = "grpcio-1.64.0-cp310-cp310-win32.whl", hash = "sha256:7c1f5b2298244472bcda49b599be04579f26425af0fd80d3f2eb5fd8bc84d106"}, + {file = "grpcio-1.64.0-cp310-cp310-win_amd64.whl", hash = "sha256:73f84f9e5985a532e47880b3924867de16fa1aa513fff9b26106220c253c70c5"}, + {file = "grpcio-1.64.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2a18090371d138a57714ee9bffd6c9c9cb2e02ce42c681aac093ae1e7189ed21"}, + {file = "grpcio-1.64.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:59c68df3a934a586c3473d15956d23a618b8f05b5e7a3a904d40300e9c69cbf0"}, + {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b52e1ec7185512103dd47d41cf34ea78e7a7361ba460187ddd2416b480e0938c"}, + {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d598b5d5e2c9115d7fb7e2cb5508d14286af506a75950762aa1372d60e41851"}, + {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01615bbcae6875eee8091e6b9414072f4e4b00d8b7e141f89635bdae7cf784e5"}, + {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0b2dfe6dcace264807d9123d483d4c43274e3f8c39f90ff51de538245d7a4145"}, + {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7f17572dc9acd5e6dfd3014d10c0b533e9f79cd9517fc10b0225746f4c24b58e"}, + {file = "grpcio-1.64.0-cp311-cp311-win32.whl", hash = "sha256:6ec5ed15b4ffe56e2c6bc76af45e6b591c9be0224b3fb090adfb205c9012367d"}, + {file = "grpcio-1.64.0-cp311-cp311-win_amd64.whl", hash = "sha256:597191370951b477b7a1441e1aaa5cacebeb46a3b0bd240ec3bb2f28298c7553"}, + {file = "grpcio-1.64.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:1ce4cd5a61d4532651079e7aae0fedf9a80e613eed895d5b9743e66b52d15812"}, + {file = "grpcio-1.64.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:650a8150a9b288f40d5b7c1d5400cc11724eae50bd1f501a66e1ea949173649b"}, + {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8de0399b983f8676a7ccfdd45e5b2caec74a7e3cc576c6b1eecf3b3680deda5e"}, + {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46b8b43ba6a2a8f3103f103f97996cad507bcfd72359af6516363c48793d5a7b"}, + {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a54362f03d4dcfae63be455d0a7d4c1403673498b92c6bfe22157d935b57c7a9"}, + {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1f8ea18b928e539046bb5f9c124d717fbf00cc4b2d960ae0b8468562846f5aa1"}, + {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c56c91bd2923ddb6e7ed28ebb66d15633b03e0df22206f22dfcdde08047e0a48"}, + {file = "grpcio-1.64.0-cp312-cp312-win32.whl", hash = "sha256:874c741c8a66f0834f653a69e7e64b4e67fcd4a8d40296919b93bab2ccc780ba"}, + {file = "grpcio-1.64.0-cp312-cp312-win_amd64.whl", hash = "sha256:0da1d921f8e4bcee307aeef6c7095eb26e617c471f8cb1c454fd389c5c296d1e"}, + {file = "grpcio-1.64.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c46fb6bfca17bfc49f011eb53416e61472fa96caa0979b4329176bdd38cbbf2a"}, + {file = "grpcio-1.64.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3d2004e85cf5213995d09408501f82c8534700d2babeb81dfdba2a3bff0bb396"}, + {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6d5541eb460d73a07418524fb64dcfe0adfbcd32e2dac0f8f90ce5b9dd6c046c"}, + {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f279ad72dd7d64412e10f2443f9f34872a938c67387863c4cd2fb837f53e7d2"}, + {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85fda90b81da25993aa47fae66cae747b921f8f6777550895fb62375b776a231"}, + {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a053584079b793a54bece4a7d1d1b5c0645bdbee729215cd433703dc2532f72b"}, + {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:579dd9fb11bc73f0de061cab5f8b2def21480fd99eb3743ed041ad6a1913ee2f"}, + {file = "grpcio-1.64.0-cp38-cp38-win32.whl", hash = "sha256:23b6887bb21d77649d022fa1859e05853fdc2e60682fd86c3db652a555a282e0"}, + {file = "grpcio-1.64.0-cp38-cp38-win_amd64.whl", hash = "sha256:753cb58683ba0c545306f4e17dabf468d29cb6f6b11832e1e432160bb3f8403c"}, + {file = "grpcio-1.64.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:2186d76a7e383e1466e0ea2b0febc343ffeae13928c63c6ec6826533c2d69590"}, + {file = "grpcio-1.64.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0f30596cdcbed3c98024fb4f1d91745146385b3f9fd10c9f2270cbfe2ed7ed91"}, + {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:d9171f025a196f5bcfec7e8e7ffb7c3535f7d60aecd3503f9e250296c7cfc150"}, + {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf4c8daed18ae2be2f1fc7d613a76ee2a2e28fdf2412d5c128be23144d28283d"}, + {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3550493ac1d23198d46dc9c9b24b411cef613798dc31160c7138568ec26bc9b4"}, + {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3161a8f8bb38077a6470508c1a7301cd54301c53b8a34bb83e3c9764874ecabd"}, + {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8fabe2cc57a369638ab1ad8e6043721014fdf9a13baa7c0e35995d3a4a7618"}, + {file = "grpcio-1.64.0-cp39-cp39-win32.whl", hash = "sha256:31890b24d47b62cc27da49a462efe3d02f3c120edb0e6c46dcc0025506acf004"}, + {file = "grpcio-1.64.0-cp39-cp39-win_amd64.whl", hash = "sha256:5a56797dea8c02e7d3a85dfea879f286175cf4d14fbd9ab3ef2477277b927baa"}, + {file = "grpcio-1.64.0.tar.gz", hash = "sha256:257baf07f53a571c215eebe9679c3058a313fd1d1f7c4eede5a8660108c52d9c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.64.0)"] + +[[package]] +name = "grpcio-status" +version = "1.48.2" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"}, + {file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.48.2" +protobuf = ">=3.12.0" + +[[package]] +name = "gunicorn" +version = "22.0.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, + {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-resources" +version = "5.13.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, + {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jaeger-client" +version = "4.8.0" +description = "Jaeger Python OpenTracing Tracer implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, +] + +[package.dependencies] +opentracing = ">=2.1,<3.0" +threadloop = ">=1,<2" +thrift = "*" +tornado = ">=4.3" + +[package.extras] +tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "launchdarkly-eventsource" +version = "1.2.0" +description = "LaunchDarkly SSE Client" +optional = false +python-versions = ">=3.8" +files = [ + {file = "launchdarkly_eventsource-1.2.0-py3-none-any.whl", hash = "sha256:9b5ec7149e2ad9995be22ad5361deb480c229701e6b0cc799e94aa14f067b77b"}, + {file = "launchdarkly_eventsource-1.2.0.tar.gz", hash = "sha256:8cb3301ec0daeb5e17eaa37b3b65f6660fab851b317e69271185ef2fb42c2fde"}, +] + +[package.dependencies] +urllib3 = ">=1.26.0,<3" + +[[package]] +name = "launchdarkly-server-sdk" +version = "9.5.0" +description = "LaunchDarkly SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "launchdarkly_server_sdk-9.5.0-py3-none-any.whl", hash = "sha256:bf2cf213f9eb71cd43d5f20f2ac9ec9235c693036459e5038a69015a6648c035"}, + {file = "launchdarkly_server_sdk-9.5.0.tar.gz", hash = "sha256:af64d985621a03257107210266c563c5e268ca8320d1d71b5c18d9592d14fef7"}, +] + +[package.dependencies] +certifi = ">=2018.4.16" +expiringdict = ">=1.1.4" +launchdarkly-eventsource = ">=1.1.0,<2.0.0" +pyRFC3339 = ">=1.0" +semver = ">=2.10.2" +urllib3 = ">=1.26.0,<3" + +[package.extras] +consul = ["python-consul (>=1.0.1)"] +dynamodb = ["boto3 (>=1.9.71)"] +redis = ["redis (>=2.10.5)"] +test-filesource = ["pyyaml (>=5.3.1)", "watchdog (>=3.0.0)"] + +[[package]] +name = "lovely-pytest-docker" +version = "0.3.1" +description = "Pytest testing utilities with docker containers." +optional = false +python-versions = "*" +files = [ + {file = "lovely-pytest-docker-0.3.1.tar.gz", hash = "sha256:4326a180bfd4dd4ad69c2ef3e3643c41075d965f40068488b40204602e6df85e"}, +] + +[package.dependencies] +pytest = "*" +six = "*" + +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] + +[[package]] +name = "marshmallow" +version = "3.22.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "marshmallow-sqlalchemy" +version = "1.1.0" +description = "SQLAlchemy integration with the marshmallow (de)serialization library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow_sqlalchemy-1.1.0-py3-none-any.whl", hash = "sha256:cce261148e4c6ec4ee275f3d29352933380a1afa2fd3933f5e9ecd02fdc16ade"}, + {file = "marshmallow_sqlalchemy-1.1.0.tar.gz", hash = "sha256:2ab092da269dafa8a05d51a58409af71a8d2183958ba47143127dd239e0359d8"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0" +SQLAlchemy = ">=1.4.40,<3.0" + +[package.extras] +dev = ["marshmallow-sqlalchemy[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==1.0.0)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)"] +tests = ["pytest (<9)", "pytest-lazy-fixtures"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "minio" +version = "7.2.9" +description = "MinIO Python SDK for Amazon S3 Compatible Cloud Storage" +optional = false +python-versions = ">3.8" +files = [ + {file = "minio-7.2.9-py3-none-any.whl", hash = "sha256:fe5523d9c4a4d6cfc07e96905852841bccdb22b22770e1efca4bf5ae8b65774b"}, + {file = "minio-7.2.9.tar.gz", hash = "sha256:a83c2fcd981944602a8dc11e8e07543ed9cda0a9462264e3f46a13171c56bccb"}, +] + +[package.dependencies] +argon2-cffi = "*" +certifi = "*" +pycryptodome = "*" +typing-extensions = "*" +urllib3 = "*" + +[[package]] +name = "mock" +version = "4.0.3" +description = "Rolling backport of unittest.mock for all Pythons" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, + {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, +] + +[package.extras] +build = ["blurb", "twine", "wheel"] +docs = ["sphinx"] +test = ["pytest (<5.4)", "pytest-cov"] + +[[package]] +name = "msgpack" +version = "1.0.8" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, + {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, + {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, + {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, + {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, + {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, + {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, + {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, + {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, + {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, + {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, +] + +[[package]] +name = "multidict" +version = "6.1.0" +description = "multidict implementation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +] + +[[package]] +name = "opentracing" +version = "2.4.0" +description = "OpenTracing API for Python. See documentation at http://opentracing.io" +optional = false +python-versions = "*" +files = [ + {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, +] + +[package.extras] +tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pytest", "pytest-cov", "pytest-mock", "six (>=1.10.0,<2.0)", "sphinx_rtd_theme", "tornado"] + +[[package]] +name = "orjson" +version = "3.10.7" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pep8-naming" +version = "0.11.1" +description = "Check PEP-8 naming conventions, plugin for flake8" +optional = false +python-versions = "*" +files = [ + {file = "pep8-naming-0.11.1.tar.gz", hash = "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724"}, + {file = "pep8_naming-0.11.1-py2.py3-none-any.whl", hash = "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"}, +] + +[package.dependencies] +flake8-polyfill = ">=1.0.2,<2" + +[[package]] +name = "pg8000" +version = "1.31.2" +description = "PostgreSQL interface library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pg8000-1.31.2-py3-none-any.whl", hash = "sha256:436c771ede71af4d4c22ba867a30add0bc5c942d7ab27fadbb6934a487ecc8f6"}, + {file = "pg8000-1.31.2.tar.gz", hash = "sha256:1ea46cf09d8eca07fe7eaadefd7951e37bee7fabe675df164f1a572ffb300876"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.2" +scramp = ">=1.4.5" + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "3.19.6" +description = "Protocol Buffers" +optional = false +python-versions = ">=3.5" +files = [ + {file = "protobuf-3.19.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:010be24d5a44be7b0613750ab40bc8b8cedc796db468eae6c779b395f50d1fa1"}, + {file = "protobuf-3.19.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11478547958c2dfea921920617eb457bc26867b0d1aa065ab05f35080c5d9eb6"}, + {file = "protobuf-3.19.6-cp310-cp310-win32.whl", hash = "sha256:559670e006e3173308c9254d63facb2c03865818f22204037ab76f7a0ff70b5f"}, + {file = "protobuf-3.19.6-cp310-cp310-win_amd64.whl", hash = "sha256:347b393d4dd06fb93a77620781e11c058b3b0a5289262f094379ada2920a3730"}, + {file = "protobuf-3.19.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a8ce5ae0de28b51dff886fb922012dad885e66176663950cb2344c0439ecb473"}, + {file = "protobuf-3.19.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b0d02163c4e67279ddb6dc25e063db0130fc299aefabb5d481053509fae5c8"}, + {file = "protobuf-3.19.6-cp36-cp36m-win32.whl", hash = "sha256:30f5370d50295b246eaa0296533403961f7e64b03ea12265d6dfce3a391d8992"}, + {file = "protobuf-3.19.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0c0714b025ec057b5a7600cb66ce7c693815f897cfda6d6efb58201c472e3437"}, + {file = "protobuf-3.19.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5057c64052a1f1dd7d4450e9aac25af6bf36cfbfb3a1cd89d16393a036c49157"}, + {file = "protobuf-3.19.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:bb6776bd18f01ffe9920e78e03a8676530a5d6c5911934c6a1ac6eb78973ecb6"}, + {file = "protobuf-3.19.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a04134866861b11556a82dd91ea6daf1f4925746b992f277b84013a7cc1229"}, + {file = "protobuf-3.19.6-cp37-cp37m-win32.whl", hash = "sha256:4bc98de3cdccfb5cd769620d5785b92c662b6bfad03a202b83799b6ed3fa1fa7"}, + {file = "protobuf-3.19.6-cp37-cp37m-win_amd64.whl", hash = "sha256:aa3b82ca1f24ab5326dcf4ea00fcbda703e986b22f3d27541654f749564d778b"}, + {file = "protobuf-3.19.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2b2d2913bcda0e0ec9a784d194bc490f5dc3d9d71d322d070b11a0ade32ff6ba"}, + {file = "protobuf-3.19.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d0b635cefebd7a8a0f92020562dead912f81f401af7e71f16bf9506ff3bdbb38"}, + {file = "protobuf-3.19.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a552af4dc34793803f4e735aabe97ffc45962dfd3a237bdde242bff5a3de684"}, + {file = "protobuf-3.19.6-cp38-cp38-win32.whl", hash = "sha256:0469bc66160180165e4e29de7f445e57a34ab68f49357392c5b2f54c656ab25e"}, + {file = "protobuf-3.19.6-cp38-cp38-win_amd64.whl", hash = "sha256:91d5f1e139ff92c37e0ff07f391101df77e55ebb97f46bbc1535298d72019462"}, + {file = "protobuf-3.19.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c0ccd3f940fe7f3b35a261b1dd1b4fc850c8fde9f74207015431f174be5976b3"}, + {file = "protobuf-3.19.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:30a15015d86b9c3b8d6bf78d5b8c7749f2512c29f168ca259c9d7727604d0e39"}, + {file = "protobuf-3.19.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:878b4cd080a21ddda6ac6d1e163403ec6eea2e206cf225982ae04567d39be7b0"}, + {file = "protobuf-3.19.6-cp39-cp39-win32.whl", hash = "sha256:5a0d7539a1b1fb7e76bf5faa0b44b30f812758e989e59c40f77a7dab320e79b9"}, + {file = "protobuf-3.19.6-cp39-cp39-win_amd64.whl", hash = "sha256:bbf5cea5048272e1c60d235c7bd12ce1b14b8a16e76917f371c718bd3005f045"}, + {file = "protobuf-3.19.6-py2.py3-none-any.whl", hash = "sha256:14082457dc02be946f60b15aad35e9f5c69e738f80ebbc0900a19bc83734a5a4"}, + {file = "protobuf-3.19.6.tar.gz", hash = "sha256:5f5540d57a43042389e87661c6eaa50f47c19c6176e8cf1c4f287aeefeccb5c4"}, +] + +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] + +[[package]] +name = "pycountry" +version = "23.12.11" +description = "ISO country, subdivision, language, currency and script definitions and their translations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycountry-23.12.11-py3-none-any.whl", hash = "sha256:2ff91cff4f40ff61086e773d61e72005fe95de4a57bfc765509db05695dc50ab"}, + {file = "pycountry-23.12.11.tar.gz", hash = "sha256:00569d82eaefbc6a490a311bfa84a9c571cff9ddbf8b0a4f4e7b4f868b4ad925"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pycryptodome" +version = "3.21.0" +description = "Cryptographic library for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"}, + {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, +] + +[[package]] +name = "pydocstyle" +version = "5.1.1" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.5" +files = [ + {file = "pydocstyle-5.1.1-py3-none-any.whl", hash = "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678"}, + {file = "pydocstyle-5.1.1.tar.gz", hash = "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325"}, +] + +[package.dependencies] +snowballstemmer = "*" + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] + +[[package]] +name = "pyhumps" +version = "3.8.0" +description = "🐫 Convert strings (and dictionary keys) between snake case, camel case and pascal case in Python. Inspired by Humps for Node" +optional = false +python-versions = "*" +files = [ + {file = "pyhumps-3.8.0-py3-none-any.whl", hash = "sha256:060e1954d9069f428232a1adda165db0b9d8dfdce1d265d36df7fbff540acfd6"}, + {file = "pyhumps-3.8.0.tar.gz", hash = "sha256:498026258f7ee1a8e447c2e28526c0bea9407f9a59c03260aee4bd6c04d681a3"}, +] + +[[package]] +name = "pylint" +version = "3.2.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"}, + {file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"}, +] + +[package.dependencies] +astroid = ">=3.2.2,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pylint-flask" +version = "0.6" +description = "pylint-flask is a Pylint plugin to aid Pylint in recognizing and understanding errors caused when using Flask" +optional = false +python-versions = "*" +files = [ + {file = "pylint-flask-0.6.tar.gz", hash = "sha256:f4d97de2216bf7bfce07c9c08b166e978fe9f2725de2a50a9845a97de7e31517"}, +] + +[package.dependencies] +pylint-plugin-utils = ">=0.2.1" + +[[package]] +name = "pylint-plugin-utils" +version = "0.8.2" +description = "Utilities and helpers for writing Pylint plugins" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "pylint_plugin_utils-0.8.2-py3-none-any.whl", hash = "sha256:ae11664737aa2effbf26f973a9e0b6779ab7106ec0adc5fe104b0907ca04e507"}, + {file = "pylint_plugin_utils-0.8.2.tar.gz", hash = "sha256:d3cebf68a38ba3fba23a873809155562571386d4c1b03e5b4c4cc26c3eee93e4"}, +] + +[package.dependencies] +pylint = ">=1.7" + +[[package]] +name = "pyrfc3339" +version = "1.1" +description = "Generate and parse RFC 3339 timestamps" +optional = false +python-versions = "*" +files = [ + {file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"}, + {file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"}, +] + +[package.dependencies] +pytz = "*" + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, + {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-cov" +version = "2.12.1" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, +] + +[package.dependencies] +coverage = ">=5.2.1" +pytest = ">=4.6" +toml = "*" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-dotenv" +version = "0.5.2" +description = "A py.test plugin that parses environment files before running tests" +optional = false +python-versions = "*" +files = [ + {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, + {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, +] + +[package.dependencies] +pytest = ">=5.0.0" +python-dotenv = ">=0.9.1" + +[[package]] +name = "pytest-env" +version = "0.6.2" +description = "py.test plugin that allows you to add environment variables." +optional = false +python-versions = "*" +files = [ + {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, +] + +[package.dependencies] +pytest = ">=2.6.0" + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +optional = false +python-versions = "*" +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[package.dependencies] +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "sbc_common_components" +version = "0.0.0" +description = "" +optional = false +python-versions = "*" +files = [] +develop = false + +[package.dependencies] +flask = "*" +flask-jwt-oidc = ">=0.1.5" +Flask-OpenTracing = "1.1.0" +Flask-SQLAlchemy = "*" +jaeger-client = "*" + +[package.source] +type = "git" +url = "https://github.com/bolyachevets/sbc-common-components.git" +reference = "camel_case_empty_dict" +resolved_reference = "20ce13be6d59946583385c857a5aca1c4c517ad0" +subdirectory = "python" + +[[package]] +name = "scramp" +version = "1.4.5" +description = "An implementation of the SCRAM protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "scramp-1.4.5-py3-none-any.whl", hash = "sha256:50e37c464fc67f37994e35bee4151e3d8f9320e9c204fca83a5d313c121bbbe7"}, + {file = "scramp-1.4.5.tar.gz", hash = "sha256:be3fbe774ca577a7a658117dca014e5d254d158cecae3dd60332dfe33ce6d78e"}, +] + +[package.dependencies] +asn1crypto = ">=1.5.1" + +[[package]] +name = "semver" +version = "3.0.2" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, + {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, +] + +[[package]] +name = "setuptools" +version = "75.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] + +[[package]] +name = "simple-cloudevent" +version = "0.0.2" +description = "A short description of the project" +optional = false +python-versions = ">=3.8" +files = [] +develop = false + +[package.dependencies] +strict-rfc3339 = "*" + +[package.source] +type = "git" +url = "https://github.com/daxiom/simple-cloudevent.py.git" +reference = "HEAD" +resolved_reference = "447cabb988202206ac69e71177d7cd11b6c0b002" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sql-versioning" +version = "0.1.0" +description = "" +optional = false +python-versions = "^3.10" +files = [] +develop = false + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/sql-versioning" + +[[package]] +name = "sqlalchemy" +version = "2.0.35" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlalchemy-utils" +version = "0.41.2" +description = "Various utility functions for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990"}, + {file = "SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e"}, +] + +[package.dependencies] +SQLAlchemy = ">=1.3" + +[package.extras] +arrow = ["arrow (>=0.3.4)"] +babel = ["Babel (>=1.3)"] +color = ["colour (>=0.0.4)"] +encrypted = ["cryptography (>=0.6)"] +intervals = ["intervals (>=0.7.1)"] +password = ["passlib (>=1.6,<2.0)"] +pendulum = ["pendulum (>=2.0.5)"] +phone = ["phonenumbers (>=5.9.2)"] +test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg (>=3.1.8)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (==7.4.4)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"] +timezone = ["python-dateutil"] +url = ["furl (>=0.4.1)"] + +[[package]] +name = "strict-rfc3339" +version = "0.7" +description = "Strict, simple, lightweight RFC3339 functions" +optional = false +python-versions = "*" +files = [ + {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, +] + +[[package]] +name = "structlog" +version = "24.4.0" +description = "Structured Logging for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "structlog-24.4.0-py3-none-any.whl", hash = "sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610"}, + {file = "structlog-24.4.0.tar.gz", hash = "sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4"}, +] + +[package.extras] +dev = ["freezegun (>=0.2.8)", "mypy (>=1.4)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "rich", "simplejson", "twisted"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "sphinxext-opengraph", "twisted"] +tests = ["freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy (>=1.4)", "rich", "twisted"] + +[[package]] +name = "structured-logging" +version = "0.4.0" +description = "" +optional = false +python-versions = "^3.9" +files = [] +develop = false + +[package.dependencies] +flask = ">= 1" +structlog = "^24.1.0" + +[package.source] +type = "git" +url = "https://github.com/bcgov/sbc-connect-common.git" +reference = "main" +resolved_reference = "43411ed428c4c4b89bea1ac6acdb10077f247d2b" +subdirectory = "python/structured-logging" + +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +optional = false +python-versions = "*" +files = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] + +[[package]] +name = "threadloop" +version = "1.0.2" +description = "Tornado IOLoop Backed Concurrent Futures" +optional = false +python-versions = "*" +files = [ + {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, + {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, +] + +[package.dependencies] +tornado = "*" + +[[package]] +name = "thrift" +version = "0.20.0" +description = "Python bindings for the Apache Thrift RPC system" +optional = false +python-versions = "*" +files = [ + {file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"}, +] + +[package.dependencies] +six = ">=1.7.2" + +[package.extras] +all = ["tornado (>=4.0)", "twisted"] +tornado = ["tornado (>=4.0)"] +twisted = ["twisted"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "werkzeug" +version = "3.0.0" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, + {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "yarl" +version = "1.13.1" +description = "Yet another URL library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec9dd328016d8d25702a24ee274932aebf6be9787ed1c28d021945d264235b3c"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5820bd4178e6a639b3ef1db8b18500a82ceab6d8b89309e121a6859f56585b05"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86c438ce920e089c8c2388c7dcc8ab30dfe13c09b8af3d306bcabb46a053d6f7"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3de86547c820e4f4da4606d1c8ab5765dd633189791f15247706a2eeabc783ae"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca53632007c69ddcdefe1e8cbc3920dd88825e618153795b57e6ebcc92e752a"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4ee1d240b84e2f213565f0ec08caef27a0e657d4c42859809155cf3a29d1735"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c49f3e379177f4477f929097f7ed4b0622a586b0aa40c07ac8c0f8e40659a1ac"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5c5e32fef09ce101fe14acd0f498232b5710effe13abac14cd95de9c274e689e"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab9524e45ee809a083338a749af3b53cc7efec458c3ad084361c1dbf7aaf82a2"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b1481c048fe787f65e34cb06f7d6824376d5d99f1231eae4778bbe5c3831076d"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:31497aefd68036d8e31bfbacef915826ca2e741dbb97a8d6c7eac66deda3b606"}, + {file = "yarl-1.13.1-cp310-cp310-win32.whl", hash = "sha256:1fa56f34b2236f5192cb5fceba7bbb09620e5337e0b6dfe2ea0ddbd19dd5b154"}, + {file = "yarl-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:1bbb418f46c7f7355084833051701b2301092e4611d9e392360c3ba2e3e69f88"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:216a6785f296169ed52cd7dcdc2612f82c20f8c9634bf7446327f50398732a51"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40c6e73c03a6befb85b72da213638b8aaa80fe4136ec8691560cf98b11b8ae6e"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2430cf996113abe5aee387d39ee19529327205cda975d2b82c0e7e96e5fdabdc"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb4134cc6e005b99fa29dbc86f1ea0a298440ab6b07c6b3ee09232a3b48f495"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309c104ecf67626c033845b860d31594a41343766a46fa58c3309c538a1e22b2"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f90575e9fe3aae2c1e686393a9689c724cd00045275407f71771ae5d690ccf38"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d2e1626be8712333a9f71270366f4a132f476ffbe83b689dd6dc0d114796c74"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b66c87da3c6da8f8e8b648878903ca54589038a0b1e08dde2c86d9cd92d4ac9"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf1ad338620249f8dd6d4b6a91a69d1f265387df3697ad5dc996305cf6c26fb2"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9915300fe5a0aa663c01363db37e4ae8e7c15996ebe2c6cce995e7033ff6457f"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:703b0f584fcf157ef87816a3c0ff868e8c9f3c370009a8b23b56255885528f10"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1d8e3ca29f643dd121f264a7c89f329f0fcb2e4461833f02de6e39fef80f89da"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7055bbade838d68af73aea13f8c86588e4bcc00c2235b4b6d6edb0dbd174e246"}, + {file = "yarl-1.13.1-cp311-cp311-win32.whl", hash = "sha256:a3442c31c11088e462d44a644a454d48110f0588de830921fd201060ff19612a"}, + {file = "yarl-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:81bad32c8f8b5897c909bf3468bf601f1b855d12f53b6af0271963ee67fff0d2"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f452cc1436151387d3d50533523291d5f77c6bc7913c116eb985304abdbd9ec9"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9cec42a20eae8bebf81e9ce23fb0d0c729fc54cf00643eb251ce7c0215ad49fe"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d959fe96e5c2712c1876d69af0507d98f0b0e8d81bee14cfb3f6737470205419"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c837ab90c455f3ea8e68bee143472ee87828bff19ba19776e16ff961425b57"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a993f976cdcb2dc1b855d8b89b792893220db8862d1a619efa7451817c836b"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2442a415a5f4c55ced0fade7b72123210d579f7d950e0b5527fc598866e62c"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fdbf0418489525231723cdb6c79e7738b3cbacbaed2b750cb033e4ea208f220"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b7f6e699304717fdc265a7e1922561b02a93ceffdaefdc877acaf9b9f3080b8"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bcd5bf4132e6a8d3eb54b8d56885f3d3a38ecd7ecae8426ecf7d9673b270de43"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2a93a4557f7fc74a38ca5a404abb443a242217b91cd0c4840b1ebedaad8919d4"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:22b739f99c7e4787922903f27a892744189482125cc7b95b747f04dd5c83aa9f"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2db874dd1d22d4c2c657807562411ffdfabec38ce4c5ce48b4c654be552759dc"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4feaaa4742517eaceafcbe74595ed335a494c84634d33961214b278126ec1485"}, + {file = "yarl-1.13.1-cp312-cp312-win32.whl", hash = "sha256:bbf9c2a589be7414ac4a534d54e4517d03f1cbb142c0041191b729c2fa23f320"}, + {file = "yarl-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:d07b52c8c450f9366c34aa205754355e933922c79135125541daae6cbf31c799"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:95c6737f28069153c399d875317f226bbdea939fd48a6349a3b03da6829fb550"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cd66152561632ed4b2a9192e7f8e5a1d41e28f58120b4761622e0355f0fe034c"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6a2acde25be0cf9be23a8f6cbd31734536a264723fca860af3ae5e89d771cd71"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18595e6a2ee0826bf7dfdee823b6ab55c9b70e8f80f8b77c37e694288f5de1"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a31d21089894942f7d9a8df166b495101b7258ff11ae0abec58e32daf8088813"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45f209fb4bbfe8630e3d2e2052535ca5b53d4ce2d2026bed4d0637b0416830da"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f722f30366474a99745533cc4015b1781ee54b08de73260b2bbe13316079851"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3bf60444269345d712838bb11cc4eadaf51ff1a364ae39ce87a5ca8ad3bb2c8"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:942c80a832a79c3707cca46bd12ab8aa58fddb34b1626d42b05aa8f0bcefc206"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:44b07e1690f010c3c01d353b5790ec73b2f59b4eae5b0000593199766b3f7a5c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:396e59b8de7e4d59ff5507fb4322d2329865b909f29a7ed7ca37e63ade7f835c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3bb83a0f12701c0b91112a11148b5217617982e1e466069d0555be9b372f2734"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c92b89bffc660f1274779cb6fbb290ec1f90d6dfe14492523a0667f10170de26"}, + {file = "yarl-1.13.1-cp313-cp313-win32.whl", hash = "sha256:269c201bbc01d2cbba5b86997a1e0f73ba5e2f471cfa6e226bcaa7fd664b598d"}, + {file = "yarl-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:1d0828e17fa701b557c6eaed5edbd9098eb62d8838344486248489ff233998b8"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8be8cdfe20787e6a5fcbd010f8066227e2bb9058331a4eccddec6c0db2bb85b2"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08d7148ff11cb8e886d86dadbfd2e466a76d5dd38c7ea8ebd9b0e07946e76e4b"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4afdf84610ca44dcffe8b6c22c68f309aff96be55f5ea2fa31c0c225d6b83e23"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0d12fe78dcf60efa205e9a63f395b5d343e801cf31e5e1dda0d2c1fb618073d"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298c1eecfd3257aa16c0cb0bdffb54411e3e831351cd69e6b0739be16b1bdaa8"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c14c16831b565707149c742d87a6203eb5597f4329278446d5c0ae7a1a43928e"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9bacedbb99685a75ad033fd4de37129449e69808e50e08034034c0bf063f99"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658e8449b84b92a4373f99305de042b6bd0d19bf2080c093881e0516557474a5"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:373f16f38721c680316a6a00ae21cc178e3a8ef43c0227f88356a24c5193abd6"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:45d23c4668d4925688e2ea251b53f36a498e9ea860913ce43b52d9605d3d8177"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f7917697bcaa3bc3e83db91aa3a0e448bf5cde43c84b7fc1ae2427d2417c0224"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5989a38ba1281e43e4663931a53fbf356f78a0325251fd6af09dd03b1d676a09"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11b3ca8b42a024513adce810385fcabdd682772411d95bbbda3b9ed1a4257644"}, + {file = "yarl-1.13.1-cp38-cp38-win32.whl", hash = "sha256:dcaef817e13eafa547cdfdc5284fe77970b891f731266545aae08d6cce52161e"}, + {file = "yarl-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:7addd26594e588503bdef03908fc207206adac5bd90b6d4bc3e3cf33a829f57d"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0ae6637b173d0c40b9c1462e12a7a2000a71a3258fa88756a34c7d38926911c"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:576365c9f7469e1f6124d67b001639b77113cfd05e85ce0310f5f318fd02fe85"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78f271722423b2d4851cf1f4fa1a1c4833a128d020062721ba35e1a87154a049"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d74f3c335cfe9c21ea78988e67f18eb9822f5d31f88b41aec3a1ec5ecd32da5"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1891d69a6ba16e89473909665cd355d783a8a31bc84720902c5911dbb6373465"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb382fd7b4377363cc9f13ba7c819c3c78ed97c36a82f16f3f92f108c787cbbf"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8854b9f80693d20cec797d8e48a848c2fb273eb6f2587b57763ccba3f3bd4b"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf2c3f04ff50f16404ce70f822cdc59760e5e2d7965905f0e700270feb2bbfc"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb9f59f3848edf186a76446eb8bcf4c900fe147cb756fbbd730ef43b2e67c6a7"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef9b85fa1bc91c4db24407e7c4da93a5822a73dd4513d67b454ca7064e8dc6a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:098b870c18f1341786f290b4d699504e18f1cd050ed179af8123fd8232513424"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8c723c91c94a3bc8033dd2696a0f53e5d5f8496186013167bddc3fb5d9df46a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44a4c40a6f84e4d5955b63462a0e2a988f8982fba245cf885ce3be7618f6aa7d"}, + {file = "yarl-1.13.1-cp39-cp39-win32.whl", hash = "sha256:84bbcdcf393139f0abc9f642bf03f00cac31010f3034faa03224a9ef0bb74323"}, + {file = "yarl-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:fc2931ac9ce9c61c9968989ec831d3a5e6fcaaff9474e7cfa8de80b7aff5a093"}, + {file = "yarl-1.13.1-py3-none-any.whl", hash = "sha256:6a5185ad722ab4dd52d5fb1f30dcc73282eb1ed494906a92d1a228d3f89607b0"}, + {file = "yarl-1.13.1.tar.gz", hash = "sha256:ec8cfe2295f3e5e44c51f57272afbd69414ae629ec7c6b27f5a410efc78b70a0"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.19.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, + {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "439ed3bb73f5502bf60a6187eb629d062fe93469cbf5b0091430239d53506ff1" diff --git a/queue_services/auth-queue/pyproject.toml b/queue_services/auth-queue/pyproject.toml new file mode 100644 index 0000000000..c986fe2c4d --- /dev/null +++ b/queue_services/auth-queue/pyproject.toml @@ -0,0 +1,78 @@ +[tool.poetry] +name = "auth-queue" +version = "1.1.0" +description = "" +authors = ["\"BC Registries and Online Services\""] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +blinker = "1.8.2" +charset-normalizer = "3.3.2" +click = "8.1.7" +expiringdict = "1.2.2" +google-api-core = "1.34.1" +google-auth = "2.28.2" +googleapis-common-protos = "1.63.0" +grpc-google-iam-v1 = "0.13.0" +grpcio = "1.64.0" +grpcio-status = "1.48.2" +idna = "3.7" +importlib-resources = "5.13.0" +jaeger-client = "4.8.0" +msgpack = "1.0.8" +opentracing = "2.4.0" +pkgutil_resolve_name = "1.3.10" +proto-plus = "1.23.0" +protobuf = "3.19.6" +pyRFC3339 = "1.1" +pyasn1 = "0.6.0" +pyasn1_modules = "0.4.0" +pycountry = "23.12.11" +pyrsistent = "0.20.0" +python-dateutil = "2.9.0.post0" +pytz = "2024.1" +rsa = "4.9" +semver = "3.0.2" +six = "1.16.0" +threadloop = "1.0.2" +thrift = "0.20.0" +tornado = "6.4.1" +urllib3 = "2.2.2" +zipp = "3.19.1" + + +# VCS dependencies +auth-api = { git = "https://github.com/bcgov/sbc-auth.git", rev = "feature-gcp-migration", subdirectory = "auth-api" } +simple-cloudevent = { git = "https://github.com/daxiom/simple-cloudevent.py.git" } +build-deps = { git = "https://github.com/bcgov/sbc-auth.git", rev = "feature-gcp-migration", subdirectory = "build-deps" } + +[tool.poetry.group.dev.dependencies] +psycopg2 = "^2.9.9" +pylint = "3.2.3" +coverage = "^5.5" +pylint-flask = "^0.6.0" +pytest = "8.1.1" +pytest-cov = "^2.11.1" +pytest-env = "^0.6.2" +pytest-dotenv = "^0.5.2" +pytest-mock = "^3.5.1" +requests = "^2.25.1" +flake8 = "5.0.4" +flake8-blind-except = "^0.1.1" +flake8-docstrings = "^1.6.0" +flake8-isort = "^4.0.0" +flake8-quotes = "^3.3.0" +pep8-naming = "^0.11.1" +autopep8 = "^1.5.6" +pydocstyle = "^5.1.1" +freezegun = "^1.1.0" +faker = "^8.1.1" +pytest-asyncio = "0.21.0" +mock = "^4.0.3" +lovely-pytest-docker = "^0.3.1" +astroid = "^3.2.3" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/queue_services/auth-queue/requirements.txt b/queue_services/auth-queue/requirements.txt deleted file mode 100644 index 1920fdf0fc..0000000000 --- a/queue_services/auth-queue/requirements.txt +++ /dev/null @@ -1,52 +0,0 @@ -CacheControl==0.14.0 -Flask==1.1.2 -Jinja2==3.0.3 -MarkupSafe==2.0.1 -Werkzeug==1.0.1 -attrs==23.2.0 -blinker==1.8.2 -cachetools==5.3.3 -certifi==2024.7.4 -charset-normalizer==3.3.2 -click==8.1.7 -expiringdict==1.2.2 -google-api-core==2.19.0 -google-auth==2.28.2 -google-cloud-pubsub==2.20.2 -googleapis-common-protos==1.63.0 -grpc-google-iam-v1==0.13.0 -grpcio-status==1.48.2 -grpcio==1.64.0 -idna==3.7 -importlib-resources==5.13.0 -itsdangerous==2.0.1 -jaeger-client==4.8.0 -jsonschema==4.17.3 -launchdarkly-server-sdk==8.2.1 -msgpack==1.0.8 -opentracing==2.4.0 -pkgutil_resolve_name==1.3.10 -proto-plus==1.23.0 -protobuf==3.19.6 -pyRFC3339==1.1 -pyasn1==0.6.0 -pyasn1_modules==0.4.0 -pycountry==23.12.11 -pyrsistent==0.20.0 -python-dateutil==2.9.0.post0 -python-dotenv==1.0.1 -pytz==2024.1 -requests==2.32.2 -rsa==4.9 -semver==3.0.2 -sentry-sdk==2.3.1 -six==1.16.0 -threadloop==1.0.2 -thrift==0.20.0 -tornado==6.4.1 -urllib3==2.2.2 -zipp==3.19.1 --e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python --e git+https://github.com/seeker25/sbc-auth.git@20087#egg=auth-api&subdirectory=auth-api --e git+https://github.com/seeker25/sbc-connect-common.git@small_tweaks#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/daxiom/simple-cloudevent.py.git diff --git a/queue_services/auth-queue/requirements/dev.txt b/queue_services/auth-queue/requirements/dev.txt deleted file mode 100755 index 04624060f0..0000000000 --- a/queue_services/auth-queue/requirements/dev.txt +++ /dev/null @@ -1,31 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest -pytest-mock -requests -pyhamcrest -pytest-cov -FreezeGun - -# Lint and code style -flake8==5.0.4 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming -autopep8 -coverage -pylint -pylint-flask -pydocstyle -isort - - -# docker -lovely-pytest-docker -pytest-asyncio==0.18.3 - diff --git a/queue_services/auth-queue/requirements/prod.txt b/queue_services/auth-queue/requirements/prod.txt deleted file mode 100644 index f4ca56a697..0000000000 --- a/queue_services/auth-queue/requirements/prod.txt +++ /dev/null @@ -1,17 +0,0 @@ -Flask -jsonschema==4.17.3 -python-dotenv -sentry-sdk[flask] -pycountry -jaeger-client -Werkzeug<2 -attrs -python-dateutil -itsdangerous==2.0.1 -Jinja2==3.0.3 -markupsafe==2.0.1 -protobuf~=3.19.5 -launchdarkly-server-sdk==8.2.1 -google-auth==2.28.2 -google-cloud-pubsub==2.20.2 -cachecontrol diff --git a/queue_services/auth-queue/requirements/repo-libraries.txt b/queue_services/auth-queue/requirements/repo-libraries.txt deleted file mode 100644 index ed38ad555e..0000000000 --- a/queue_services/auth-queue/requirements/repo-libraries.txt +++ /dev/null @@ -1,4 +0,0 @@ --e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python --e git+https://github.com/seeker25/sbc-auth.git@refactor_queues#egg=auth-api&subdirectory=auth-api --e git+https://github.com/seeker25/sbc-connect-common.git@small_tweaks#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/daxiom/simple-cloudevent.py.git diff --git a/queue_services/auth-queue/setup.cfg b/queue_services/auth-queue/setup.cfg index a6a214a510..0bb5387022 100644 --- a/queue_services/auth-queue/setup.cfg +++ b/queue_services/auth-queue/setup.cfg @@ -9,7 +9,7 @@ classifiers = Topic :: Payments License :: OSI Approved :: Apache Software License Natural Language :: English - Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.12 license = Apache Software License Version 2.0 description = A short description of the project long_description = file: README.md diff --git a/queue_services/auth-queue/setup.py b/queue_services/auth-queue/setup.py deleted file mode 100644 index 4ca80d2b75..0000000000 --- a/queue_services/auth-queue/setup.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright © 2019 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module -""" -import ast -import re -from glob import glob -from os.path import basename, splitext - -from setuptools import find_packages, setup - - -_version_re = re.compile(r'__version__\s+=\s+(.*)') # pylint: disable=invalid-name - -with open('src/auth_queue/version.py', 'rb') as f: - version = str(ast.literal_eval(_version_re.search( # pylint: disable=invalid-name - f.read().decode('utf-8')).group(1))) - - -def read_requirements(filename): - """ - Get application requirements from - the requirements.txt file. - :return: Python requirements - """ - with open(filename, 'r') as req: - requirements = req.readlines() - install_requires = [r.strip() for r in requirements if (r.find('git+') != 0 and r.find('-e git+') != 0)] - return install_requires - - -def read(filepath): - """ - Read the contents from a file. - :param str filepath: path to the file to be read - :return: file contents - """ - with open(filepath, 'r') as file_handle: - content = file_handle.read() - return content - - -REQUIREMENTS = read_requirements('requirements.txt') - -setup( - name="auth_queue", - version=version, - author_email='', - packages=find_packages('src'), - package_dir={'': 'src'}, - py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')], - include_package_data=True, - license=read('LICENSE'), - long_description=read('README.md'), - zip_safe=False, - install_requires=REQUIREMENTS, - setup_requires=["pytest-runner", ], - tests_require=["pytest", ], -) diff --git a/queue_services/auth-queue/src/auth_queue/__init__.py b/queue_services/auth-queue/src/auth_queue/__init__.py index 597218c2ea..3448ace0f5 100644 --- a/queue_services/auth-queue/src/auth_queue/__init__.py +++ b/queue_services/auth-queue/src/auth_queue/__init__.py @@ -14,23 +14,18 @@ """Resource package for the auth-queue service.""" import os -import sentry_sdk +from auth_api.exceptions import ExceptionHandler from auth_api.models import db from auth_api.resources.ops import bp as ops_bp from auth_api.services.flags import flags from auth_api.services.gcp_queue import queue from auth_api.utils.cache import cache -from auth_api.utils.util_logging import setup_logging from flask import Flask -from sentry_sdk.integrations.flask import FlaskIntegration from auth_queue import config from auth_queue.resources.worker import bp as worker_endpoint -setup_logging(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.conf')) # important to do this first - - def register_endpoints(app: Flask): """Register endpoints with the flask application.""" # Allow base route to match with, and without a trailing slash @@ -47,13 +42,7 @@ def create_app(run_mode=os.getenv('DEPLOYMENT_ENV', 'production')) -> Flask: """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(config.get_named_config(run_mode)) - - if str(app.config.get('SENTRY_ENABLE')).lower() == 'true': - if app.config.get('SENTRY_DSN', None): - sentry_sdk.init( # pylint: disable=abstract-class-instantiated - dsn=app.config.get('SENTRY_DSN'), - integrations=[FlaskIntegration()] - ) + app.config['ENV'] = run_mode db.init_app(app) flags.init_app(app) @@ -61,5 +50,6 @@ def create_app(run_mode=os.getenv('DEPLOYMENT_ENV', 'production')) -> Flask: queue.init_app(app) register_endpoints(app) + ExceptionHandler(app) return app diff --git a/queue_services/auth-queue/src/auth_queue/config.py b/queue_services/auth-queue/src/auth_queue/config.py index 3b34353b0f..f9fab2fa65 100644 --- a/queue_services/auth-queue/src/auth_queue/config.py +++ b/queue_services/auth-queue/src/auth_queue/config.py @@ -59,9 +59,6 @@ class _Config: # pylint: disable=too-few-public-methods PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - SENTRY_ENABLE = os.getenv('SENTRY_ENABLE', 'False') - SENTRY_DSN = os.getenv('SENTRY_DSN', None) - SQLALCHEMY_TRACK_MODIFICATIONS = False AUTH_LD_SDK_KEY = os.getenv('AUTH_LD_SDK_KEY', None) @@ -72,14 +69,19 @@ class _Config: # pylint: disable=too-few-public-methods DB_NAME = os.getenv('DATABASE_NAME', '') DB_HOST = os.getenv('DATABASE_HOST', '') DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' + SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' # noqa: E231 + + if DB_UNIX_SOCKET := os.getenv('DATABASE_UNIX_SOCKET', None): + SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}@/{DB_NAME}?host={DB_UNIX_SOCKET}' # noqa: E231, E501 + else: + SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' # noqa: E231, E501 # PUB/SUB - PUB: account-mailer-dev, SUB: auth-event-dev and namex-nr-state-dev ACCOUNT_MAILER_TOPIC = os.getenv('ACCOUNT_MAILER_TOPIC', 'account-mailer-dev') # If blank in PUB/SUB, this should match the https endpoint the subscription is pushing to. AUTH_AUDIENCE_SUB = os.getenv('AUTH_QUEUE_AUDIENCE_SUB') GCP_AUTH_KEY = os.getenv('AUTHPAY_GCP_AUTH_KEY', None) - VERIFY_PUBSUB_EMAILS = f'{os.getenv("AUTHPAY_SERVICE_ACCOUNT")},{os.getenv("BUSINESS_SERVICE_ACCOUNT")}'.split(',') + VERIFY_PUBSUB_EMAILS = f'{os.getenv("AUTHPAY_SERVICE_ACCOUNT")},{os.getenv("BUSINESS_SERVICE_ACCOUNT")}'.split(',') # noqa: E231, E501 PAY_API_URL = os.getenv('PAY_API_URL') + os.getenv('PAY_API_VERSION') @@ -112,7 +114,7 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432') SQLALCHEMY_DATABASE_URI = os.getenv( 'DATABASE_TEST_URL', - default=f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' + default=f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' # noqa: E231 ) PAY_API_URL = os.getenv('PAY_API_URL') + os.getenv('PAY_API_VERSION') diff --git a/queue_services/auth-queue/src/auth_queue/logging.conf b/queue_services/auth-queue/src/auth_queue/logging.conf deleted file mode 100644 index ffc1a01e36..0000000000 --- a/queue_services/auth-queue/src/auth_queue/logging.conf +++ /dev/null @@ -1,28 +0,0 @@ -[loggers] -keys=root,api - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=api -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= \ No newline at end of file diff --git a/queue_services/auth-queue/src/auth_queue/resources/worker.py b/queue_services/auth-queue/src/auth_queue/resources/worker.py index 648f2f115f..fea9b37caf 100644 --- a/queue_services/auth-queue/src/auth_queue/resources/worker.py +++ b/queue_services/auth-queue/src/auth_queue/resources/worker.py @@ -32,10 +32,13 @@ from flask import Blueprint, current_app, request from sbc_common_components.utils.enums import QueueMessageTypes from simple_cloudevent import SimpleCloudEvent +from structured_logging import StructuredLogging bp = Blueprint('worker', __name__) +logger = StructuredLogging.get_logger() + @bp.route('/', methods=('POST',)) @ensure_authorized_queue_user @@ -46,9 +49,9 @@ def worker(): return {}, HTTPStatus.OK try: - current_app.logger.info('Event message received: %s', json.dumps(dataclasses.asdict(event_message))) + logger.info('Event message received: %s', json.dumps(dataclasses.asdict(event_message))) if is_message_processed(event_message): - current_app.logger.info('Event message already processed, skipping.') + logger.info('Event message already processed, skipping.') return {}, HTTPStatus.OK if event_message.type == QueueMessageTypes.NAMES_EVENT.value: process_name_events(event_message) @@ -58,7 +61,7 @@ def worker(): QueueMessageTypes.NSF_LOCK_ACCOUNT.value]: process_pay_lock_unlock_event(event_message) except Exception: # NOQA # pylint: disable=broad-except - current_app.logger.error('Error processing event:', exc_info=True) + logger.error('Error processing event:', exc_info=True) # Return a 200, so the event is removed from the Queue return {}, HTTPStatus.OK @@ -78,7 +81,7 @@ def is_message_processed(event_message): def process_activity_log(data): """Process activity log events.""" - current_app.logger.debug('>>>>>>>process_activity_log>>>>>') + logger.debug('>>>>>>>process_activity_log>>>>>') activity_model = ActivityLogModel(actor_id=data.get('actorId'), action=data.get('action'), item_type=data.get('itemType'), @@ -92,21 +95,21 @@ def process_activity_log(data): try: activity_model.save() except Exception as e: # NOQA # pylint: disable=broad-except - current_app.logger.error('DB Error: %s', e) + logger.error('DB Error: %s', e) db.session.rollback() - current_app.logger.debug('<<<<<<>>>>>>process_pay_lock_unlock_event>>>>>') + logger.debug('>>>>>>>process_pay_lock_unlock_event>>>>>') message_type = event_message.type queue_data = event_message.data skip_notification = queue_data.get('skipNotification', False) org_id = queue_data.get('accountId') org: OrgModel = OrgModel.find_by_org_id(org_id) if org is None: - current_app.logger.error('Unknown org for orgid %s', org_id) + logger.error('Unknown org for orgid %s', org_id) return data = { @@ -128,7 +131,7 @@ def process_pay_lock_unlock_event(event_message: SimpleCloudEvent): org.flush() db.session.commit() - current_app.logger.debug('<<<<<<>>>>>>process_name_events>>>>>') + logger.debug('>>>>>>>process_name_events>>>>>') request_data = event_message.data.get('request') or event_message.data.get('name') nr_number = request_data['nrNum'] nr_status = request_data['newState'] nr_entity = EntityModel.find_by_business_identifier(nr_number) if nr_entity is None: - current_app.logger.info("Entity doesn't exist, creating a new entity.") + logger.info("Entity doesn't exist, creating a new entity.") nr_entity = EntityModel( business_identifier=nr_number, corp_type_code=CorpType.NR.value @@ -171,7 +174,7 @@ def process_name_events(event_message: SimpleCloudEvent): nr_entity.last_modified = parser.parse(event_message.time) # Future - None needs to be replaced with whatever we decide to fill the data with. if nr_status == 'DRAFT' and not AffiliationModel.find_affiliations_by_business_identifier(nr_number, None): - current_app.logger.info('Status is DRAFT, getting invoices for account') + logger.info('Status is DRAFT, getting invoices for account') token = None # Find account details for the NR. with current_app.test_request_context('service_token'): @@ -185,13 +188,13 @@ def process_name_events(event_message: SimpleCloudEvent): if invoices and invoices['invoices'] \ and (auth_account_id := invoices['invoices'][0].get('paymentAccount').get('accountId')) \ and str(auth_account_id).isnumeric(): - current_app.logger.info('Account ID received : %s', auth_account_id) + logger.info('Account ID received : %s', auth_account_id) # Auth account id can be service account value too, so doing a query lookup than find_by_id org: OrgModel = db.session.query(OrgModel).filter(OrgModel.id == auth_account_id).one_or_none() # If account is present and is not a gov account, then affiliate. if org and org.access_type != AccessType.GOVM.value: nr_entity.pass_code_claimed = True - current_app.logger.info('Creating affiliation between Entity : %s and Org : %s', nr_entity, org) + logger.info('Creating affiliation between Entity : %s and Org : %s', nr_entity, org) affiliation: AffiliationModel = AffiliationModel(entity=nr_entity, org=org) affiliation.flush() activity: ActivityLogModel = ActivityLogModel(org_id=org.id, @@ -203,4 +206,4 @@ def process_name_events(event_message: SimpleCloudEvent): activity.flush() nr_entity.save() - current_app.logger.debug('<<<<<<