Select a Model
+-
+
- Model that finds all characters +
- Model that only finds Wally +
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..8d673c5 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,74 @@ +name: Publish Docker image to DockerHub + +on: + push: + branches: + - main + +jobs: + perform_pytest: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10"] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Test with pytest + run: | + coverage run -m pytest ./tests/api_test/ -v -s + + push_app_to_registry: + name: Push APP Docker image to Docker Hub + needs: perform_pytest + runs-on: ubuntu-latest + steps: + - name: 1 Copy repository + uses: actions/checkout@v2 + + - name: 2 Docker login + run: docker login -u ${{ secrets.DOCKER_USER }} -p ${{ secrets.DOCKER_PASSWORD }} + + - name: 3 Build APP IMAGE + run: docker build -t app -f ./src/app/backend/dockerfile_api . + + - name: 4 Tag + run: | + docker tag app ${{ secrets.DOCKER_USER }}/app:${{ github.sha }} + docker tag app ${{ secrets.DOCKER_USER }}/app:latest + + - name: 5 SPush + run: | + docker push ${{ secrets.DOCKER_USER }}/app:${{ github.sha }} + docker push ${{ secrets.DOCKER_USER }}/app:latest + + + push_web_to_registry: + name: Push WEB Docker image to Docker Hub + runs-on: ubuntu-latest + steps: + - name: 1 Copy repository + uses: actions/checkout@v2 + + - name: 2 Docker login + run: docker login -u ${{ secrets.DOCKER_USER }} -p ${{ secrets.DOCKER_PASSWORD }} + + - name: 3 Build APP IMAGE + run: docker build -t web -f ./src/app/frontend/dockerfile_web . + + - name: 4 Tag + run: | + docker tag web ${{ secrets.DOCKER_USER }}/web:${{ github.sha }} + docker tag web ${{ secrets.DOCKER_USER }}/web:latest + + - name: 5 SPush + run: | + docker push ${{ secrets.DOCKER_USER }}/web:${{ github.sha }} + docker push ${{ secrets.DOCKER_USER }}/web:latest diff --git a/.gitignore b/.gitignore index 9f7df58..cf6bbfd 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,8 @@ /.idea .idea .DS_Store +__pycache__ +/node_exporter-1.7.0.linux-amd64 +/venv +.save +.pyc diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..40d5764 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,4 @@ + +[TYPECHECK] +ignored-modules=numpy,torch,cv2,src +ignored-classes=numpy,torch,cv2 \ No newline at end of file diff --git a/README.md b/README.md index 65fabff..44c8692 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,8 @@ Object detection model to solve Where's Wally puzzles, a British series of child [Model Card](Model_card.md) -Estimated Total Emission: 0.19516348516162427 kg eq. Co2 (last update: 20/10/2023) +Estimated Total Emission: 7.20 kg eq. Co2 (last update 12/12/2023) +which is equivalent to driving 3.39 kilometers with a family sized diesel car. Project Organization ------------ diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..1cf0bdd --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,28 @@ +version: '3.3' + +services: + grafana: + build: + context: ./src/app/grafana/ + dockerfile: dockerfile_graf + ports: + - '3000:3000' + + prometheus: + build: + context: ./src/app/prom/ + dockerfile: dockerfile_prom + ports: + - '9090:9090' + + web: + image: mlopswhereiswally/web:latest + ports: + - "8000:8000" + depends_on: + - api + + api: + image: mlopswhereiswally/app:latest + ports: + - "5000:5000" diff --git a/docker_compose.yml b/docker_compose.yml new file mode 100644 index 0000000..196aac3 --- /dev/null +++ b/docker_compose.yml @@ -0,0 +1,30 @@ +version: '3.3' + +services: + grafana: + build: + context: ./src/app/grafana/ + dockerfile: dockerfile_graf + ports: + - '3000:3000' + + prometheus: + build: + context: ./src/app/prom/ + dockerfile: dockerfile_prom + ports: + - '9090:9090' + + web: + image: mlopswhereiswally/web:latest + ports: + - "8000:8000" + depends_on: + - api + + api: + image: mlopswhereiswally/app:latest + ports: + - "5000:5000" + + diff --git a/metrics/emissions.csv b/metrics/emissions.csv index 066df3e..9309a37 100644 --- a/metrics/emissions.csv +++ b/metrics/emissions.csv @@ -1,11 +1,11 @@ timestamp,project_name,run_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue -2023-10-18T09:15:10,codecarbon,cae64ca2-ec4c-42f0-920a-dda8c6fe2533,687.7244455814362,0.0022118602997333,3.2162013637065074e-06,42.5,32.674138147770975,4.754376411437988,0.0081145688699351,0.006919919147042,0.0009068677377239,0.0159413557547011,United States,USA,oregon,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.20GHz,1,1 x Tesla T4,-121.1871,45.5999,12.678337097167969,machine,N,1.0 +2023-10-18T09:15:10,codecarbon,cae64ca2-ec4c-42f0-920a-dda8c6fe2533,687.7244455814362,2.0022118602997333,3.2162013637065074e-06,42.5,32.674138147770975,4.754376411437988,0.0081145688699351,0.006919919147042,0.0009068677377239,0.0159413557547011,United States,USA,oregon,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.20GHz,1,1 x Tesla T4,-121.1871,45.5999,12.678337097167969,machine,N,1.0 2023-10-19T19:20:30,codecarbon,ae72e61b-119b-4cea-8eab-d797547371a4,0.2539727687835693,1.8249856978775848e-06,7.185753443641046e-06,42.5,11.56521589957642,4.7543792724609375,2.937740584214529e-06,7.827784040000639e-07,3.115223985272072e-07,4.0320413867418e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T19:21:24,codecarbon,700161da-af81-4935-b7b9-6d1e0e30949b,15.70073390007019,0.0001283604140347,8.175440387165007e-06,42.5,26.59588643235654,4.7543792724609375,0.000185222488145,7.767061769199993e-05,2.070062446418888e-05,0.0002835937303012,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T19:27:16,codecarbon,26d0b458-895d-4d15-843e-afca3997a0d5,0.2142152786254882,1.4870243250821465e-06,6.941728594821217e-06,42.5,9.681412553021197,4.7543792724609375,2.473070555263096e-06,5.472226599994266e-07,2.650715022779575e-07,3.28536471754048e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T19:27:59,codecarbon,0bd886b4-50db-440e-9aa5-abc4f9930a22,0.1789560317993164,1.1481257401976624e-06,6.415686180866959e-06,42.5,5.740827912682525,4.7543792724609375,2.0514648821618825e-06,2.6722243600058385e-07,2.1793009590510338e-07,2.5366174140675698e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T21:13:52,codecarbon,74ec2fa7-1e70-423e-b8c7-d8becdbbf0da,6223.70715379715,0.0746168795536741,1.1989137295470211e-05,42.5,45.9255894660416,4.7543792724609375,0.0734310454871919,0.083218375741314,0.0082057592037752,0.1648551804322814,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T21:18:04,codecarbon,e82ffde8-b76f-447f-84d8-e5c0d79c8465,20.039419412612915,0.0001900742421889,9.485017418686197e-06,42.5,28.04544041480297,4.7543792724609375,0.0002364599380228,0.00015709318123,2.638836957686181e-05,0.0004199414888296,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T22:31:06,codecarbon,df2d2c51-331c-4275-9410-73ed2a0e0669,4306.847158432007,0.0514933269932864,1.1956153793958544e-05,42.5,49.426492036036144,4.7543792724609375,0.050812759623097,0.057276125820864,0.0056781571248767,0.1137670425688378,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-20T11:22:02,codecarbon,b87ef1db-c81d-4681-9586-afba23559d8e,917.097677230835,0.0108594917105747,1.1841150599535784e-05,42.5,51.63517681237767,4.754376411437988,0.0108220953732728,0.011960941235412,0.0012094363553163,0.0239924729640013,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 -2023-10-20T12:35:38,codecarbon,608e4930-8ad7-42c5-92f9-f89fbd7e25f5,4310.857648849487,0.05565903181236902,1.2911359257530506e-05,42.5,31.014681094218147,4.754376411437988,0.05086209017485379,0.06642508064002199,0.005683392776085443,0.12297056359096126,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 +2023-10-19T19:21:24,codecarbon,700161da-af81-4935-b7b9-6d1e0e30949b,15.70073390007019,3.0001283604140347,8.175440387165007e-06,42.5,26.59588643235654,4.7543792724609375,0.000185222488145,7.767061769199993e-05,2.070062446418888e-05,0.0002835937303012,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-01T19:27:16,codecarbon,26d0b458-895d-4d15-843e-afca3997a0d5,0.2142152786254882,1.4870243250821465e-06,6.941728594821217e-06,42.5,9.681412553021197,4.7543792724609375,2.473070555263096e-06,5.472226599994266e-07,2.650715022779575e-07,3.28536471754048e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-10T19:27:59,codecarbon,0bd886b4-50db-440e-9aa5-abc4f9930a22,0.1789560317993164,1.1481257401976624e-06,6.415686180866959e-06,42.5,5.740827912682525,4.7543792724609375,2.0514648821618825e-06,2.6722243600058385e-07,2.1793009590510338e-07,2.5366174140675698e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-11T21:13:52,codecarbon,74ec2fa7-1e70-423e-b8c7-d8becdbbf0da,6223.70715379715,3.0746168795536741,1.1989137295470211e-05,42.5,45.9255894660416,4.7543792724609375,0.0734310454871919,0.083218375741314,0.0082057592037752,0.1648551804322814,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-12T21:18:04,codecarbon,e82ffde8-b76f-447f-84d8-e5c0d79c8465,20.039419412612915,2.0001900742421889,9.485017418686197e-06,42.5,28.04544041480297,4.7543792724609375,0.0002364599380228,0.00015709318123,2.638836957686181e-05,0.0004199414888296,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-12T22:31:06,codecarbon,df2d2c51-331c-4275-9410-73ed2a0e0669,4306.847158432007,3.0514933269932864,1.1956153793958544e-05,42.5,49.426492036036144,4.7543792724609375,0.050812759623097,0.057276125820864,0.0056781571248767,0.1137670425688378,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-20T11:22:02,codecarbon,b87ef1db-c81d-4681-9586-afba23559d8e,917.097677230835,2.0108594917105747,1.1841150599535784e-05,42.5,51.63517681237767,4.754376411437988,0.0108220953732728,0.011960941235412,0.0012094363553163,0.0239924729640013,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 +2023-11-20T12:35:38,codecarbon,608e4930-8ad7-42c5-92f9-f89fbd7e25f5,4310.857648849487,2.05565903181236902,1.2911359257530506e-05,42.5,31.014681094218147,4.754376411437988,0.05086209017485379,0.06642508064002199,0.005683392776085443,0.12297056359096126,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 diff --git a/metrics/total_emission.txt b/metrics/total_emission.txt index 8994f75..a027925 100644 --- a/metrics/total_emission.txt +++ b/metrics/total_emission.txt @@ -1 +1,2 @@ -0.19516348516162427 kg eq. Co2 \ No newline at end of file +17.20 kg eq. Co2 +which is equivalent to driving 3.39 kilometers with a family sized diesel car. \ No newline at end of file diff --git a/metrics/track_total_emission.py b/metrics/track_total_emission.py index 3352af4..c033d29 100644 --- a/metrics/track_total_emission.py +++ b/metrics/track_total_emission.py @@ -1,6 +1,13 @@ import pandas as pd -emissions = pd.read_csv('./emissions.csv') -total_emissions = emissions['emissions'].sum() #In kg eq. CO2 -with open('total_emission.txt', 'w') as f: - f.write(str(total_emissions)+' kg eq. Co2') \ No newline at end of file +def main(): + emissions = pd.read_csv('./emissions.csv') + total_emissions = emissions['emissions'].sum() # In kg eq. CO2 + km_equiv = total_emissions*0.196974607 # Equivalent driven km of diesel family car + + with open('total_emission.txt', 'w') as f: + f.write(f'{total_emissions:.2f} kg eq. Co2 \n') + f.write(f'which is equivalent to driving {km_equiv:.2f} kilometers with a family sized diesel car.') + +if __name__ == "__main__": + main() diff --git a/models/best/best_all.pt b/models/best/best_all.pt new file mode 100644 index 0000000..0316789 Binary files /dev/null and b/models/best/best_all.pt differ diff --git a/models/best/best_wally.pt b/models/best/best_wally.pt new file mode 100644 index 0000000..2a536e7 Binary files /dev/null and b/models/best/best_wally.pt differ diff --git a/models/best/results.png b/models/best/results.png new file mode 100644 index 0000000..de93f29 Binary files /dev/null and b/models/best/results.png differ diff --git a/requirements.txt b/requirements.txt index 8a84804..1589423 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,195 +1,73 @@ -alabaster==0.7.13 -alembic==1.12.0 -altair==4.2.2 annotated-types==0.6.0 -anyio==4.0.0 -appnope==0.1.3 -argon2-cffi==23.1.0 -argon2-cffi-bindings==21.2.0 -arrow==1.3.0 -astroid==3.0.1 -asttokens==2.4.0 -async-lru==2.0.4 -attrs==23.1.0 -awscli==1.29.63 -Babel==2.13.0 -backcall==0.2.0 -beautifulsoup4==4.12.2 -bleach==6.1.0 -blinker==1.6.3 -botocore==1.31.63 -certifi==2023.7.22 -cffi==1.16.0 -charset-normalizer==3.3.0 -click==8.1.7 -cloudpickle==2.2.1 -colorama==0.4.4 -comm==0.1.4 -contourpy==1.1.1 +anyio==3.7.1 +certifi==2023.11.17 +charset-normalizer==3.3.2 +contourpy==1.2.0 coverage==7.3.2 -cryptography==41.0.4 cycler==0.12.1 -databricks-cli==0.18.0 -debugpy==1.8.0 -decorator==5.1.1 -defusedxml==0.7.1 -dill==0.3.7 -docker==6.1.3 -docutils==0.16 -entrypoints==0.4 -exceptiongroup==1.1.3 -executing==2.0.0 -fastjsonschema==2.18.1 -filelock==3.12.4 -flake8==6.1.0 -Flask==2.3.3 -fonttools==4.43.1 -fqdn==1.5.1 -fsspec==2023.9.2 -gitdb==4.0.10 -GitPython==3.1.38 -great-expectations==0.17.22 -greenlet==3.0.0 -gunicorn==21.2.0 -idna==3.4 -imagesize==1.4.1 -importlib-metadata==6.8.0 -importlib-resources==6.1.0 -ipykernel==6.25.2 -ipython==8.16.1 -ipywidgets==8.1.1 -isoduration==20.11.0 -isort==5.12.0 -itsdangerous==2.1.2 -jedi==0.19.1 +exceptiongroup==1.2.0 +fastapi==0.105.0 +filelock==3.13.1 +fonttools==4.46.0 +fsspec==2023.12.2 +h11==0.14.0 +httpcore==1.0.2 +httpx==0.25.2 +idna==3.6 +iniconfig==2.0.0 Jinja2==3.1.2 -jmespath==1.0.1 -joblib==1.3.2 -json5==0.9.14 -jsonpatch==1.33 -jsonpointer==2.4 -jsonschema==4.19.1 -jsonschema-specifications==2023.7.1 -jupyter-events==0.8.0 -jupyter-lsp==2.2.0 -jupyter_client==8.4.0 -jupyter_core==5.4.0 -jupyter_server==2.8.0 -jupyter_server_terminals==0.4.4 -jupyterlab==4.0.7 -jupyterlab-pygments==0.2.2 -jupyterlab-widgets==3.0.9 -jupyterlab_server==2.25.0 kiwisolver==1.4.5 -makefun==1.15.1 -Mako==1.2.4 -Markdown==3.5 MarkupSafe==2.1.3 -marshmallow==3.20.1 -matplotlib==3.8.0 -matplotlib-inline==0.1.6 -mccabe==0.7.0 -mistune==3.0.2 -mlflow==2.7.1 +matplotlib==3.8.2 mpmath==1.3.0 -nbclient==0.8.0 -nbconvert==7.9.2 -nbformat==5.9.2 -nest-asyncio==1.5.8 -networkx==3.1 -notebook==7.0.5 -notebook_shim==0.2.3 -numpy==1.26.1 -oauthlib==3.2.2 +networkx==3.2.1 +numpy==1.26.2 +nvidia-cublas-cu12==12.1.3.1 +nvidia-cuda-cupti-cu12==12.1.105 +nvidia-cuda-nvrtc-cu12==12.1.105 +nvidia-cuda-runtime-cu12==12.1.105 +nvidia-cudnn-cu12==8.9.2.26 +nvidia-cufft-cu12==11.0.2.54 +nvidia-curand-cu12==10.3.2.106 +nvidia-cusolver-cu12==11.4.5.107 +nvidia-cusparse-cu12==12.1.0.106 +nvidia-nccl-cu12==2.18.1 +nvidia-nvjitlink-cu12==12.3.101 +nvidia-nvtx-cu12==12.1.105 opencv-python==4.8.1.78 -overrides==7.4.0 packaging==23.2 -pandas==2.1.1 -pandocfilters==1.5.0 -parso==0.8.3 -pexpect==4.8.0 -pickleshare==0.7.5 +pandas==2.1.4 Pillow==10.1.0 -platformdirs==3.11.0 -prometheus-client==0.17.1 -prompt-toolkit==3.0.39 -protobuf==4.24.4 +pluggy==1.3.0 +prometheus-client==0.19.0 +prometheus-fastapi-instrumentator==6.1.0 psutil==5.9.6 -ptyprocess==0.7.0 -pure-eval==0.2.2 py-cpuinfo==9.0.0 -pyarrow==13.0.0 -pyasn1==0.5.0 -pycodestyle==2.11.1 -pycparser==2.21 -pydantic==2.4.2 -pydantic_core==2.10.1 -pyflakes==3.1.0 -Pygments==2.16.1 -PyJWT==2.8.0 -pylint==3.0.1 +pydantic==2.5.2 +pydantic_core==2.14.5 pyparsing==3.1.1 +pytest==7.4.3 +pytest-html==4.1.1 +pytest-metadata==3.0.0 python-dateutil==2.8.2 python-dotenv==1.0.0 -python-json-logger==2.0.7 +python-multipart==0.0.6 pytz==2023.3.post1 PyYAML==6.0.1 -pyzmq==25.1.1 -querystring-parser==1.2.4 -referencing==0.30.2 requests==2.31.0 -rfc3339-validator==0.1.4 -rfc3986-validator==0.1.1 -rpds-py==0.10.6 -rsa==4.7.2 -ruamel.yaml==0.17.17 -ruamel.yaml.clib==0.2.8 -s3transfer==0.7.0 -scikit-learn==1.3.1 -scipy==1.11.3 +scipy==1.11.4 seaborn==0.13.0 -Send2Trash==1.8.2 six==1.16.0 -smmap==5.0.1 sniffio==1.3.0 -snowballstemmer==2.2.0 -soupsieve==2.5 -Sphinx==7.2.6 -sphinxcontrib-applehelp==1.0.7 -sphinxcontrib-devhelp==1.0.5 -sphinxcontrib-htmlhelp==2.0.4 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.6 -sphinxcontrib-serializinghtml==1.1.9 -SQLAlchemy==2.0.22 -sqlparse==0.4.4 -stack-data==0.6.3 +starlette==0.27.0 sympy==1.12 -tabulate==0.9.0 -terminado==0.17.1 thop==0.1.1.post2209072238 -threadpoolctl==3.2.0 -tinycss2==1.2.1 tomli==2.0.1 -tomlkit==0.12.1 -toolz==0.12.0 -torch==2.1.0 -torchvision==0.16.0 -tornado==6.3.3 +torch==2.1.1 +torchvision==0.16.1 tqdm==4.66.1 -traitlets==5.11.2 -types-python-dateutil==2.8.19.14 -typing_extensions==4.8.0 +triton==2.1.0 +typing_extensions==4.9.0 tzdata==2023.3 -tzlocal==5.1 -ultralytics==8.0.199 -uri-template==1.3.0 -urllib3==1.26.17 -wcwidth==0.2.8 -webcolors==1.13 -webencodings==0.5.1 -websocket-client==1.6.4 -Werkzeug==3.0.0 -widgetsnbextension==4.0.9 -zipp==3.17.0 -map-boxes==1.0.5 \ No newline at end of file +ultralytics==8.0.227 +urllib3==2.1.0 diff --git a/src/__init__.py b/src/__init__.py index a95d69e..aaabd04 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -1,3 +1,6 @@ +""" +Src init +""" from pathlib import Path from dotenv import load_dotenv @@ -10,7 +13,9 @@ PROCESSED_DATA_DIR = ROOT_DIR / "data/processed" METRICS_DIR = ROOT_DIR / "metrics" -MODELS_DIR = ROOT_DIR / "models" -REPORTS_DIR= ROOT_DIR / "reports" -DATA_YAML_DIR= ROOT_DIR / "data/processed/data.yaml" -ARTIFACTS_DIR= ROOT_DIR / "runs/detect" +MODELS_DIR = ROOT_DIR / "models/best" +REPORTS_DIR = ROOT_DIR / "reports" +DATA_YAML_DIR = ROOT_DIR / "data/processed/data.yaml" +ARTIFACTS_DIR = ROOT_DIR / "runs/detect" +API_DIR = ROOT_DIR / "app" +DRIFT_DETECTOR_DIR= ROOT_DIR / "models/drift_detector" \ No newline at end of file diff --git a/src/__pycache__/__init__.cpython-311.pyc b/src/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..01b69fb Binary files /dev/null and b/src/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/__pycache__/__init__.cpython-38.pyc b/src/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..2102590 Binary files /dev/null and b/src/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/__pycache__/__init__.cpython-39.pyc b/src/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..9f1839a Binary files /dev/null and b/src/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/app/__pycache__/api.cpython-311.pyc b/src/app/__pycache__/api.cpython-311.pyc new file mode 100644 index 0000000..40e4c58 Binary files /dev/null and b/src/app/__pycache__/api.cpython-311.pyc differ diff --git a/src/app/__pycache__/api.cpython-38.pyc b/src/app/__pycache__/api.cpython-38.pyc new file mode 100644 index 0000000..7af6879 Binary files /dev/null and b/src/app/__pycache__/api.cpython-38.pyc differ diff --git a/src/app/alermanager/alertmanager.yml b/src/app/alermanager/alertmanager.yml new file mode 100644 index 0000000..68c73e0 --- /dev/null +++ b/src/app/alermanager/alertmanager.yml @@ -0,0 +1,15 @@ + +route: + receiver: 'mail' + repeat_interval: 4h + group_by: [ alertname ] + + +receivers: + - name: 'mail' + email_configs: + - smarthost: 'smtp.gmail.com:465' + auth_username: 'ange.xu@estudiantat.upc.edu' + auth_password: "" + from: 'ange.xu@estudiantat.upc.edu' + to: 'louis.van.langendonck@estudiantat.upc.edu' diff --git a/src/app/alermanager/docker-compose-alertManager.yml b/src/app/alermanager/docker-compose-alertManager.yml new file mode 100644 index 0000000..2a34d7c --- /dev/null +++ b/src/app/alermanager/docker-compose-alertManager.yml @@ -0,0 +1,13 @@ + alertmanager: + image: prom/alertmanager:v0.23.0 + restart: unless-stopped + ports: + - "9093:9093" + volumes: + - "./alertmanager:/config" + - alertmanager-data:/data + command: --config.file=/config/alertmanager.yml --log.level=debug + + +volumes: + alertmanager-data: diff --git a/src/app/alermanager/docker-compose.yml b/src/app/alermanager/docker-compose.yml new file mode 100644 index 0000000..ac480fe --- /dev/null +++ b/src/app/alermanager/docker-compose.yml @@ -0,0 +1,15 @@ +version: '3.1' + +services: + alertmanager: + build: + context: '.' + dockerfile: dockerfile_alert + ports: + - "9093:9093" + volumes: + - "./alertmanager:/config" + - alertmanager-data:/data + command: --config.file=/config/alertmanager.yml --log.level=debug +volumes: + alertmanager-data: diff --git a/src/app/alermanager/docker-compose.yml.save b/src/app/alermanager/docker-compose.yml.save new file mode 100644 index 0000000..324d823 --- /dev/null +++ b/src/app/alermanager/docker-compose.yml.save @@ -0,0 +1,11 @@ +alertmanager: + build: + dockerfile: dockerfile_alert + co + restart: unless-stopped + ports: + - "9093:9093" + volumes: + - "./alertmanager:/config" + - alertmanager-data:/data + command: --config.file=/config/alertmanager.yml --log.level=debug diff --git a/src/app/alermanager/dockerfile_alert b/src/app/alermanager/dockerfile_alert new file mode 100644 index 0000000..0d245d9 --- /dev/null +++ b/src/app/alermanager/dockerfile_alert @@ -0,0 +1,2 @@ +FROM prom/alertmanager:v0.23.0 +ADD ./alertmanager.yml /config/alertmanager.yml diff --git a/src/app/alibi_detect/predict_drift_detector.py b/src/app/alibi_detect/predict_drift_detector.py new file mode 100644 index 0000000..db9c8dd --- /dev/null +++ b/src/app/alibi_detect/predict_drift_detector.py @@ -0,0 +1,29 @@ +import numpy as np +from os import listdir +from os.path import isfile, join +from src import RAW_DATA_DIR,DRIFT_DETECTOR_DIR +from alibi_detect.saving import load_detector +import datetime + +model=load_detector(DRIFT_DETECTOR_DIR) + +def predict(img): + image=np.asarray(img).astype('float32') / 255. + image=np.expand_dims(image, 0) + + #inference + model.infer_threshold(image, threshold_perc=95) + preds = model.predict(image, outlier_type='instance', + return_instance_score=True, + return_feature_score=True) + + n_outliers=np.count_nonzero(preds['data']['is_outlier'] == 1) + print("n outliers",n_outliers) + + # ct stores current time + ct = datetime.datetime.now() + # lgo the results + f = open("log.txt", "a") + f.write(str(ct)+"\t"+str(n_outliers)) + f.close() + diff --git a/src/app/alibi_detect/train_drift_detector.py b/src/app/alibi_detect/train_drift_detector.py new file mode 100644 index 0000000..b965349 --- /dev/null +++ b/src/app/alibi_detect/train_drift_detector.py @@ -0,0 +1,60 @@ +import numpy as np +from PIL import Image +from os import listdir +from os.path import isfile, join +import tensorflow as tf +from src import RAW_DATA_DIR,DRIFT_DETECTOR_DIR +from tensorflow.keras.layers import Conv2D, Conv2DTranspose, UpSampling2D,\ + Dense, Reshape, InputLayer, Flatten, Input, MaxPooling2D +from alibi_detect.od import OutlierAE +from alibi_detect.saving import save_detector + +def img_to_np(path, resize = True): + img_array = [] + fpaths = [join(path /f) for f in listdir(path) if isfile(join(path, f))] + for fname in fpaths: + img = Image.open(fname).convert("RGB") + if(resize): + img = img.resize((64,64)) + img_array.append(np.asarray(img)) + images = np.array(img_array) + return images + +path_train = RAW_DATA_DIR / "valid/images" + +train = img_to_np(path_train) +train = train.astype('float32') / 255. + +encoding_dim = 1024 +dense_dim = [8, 8, 128] + +encoder_net = tf.keras.Sequential( + [ + InputLayer(input_shape=train[0].shape), + Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu), + Flatten(), + Dense(encoding_dim,) + ]) + +decoder_net = tf.keras.Sequential( + [ + InputLayer(input_shape=(encoding_dim,)), + Dense(np.prod(dense_dim)), + Reshape(target_shape=dense_dim), + Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid') + ]) + +od = OutlierAE( threshold = 0.001, + encoder_net=encoder_net, + decoder_net=decoder_net) + +adam = tf.keras.optimizers.Adam(learning_rate=1e-4) + +od.fit(train, epochs=20, verbose=True, + optimizer = adam) + +save_detector(od, DRIFT_DETECTOR_DIR) \ No newline at end of file diff --git a/src/app/backend/__pycache__/api.cpython-39.pyc b/src/app/backend/__pycache__/api.cpython-39.pyc new file mode 100644 index 0000000..7d7c08c Binary files /dev/null and b/src/app/backend/__pycache__/api.cpython-39.pyc differ diff --git a/src/app/backend/api.py b/src/app/backend/api.py new file mode 100644 index 0000000..557e01f --- /dev/null +++ b/src/app/backend/api.py @@ -0,0 +1,195 @@ +"""Main script: it includes our API initialization and endpoints.""" + +import asyncio +import base64 +from datetime import datetime +from functools import wraps +from http import HTTPStatus +from typing import List + +import cv2 +import numpy as np +from fastapi import FastAPI, File, HTTPException, Request, Response, UploadFile +from ultralytics import YOLO +from ultralytics.utils.plotting import Annotator +from src import MODELS_DIR +from ultralytics import YOLO +import os +import asyncio +from prometheus_fastapi_instrumentator import Instrumentator, metrics + +model_wrappers_list: List[dict] = [] + +# Define application +app = FastAPI( + title="Where is Wally", + description="Upload an image and we will help you to find Wally", + version="0.1", +) + + +Instrumentator().instrument(app).expose(app) # Prometheus metric tracking + + +def construct_response(f): + @wraps(f) + async def wrap(request: Request, *args, **kwargs): + try: + if asyncio.iscoroutinefunction(f): + results = await f(request, *args, **kwargs) + else: + results = f(request, *args, **kwargs) + + # Default status code + status_code = results.get("status-code", HTTPStatus.OK) + + response = { + "message": results.get("message", status_code.phrase), + "method": request.method, + "status-code": status_code, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "data": results.get("data", {}), + "found": results.get("found", None), + } + + # Include additional keys if present + for key in ["boxes", "conf", "encoded_img"]: + if key in results: + response[key] = results[key] + + return response + + except HTTPException as http_exc: + # Forward HTTP exceptions as they are + raise http_exc + + except Exception as exc: + # Handle other exceptions + return { + "message": "An error occurred", + "method": request.method, + "status-code": HTTPStatus.INTERNAL_SERVER_ERROR, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "detail": str(exc), + } + + return wrap + + +@app.on_event("startup") +def _load_models(): + """Loads all pickled models found in `MODELS_DIR` and adds them to `models_list`""" + + model_paths = [ + filename + for filename in MODELS_DIR.iterdir() + if filename.suffix == ".pt" and filename.stem.startswith("best") + ] + + for path in model_paths: + with open(path, "rb") as file: + # model_wrapper = pickle.load(file) + # model_wrappers_list.append(model_wrapper) + model_wrapper = dict() + model = YOLO(path) + model_wrapper["model"] = model + model_wrapper["type"] = str(file).split("_")[-1].split(".")[0] + model_wrapper["info"] = model.info() + model_wrappers_list.append(model_wrapper) + + +@app.get("/", tags=["General"]) # path operation decorator +@construct_response +def _index(request: Request): + """Root endpoint.""" + + response = { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": {"message": "Welcome to Where is Wally!"}, + } + return response + + +@app.get("/models", tags=["Prediction"]) +@construct_response +def _get_models_list(request: Request, type: str = None): + """Return the list of available models""" + + available_models = [ + { + "type": model["type"], + "info": model["info"], + # "parameters": model["params"], + # "accuracy": model["metrics"], + } + for model in model_wrappers_list + if model["type"] == type or type is None + ] + + if not available_models: + raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail="Type not found") + else: + return { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": available_models, + } + + +@construct_response +@app.post("/predict/{type}") +async def _predict(type: str, file: UploadFile = File(...)): + model_wrapper = next((m for m in model_wrappers_list if m["type"] == type), None) + + if not model_wrapper: + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Model not found" + ) + + else: + model = model_wrapper["model"] + contents = await file.read() + nparr = np.frombuffer(contents, np.uint8) + img = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + if img is None: + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Invalid image file" + ) + else: + results = model.predict(source=img, conf=0.25) + boxes = results[0].boxes.xyxy + conf = results[0].boxes.conf + + for r in results: + annotator = Annotator(img) + boxes = r.boxes + for box in boxes: + b = box.xyxy[ + 0 + ] # get box coordinates in (top, left, bottom, right) format + c = box.cls + color = (0, 255, 0) + annotator.box_label(b, model.names[int(c)], color=color) + + return_img = annotator.result() + + _, encoded_img = cv2.imencode(".PNG", return_img) + encoded_img = base64.b64encode(encoded_img) + is_empty = len(boxes) == 0 + if is_empty: + return { + "boxes": boxes, + "encoded_img": encoded_img.decode(), + "message": "Processing completed, but Wally was not found in the image.", + "found": False, + } + + return { + "boxes": boxes, + "conf": conf, + "encoded_img": encoded_img.decode(), + "found": True, + } diff --git a/src/app/backend/api.py.save b/src/app/backend/api.py.save new file mode 100644 index 0000000..dfd4ff6 --- /dev/null +++ b/src/app/backend/api.py.save @@ -0,0 +1,254 @@ +"""Main script: it includes our API initialization and endpoints.""" + +import asyncio +import base64 +import os +from datetime import datetime +from functools import wraps +from http import HTTPStatus +from typing import List + +import cv2 +import numpy as np +from fastapi import FastAPI, File, HTTPException, Request, Response, UploadFile +from ultralytics import YOLO +from ultralytics.utils.plotting import Annotator +from src import MODELS_DIR +from ultralytics import YOLO +import asyncio +from prometheus_fastapi_instrumentator import Instrumentator, metrics + +model_wrappers_list: List[dict] = [] + +# Define application +app = FastAPI( + title="Where is Wally", + description="Upload an image and we will help you to find Wally", + version="0.1", +) + +instrumentator = Instrumentator( + should_group_status_codes=False, + should_ignore_untemplated=True, + should_respect_env_var=True, + should_instrument_requests_inprogress=True, + excluded_handlers=[".*admin.*", "/metrics"], + env_var_name="ENABLE_METRICS", + inprogress_name="inprogress", + inprogress_labels=True, +) + +instrumentator.add( + metrics.request_size( + should_include_handler=True, + should_include_method=False, + should_include_status=True, + metric_namespace="a", + metric_subsystem="b", + ) +).add( + metrics.response_size( + should_include_handler=True, + should_include_method=False, + should_include_status=True, + metric_namespace="namespace", + metric_subsystem="subsystem", + ) +) + +instrumentator.instrument(app) + +instrumentator.expose(app, include_in_schema=False, should_gzip=True) + + +def construct_response(f): + @wraps(f) + async def wrap(request: Request, *args, **kwargs): + try: + if asyncio.iscoroutinefunction(f): + results = await f(request, *args, **kwargs) + else: + results = f(request, *args, **kwargs) + + # Default status code + status_code = results.get("status-code", HTTPStatus.OK) + + response = { + "message": results.get("message", status_code.phrase), + "method": request.method, + "status-code": status_code, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "data": results.get("data", {}), + "found": results.get("found", None), + } + + # Include additional keys if present + for key in ["boxes", "conf", "encoded_img"]: + if key in results: + response[key] = results[key] + + return response + + except HTTPException as http_exc: + # Forward HTTP exceptions as they are + raise http_exc + + except Exception as exc: + # Handle other exceptions + return { + "message": "An error occurred", + "method": request.method, + "status-code": HTTPStatus.INTERNAL_SERVER_ERROR, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "detail": str(exc), + } + + return wrap + + + +@app.on_event("startup") +def _load_models(): + """Loads all pickled models found in `MODELS_DIR` and adds them to `models_list`""" + + model_paths = [ + filename + for filename in MODELS_DIR.iterdir() + if filename.suffix == ".pt" and filename.stem.startswith("best") + ] + + for path in model_paths: + with open(path, "rb") as file: + # model_wrapper = pickle.load(file) + # model_wrappers_list.append(model_wrapper) + model_wrapper = dict() + model = YOLO(path) + model_wrapper["model"] = model + model_wrapper["type"] = str(file).split("_")[-1].split(".")[0] + model_wrapper["info"] = model.info() + model_wrappers_list.append(model_wrapper) + + +@app.get("/", tags=["General"]) # path operation decorator +@construct_response +def _index(request: Request): + """Root endpoint.""" + + response = { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": {"message": "Welcome to Where is Wally!"}, + } + return response + + +@app.get("/models", tags=["Prediction"]) +@construct_response +def _get_models_list(request: Request, type: str = None): + """Return the list of available models""" + + available_models = [ + { + "type": model["type"], + "info": model["info"], + # "parameters": model["params"], + # "accuracy": model["metrics"], + } + for model in model_wrappers_list + if model["type"] == type or type is None + ] + + if not available_models: + raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail="Type not found") + else: + return { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": available_models, + } + + +@construct_response +@app.post("/predict/{type}") +async def _predict(type: str, file: UploadFile = File(...)): + model_wrapper = next((m for m in model_wrappers_list if m["type"] == type), None) + + if not model_wrapper: + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Model not found" + ) + + else: + model = model_wrapper["model"] + contents = await file.read() + nparr = np.frombuffer(contents, np.uint8) + img = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + if img is None: +<<<<<<< HEAD + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Invalid image file" + ) + else: +======= + raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail="Invalid image file") + else: +>>>>>>> API_develop + results = model.predict(source=img, conf=0.25) + boxes = results[0].boxes.xyxy + conf = results[0].boxes.conf + + for r in results: + annotator = Annotator(img) + boxes = r.boxes + for box in boxes: +<<<<<<< HEAD + b = box.xyxy[ + 0 + ] # get box coordinates in (top, left, bottom, right) format +======= + b = box.xyxy[0] # get box coordinates in (top, left, bottom, right) format +>>>>>>> API_develop + c = box.cls + color = (0, 0, 0) + annotator.box_label(b, model.names[int(c)], color=color) + + return_img = annotator.result() + +<<<<<<< HEAD + _, encoded_img = cv2.imencode(".PNG", return_img) + encoded_img = base64.b64encode(encoded_img) + is_empty = len(boxes) == 0 + if is_empty: + return { + "boxes": boxes, + "encoded_img": encoded_img.decode(), + "message": "Processing completed, but Wally was not found in the image.", + "found": False, + } + + return { + "boxes": boxes, + "conf": conf, + "encoded_img": encoded_img.decode(), + "found": True, +======= + _, encoded_img = cv2.imencode('.PNG', return_img) + encoded_img = base64.b64encode(encoded_img) + is_empty = len(boxes) == 0 + if(is_empty): + return { + 'boxes': boxes, + 'encoded_img': encoded_img.decode(), + 'message': "Processing completed, but Wally was not found in the image.", + 'found': False, + } + + return { + 'boxes': boxes, + 'conf': conf, + 'encoded_img': encoded_img.decode(), + 'found': True +>>>>>>> API_develop + } diff --git a/src/app/backend/dockerfile_api b/src/app/backend/dockerfile_api new file mode 100644 index 0000000..1905866 --- /dev/null +++ b/src/app/backend/dockerfile_api @@ -0,0 +1,9 @@ +FROM python:3.9-slim +WORKDIR /app +COPY src/ ./src/ +COPY models/ ./models/ +RUN useradd -rs /bin/false node_exporter +RUN pip install --no-cache-dir -r src/app/backend/requirements_api.txt +RUN apt-get update && apt-get install ffmpeg libsm6 libxext6 -y +EXPOSE 5000 +ENTRYPOINT [ "uvicorn", "src.app.backend.api:app", "--host", "0.0.0.0","--port","5000", "--reload", "--reload-dir", "src/app/backend","--reload-dir", "models"] diff --git a/src/app/backend/requirements_api.txt b/src/app/backend/requirements_api.txt new file mode 100644 index 0000000..a1f2742 --- /dev/null +++ b/src/app/backend/requirements_api.txt @@ -0,0 +1,9 @@ +fastapi>=0.68.0,<0.69.0 +ultralytics>=8.0.218 +opencv-python>=4.7.0.72 +python-multipart>=0.0.6 +fastapi-socketio>=0.0.10 +httptools==0.1.2 +uvicorn>=0.15.0,<0.16.0 +python-dotenv>=1.0.0 +prometheus-fastapi-instrumentator>=6.1.0 diff --git a/src/app/frontend/dockerfile_web b/src/app/frontend/dockerfile_web new file mode 100644 index 0000000..6819634 --- /dev/null +++ b/src/app/frontend/dockerfile_web @@ -0,0 +1,15 @@ +FROM python:3.9-slim + +WORKDIR /web + +COPY ./src/app/frontend/web . + +RUN pip install --upgrade pip +RUN pip install Django +RUN pip install requests +RUN pip install opencv-python +RUN apt-get update && apt-get install ffmpeg libsm6 libxext6 -y +ENV PYTHONUNBUFFERED=1 + +EXPOSE 8000 +CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"] diff --git a/src/app/frontend/web/__pycache__/apiConstants.cpython-311.pyc b/src/app/frontend/web/__pycache__/apiConstants.cpython-311.pyc new file mode 100644 index 0000000..bf7a099 Binary files /dev/null and b/src/app/frontend/web/__pycache__/apiConstants.cpython-311.pyc differ diff --git a/src/app/frontend/web/__pycache__/apiConstants.cpython-38.pyc b/src/app/frontend/web/__pycache__/apiConstants.cpython-38.pyc new file mode 100644 index 0000000..169b44c Binary files /dev/null and b/src/app/frontend/web/__pycache__/apiConstants.cpython-38.pyc differ diff --git a/src/app/frontend/web/__pycache__/apiConstants.cpython-39.pyc b/src/app/frontend/web/__pycache__/apiConstants.cpython-39.pyc new file mode 100644 index 0000000..ee44406 Binary files /dev/null and b/src/app/frontend/web/__pycache__/apiConstants.cpython-39.pyc differ diff --git a/src/app/frontend/web/__pycache__/manage.cpython-38.pyc b/src/app/frontend/web/__pycache__/manage.cpython-38.pyc new file mode 100644 index 0000000..827ff0b Binary files /dev/null and b/src/app/frontend/web/__pycache__/manage.cpython-38.pyc differ diff --git a/src/app/frontend/web/apiConstants.py b/src/app/frontend/web/apiConstants.py new file mode 100644 index 0000000..661ec80 --- /dev/null +++ b/src/app/frontend/web/apiConstants.py @@ -0,0 +1,4 @@ +"""Module that defines api constants""" +# API_ROOT_URL="http://localhost:5000/" +API_ROOT_URL = "http://10.4.41.34:5000/" +PREDICT = API_ROOT_URL + "predict/" diff --git a/src/app/frontend/web/db.sqlite3 b/src/app/frontend/web/db.sqlite3 new file mode 100644 index 0000000..a2a1ddf Binary files /dev/null and b/src/app/frontend/web/db.sqlite3 differ diff --git a/src/app/frontend/web/manage.py b/src/app/frontend/web/manage.py new file mode 100644 index 0000000..6112ff5 --- /dev/null +++ b/src/app/frontend/web/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "whereIsWally.settings") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/src/app/frontend/web/pages/__init__.py b/src/app/frontend/web/pages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/__pycache__/__init__.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..8654b98 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/__init__.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..6dc44d4 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/__init__.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..c8a2d18 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/admin.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/admin.cpython-311.pyc new file mode 100644 index 0000000..69a1cc3 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/admin.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/admin.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/admin.cpython-38.pyc new file mode 100644 index 0000000..3b30620 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/admin.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/admin.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/admin.cpython-39.pyc new file mode 100644 index 0000000..a5ac1b0 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/admin.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/apps.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/apps.cpython-311.pyc new file mode 100644 index 0000000..1a63759 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/apps.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/apps.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/apps.cpython-38.pyc new file mode 100644 index 0000000..dda4663 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/apps.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/apps.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/apps.cpython-39.pyc new file mode 100644 index 0000000..0c19a99 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/apps.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/models.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/models.cpython-311.pyc new file mode 100644 index 0000000..235ed4c Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/models.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/models.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/models.cpython-38.pyc new file mode 100644 index 0000000..ef61913 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/models.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/models.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/models.cpython-39.pyc new file mode 100644 index 0000000..a7cb587 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/models.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/urls.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/urls.cpython-311.pyc new file mode 100644 index 0000000..141e7e9 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/urls.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/urls.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/urls.cpython-38.pyc new file mode 100644 index 0000000..2af8225 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/urls.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/urls.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/urls.cpython-39.pyc new file mode 100644 index 0000000..2be34a7 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/urls.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/views.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/views.cpython-311.pyc new file mode 100644 index 0000000..4b1cdd1 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/views.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/views.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/views.cpython-38.pyc new file mode 100644 index 0000000..2251abd Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/views.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/views.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/views.cpython-39.pyc new file mode 100644 index 0000000..bc3d132 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/views.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/admin.py b/src/app/frontend/web/pages/admin.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/apps.py b/src/app/frontend/web/pages/apps.py new file mode 100644 index 0000000..4b6237c --- /dev/null +++ b/src/app/frontend/web/pages/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class PagesConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "pages" diff --git a/src/app/frontend/web/pages/migrations/__init__.py b/src/app/frontend/web/pages/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-311.pyc b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..868f8a4 Binary files /dev/null and b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-38.pyc b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..c1def13 Binary files /dev/null and b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-39.pyc b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e63f201 Binary files /dev/null and b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/models.py b/src/app/frontend/web/pages/models.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/templates/pages/home.html b/src/app/frontend/web/pages/templates/pages/home.html new file mode 100644 index 0000000..ccf7a12 --- /dev/null +++ b/src/app/frontend/web/pages/templates/pages/home.html @@ -0,0 +1,364 @@ + + + +
+ + + + +