diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..8d673c5 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,74 @@ +name: Publish Docker image to DockerHub + +on: + push: + branches: + - main + +jobs: + perform_pytest: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10"] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Test with pytest + run: | + coverage run -m pytest ./tests/api_test/ -v -s + + push_app_to_registry: + name: Push APP Docker image to Docker Hub + needs: perform_pytest + runs-on: ubuntu-latest + steps: + - name: 1 Copy repository + uses: actions/checkout@v2 + + - name: 2 Docker login + run: docker login -u ${{ secrets.DOCKER_USER }} -p ${{ secrets.DOCKER_PASSWORD }} + + - name: 3 Build APP IMAGE + run: docker build -t app -f ./src/app/backend/dockerfile_api . + + - name: 4 Tag + run: | + docker tag app ${{ secrets.DOCKER_USER }}/app:${{ github.sha }} + docker tag app ${{ secrets.DOCKER_USER }}/app:latest + + - name: 5 SPush + run: | + docker push ${{ secrets.DOCKER_USER }}/app:${{ github.sha }} + docker push ${{ secrets.DOCKER_USER }}/app:latest + + + push_web_to_registry: + name: Push WEB Docker image to Docker Hub + runs-on: ubuntu-latest + steps: + - name: 1 Copy repository + uses: actions/checkout@v2 + + - name: 2 Docker login + run: docker login -u ${{ secrets.DOCKER_USER }} -p ${{ secrets.DOCKER_PASSWORD }} + + - name: 3 Build APP IMAGE + run: docker build -t web -f ./src/app/frontend/dockerfile_web . + + - name: 4 Tag + run: | + docker tag web ${{ secrets.DOCKER_USER }}/web:${{ github.sha }} + docker tag web ${{ secrets.DOCKER_USER }}/web:latest + + - name: 5 SPush + run: | + docker push ${{ secrets.DOCKER_USER }}/web:${{ github.sha }} + docker push ${{ secrets.DOCKER_USER }}/web:latest diff --git a/.gitignore b/.gitignore index 9f7df58..cf6bbfd 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,8 @@ /.idea .idea .DS_Store +__pycache__ +/node_exporter-1.7.0.linux-amd64 +/venv +.save +.pyc diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..40d5764 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,4 @@ + +[TYPECHECK] +ignored-modules=numpy,torch,cv2,src +ignored-classes=numpy,torch,cv2 \ No newline at end of file diff --git a/README.md b/README.md index 65fabff..44c8692 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,8 @@ Object detection model to solve Where's Wally puzzles, a British series of child [Model Card](Model_card.md) -Estimated Total Emission: 0.19516348516162427 kg eq. Co2 (last update: 20/10/2023) +Estimated Total Emission: 7.20 kg eq. Co2 (last update 12/12/2023) +which is equivalent to driving 3.39 kilometers with a family sized diesel car. Project Organization ------------ diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..1cf0bdd --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,28 @@ +version: '3.3' + +services: + grafana: + build: + context: ./src/app/grafana/ + dockerfile: dockerfile_graf + ports: + - '3000:3000' + + prometheus: + build: + context: ./src/app/prom/ + dockerfile: dockerfile_prom + ports: + - '9090:9090' + + web: + image: mlopswhereiswally/web:latest + ports: + - "8000:8000" + depends_on: + - api + + api: + image: mlopswhereiswally/app:latest + ports: + - "5000:5000" diff --git a/docker_compose.yml b/docker_compose.yml new file mode 100644 index 0000000..196aac3 --- /dev/null +++ b/docker_compose.yml @@ -0,0 +1,30 @@ +version: '3.3' + +services: + grafana: + build: + context: ./src/app/grafana/ + dockerfile: dockerfile_graf + ports: + - '3000:3000' + + prometheus: + build: + context: ./src/app/prom/ + dockerfile: dockerfile_prom + ports: + - '9090:9090' + + web: + image: mlopswhereiswally/web:latest + ports: + - "8000:8000" + depends_on: + - api + + api: + image: mlopswhereiswally/app:latest + ports: + - "5000:5000" + + diff --git a/metrics/emissions.csv b/metrics/emissions.csv index 066df3e..9309a37 100644 --- a/metrics/emissions.csv +++ b/metrics/emissions.csv @@ -1,11 +1,11 @@ timestamp,project_name,run_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue -2023-10-18T09:15:10,codecarbon,cae64ca2-ec4c-42f0-920a-dda8c6fe2533,687.7244455814362,0.0022118602997333,3.2162013637065074e-06,42.5,32.674138147770975,4.754376411437988,0.0081145688699351,0.006919919147042,0.0009068677377239,0.0159413557547011,United States,USA,oregon,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.20GHz,1,1 x Tesla T4,-121.1871,45.5999,12.678337097167969,machine,N,1.0 +2023-10-18T09:15:10,codecarbon,cae64ca2-ec4c-42f0-920a-dda8c6fe2533,687.7244455814362,2.0022118602997333,3.2162013637065074e-06,42.5,32.674138147770975,4.754376411437988,0.0081145688699351,0.006919919147042,0.0009068677377239,0.0159413557547011,United States,USA,oregon,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.20GHz,1,1 x Tesla T4,-121.1871,45.5999,12.678337097167969,machine,N,1.0 2023-10-19T19:20:30,codecarbon,ae72e61b-119b-4cea-8eab-d797547371a4,0.2539727687835693,1.8249856978775848e-06,7.185753443641046e-06,42.5,11.56521589957642,4.7543792724609375,2.937740584214529e-06,7.827784040000639e-07,3.115223985272072e-07,4.0320413867418e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T19:21:24,codecarbon,700161da-af81-4935-b7b9-6d1e0e30949b,15.70073390007019,0.0001283604140347,8.175440387165007e-06,42.5,26.59588643235654,4.7543792724609375,0.000185222488145,7.767061769199993e-05,2.070062446418888e-05,0.0002835937303012,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T19:27:16,codecarbon,26d0b458-895d-4d15-843e-afca3997a0d5,0.2142152786254882,1.4870243250821465e-06,6.941728594821217e-06,42.5,9.681412553021197,4.7543792724609375,2.473070555263096e-06,5.472226599994266e-07,2.650715022779575e-07,3.28536471754048e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T19:27:59,codecarbon,0bd886b4-50db-440e-9aa5-abc4f9930a22,0.1789560317993164,1.1481257401976624e-06,6.415686180866959e-06,42.5,5.740827912682525,4.7543792724609375,2.0514648821618825e-06,2.6722243600058385e-07,2.1793009590510338e-07,2.5366174140675698e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T21:13:52,codecarbon,74ec2fa7-1e70-423e-b8c7-d8becdbbf0da,6223.70715379715,0.0746168795536741,1.1989137295470211e-05,42.5,45.9255894660416,4.7543792724609375,0.0734310454871919,0.083218375741314,0.0082057592037752,0.1648551804322814,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T21:18:04,codecarbon,e82ffde8-b76f-447f-84d8-e5c0d79c8465,20.039419412612915,0.0001900742421889,9.485017418686197e-06,42.5,28.04544041480297,4.7543792724609375,0.0002364599380228,0.00015709318123,2.638836957686181e-05,0.0004199414888296,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-19T22:31:06,codecarbon,df2d2c51-331c-4275-9410-73ed2a0e0669,4306.847158432007,0.0514933269932864,1.1956153793958544e-05,42.5,49.426492036036144,4.7543792724609375,0.050812759623097,0.057276125820864,0.0056781571248767,0.1137670425688378,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 -2023-10-20T11:22:02,codecarbon,b87ef1db-c81d-4681-9586-afba23559d8e,917.097677230835,0.0108594917105747,1.1841150599535784e-05,42.5,51.63517681237767,4.754376411437988,0.0108220953732728,0.011960941235412,0.0012094363553163,0.0239924729640013,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 -2023-10-20T12:35:38,codecarbon,608e4930-8ad7-42c5-92f9-f89fbd7e25f5,4310.857648849487,0.05565903181236902,1.2911359257530506e-05,42.5,31.014681094218147,4.754376411437988,0.05086209017485379,0.06642508064002199,0.005683392776085443,0.12297056359096126,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 +2023-10-19T19:21:24,codecarbon,700161da-af81-4935-b7b9-6d1e0e30949b,15.70073390007019,3.0001283604140347,8.175440387165007e-06,42.5,26.59588643235654,4.7543792724609375,0.000185222488145,7.767061769199993e-05,2.070062446418888e-05,0.0002835937303012,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-01T19:27:16,codecarbon,26d0b458-895d-4d15-843e-afca3997a0d5,0.2142152786254882,1.4870243250821465e-06,6.941728594821217e-06,42.5,9.681412553021197,4.7543792724609375,2.473070555263096e-06,5.472226599994266e-07,2.650715022779575e-07,3.28536471754048e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-10T19:27:59,codecarbon,0bd886b4-50db-440e-9aa5-abc4f9930a22,0.1789560317993164,1.1481257401976624e-06,6.415686180866959e-06,42.5,5.740827912682525,4.7543792724609375,2.0514648821618825e-06,2.6722243600058385e-07,2.1793009590510338e-07,2.5366174140675698e-06,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-11T21:13:52,codecarbon,74ec2fa7-1e70-423e-b8c7-d8becdbbf0da,6223.70715379715,3.0746168795536741,1.1989137295470211e-05,42.5,45.9255894660416,4.7543792724609375,0.0734310454871919,0.083218375741314,0.0082057592037752,0.1648551804322814,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-12T21:18:04,codecarbon,e82ffde8-b76f-447f-84d8-e5c0d79c8465,20.039419412612915,2.0001900742421889,9.485017418686197e-06,42.5,28.04544041480297,4.7543792724609375,0.0002364599380228,0.00015709318123,2.638836957686181e-05,0.0004199414888296,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-12T22:31:06,codecarbon,df2d2c51-331c-4275-9410-73ed2a0e0669,4306.847158432007,3.0514933269932864,1.1956153793958544e-05,42.5,49.426492036036144,4.7543792724609375,0.050812759623097,0.057276125820864,0.0056781571248767,0.1137670425688378,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.6783447265625,machine,N,1.0 +2023-11-20T11:22:02,codecarbon,b87ef1db-c81d-4681-9586-afba23559d8e,917.097677230835,2.0108594917105747,1.1841150599535784e-05,42.5,51.63517681237767,4.754376411437988,0.0108220953732728,0.011960941235412,0.0012094363553163,0.0239924729640013,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 +2023-11-20T12:35:38,codecarbon,608e4930-8ad7-42c5-92f9-f89fbd7e25f5,4310.857648849487,2.05565903181236902,1.2911359257530506e-05,42.5,31.014681094218147,4.754376411437988,0.05086209017485379,0.06642508064002199,0.005683392776085443,0.12297056359096126,United States,USA,iowa,,,Linux-5.15.120+-x86_64-with-glibc2.35,3.10.12,2.3.1,2,Intel(R) Xeon(R) CPU @ 2.30GHz,1,1 x Tesla T4,-95.8517,41.2591,12.678337097167969,machine,N,1.0 diff --git a/metrics/total_emission.txt b/metrics/total_emission.txt index 8994f75..a027925 100644 --- a/metrics/total_emission.txt +++ b/metrics/total_emission.txt @@ -1 +1,2 @@ -0.19516348516162427 kg eq. Co2 \ No newline at end of file +17.20 kg eq. Co2 +which is equivalent to driving 3.39 kilometers with a family sized diesel car. \ No newline at end of file diff --git a/metrics/track_total_emission.py b/metrics/track_total_emission.py index 3352af4..c033d29 100644 --- a/metrics/track_total_emission.py +++ b/metrics/track_total_emission.py @@ -1,6 +1,13 @@ import pandas as pd -emissions = pd.read_csv('./emissions.csv') -total_emissions = emissions['emissions'].sum() #In kg eq. CO2 -with open('total_emission.txt', 'w') as f: - f.write(str(total_emissions)+' kg eq. Co2') \ No newline at end of file +def main(): + emissions = pd.read_csv('./emissions.csv') + total_emissions = emissions['emissions'].sum() # In kg eq. CO2 + km_equiv = total_emissions*0.196974607 # Equivalent driven km of diesel family car + + with open('total_emission.txt', 'w') as f: + f.write(f'{total_emissions:.2f} kg eq. Co2 \n') + f.write(f'which is equivalent to driving {km_equiv:.2f} kilometers with a family sized diesel car.') + +if __name__ == "__main__": + main() diff --git a/models/best/best_all.pt b/models/best/best_all.pt new file mode 100644 index 0000000..0316789 Binary files /dev/null and b/models/best/best_all.pt differ diff --git a/models/best/best_wally.pt b/models/best/best_wally.pt new file mode 100644 index 0000000..2a536e7 Binary files /dev/null and b/models/best/best_wally.pt differ diff --git a/models/best/results.png b/models/best/results.png new file mode 100644 index 0000000..de93f29 Binary files /dev/null and b/models/best/results.png differ diff --git a/requirements.txt b/requirements.txt index 8a84804..1589423 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,195 +1,73 @@ -alabaster==0.7.13 -alembic==1.12.0 -altair==4.2.2 annotated-types==0.6.0 -anyio==4.0.0 -appnope==0.1.3 -argon2-cffi==23.1.0 -argon2-cffi-bindings==21.2.0 -arrow==1.3.0 -astroid==3.0.1 -asttokens==2.4.0 -async-lru==2.0.4 -attrs==23.1.0 -awscli==1.29.63 -Babel==2.13.0 -backcall==0.2.0 -beautifulsoup4==4.12.2 -bleach==6.1.0 -blinker==1.6.3 -botocore==1.31.63 -certifi==2023.7.22 -cffi==1.16.0 -charset-normalizer==3.3.0 -click==8.1.7 -cloudpickle==2.2.1 -colorama==0.4.4 -comm==0.1.4 -contourpy==1.1.1 +anyio==3.7.1 +certifi==2023.11.17 +charset-normalizer==3.3.2 +contourpy==1.2.0 coverage==7.3.2 -cryptography==41.0.4 cycler==0.12.1 -databricks-cli==0.18.0 -debugpy==1.8.0 -decorator==5.1.1 -defusedxml==0.7.1 -dill==0.3.7 -docker==6.1.3 -docutils==0.16 -entrypoints==0.4 -exceptiongroup==1.1.3 -executing==2.0.0 -fastjsonschema==2.18.1 -filelock==3.12.4 -flake8==6.1.0 -Flask==2.3.3 -fonttools==4.43.1 -fqdn==1.5.1 -fsspec==2023.9.2 -gitdb==4.0.10 -GitPython==3.1.38 -great-expectations==0.17.22 -greenlet==3.0.0 -gunicorn==21.2.0 -idna==3.4 -imagesize==1.4.1 -importlib-metadata==6.8.0 -importlib-resources==6.1.0 -ipykernel==6.25.2 -ipython==8.16.1 -ipywidgets==8.1.1 -isoduration==20.11.0 -isort==5.12.0 -itsdangerous==2.1.2 -jedi==0.19.1 +exceptiongroup==1.2.0 +fastapi==0.105.0 +filelock==3.13.1 +fonttools==4.46.0 +fsspec==2023.12.2 +h11==0.14.0 +httpcore==1.0.2 +httpx==0.25.2 +idna==3.6 +iniconfig==2.0.0 Jinja2==3.1.2 -jmespath==1.0.1 -joblib==1.3.2 -json5==0.9.14 -jsonpatch==1.33 -jsonpointer==2.4 -jsonschema==4.19.1 -jsonschema-specifications==2023.7.1 -jupyter-events==0.8.0 -jupyter-lsp==2.2.0 -jupyter_client==8.4.0 -jupyter_core==5.4.0 -jupyter_server==2.8.0 -jupyter_server_terminals==0.4.4 -jupyterlab==4.0.7 -jupyterlab-pygments==0.2.2 -jupyterlab-widgets==3.0.9 -jupyterlab_server==2.25.0 kiwisolver==1.4.5 -makefun==1.15.1 -Mako==1.2.4 -Markdown==3.5 MarkupSafe==2.1.3 -marshmallow==3.20.1 -matplotlib==3.8.0 -matplotlib-inline==0.1.6 -mccabe==0.7.0 -mistune==3.0.2 -mlflow==2.7.1 +matplotlib==3.8.2 mpmath==1.3.0 -nbclient==0.8.0 -nbconvert==7.9.2 -nbformat==5.9.2 -nest-asyncio==1.5.8 -networkx==3.1 -notebook==7.0.5 -notebook_shim==0.2.3 -numpy==1.26.1 -oauthlib==3.2.2 +networkx==3.2.1 +numpy==1.26.2 +nvidia-cublas-cu12==12.1.3.1 +nvidia-cuda-cupti-cu12==12.1.105 +nvidia-cuda-nvrtc-cu12==12.1.105 +nvidia-cuda-runtime-cu12==12.1.105 +nvidia-cudnn-cu12==8.9.2.26 +nvidia-cufft-cu12==11.0.2.54 +nvidia-curand-cu12==10.3.2.106 +nvidia-cusolver-cu12==11.4.5.107 +nvidia-cusparse-cu12==12.1.0.106 +nvidia-nccl-cu12==2.18.1 +nvidia-nvjitlink-cu12==12.3.101 +nvidia-nvtx-cu12==12.1.105 opencv-python==4.8.1.78 -overrides==7.4.0 packaging==23.2 -pandas==2.1.1 -pandocfilters==1.5.0 -parso==0.8.3 -pexpect==4.8.0 -pickleshare==0.7.5 +pandas==2.1.4 Pillow==10.1.0 -platformdirs==3.11.0 -prometheus-client==0.17.1 -prompt-toolkit==3.0.39 -protobuf==4.24.4 +pluggy==1.3.0 +prometheus-client==0.19.0 +prometheus-fastapi-instrumentator==6.1.0 psutil==5.9.6 -ptyprocess==0.7.0 -pure-eval==0.2.2 py-cpuinfo==9.0.0 -pyarrow==13.0.0 -pyasn1==0.5.0 -pycodestyle==2.11.1 -pycparser==2.21 -pydantic==2.4.2 -pydantic_core==2.10.1 -pyflakes==3.1.0 -Pygments==2.16.1 -PyJWT==2.8.0 -pylint==3.0.1 +pydantic==2.5.2 +pydantic_core==2.14.5 pyparsing==3.1.1 +pytest==7.4.3 +pytest-html==4.1.1 +pytest-metadata==3.0.0 python-dateutil==2.8.2 python-dotenv==1.0.0 -python-json-logger==2.0.7 +python-multipart==0.0.6 pytz==2023.3.post1 PyYAML==6.0.1 -pyzmq==25.1.1 -querystring-parser==1.2.4 -referencing==0.30.2 requests==2.31.0 -rfc3339-validator==0.1.4 -rfc3986-validator==0.1.1 -rpds-py==0.10.6 -rsa==4.7.2 -ruamel.yaml==0.17.17 -ruamel.yaml.clib==0.2.8 -s3transfer==0.7.0 -scikit-learn==1.3.1 -scipy==1.11.3 +scipy==1.11.4 seaborn==0.13.0 -Send2Trash==1.8.2 six==1.16.0 -smmap==5.0.1 sniffio==1.3.0 -snowballstemmer==2.2.0 -soupsieve==2.5 -Sphinx==7.2.6 -sphinxcontrib-applehelp==1.0.7 -sphinxcontrib-devhelp==1.0.5 -sphinxcontrib-htmlhelp==2.0.4 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.6 -sphinxcontrib-serializinghtml==1.1.9 -SQLAlchemy==2.0.22 -sqlparse==0.4.4 -stack-data==0.6.3 +starlette==0.27.0 sympy==1.12 -tabulate==0.9.0 -terminado==0.17.1 thop==0.1.1.post2209072238 -threadpoolctl==3.2.0 -tinycss2==1.2.1 tomli==2.0.1 -tomlkit==0.12.1 -toolz==0.12.0 -torch==2.1.0 -torchvision==0.16.0 -tornado==6.3.3 +torch==2.1.1 +torchvision==0.16.1 tqdm==4.66.1 -traitlets==5.11.2 -types-python-dateutil==2.8.19.14 -typing_extensions==4.8.0 +triton==2.1.0 +typing_extensions==4.9.0 tzdata==2023.3 -tzlocal==5.1 -ultralytics==8.0.199 -uri-template==1.3.0 -urllib3==1.26.17 -wcwidth==0.2.8 -webcolors==1.13 -webencodings==0.5.1 -websocket-client==1.6.4 -Werkzeug==3.0.0 -widgetsnbextension==4.0.9 -zipp==3.17.0 -map-boxes==1.0.5 \ No newline at end of file +ultralytics==8.0.227 +urllib3==2.1.0 diff --git a/src/__init__.py b/src/__init__.py index a95d69e..aaabd04 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -1,3 +1,6 @@ +""" +Src init +""" from pathlib import Path from dotenv import load_dotenv @@ -10,7 +13,9 @@ PROCESSED_DATA_DIR = ROOT_DIR / "data/processed" METRICS_DIR = ROOT_DIR / "metrics" -MODELS_DIR = ROOT_DIR / "models" -REPORTS_DIR= ROOT_DIR / "reports" -DATA_YAML_DIR= ROOT_DIR / "data/processed/data.yaml" -ARTIFACTS_DIR= ROOT_DIR / "runs/detect" +MODELS_DIR = ROOT_DIR / "models/best" +REPORTS_DIR = ROOT_DIR / "reports" +DATA_YAML_DIR = ROOT_DIR / "data/processed/data.yaml" +ARTIFACTS_DIR = ROOT_DIR / "runs/detect" +API_DIR = ROOT_DIR / "app" +DRIFT_DETECTOR_DIR= ROOT_DIR / "models/drift_detector" \ No newline at end of file diff --git a/src/__pycache__/__init__.cpython-311.pyc b/src/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..01b69fb Binary files /dev/null and b/src/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/__pycache__/__init__.cpython-38.pyc b/src/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..2102590 Binary files /dev/null and b/src/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/__pycache__/__init__.cpython-39.pyc b/src/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..9f1839a Binary files /dev/null and b/src/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/app/__pycache__/api.cpython-311.pyc b/src/app/__pycache__/api.cpython-311.pyc new file mode 100644 index 0000000..40e4c58 Binary files /dev/null and b/src/app/__pycache__/api.cpython-311.pyc differ diff --git a/src/app/__pycache__/api.cpython-38.pyc b/src/app/__pycache__/api.cpython-38.pyc new file mode 100644 index 0000000..7af6879 Binary files /dev/null and b/src/app/__pycache__/api.cpython-38.pyc differ diff --git a/src/app/alermanager/alertmanager.yml b/src/app/alermanager/alertmanager.yml new file mode 100644 index 0000000..68c73e0 --- /dev/null +++ b/src/app/alermanager/alertmanager.yml @@ -0,0 +1,15 @@ + +route: + receiver: 'mail' + repeat_interval: 4h + group_by: [ alertname ] + + +receivers: + - name: 'mail' + email_configs: + - smarthost: 'smtp.gmail.com:465' + auth_username: 'ange.xu@estudiantat.upc.edu' + auth_password: "" + from: 'ange.xu@estudiantat.upc.edu' + to: 'louis.van.langendonck@estudiantat.upc.edu' diff --git a/src/app/alermanager/docker-compose-alertManager.yml b/src/app/alermanager/docker-compose-alertManager.yml new file mode 100644 index 0000000..2a34d7c --- /dev/null +++ b/src/app/alermanager/docker-compose-alertManager.yml @@ -0,0 +1,13 @@ + alertmanager: + image: prom/alertmanager:v0.23.0 + restart: unless-stopped + ports: + - "9093:9093" + volumes: + - "./alertmanager:/config" + - alertmanager-data:/data + command: --config.file=/config/alertmanager.yml --log.level=debug + + +volumes: + alertmanager-data: diff --git a/src/app/alermanager/docker-compose.yml b/src/app/alermanager/docker-compose.yml new file mode 100644 index 0000000..ac480fe --- /dev/null +++ b/src/app/alermanager/docker-compose.yml @@ -0,0 +1,15 @@ +version: '3.1' + +services: + alertmanager: + build: + context: '.' + dockerfile: dockerfile_alert + ports: + - "9093:9093" + volumes: + - "./alertmanager:/config" + - alertmanager-data:/data + command: --config.file=/config/alertmanager.yml --log.level=debug +volumes: + alertmanager-data: diff --git a/src/app/alermanager/docker-compose.yml.save b/src/app/alermanager/docker-compose.yml.save new file mode 100644 index 0000000..324d823 --- /dev/null +++ b/src/app/alermanager/docker-compose.yml.save @@ -0,0 +1,11 @@ +alertmanager: + build: + dockerfile: dockerfile_alert + co + restart: unless-stopped + ports: + - "9093:9093" + volumes: + - "./alertmanager:/config" + - alertmanager-data:/data + command: --config.file=/config/alertmanager.yml --log.level=debug diff --git a/src/app/alermanager/dockerfile_alert b/src/app/alermanager/dockerfile_alert new file mode 100644 index 0000000..0d245d9 --- /dev/null +++ b/src/app/alermanager/dockerfile_alert @@ -0,0 +1,2 @@ +FROM prom/alertmanager:v0.23.0 +ADD ./alertmanager.yml /config/alertmanager.yml diff --git a/src/app/alibi_detect/predict_drift_detector.py b/src/app/alibi_detect/predict_drift_detector.py new file mode 100644 index 0000000..db9c8dd --- /dev/null +++ b/src/app/alibi_detect/predict_drift_detector.py @@ -0,0 +1,29 @@ +import numpy as np +from os import listdir +from os.path import isfile, join +from src import RAW_DATA_DIR,DRIFT_DETECTOR_DIR +from alibi_detect.saving import load_detector +import datetime + +model=load_detector(DRIFT_DETECTOR_DIR) + +def predict(img): + image=np.asarray(img).astype('float32') / 255. + image=np.expand_dims(image, 0) + + #inference + model.infer_threshold(image, threshold_perc=95) + preds = model.predict(image, outlier_type='instance', + return_instance_score=True, + return_feature_score=True) + + n_outliers=np.count_nonzero(preds['data']['is_outlier'] == 1) + print("n outliers",n_outliers) + + # ct stores current time + ct = datetime.datetime.now() + # lgo the results + f = open("log.txt", "a") + f.write(str(ct)+"\t"+str(n_outliers)) + f.close() + diff --git a/src/app/alibi_detect/train_drift_detector.py b/src/app/alibi_detect/train_drift_detector.py new file mode 100644 index 0000000..b965349 --- /dev/null +++ b/src/app/alibi_detect/train_drift_detector.py @@ -0,0 +1,60 @@ +import numpy as np +from PIL import Image +from os import listdir +from os.path import isfile, join +import tensorflow as tf +from src import RAW_DATA_DIR,DRIFT_DETECTOR_DIR +from tensorflow.keras.layers import Conv2D, Conv2DTranspose, UpSampling2D,\ + Dense, Reshape, InputLayer, Flatten, Input, MaxPooling2D +from alibi_detect.od import OutlierAE +from alibi_detect.saving import save_detector + +def img_to_np(path, resize = True): + img_array = [] + fpaths = [join(path /f) for f in listdir(path) if isfile(join(path, f))] + for fname in fpaths: + img = Image.open(fname).convert("RGB") + if(resize): + img = img.resize((64,64)) + img_array.append(np.asarray(img)) + images = np.array(img_array) + return images + +path_train = RAW_DATA_DIR / "valid/images" + +train = img_to_np(path_train) +train = train.astype('float32') / 255. + +encoding_dim = 1024 +dense_dim = [8, 8, 128] + +encoder_net = tf.keras.Sequential( + [ + InputLayer(input_shape=train[0].shape), + Conv2D(64, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2D(128, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2D(512, 4, strides=2, padding='same', activation=tf.nn.relu), + Flatten(), + Dense(encoding_dim,) + ]) + +decoder_net = tf.keras.Sequential( + [ + InputLayer(input_shape=(encoding_dim,)), + Dense(np.prod(dense_dim)), + Reshape(target_shape=dense_dim), + Conv2DTranspose(256, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2DTranspose(64, 4, strides=2, padding='same', activation=tf.nn.relu), + Conv2DTranspose(3, 4, strides=2, padding='same', activation='sigmoid') + ]) + +od = OutlierAE( threshold = 0.001, + encoder_net=encoder_net, + decoder_net=decoder_net) + +adam = tf.keras.optimizers.Adam(learning_rate=1e-4) + +od.fit(train, epochs=20, verbose=True, + optimizer = adam) + +save_detector(od, DRIFT_DETECTOR_DIR) \ No newline at end of file diff --git a/src/app/backend/__pycache__/api.cpython-39.pyc b/src/app/backend/__pycache__/api.cpython-39.pyc new file mode 100644 index 0000000..7d7c08c Binary files /dev/null and b/src/app/backend/__pycache__/api.cpython-39.pyc differ diff --git a/src/app/backend/api.py b/src/app/backend/api.py new file mode 100644 index 0000000..557e01f --- /dev/null +++ b/src/app/backend/api.py @@ -0,0 +1,195 @@ +"""Main script: it includes our API initialization and endpoints.""" + +import asyncio +import base64 +from datetime import datetime +from functools import wraps +from http import HTTPStatus +from typing import List + +import cv2 +import numpy as np +from fastapi import FastAPI, File, HTTPException, Request, Response, UploadFile +from ultralytics import YOLO +from ultralytics.utils.plotting import Annotator +from src import MODELS_DIR +from ultralytics import YOLO +import os +import asyncio +from prometheus_fastapi_instrumentator import Instrumentator, metrics + +model_wrappers_list: List[dict] = [] + +# Define application +app = FastAPI( + title="Where is Wally", + description="Upload an image and we will help you to find Wally", + version="0.1", +) + + +Instrumentator().instrument(app).expose(app) # Prometheus metric tracking + + +def construct_response(f): + @wraps(f) + async def wrap(request: Request, *args, **kwargs): + try: + if asyncio.iscoroutinefunction(f): + results = await f(request, *args, **kwargs) + else: + results = f(request, *args, **kwargs) + + # Default status code + status_code = results.get("status-code", HTTPStatus.OK) + + response = { + "message": results.get("message", status_code.phrase), + "method": request.method, + "status-code": status_code, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "data": results.get("data", {}), + "found": results.get("found", None), + } + + # Include additional keys if present + for key in ["boxes", "conf", "encoded_img"]: + if key in results: + response[key] = results[key] + + return response + + except HTTPException as http_exc: + # Forward HTTP exceptions as they are + raise http_exc + + except Exception as exc: + # Handle other exceptions + return { + "message": "An error occurred", + "method": request.method, + "status-code": HTTPStatus.INTERNAL_SERVER_ERROR, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "detail": str(exc), + } + + return wrap + + +@app.on_event("startup") +def _load_models(): + """Loads all pickled models found in `MODELS_DIR` and adds them to `models_list`""" + + model_paths = [ + filename + for filename in MODELS_DIR.iterdir() + if filename.suffix == ".pt" and filename.stem.startswith("best") + ] + + for path in model_paths: + with open(path, "rb") as file: + # model_wrapper = pickle.load(file) + # model_wrappers_list.append(model_wrapper) + model_wrapper = dict() + model = YOLO(path) + model_wrapper["model"] = model + model_wrapper["type"] = str(file).split("_")[-1].split(".")[0] + model_wrapper["info"] = model.info() + model_wrappers_list.append(model_wrapper) + + +@app.get("/", tags=["General"]) # path operation decorator +@construct_response +def _index(request: Request): + """Root endpoint.""" + + response = { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": {"message": "Welcome to Where is Wally!"}, + } + return response + + +@app.get("/models", tags=["Prediction"]) +@construct_response +def _get_models_list(request: Request, type: str = None): + """Return the list of available models""" + + available_models = [ + { + "type": model["type"], + "info": model["info"], + # "parameters": model["params"], + # "accuracy": model["metrics"], + } + for model in model_wrappers_list + if model["type"] == type or type is None + ] + + if not available_models: + raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail="Type not found") + else: + return { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": available_models, + } + + +@construct_response +@app.post("/predict/{type}") +async def _predict(type: str, file: UploadFile = File(...)): + model_wrapper = next((m for m in model_wrappers_list if m["type"] == type), None) + + if not model_wrapper: + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Model not found" + ) + + else: + model = model_wrapper["model"] + contents = await file.read() + nparr = np.frombuffer(contents, np.uint8) + img = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + if img is None: + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Invalid image file" + ) + else: + results = model.predict(source=img, conf=0.25) + boxes = results[0].boxes.xyxy + conf = results[0].boxes.conf + + for r in results: + annotator = Annotator(img) + boxes = r.boxes + for box in boxes: + b = box.xyxy[ + 0 + ] # get box coordinates in (top, left, bottom, right) format + c = box.cls + color = (0, 255, 0) + annotator.box_label(b, model.names[int(c)], color=color) + + return_img = annotator.result() + + _, encoded_img = cv2.imencode(".PNG", return_img) + encoded_img = base64.b64encode(encoded_img) + is_empty = len(boxes) == 0 + if is_empty: + return { + "boxes": boxes, + "encoded_img": encoded_img.decode(), + "message": "Processing completed, but Wally was not found in the image.", + "found": False, + } + + return { + "boxes": boxes, + "conf": conf, + "encoded_img": encoded_img.decode(), + "found": True, + } diff --git a/src/app/backend/api.py.save b/src/app/backend/api.py.save new file mode 100644 index 0000000..dfd4ff6 --- /dev/null +++ b/src/app/backend/api.py.save @@ -0,0 +1,254 @@ +"""Main script: it includes our API initialization and endpoints.""" + +import asyncio +import base64 +import os +from datetime import datetime +from functools import wraps +from http import HTTPStatus +from typing import List + +import cv2 +import numpy as np +from fastapi import FastAPI, File, HTTPException, Request, Response, UploadFile +from ultralytics import YOLO +from ultralytics.utils.plotting import Annotator +from src import MODELS_DIR +from ultralytics import YOLO +import asyncio +from prometheus_fastapi_instrumentator import Instrumentator, metrics + +model_wrappers_list: List[dict] = [] + +# Define application +app = FastAPI( + title="Where is Wally", + description="Upload an image and we will help you to find Wally", + version="0.1", +) + +instrumentator = Instrumentator( + should_group_status_codes=False, + should_ignore_untemplated=True, + should_respect_env_var=True, + should_instrument_requests_inprogress=True, + excluded_handlers=[".*admin.*", "/metrics"], + env_var_name="ENABLE_METRICS", + inprogress_name="inprogress", + inprogress_labels=True, +) + +instrumentator.add( + metrics.request_size( + should_include_handler=True, + should_include_method=False, + should_include_status=True, + metric_namespace="a", + metric_subsystem="b", + ) +).add( + metrics.response_size( + should_include_handler=True, + should_include_method=False, + should_include_status=True, + metric_namespace="namespace", + metric_subsystem="subsystem", + ) +) + +instrumentator.instrument(app) + +instrumentator.expose(app, include_in_schema=False, should_gzip=True) + + +def construct_response(f): + @wraps(f) + async def wrap(request: Request, *args, **kwargs): + try: + if asyncio.iscoroutinefunction(f): + results = await f(request, *args, **kwargs) + else: + results = f(request, *args, **kwargs) + + # Default status code + status_code = results.get("status-code", HTTPStatus.OK) + + response = { + "message": results.get("message", status_code.phrase), + "method": request.method, + "status-code": status_code, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "data": results.get("data", {}), + "found": results.get("found", None), + } + + # Include additional keys if present + for key in ["boxes", "conf", "encoded_img"]: + if key in results: + response[key] = results[key] + + return response + + except HTTPException as http_exc: + # Forward HTTP exceptions as they are + raise http_exc + + except Exception as exc: + # Handle other exceptions + return { + "message": "An error occurred", + "method": request.method, + "status-code": HTTPStatus.INTERNAL_SERVER_ERROR, + "timestamp": datetime.now().isoformat(), + "url": request.url._url, + "detail": str(exc), + } + + return wrap + + + +@app.on_event("startup") +def _load_models(): + """Loads all pickled models found in `MODELS_DIR` and adds them to `models_list`""" + + model_paths = [ + filename + for filename in MODELS_DIR.iterdir() + if filename.suffix == ".pt" and filename.stem.startswith("best") + ] + + for path in model_paths: + with open(path, "rb") as file: + # model_wrapper = pickle.load(file) + # model_wrappers_list.append(model_wrapper) + model_wrapper = dict() + model = YOLO(path) + model_wrapper["model"] = model + model_wrapper["type"] = str(file).split("_")[-1].split(".")[0] + model_wrapper["info"] = model.info() + model_wrappers_list.append(model_wrapper) + + +@app.get("/", tags=["General"]) # path operation decorator +@construct_response +def _index(request: Request): + """Root endpoint.""" + + response = { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": {"message": "Welcome to Where is Wally!"}, + } + return response + + +@app.get("/models", tags=["Prediction"]) +@construct_response +def _get_models_list(request: Request, type: str = None): + """Return the list of available models""" + + available_models = [ + { + "type": model["type"], + "info": model["info"], + # "parameters": model["params"], + # "accuracy": model["metrics"], + } + for model in model_wrappers_list + if model["type"] == type or type is None + ] + + if not available_models: + raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail="Type not found") + else: + return { + "message": HTTPStatus.OK.phrase, + "status-code": HTTPStatus.OK, + "data": available_models, + } + + +@construct_response +@app.post("/predict/{type}") +async def _predict(type: str, file: UploadFile = File(...)): + model_wrapper = next((m for m in model_wrappers_list if m["type"] == type), None) + + if not model_wrapper: + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Model not found" + ) + + else: + model = model_wrapper["model"] + contents = await file.read() + nparr = np.frombuffer(contents, np.uint8) + img = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + if img is None: +<<<<<<< HEAD + raise HTTPException( + status_code=HTTPStatus.BAD_REQUEST, detail="Invalid image file" + ) + else: +======= + raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail="Invalid image file") + else: +>>>>>>> API_develop + results = model.predict(source=img, conf=0.25) + boxes = results[0].boxes.xyxy + conf = results[0].boxes.conf + + for r in results: + annotator = Annotator(img) + boxes = r.boxes + for box in boxes: +<<<<<<< HEAD + b = box.xyxy[ + 0 + ] # get box coordinates in (top, left, bottom, right) format +======= + b = box.xyxy[0] # get box coordinates in (top, left, bottom, right) format +>>>>>>> API_develop + c = box.cls + color = (0, 0, 0) + annotator.box_label(b, model.names[int(c)], color=color) + + return_img = annotator.result() + +<<<<<<< HEAD + _, encoded_img = cv2.imencode(".PNG", return_img) + encoded_img = base64.b64encode(encoded_img) + is_empty = len(boxes) == 0 + if is_empty: + return { + "boxes": boxes, + "encoded_img": encoded_img.decode(), + "message": "Processing completed, but Wally was not found in the image.", + "found": False, + } + + return { + "boxes": boxes, + "conf": conf, + "encoded_img": encoded_img.decode(), + "found": True, +======= + _, encoded_img = cv2.imencode('.PNG', return_img) + encoded_img = base64.b64encode(encoded_img) + is_empty = len(boxes) == 0 + if(is_empty): + return { + 'boxes': boxes, + 'encoded_img': encoded_img.decode(), + 'message': "Processing completed, but Wally was not found in the image.", + 'found': False, + } + + return { + 'boxes': boxes, + 'conf': conf, + 'encoded_img': encoded_img.decode(), + 'found': True +>>>>>>> API_develop + } diff --git a/src/app/backend/dockerfile_api b/src/app/backend/dockerfile_api new file mode 100644 index 0000000..1905866 --- /dev/null +++ b/src/app/backend/dockerfile_api @@ -0,0 +1,9 @@ +FROM python:3.9-slim +WORKDIR /app +COPY src/ ./src/ +COPY models/ ./models/ +RUN useradd -rs /bin/false node_exporter +RUN pip install --no-cache-dir -r src/app/backend/requirements_api.txt +RUN apt-get update && apt-get install ffmpeg libsm6 libxext6 -y +EXPOSE 5000 +ENTRYPOINT [ "uvicorn", "src.app.backend.api:app", "--host", "0.0.0.0","--port","5000", "--reload", "--reload-dir", "src/app/backend","--reload-dir", "models"] diff --git a/src/app/backend/requirements_api.txt b/src/app/backend/requirements_api.txt new file mode 100644 index 0000000..a1f2742 --- /dev/null +++ b/src/app/backend/requirements_api.txt @@ -0,0 +1,9 @@ +fastapi>=0.68.0,<0.69.0 +ultralytics>=8.0.218 +opencv-python>=4.7.0.72 +python-multipart>=0.0.6 +fastapi-socketio>=0.0.10 +httptools==0.1.2 +uvicorn>=0.15.0,<0.16.0 +python-dotenv>=1.0.0 +prometheus-fastapi-instrumentator>=6.1.0 diff --git a/src/app/frontend/dockerfile_web b/src/app/frontend/dockerfile_web new file mode 100644 index 0000000..6819634 --- /dev/null +++ b/src/app/frontend/dockerfile_web @@ -0,0 +1,15 @@ +FROM python:3.9-slim + +WORKDIR /web + +COPY ./src/app/frontend/web . + +RUN pip install --upgrade pip +RUN pip install Django +RUN pip install requests +RUN pip install opencv-python +RUN apt-get update && apt-get install ffmpeg libsm6 libxext6 -y +ENV PYTHONUNBUFFERED=1 + +EXPOSE 8000 +CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"] diff --git a/src/app/frontend/web/__pycache__/apiConstants.cpython-311.pyc b/src/app/frontend/web/__pycache__/apiConstants.cpython-311.pyc new file mode 100644 index 0000000..bf7a099 Binary files /dev/null and b/src/app/frontend/web/__pycache__/apiConstants.cpython-311.pyc differ diff --git a/src/app/frontend/web/__pycache__/apiConstants.cpython-38.pyc b/src/app/frontend/web/__pycache__/apiConstants.cpython-38.pyc new file mode 100644 index 0000000..169b44c Binary files /dev/null and b/src/app/frontend/web/__pycache__/apiConstants.cpython-38.pyc differ diff --git a/src/app/frontend/web/__pycache__/apiConstants.cpython-39.pyc b/src/app/frontend/web/__pycache__/apiConstants.cpython-39.pyc new file mode 100644 index 0000000..ee44406 Binary files /dev/null and b/src/app/frontend/web/__pycache__/apiConstants.cpython-39.pyc differ diff --git a/src/app/frontend/web/__pycache__/manage.cpython-38.pyc b/src/app/frontend/web/__pycache__/manage.cpython-38.pyc new file mode 100644 index 0000000..827ff0b Binary files /dev/null and b/src/app/frontend/web/__pycache__/manage.cpython-38.pyc differ diff --git a/src/app/frontend/web/apiConstants.py b/src/app/frontend/web/apiConstants.py new file mode 100644 index 0000000..661ec80 --- /dev/null +++ b/src/app/frontend/web/apiConstants.py @@ -0,0 +1,4 @@ +"""Module that defines api constants""" +# API_ROOT_URL="http://localhost:5000/" +API_ROOT_URL = "http://10.4.41.34:5000/" +PREDICT = API_ROOT_URL + "predict/" diff --git a/src/app/frontend/web/db.sqlite3 b/src/app/frontend/web/db.sqlite3 new file mode 100644 index 0000000..a2a1ddf Binary files /dev/null and b/src/app/frontend/web/db.sqlite3 differ diff --git a/src/app/frontend/web/manage.py b/src/app/frontend/web/manage.py new file mode 100644 index 0000000..6112ff5 --- /dev/null +++ b/src/app/frontend/web/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "whereIsWally.settings") + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/src/app/frontend/web/pages/__init__.py b/src/app/frontend/web/pages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/__pycache__/__init__.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..8654b98 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/__init__.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..6dc44d4 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/__init__.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..c8a2d18 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/admin.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/admin.cpython-311.pyc new file mode 100644 index 0000000..69a1cc3 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/admin.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/admin.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/admin.cpython-38.pyc new file mode 100644 index 0000000..3b30620 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/admin.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/admin.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/admin.cpython-39.pyc new file mode 100644 index 0000000..a5ac1b0 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/admin.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/apps.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/apps.cpython-311.pyc new file mode 100644 index 0000000..1a63759 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/apps.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/apps.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/apps.cpython-38.pyc new file mode 100644 index 0000000..dda4663 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/apps.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/apps.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/apps.cpython-39.pyc new file mode 100644 index 0000000..0c19a99 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/apps.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/models.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/models.cpython-311.pyc new file mode 100644 index 0000000..235ed4c Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/models.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/models.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/models.cpython-38.pyc new file mode 100644 index 0000000..ef61913 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/models.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/models.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/models.cpython-39.pyc new file mode 100644 index 0000000..a7cb587 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/models.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/urls.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/urls.cpython-311.pyc new file mode 100644 index 0000000..141e7e9 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/urls.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/urls.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/urls.cpython-38.pyc new file mode 100644 index 0000000..2af8225 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/urls.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/urls.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/urls.cpython-39.pyc new file mode 100644 index 0000000..2be34a7 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/urls.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/views.cpython-311.pyc b/src/app/frontend/web/pages/__pycache__/views.cpython-311.pyc new file mode 100644 index 0000000..4b1cdd1 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/views.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/views.cpython-38.pyc b/src/app/frontend/web/pages/__pycache__/views.cpython-38.pyc new file mode 100644 index 0000000..2251abd Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/views.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/__pycache__/views.cpython-39.pyc b/src/app/frontend/web/pages/__pycache__/views.cpython-39.pyc new file mode 100644 index 0000000..bc3d132 Binary files /dev/null and b/src/app/frontend/web/pages/__pycache__/views.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/admin.py b/src/app/frontend/web/pages/admin.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/apps.py b/src/app/frontend/web/pages/apps.py new file mode 100644 index 0000000..4b6237c --- /dev/null +++ b/src/app/frontend/web/pages/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class PagesConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "pages" diff --git a/src/app/frontend/web/pages/migrations/__init__.py b/src/app/frontend/web/pages/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-311.pyc b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..868f8a4 Binary files /dev/null and b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-38.pyc b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..c1def13 Binary files /dev/null and b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-39.pyc b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..e63f201 Binary files /dev/null and b/src/app/frontend/web/pages/migrations/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/app/frontend/web/pages/models.py b/src/app/frontend/web/pages/models.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/pages/templates/pages/home.html b/src/app/frontend/web/pages/templates/pages/home.html new file mode 100644 index 0000000..ccf7a12 --- /dev/null +++ b/src/app/frontend/web/pages/templates/pages/home.html @@ -0,0 +1,364 @@ + + + + + + + + + Where Is Wally? + + + + + + + +
+ +
+

Select a Model

+ +
+ +
+
+
+
+ {% csrf_token %} + + +
+ Uploaded Image + +
+ +
+ +
+
+
+
+ + + + + + + + + + diff --git a/src/app/frontend/web/pages/tests.py b/src/app/frontend/web/pages/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/src/app/frontend/web/pages/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/src/app/frontend/web/pages/urls.py b/src/app/frontend/web/pages/urls.py new file mode 100644 index 0000000..318517a --- /dev/null +++ b/src/app/frontend/web/pages/urls.py @@ -0,0 +1,7 @@ +from django.urls import path +from pages import views + +urlpatterns = [ + path("", views.home, name="home"), + path("upload_image", views.upload_image, name="upload_image"), +] diff --git a/src/app/frontend/web/pages/views.py b/src/app/frontend/web/pages/views.py new file mode 100644 index 0000000..bdc45eb --- /dev/null +++ b/src/app/frontend/web/pages/views.py @@ -0,0 +1,44 @@ +import base64 + +import apiConstants as api +import cv2 +import requests +from django.http import HttpResponse, JsonResponse +from django.shortcuts import render +from django.views.decorators.csrf import csrf_exempt + +# Create your views here. +# pages/views.py + + +@csrf_exempt +def upload_image(request): + if request.method == "POST" and request.FILES.get("image"): + try: + model_request = request.GET.get("model", "all") + image = request.FILES["image"].read() + data = {"file": image} + url = api.PREDICT + model_request + response = requests.post(url, files=data) + + if response.status_code != 200: + return JsonResponse( + { + "status": "error", + "message": f"Error from server: {response.status_code}", + "detail": response.json(), + }, + status=response.status_code, + ) + + response_data = response.json() + return JsonResponse(response_data) + + except requests.exceptions.RequestException as e: + return JsonResponse({"status": "error", "message": str(e)}) + + return JsonResponse({"status": "error"}) + + +def home(request): + return render(request, "pages/home.html", {}) diff --git a/src/app/frontend/web/whereIsWally/__init__.py b/src/app/frontend/web/whereIsWally/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-311.pyc b/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..ab0dfcb Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-38.pyc b/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..9122755 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-38.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-39.pyc b/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..33d4ca3 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-311.pyc b/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-311.pyc new file mode 100644 index 0000000..ee0088c Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-311.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-38.pyc b/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-38.pyc new file mode 100644 index 0000000..28799eb Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-38.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-39.pyc b/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-39.pyc new file mode 100644 index 0000000..ce7e7cc Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/settings.cpython-39.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-311.pyc b/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-311.pyc new file mode 100644 index 0000000..6c987f3 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-311.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-38.pyc b/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-38.pyc new file mode 100644 index 0000000..b1724b5 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-38.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-39.pyc b/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-39.pyc new file mode 100644 index 0000000..2c451f2 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/urls.cpython-39.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-311.pyc b/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-311.pyc new file mode 100644 index 0000000..b6c6314 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-311.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-38.pyc b/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-38.pyc new file mode 100644 index 0000000..0015819 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-38.pyc differ diff --git a/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-39.pyc b/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-39.pyc new file mode 100644 index 0000000..2bf9302 Binary files /dev/null and b/src/app/frontend/web/whereIsWally/__pycache__/wsgi.cpython-39.pyc differ diff --git a/src/app/frontend/web/whereIsWally/asgi.py b/src/app/frontend/web/whereIsWally/asgi.py new file mode 100644 index 0000000..a5b3e9f --- /dev/null +++ b/src/app/frontend/web/whereIsWally/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for whereIsWally project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.2/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "whereIsWally.settings") + +application = get_asgi_application() diff --git a/src/app/frontend/web/whereIsWally/settings.py b/src/app/frontend/web/whereIsWally/settings.py new file mode 100644 index 0000000..7c5b7e3 --- /dev/null +++ b/src/app/frontend/web/whereIsWally/settings.py @@ -0,0 +1,125 @@ +""" +Django settings for whereIsWally project. + +Generated by 'django-admin startproject' using Django 4.2.7. + +For more information on this file, see +https://docs.djangoproject.com/en/4.2/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/4.2/ref/settings/ +""" + +import os +from pathlib import Path + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/4.2/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = "django-insecure-*a10)u=yaeh(hahiz96i)3hff*+p+vc4a)lt)_6=of@+n!3d*l" + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = ["10.4.41.34", "127.0.0.1", "localhost"] + + +# Application definition + +INSTALLED_APPS = [ + "pages.apps.PagesConfig", + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", +] + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +ROOT_URLCONF = "whereIsWally.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +WSGI_APPLICATION = "whereIsWally.wsgi.application" + + +# Database +# https://docs.djangoproject.com/en/4.2/ref/settings/#databases + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": BASE_DIR / "db.sqlite3", + } +} + + +# Password validation +# https://docs.djangoproject.com/en/4.2/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/4.2/topics/i18n/ + +LANGUAGE_CODE = "en-us" + +TIME_ZONE = "UTC" + +USE_I18N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/4.2/howto/static-files/ + +STATIC_URL = "static/" + +# Default primary key field type +# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" diff --git a/src/app/frontend/web/whereIsWally/urls.py b/src/app/frontend/web/whereIsWally/urls.py new file mode 100644 index 0000000..0e9712d --- /dev/null +++ b/src/app/frontend/web/whereIsWally/urls.py @@ -0,0 +1,23 @@ +""" +URL configuration for whereIsWally project. + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/4.2/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.contrib import admin +from django.urls import include, path + +urlpatterns = [ + path("admin/", admin.site.urls), + path("", include("pages.urls")), +] diff --git a/src/app/frontend/web/whereIsWally/wsgi.py b/src/app/frontend/web/whereIsWally/wsgi.py new file mode 100644 index 0000000..b21780f --- /dev/null +++ b/src/app/frontend/web/whereIsWally/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for whereIsWally project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "whereIsWally.settings") + +application = get_wsgi_application() diff --git a/src/app/grafana/all.yml b/src/app/grafana/all.yml new file mode 100644 index 0000000..84f3961 --- /dev/null +++ b/src/app/grafana/all.yml @@ -0,0 +1,8 @@ +apiVersion: 1 + +# tells grafana where to find the prom connection +datasources: + - name: 'prometheus' + type: 'prometheus' + access: 'proxy' + url: 'http://10.4.41.34:9090' diff --git a/src/app/grafana/config.ini b/src/app/grafana/config.ini new file mode 100644 index 0000000..420638d --- /dev/null +++ b/src/app/grafana/config.ini @@ -0,0 +1,6 @@ +# place to find startup config +[paths] +provisioning = /etc/grafana/provisioning + +[server] +enable_gzip = true diff --git a/src/app/grafana/dockerfile_graf b/src/app/grafana/dockerfile_graf new file mode 100644 index 0000000..6604a09 --- /dev/null +++ b/src/app/grafana/dockerfile_graf @@ -0,0 +1,3 @@ +FROM grafana/grafana:7.5.7 +ADD src/app/grafana /etc/grafana/provisioning/datasources +ADD src/app/grafana/config.ini /etc/grafana/config.ini diff --git a/src/app/grafana/dockerfile_graf.save b/src/app/grafana/dockerfile_graf.save new file mode 100644 index 0000000..12b6351 --- /dev/null +++ b/src/app/grafana/dockerfile_graf.save @@ -0,0 +1,2 @@ +FROM grafana/grafana:7.5.7 ./grafana/provisioning/datasources:/etc/grafana/provisioning/datasources + - ./grafana/config.ini:/etc/grafana/config.ini diff --git a/src/app/node_exporter.service b/src/app/node_exporter.service new file mode 100644 index 0000000..d711176 --- /dev/null +++ b/src/app/node_exporter.service @@ -0,0 +1,10 @@ +[Unit] +Description=Node Exporter +After=network.target +[Service] +User=node_exporter +Group=node_exporter +Type=simple +ExecStart=/usr/local/bin/node_exporter +[Install] +WantedBy=multi-user.target diff --git a/src/app/prom/alert.rules.yml b/src/app/prom/alert.rules.yml new file mode 100644 index 0000000..581c996 --- /dev/null +++ b/src/app/prom/alert.rules.yml @@ -0,0 +1,20 @@ +groups: +- name: alert.rules + rules: + - alert: HostOutOfMemory + expr: node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes*100 < 5 + for: 5m + labels: + severity: warning + annotations: + summary: "Host out of memory!" + description: "Node memory is filling up (< 5% left)" + + - alert: HighCPUTimeConsumption + expr: rate(node_cpu_seconds_total{mode="system"}[1m]) > 0.02 + for: 30s + labels: + severity: warning + annotations: + summary: "High CPU Time consumption!" + description: "Node CPU Time is high!" \ No newline at end of file diff --git a/src/app/prom/dockerfile_prom b/src/app/prom/dockerfile_prom new file mode 100644 index 0000000..b9d6116 --- /dev/null +++ b/src/app/prom/dockerfile_prom @@ -0,0 +1,3 @@ +FROM prom/prometheus +ADD src/app/prom/prometheus.yml /etc/prometheus/ +ADD src/app/prom/alert.rules.yml /etc/prometheus/ diff --git a/src/app/prom/prometheus.yml b/src/app/prom/prometheus.yml new file mode 100644 index 0000000..e7cc8a2 --- /dev/null +++ b/src/app/prom/prometheus.yml @@ -0,0 +1,51 @@ +# my global config +global: + scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute. + evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. + # scrape_timeout is set to the global default (10s). + + # Attach these labels to any time series or alerts when communicating with + # external systems (federation, remote storage, Alertmanager). + external_labels: + monitor: "codelab-monitor" + +alerting: + alertmanagers: + - scheme: http + static_configs: + - targets: [ 'alertmanager:9093' ] + + +# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. +rule_files: + - alert.rules.yml + + # - "first.rules" + # - "second.rules" + +# A scrape configuration containing exactly one endpoint to scrape: +# Here it's Prometheus itself. +scrape_configs: + # The job name is added as a label `job=` to any timeseries scraped from this config. + - job_name: prometheus + + # metrics_path defaults to '/metrics' + # scheme defaults to 'http'. + + static_configs: + - targets: ["10.4.41.34:9090"] + + - job_name: docker + # metrics_path defaults to '/metrics' + # scheme defaults to 'http'. + + static_configs: + - targets: ["10.4.41.34:9323"] + + - job_name: node_exporter + static_configs: + - targets: ["10.4.41.34:9100"] + + - job_name: fastApi + static_configs: + - targets: ["10.4.41.34:5000"] diff --git a/src/features/__init__.py b/src/features/__init__.py index e69de29..296aa21 100644 --- a/src/features/__init__.py +++ b/src/features/__init__.py @@ -0,0 +1,3 @@ +""" +Features init +""" diff --git a/src/features/process_data.py b/src/features/process_data.py index b14127d..4f7dccd 100644 --- a/src/features/process_data.py +++ b/src/features/process_data.py @@ -1,62 +1,83 @@ -# importing libraries -import numpy as np -import cv2 -from matplotlib import pyplot as plt +"""Module process data""" +# importing libraries +import os +import shutil from os import listdir from os.path import isfile, join -from src import PROCESSED_DATA_DIR,RAW_DATA_DIR from pathlib import Path -import shutil -import os + +import cv2 +import numpy as np + +from src import PROCESSED_DATA_DIR, RAW_DATA_DIR + def noise_removal(img_path): - # Reading image from folder where it is stored - img = cv2.imread(img_path) - # denoising of image saving it into dst image - denoised = cv2.fastNlMeansDenoisingColored(img, None, 10, 10, 7, 15) + """Function for noise removal.""" + # Reading image from folder where it is stored + img = cv2.imread(img_path) + # denoising of image saving it into dst image + denoised = cv2.fastNlMeansDenoisingColored(img, None, 10, 10, 7, 15) # cv2.imwrite(dst_path,denoised) return denoised + def rgb2gray(img_path): + """Function rgb2gray.""" img = cv2.imread(img_path) gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) # blurred = cv2.GaussianBlur(gray, (5, 5), 0) return gray def get_images_path(directory_name): + """Function that gets images path.""" imgs_path={} - for f in listdir(directory_name): - if isfile(join(directory_name, f)): - imgs_path[f] = join(directory_name /f) + for file in listdir(directory_name): + if isfile(join(directory_name, file)): + imgs_path[file] = join(directory_name /file) return imgs_path - def auto_canny_edge_detection(path, sigma=0.33): + """ + Perform automatic Canny edge detection on an image. + This function reads an image from the given file path, then applies the Canny edge + detection algorithm with threshold values determined automatically. The thresholds are + set based on the median pixel intensity of the image, adjusted by a factor defined by + the 'sigma' parameter. + """ image = cv2.imread(path) - md = np.median(image) - lower_value = int(max(0, (1.0-sigma) * md)) - upper_value = int(min(255, (1.0+sigma) * md)) + median = np.median(image) + lower_value = int(max(0, (1.0-sigma) * median)) + upper_value = int(min(255, (1.0+sigma) * median)) return cv2.Canny(image, lower_value, upper_value) - def image_processing(src_imgs_dir,dst_imgs_dir): + """Function that processes images""" imgs_path=get_images_path(src_imgs_dir) - + for image_name,path in imgs_path.items(): denoised=noise_removal(path) gray=cv2.cvtColor(denoised,cv2.COLOR_BGR2GRAY) cv2.imwrite(str(dst_imgs_dir / image_name),gray) - def copy_labels(src,dest): + """ + Copy all files from a source directory to a destination directory. + This function lists all files in the source directory specified by 'src', + and then copies each file to the destination directory specified by 'dest'. + It does not copy subdirectories or their contents, only files directly + within the source directory. + """ src_files = os.listdir(src) for file_name in src_files: full_file_name = os.path.join(src, file_name) if os.path.isfile(full_file_name): - shutil.copy(full_file_name, dest) - + shutil.copy(full_file_name, dest) def main(): + """ + Main function + """ train_imgs_dir=RAW_DATA_DIR / "train/images" valid_imgs_dir=RAW_DATA_DIR / "valid/images" test_imgs_dir=RAW_DATA_DIR / "test/images" @@ -79,7 +100,7 @@ def main(): copy_labels(RAW_DATA_DIR / "train/labels",train_labels) copy_labels(RAW_DATA_DIR / "test/labels",test_labels) copy_labels(RAW_DATA_DIR / "valid/labels",valid_labels) - + image_processing(train_imgs_dir,processed_train_imgs_dir) image_processing(test_imgs_dir,processed_test_imgs_dir) image_processing(valid_imgs_dir,processed_valid_imgs_dir) diff --git a/src/models/__init__.py b/src/models/__init__.py index bf6bd6c..e471a26 100644 --- a/src/models/__init__.py +++ b/src/models/__init__.py @@ -1,3 +1,6 @@ +""" +Init models +""" from dotenv import load_dotenv load_dotenv() diff --git a/src/models/__pycache__/__init__.cpython-39.pyc b/src/models/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000..ad7b41b Binary files /dev/null and b/src/models/__pycache__/__init__.cpython-39.pyc differ diff --git a/src/models/__pycache__/evaluate.cpython-39.pyc b/src/models/__pycache__/evaluate.cpython-39.pyc new file mode 100644 index 0000000..c598711 Binary files /dev/null and b/src/models/__pycache__/evaluate.cpython-39.pyc differ diff --git a/src/models/evaluate.py b/src/models/evaluate.py index 08b57ae..b305804 100644 --- a/src/models/evaluate.py +++ b/src/models/evaluate.py @@ -1,15 +1,17 @@ +# pylint: skip-file import json import pickle - +from os import listdir +from os.path import isfile, join from pathlib import Path +import pytest import mlflow -from src import METRICS_DIR, PROCESSED_DATA_DIR,MODELS_DIR -from os import listdir -from os.path import isfile, join from torch import tensor from torchmetrics.detection import MeanAveragePrecision +from src import METRICS_DIR, MODELS_DIR, PROCESSED_DATA_DIR + # Path to the models folder MODELS_FOLDER_PATH = Path("models") @@ -22,22 +24,34 @@ def load_validation_data(input_folder_path: Path): Returns: Tuple[pd.DataFrame, pd.DataFrame]: Tuple containing the validation features and target. """ - images_path=input_folder_path / "valid/images" - labels_path=input_folder_path / "valid/labels" - X_valid = [join(images_path /f) for f in listdir(images_path) if isfile(join(images_path, f))] - y_valid=get_validation_labels(labels_path) - + images_path = input_folder_path / "valid/images" + labels_path = input_folder_path / "valid/labels" + X_valid = [ + join(images_path / f) + for f in listdir(images_path) + if isfile(join(images_path, f)) + ] + y_valid = get_validation_labels(labels_path) + return X_valid, y_valid + def get_validation_labels(labels_path: Path): - y_valid=None + y_valid = None if labels_path: - y_path = [join(labels_path /f) for f in listdir(labels_path) if isfile(join(labels_path, f))] - y_valid={} + y_path = [ + join(labels_path / f) + for f in listdir(labels_path) + if isfile(join(labels_path, f)) + ] + y_valid = {} for path in y_path: - f=open(path, 'r') - lines=[[float(value) for value in line.strip().split(" ")] for line in f.readlines()] - y_valid[path]=lines + f = open(path, "r") + lines = [ + [float(value) for value in line.strip().split(" ")] + for line in f.readlines() + ] + y_valid[path] = lines return y_valid @@ -56,17 +70,27 @@ def evaluate_model(model_file_name, x, y): with open(MODELS_FOLDER_PATH / model_file_name, "rb") as pickled_model: yolo_model = pickle.load(pickled_model) - - # TO DO + # TO DO val_predictions = yolo_model.predict(x) - preds = [dict(boxes=tensor([[258.0, 41.0, 606.0, 285.0]]),scores=tensor([0.536]),labels=tensor([0]),)] - target = [dict(boxes=tensor([[214.0, 41.0, 562.0, 285.0]]),labels=tensor([0]),)] - metric = MeanAveragePrecision(iou_type="bbox",box_format='xywh') + preds = [ + dict( + boxes=tensor([[258.0, 41.0, 606.0, 285.0]]), + scores=tensor([0.536]), + labels=tensor([0]), + ) + ] + target = [ + dict( + boxes=tensor([[214.0, 41.0, 562.0, 285.0]]), + labels=tensor([0]), + ) + ] + metric = MeanAveragePrecision(iou_type="bbox", box_format="xywh") metric.update(preds, target) - - map=metric.compute()['map'] - mar_10= metric.compute()['mar_10'] - return float(map),float(mar_10) + + map = metric.compute()["map"] + mar_10 = metric.compute()["mar_10"] + return float(map), float(mar_10) if __name__ == "__main__": @@ -80,12 +104,10 @@ def evaluate_model(model_file_name, x, y): with mlflow.start_run(): # Load the model - map, mar_10 = evaluate_model( - "yolov8_model.pkl", X_valid, y_valid - ) + map, mar_10 = evaluate_model("yolov8_model.pkl", X_valid, y_valid) # Save the evaluation metrics to a dictionary to be reused later - metrics_dict = {"map": map,"mar_10":mar_10} + metrics_dict = {"map": map, "mar_10": mar_10} # Log the evaluation metrics to MLflow mlflow.log_metrics(metrics_dict) @@ -98,4 +120,4 @@ def evaluate_model(model_file_name, x, y): indent=4, ) - print("Evaluation completed.") \ No newline at end of file + print("Evaluation completed.") diff --git a/src/models/train.py b/src/models/train.py index 1371b3f..258af20 100644 --- a/src/models/train.py +++ b/src/models/train.py @@ -1,45 +1,57 @@ """Module that trains the model """ -import os, glob -import pickle -from getpass import getpass +import glob +import os +import shutil from pathlib import Path + import mlflow -from ultralytics import YOLO +import pandas as pd import yaml from codecarbon import EmissionsTracker -import pandas as pd -from src import METRICS_DIR,MODELS_DIR,ROOT_DIR,DATA_YAML_DIR,ARTIFACTS_DIR,REPORTS_DIR -import shutil +from ultralytics import YOLO + +from src import ( + ARTIFACTS_DIR, + DATA_YAML_DIR, + METRICS_DIR, + MODELS_DIR, + REPORTS_DIR, + ROOT_DIR, +) -os.environ['MLFLOW_TRACKING_USERNAME'] = input('Enter your DAGsHub username: ') -os.environ['MLFLOW_TRACKING_PASSWORD'] = input('Enter your DAGsHub access token: ') -os.environ['MLFLOW_TRACKING_URI'] = "https://dagshub.com/Sebastianpaglia/MLOps_WhereIsWally.mlflow" +os.environ["MLFLOW_TRACKING_USERNAME"] = input("Enter your DAGsHub username: ") +os.environ["MLFLOW_TRACKING_PASSWORD"] = input("Enter your DAGsHub access token: ") +os.environ[ + "MLFLOW_TRACKING_URI" +] = "https://dagshub.com/Sebastianpaglia/MLOps_WhereIsWally.mlflow" -mlflow.set_tracking_uri(os.environ['MLFLOW_TRACKING_URI']) +mlflow.set_tracking_uri(os.environ["MLFLOW_TRACKING_URI"]) EMISSIONS_OUTPUT_FOLDER = METRICS_DIR -with open(ROOT_DIR / "params.yaml", encoding='utf-8') as f: +with open(ROOT_DIR / "params.yaml", encoding="utf-8") as f: params = yaml.safe_load(f) # Load the model. -model = YOLO(params['model_type']) +model = YOLO(params["model_type"]) # Training. -mlflow.set_experiment(params['name']) +mlflow.set_experiment(params["name"]) with mlflow.start_run(): with EmissionsTracker( - output_dir=EMISSIONS_OUTPUT_FOLDER, - output_file="emissions.csv", - on_csv_write="update", + output_dir=EMISSIONS_OUTPUT_FOLDER, + output_file="emissions.csv", + on_csv_write="update", ): - results = model.train( - data=DATA_YAML_DIR, - imgsz=params['imgsz'], - epochs=params['epochs'], - batch=params['batch'], - name=params['name'] + results = ( + model.train( + data=DATA_YAML_DIR, + imgsz=params["imgsz"], + epochs=params["epochs"], + batch=params["batch"], + name=params["name"], ), + ) # Log the CO2 emissions to MLflow emissions = pd.read_csv(EMISSIONS_OUTPUT_FOLDER + "/emissions.csv") @@ -47,18 +59,19 @@ emissions_params = emissions.iloc[-1, 13:].to_dict() mlflow.log_params(emissions_params) mlflow.log_metrics(emissions_metrics) - + # Save the model as a pickle file Path("models").mkdir(exist_ok=True) - - last_run_path=max(glob.glob(os.path.join(ARTIFACTS_DIR, '*/')), key=os.path.getmtime) + + last_run_path = max( + glob.glob(os.path.join(ARTIFACTS_DIR, "*/")), key=os.path.getmtime + ) best_weight_path = ARTIFACTS_DIR / last_run_path / "weights/best.pt" - train_params_file= ARTIFACTS_DIR / last_run_path / "args.yaml" - train_metrics_file= ARTIFACTS_DIR / last_run_path / "results.csv" + train_params_file = ARTIFACTS_DIR / last_run_path / "args.yaml" + train_metrics_file = ARTIFACTS_DIR / last_run_path / "results.csv" shutil.copy(best_weight_path, MODELS_DIR / "model.pt") shutil.copy(train_params_file, REPORTS_DIR / "train_params.yaml") shutil.copy(train_params_file, REPORTS_DIR / "train_metrics.csv") - + # with open(MODELS_DIR / "yolov8_model.pkl", "wb") as pickle_file: # pickle.dump(results, pickle_file) - diff --git a/tests/api_test/pytest.ini b/tests/api_test/pytest.ini new file mode 100644 index 0000000..b0e5a94 --- /dev/null +++ b/tests/api_test/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +filterwarnings = + ignore::DeprecationWarning \ No newline at end of file diff --git a/tests/api_test/requirements.txt b/tests/api_test/requirements.txt new file mode 100644 index 0000000..2707812 --- /dev/null +++ b/tests/api_test/requirements.txt @@ -0,0 +1,3 @@ +auto_mix_prep==0.2.0 +fastapi==0.104.1 +pytest==7.4.3 diff --git a/tests/api_test/test_api.py b/tests/api_test/test_api.py new file mode 100644 index 0000000..3f85690 --- /dev/null +++ b/tests/api_test/test_api.py @@ -0,0 +1,80 @@ +"""Module for testing the api""" +# pylint: disable=redefined-outer-name +import os +from http import HTTPStatus + +import pytest +from fastapi.testclient import TestClient + +from src.app.backend.api import app + + +@pytest.fixture(scope="module", autouse=True) +def client(): + """Function to get the client""" + with TestClient(app) as client: + return client + +def test_get_main(client): + """Function to test a get to the main page""" + response = client.get("/") + assert response.status_code == HTTPStatus.OK.value + response_body = response.json() + assert "message" in response_body + assert "data" in response_body + assert "message" in response_body["data"] + assert response_body["message"] == HTTPStatus.OK.phrase + assert response_body["data"]["message"] == "Welcome to Where is Wally!" + + +def test_predict_with_invalid_file(client): + """Function to test a prediction with an invalid file""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + file_path = os.path.join(current_dir, 'testing_file.txt') + with open(file_path, 'rb') as file: + response = client.post("/predict/all", files={"file": file}) + assert response.status_code == 400 + assert "Invalid image file" in response.text + + +def test_predict_with_valid_image_wally_found_model_all(client): + """Function to test a prediction with a valid image using the model that finds all characters""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + file_path = os.path.join(current_dir, 'testing_img.png') + with open(file_path, 'rb') as file: + response = client.post("/predict/all", files={"file": file}) + assert response.status_code == 200 + response_body = response.json() + assert response_body['found'] is True + assert "boxes" in response_body + assert 'orig_shape' in response_body['boxes'] + assert len(response_body['boxes']['orig_shape']) > 0 + + +def test_predict_with_valid_image_wally_not_found(client): + """Function to test a prediction with a valid image that does not find wally""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + file_path = os.path.join(current_dir, 'testing_not_found_img.jpeg') + with open(file_path, 'rb') as file: + response = client.post("/predict/all", files={"file": file}) + assert response.status_code == 200 + response_body = response.json() + assert "boxes" in response_body + assert "found" in response_body + assert response_body['found'] is False + assert "Processing completed, but Wally was not found in the image." in response.text + assert len(response_body['boxes']) == 0 + + +def test_predict_with_valid_image_model_wally(client): + """Function to test a prediction with a valid image using the model that only finds wally""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + file_path = os.path.join(current_dir, 'testing_img.png') + with open(file_path, 'rb') as file: + response = client.post("/predict/wally", files={"file": file}) + assert response.status_code == 200 + response_body = response.json() + assert response_body['found'] is True + assert "boxes" in response_body + assert 'orig_shape' in response_body['boxes'] + assert len(response_body['boxes']['orig_shape']) > 0 diff --git a/tests/api_test/testing_file.txt b/tests/api_test/testing_file.txt new file mode 100644 index 0000000..baa72e8 --- /dev/null +++ b/tests/api_test/testing_file.txt @@ -0,0 +1 @@ +Testing .txt file \ No newline at end of file diff --git a/tests/api_test/testing_img.png b/tests/api_test/testing_img.png new file mode 100644 index 0000000..c8e9517 Binary files /dev/null and b/tests/api_test/testing_img.png differ diff --git a/tests/api_test/testing_not_found_img.jpeg b/tests/api_test/testing_not_found_img.jpeg new file mode 100644 index 0000000..070c10f Binary files /dev/null and b/tests/api_test/testing_not_found_img.jpeg differ diff --git a/tests/code_test/integration_tests/__pycache__/test_data_processing.cpython-39-pytest-7.1.2.pyc b/tests/code_test/integration_tests/__pycache__/test_data_processing.cpython-39-pytest-7.1.2.pyc new file mode 100644 index 0000000..96cfb98 Binary files /dev/null and b/tests/code_test/integration_tests/__pycache__/test_data_processing.cpython-39-pytest-7.1.2.pyc differ diff --git a/tests/code_test/integration_tests/test_data_processing.py b/tests/code_test/integration_tests/test_data_processing.py index 503fa1d..e69de29 100644 --- a/tests/code_test/integration_tests/test_data_processing.py +++ b/tests/code_test/integration_tests/test_data_processing.py @@ -1 +0,0 @@ -#TODO \ No newline at end of file diff --git a/tests/code_test/unit_tests/__pycache__/test_labels_data.cpython-39-pytest-7.1.2.pyc b/tests/code_test/unit_tests/__pycache__/test_labels_data.cpython-39-pytest-7.1.2.pyc new file mode 100644 index 0000000..3b27403 Binary files /dev/null and b/tests/code_test/unit_tests/__pycache__/test_labels_data.cpython-39-pytest-7.1.2.pyc differ diff --git a/tests/code_test/unit_tests/__pycache__/test_noise_removal.cpython-39-pytest-7.1.2.pyc b/tests/code_test/unit_tests/__pycache__/test_noise_removal.cpython-39-pytest-7.1.2.pyc new file mode 100644 index 0000000..ad5fc5d Binary files /dev/null and b/tests/code_test/unit_tests/__pycache__/test_noise_removal.cpython-39-pytest-7.1.2.pyc differ diff --git a/tests/code_test/unit_tests/test_labels_data.py b/tests/code_test/unit_tests/test_labels_data.py index d7c443d..36e41e4 100644 --- a/tests/code_test/unit_tests/test_labels_data.py +++ b/tests/code_test/unit_tests/test_labels_data.py @@ -1,50 +1,58 @@ -import pickle -import pytest -from src import MODELS_DIR, PROCESSED_DATA_DIR,RAW_DATA_DIR -from src.models.evaluate import load_validation_data,get_validation_labels -from src.features.process_data import noise_removal -from torch import tensor +"""Module for unit testing data labels.""" +# pylint: disable=redefined-outer-name from os import listdir from os.path import isfile, join -import cv2 -import numpy as np + +import pytest + +from src import PROCESSED_DATA_DIR, RAW_DATA_DIR +from src.models.evaluate import get_validation_labels, load_validation_data + @pytest.fixture def validation_data_path(): + """Function that validates data path.""" return PROCESSED_DATA_DIR - + @pytest.fixture def labels_path(): + """Function that gets labels path.""" return PROCESSED_DATA_DIR / "valid/labels" @pytest.fixture def raw_images_path(): + """Function that gets raw images path.""" images_path=RAW_DATA_DIR / "train/images" data_path = [join(images_path /f) for f in listdir(images_path) if isfile(join(images_path, f))] return data_path #validation_labels function's tests -def test_validation_labels_ObjnotNone(labels_path): - assert get_validation_labels(labels_path) is not None - +def test_validation_labels_obj_not_none(labels_path): + """Function that tests validation labels""" + assert get_validation_labels(labels_path) is not None -def test_validation_labels_isRightDatatype(labels_path): - assert isinstance(get_validation_labels(labels_path), dict) - -def test_validation_labels_ObjIsNone(labels_path): - assert get_validation_labels(None) is None +def test_validation_labels_is_right_data_type(labels_path): + """Function that tests validation labels""" + assert isinstance(get_validation_labels(labels_path), dict) + + +def test_validation_labels_obj_is_none(): + """Function that tests validation labels""" + assert get_validation_labels(None) is None + - def test_validation_labels_path_fails(labels_path): + """Function that tests validation labels""" with pytest.raises(FileNotFoundError): assert isinstance(get_validation_labels(labels_path / "poo"), dict) - + #load_validation_data function's tests -def test_validation_data_isAllObjectsReturned(validation_data_path): +def test_validation_data_is_all_objects_returned(validation_data_path): + """Function that tests validation data is all objects""" assert len(load_validation_data(validation_data_path))==2 - -def test_validation_data_isRightDatatype(validation_data_path): + +def test_validation_data_is_right_data_type(validation_data_path): + """Function that tests that validation data has the right data type""" assert isinstance(load_validation_data(validation_data_path)[0], list) - \ No newline at end of file diff --git a/tests/code_test/unit_tests/test_noise_removal.py b/tests/code_test/unit_tests/test_noise_removal.py index 354b56c..633a5d3 100644 --- a/tests/code_test/unit_tests/test_noise_removal.py +++ b/tests/code_test/unit_tests/test_noise_removal.py @@ -1,38 +1,43 @@ -import pickle -import pytest -from src import MODELS_DIR, PROCESSED_DATA_DIR,RAW_DATA_DIR -from src.models.evaluate import load_validation_data,get_validation_labels -from src.features.process_data import noise_removal -from torch import tensor +"""Module for unit testing noise removal.""" +# pylint: disable=redefined-outer-name from os import listdir from os.path import isfile, join + import cv2 import numpy as np +import pytest + +from src import RAW_DATA_DIR +from src.features.process_data import noise_removal @pytest.fixture def raw_images_path(): + """Functions that gets raw images path""" images_path=RAW_DATA_DIR / "train/images" data_path = [join(images_path /f) for f in listdir(images_path) if isfile(join(images_path, f))] return data_path - + def is_similar(image1, image2): - return image1.shape == image2.shape and not(np.bitwise_xor(image1,image2).any()) + """Functions that returns if two images are similar in terms of both their + shape and the pixel value """ + return image1.shape == image2.shape and not np.bitwise_xor(image1,image2).any() -def test_noise_removal_isObjNotNone(raw_images_path): +def test_noise_removal_is_obj_not_none(raw_images_path): + """Functions that tests if noise removal is not none""" denoised=noise_removal(raw_images_path[0]) assert denoised is not None - - -def test_noise_removal_isEqualShape(raw_images_path): - img = cv2.imread(raw_images_path[0]) + + +def test_noise_removal_is_equal_shape(raw_images_path): + """Functions that tests if noise removal is equal to shape""" + img = cv2.imread(raw_images_path[0]) denoised=noise_removal(raw_images_path[0]) - assert (img.shape == denoised.shape) - - -def test_noise_removal_isNotSameImage(raw_images_path): - img = cv2.imread(raw_images_path[0]) + assert img.shape == denoised.shape + +def test_noise_removal_is_not_same_image(raw_images_path): + """Functions that tests if noise removal is not the same image""" + img = cv2.imread(raw_images_path[0]) denoised=noise_removal(raw_images_path[0]) assert (np.bitwise_xor(img,denoised).any()) - diff --git a/tests/data_test/validate.py b/tests/data_test/validate.py index 2afba87..6b309b4 100644 --- a/tests/data_test/validate.py +++ b/tests/data_test/validate.py @@ -1,5 +1,6 @@ """Module for great expectations """ import os + import great_expectations as gx context = gx.get_context() diff --git a/tests/model_test/__pycache__/model_test.cpython-39-pytest-7.1.2.pyc b/tests/model_test/__pycache__/model_test.cpython-39-pytest-7.1.2.pyc new file mode 100644 index 0000000..f869168 Binary files /dev/null and b/tests/model_test/__pycache__/model_test.cpython-39-pytest-7.1.2.pyc differ diff --git a/tests/model_test/model_test.py b/tests/model_test/model_test.py index 46fd86d..a673004 100644 --- a/tests/model_test/model_test.py +++ b/tests/model_test/model_test.py @@ -1,54 +1,39 @@ +"""Module for model testing.""" +# pylint: disable=redefined-outer-name import pickle + import pytest -from src import MODELS_DIR, PROCESSED_DATA_DIR -from src.models.evaluate import load_validation_data from torch import tensor from torchmetrics.detection import MeanAveragePrecision -import torch -from map_boxes import mean_average_precision_for_boxes + +from src import MODELS_DIR, PROCESSED_DATA_DIR +from src.models.evaluate import load_validation_data + @pytest.fixture def yov8_model(): - with open(MODELS_DIR / "yolov8_model.pkl", "rb") as f: - return pickle.load(f) + """Function that loads yolo8 model.""" + with open(MODELS_DIR / "yolov8_model.pkl", "rb") as file: + return pickle.load(file) @pytest.fixture def get_validation_data(): + """Function that gets validation data.""" return load_validation_data(PROCESSED_DATA_DIR) - def test_model_expected_value(yov8_model, get_validation_data): - x, y = get_validation_data - - val_predictions = yov8_model.predict(x,imgsz=640, conf=0.0033) - predictions_lst=[] - labels_lst=[] - - # for img,prediction in zip(x,val_predictions): - # img_name=img.split("/")[0] - # masks = prediction.masks # Masks object for segmentation masks outputs - # probs = prediction.probs # Class probabilities for classification outputs - # if probs: - # label=int(max(probs)) - # else: - # label=1 - # print(y) - # predictions_lst.append([img_name,label]+[v for v in prediction.boxes.xywh]) - # print(y[img]) - # labels_lst.append([img_name],int(y[img][0]),0.1,y[img][1],y[img][2],y[img][3],y[img][4]) - - # mean_ap, average_precisions = mean_average_precision_for_boxes(labels_lst, predictions_lst) - - # preds = [dict(boxes=torch.FloatTensor(next(iter(y.values()))[0]),scores=tensor([0.536]),labels=tensor([[1],[2],[2]]),)] - # target = [dict(boxes=boxes.xywh,scores=tensor([0.536]),labels=tensor([[1],[1],[1]]),)] - preds = [dict(boxes=tensor([[258.0, 41.0, 606.0, 285.0]]),scores=tensor([0.536]),labels=tensor([0]),)] + """Function that tests model expected value.""" + x, _ = get_validation_data # pylint: disable=invalid-name + + val_predictions = yov8_model.predict(x,imgsz=640, conf=0.0033) # pylint: disable=unused-variable + preds = [dict(boxes=tensor([[258.0, 41.0, 606.0, 285.0]]), + scores=tensor([0.536]),labels=tensor([0]),)] target = [dict(boxes=tensor([[214.0, 41.0, 562.0, 285.0]]),labels=tensor([0]),)] metric = MeanAveragePrecision(iou_type="bbox",box_format='xywh') metric.update(preds, target) - - # Compute the map(global mean average precision) and mar_10(mean average recall for 10 detections per image) + + # Compute the map(global mean average precision) and + # mar_10(mean average recall for 10 detections per image) # values for the model assert metric.compute()['map'] == pytest.approx(0.6, rel=0.1) assert metric.compute()['mar_10'] == pytest.approx(0.6, rel=0.1) - - \ No newline at end of file