diff --git a/.github/workflows/find-secrets.yml b/.github/workflows/find-secrets.yml index b39eb8cdc52..0eb7bbd6feb 100644 --- a/.github/workflows/find-secrets.yml +++ b/.github/workflows/find-secrets.yml @@ -11,7 +11,7 @@ jobs: with: fetch-depth: 0 - name: TruffleHog OSS - uses: trufflesecurity/trufflehog@v3.88.2 + uses: trufflesecurity/trufflehog@v3.88.4 with: path: ./ base: ${{ github.event.repository.default_branch }} diff --git a/api/pyproject.toml b/api/pyproject.toml index 3c7338eb53a..2cccb9ddb3c 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -8,7 +8,7 @@ description = "Prowler's API (Django/DRF)" license = "Apache-2.0" name = "prowler-api" package-mode = false -version = "1.3.2" +version = "1.4.0" [tool.poetry.dependencies] celery = {extras = ["pytest"], version = "^5.4.0"} diff --git a/api/src/backend/api/migrations/0008_daily_scheduled_tasks_update.py b/api/src/backend/api/migrations/0008_daily_scheduled_tasks_update.py new file mode 100644 index 00000000000..7f059ea2b8a --- /dev/null +++ b/api/src/backend/api/migrations/0008_daily_scheduled_tasks_update.py @@ -0,0 +1,64 @@ +import json +from datetime import datetime, timedelta, timezone + +import django.db.models.deletion +from django.db import migrations, models +from django_celery_beat.models import PeriodicTask + +from api.db_utils import rls_transaction +from api.models import Scan, StateChoices + + +def migrate_daily_scheduled_scan_tasks(apps, schema_editor): + for daily_scheduled_scan_task in PeriodicTask.objects.filter( + task="scan-perform-scheduled" + ): + task_kwargs = json.loads(daily_scheduled_scan_task.kwargs) + tenant_id = task_kwargs["tenant_id"] + provider_id = task_kwargs["provider_id"] + + current_time = datetime.now(timezone.utc) + scheduled_time_today = datetime.combine( + current_time.date(), + daily_scheduled_scan_task.start_time.time(), + tzinfo=timezone.utc, + ) + + if current_time < scheduled_time_today: + next_scan_date = scheduled_time_today + else: + next_scan_date = scheduled_time_today + timedelta(days=1) + + with rls_transaction(tenant_id): + Scan.objects.create( + tenant_id=tenant_id, + name="Daily scheduled scan", + provider_id=provider_id, + trigger=Scan.TriggerChoices.SCHEDULED, + state=StateChoices.SCHEDULED, + scheduled_at=next_scan_date, + scheduler_task_id=daily_scheduled_scan_task.id, + ) + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ("api", "0007_scan_and_scan_summaries_indexes"), + ("django_celery_beat", "0019_alter_periodictasks_options"), + ] + + operations = [ + migrations.AddField( + model_name="scan", + name="scheduler_task", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="django_celery_beat.periodictask", + ), + ), + migrations.RunPython(migrate_daily_scheduled_scan_tasks), + ] diff --git a/api/src/backend/api/models.py b/api/src/backend/api/models.py index 09014321cb4..ef982be8c7a 100644 --- a/api/src/backend/api/models.py +++ b/api/src/backend/api/models.py @@ -11,6 +11,7 @@ from django.db import models from django.db.models import Q from django.utils.translation import gettext_lazy as _ +from django_celery_beat.models import PeriodicTask from django_celery_results.models import TaskResult from psqlextra.manager import PostgresManager from psqlextra.models import PostgresPartitionedModel @@ -410,6 +411,9 @@ class TriggerChoices(models.TextChoices): started_at = models.DateTimeField(null=True, blank=True) completed_at = models.DateTimeField(null=True, blank=True) next_scan_at = models.DateTimeField(null=True, blank=True) + scheduler_task = models.ForeignKey( + PeriodicTask, on_delete=models.CASCADE, null=True, blank=True + ) # TODO: mutelist foreign key class Meta(RowLevelSecurityProtectedModel.Meta): diff --git a/api/src/backend/api/specs/v1.yaml b/api/src/backend/api/specs/v1.yaml index 7448843b54d..ab25ef0e273 100644 --- a/api/src/backend/api/specs/v1.yaml +++ b/api/src/backend/api/specs/v1.yaml @@ -1,7 +1,7 @@ openapi: 3.0.3 info: title: Prowler API - version: 1.3.2 + version: 1.4.0 description: |- Prowler API specification. diff --git a/api/src/backend/api/v1/urls.py b/api/src/backend/api/v1/urls.py index 6b230960e1d..4ff587d9137 100644 --- a/api/src/backend/api/v1/urls.py +++ b/api/src/backend/api/v1/urls.py @@ -1,4 +1,3 @@ -from django.conf import settings from django.urls import include, path from drf_spectacular.views import SpectacularRedocView from rest_framework_nested import routers @@ -113,6 +112,3 @@ path("schema", SchemaView.as_view(), name="schema"), path("docs", SpectacularRedocView.as_view(url_name="schema"), name="docs"), ] - -if settings.DEBUG: - urlpatterns += [path("silk/", include("silk.urls", namespace="silk"))] diff --git a/api/src/backend/api/v1/views.py b/api/src/backend/api/v1/views.py index 3ecb6474fb3..b11626c8296 100644 --- a/api/src/backend/api/v1/views.py +++ b/api/src/backend/api/v1/views.py @@ -193,7 +193,7 @@ class SchemaView(SpectacularAPIView): def get(self, request, *args, **kwargs): spectacular_settings.TITLE = "Prowler API" - spectacular_settings.VERSION = "1.3.2" + spectacular_settings.VERSION = "1.4.0" spectacular_settings.DESCRIPTION = ( "Prowler API specification.\n\nThis file is auto-generated." ) diff --git a/api/src/backend/config/django/devel.py b/api/src/backend/config/django/devel.py index 6ee92a6ecb8..825e1ce36a9 100644 --- a/api/src/backend/config/django/devel.py +++ b/api/src/backend/config/django/devel.py @@ -37,9 +37,3 @@ ) + ("api.filters.CustomDjangoFilterBackend",) SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0=" - -MIDDLEWARE += [ # noqa: F405 - "silk.middleware.SilkyMiddleware", -] - -INSTALLED_APPS += ["silk"] # noqa: F405 diff --git a/api/src/backend/tasks/beat.py b/api/src/backend/tasks/beat.py index 81dc04abba4..6cd8d7a9cea 100644 --- a/api/src/backend/tasks/beat.py +++ b/api/src/backend/tasks/beat.py @@ -5,10 +5,14 @@ from rest_framework_json_api.serializers import ValidationError from tasks.tasks import perform_scheduled_scan_task -from api.models import Provider +from api.db_utils import rls_transaction +from api.models import Provider, Scan, StateChoices def schedule_provider_scan(provider_instance: Provider): + tenant_id = str(provider_instance.tenant_id) + provider_id = str(provider_instance.id) + schedule, _ = IntervalSchedule.objects.get_or_create( every=24, period=IntervalSchedule.HOURS, @@ -17,23 +21,9 @@ def schedule_provider_scan(provider_instance: Provider): # Create a unique name for the periodic task task_name = f"scan-perform-scheduled-{provider_instance.id}" - # Schedule the task - _, created = PeriodicTask.objects.get_or_create( - interval=schedule, - name=task_name, - task="scan-perform-scheduled", - kwargs=json.dumps( - { - "tenant_id": str(provider_instance.tenant_id), - "provider_id": str(provider_instance.id), - } - ), - one_off=False, - defaults={ - "start_time": datetime.now(timezone.utc) + timedelta(hours=24), - }, - ) - if not created: + if PeriodicTask.objects.filter( + interval=schedule, name=task_name, task="scan-perform-scheduled" + ).exists(): raise ValidationError( [ { @@ -45,9 +35,36 @@ def schedule_provider_scan(provider_instance: Provider): ] ) + with rls_transaction(tenant_id): + scheduled_scan = Scan.objects.create( + tenant_id=tenant_id, + name="Daily scheduled scan", + provider_id=provider_id, + trigger=Scan.TriggerChoices.SCHEDULED, + state=StateChoices.AVAILABLE, + scheduled_at=datetime.now(timezone.utc), + ) + + # Schedule the task + periodic_task_instance = PeriodicTask.objects.create( + interval=schedule, + name=task_name, + task="scan-perform-scheduled", + kwargs=json.dumps( + { + "tenant_id": tenant_id, + "provider_id": provider_id, + } + ), + one_off=False, + start_time=datetime.now(timezone.utc) + timedelta(hours=24), + ) + scheduled_scan.scheduler_task_id = periodic_task_instance.id + scheduled_scan.save() + return perform_scheduled_scan_task.apply_async( kwargs={ "tenant_id": str(provider_instance.tenant_id), - "provider_id": str(provider_instance.id), + "provider_id": provider_id, }, ) diff --git a/api/src/backend/tasks/jobs/scan.py b/api/src/backend/tasks/jobs/scan.py index b11366a859f..ad11079ef0a 100644 --- a/api/src/backend/tasks/jobs/scan.py +++ b/api/src/backend/tasks/jobs/scan.py @@ -245,8 +245,11 @@ def perform_prowler_scan( status = FindingStatus[finding.status] delta = _create_finding_delta(last_status, status) - # For the findings prior to the change, when a first finding is found with delta!="new" it will be assigned a current date as first_seen_at and the successive findings with the same UID will always get the date of the previous finding. - # For new findings, when a finding (delta="new") is found for the first time, the first_seen_at attribute will be assigned the current date, the following findings will get that date. + # For the findings prior to the change, when a first finding is found with delta!="new" it will be + # assigned a current date as first_seen_at and the successive findings with the same UID will + # always get the date of the previous finding. + # For new findings, when a finding (delta="new") is found for the first time, the first_seen_at + # attribute will be assigned the current date, the following findings will get that date. if not last_first_seen_at: last_first_seen_at = datetime.now(tz=timezone.utc) diff --git a/api/src/backend/tasks/tasks.py b/api/src/backend/tasks/tasks.py index 1f87e0fdeb5..792900c4e74 100644 --- a/api/src/backend/tasks/tasks.py +++ b/api/src/backend/tasks/tasks.py @@ -1,15 +1,14 @@ -from datetime import datetime, timedelta, timezone - from celery import shared_task from config.celery import RLSTask from django_celery_beat.models import PeriodicTask from tasks.jobs.connection import check_provider_connection from tasks.jobs.deletion import delete_provider, delete_tenant from tasks.jobs.scan import aggregate_findings, perform_prowler_scan +from tasks.utils import get_next_execution_datetime from api.db_utils import rls_transaction from api.decorators import set_tenant -from api.models import Provider, Scan +from api.models import Scan, StateChoices @shared_task(base=RLSTask, name="provider-connection-check") @@ -100,28 +99,42 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str): task_id = self.request.id with rls_transaction(tenant_id): - provider_instance = Provider.objects.get(pk=provider_id) periodic_task_instance = PeriodicTask.objects.get( name=f"scan-perform-scheduled-{provider_id}" ) - next_scan_date = datetime.combine( - datetime.now(timezone.utc), periodic_task_instance.start_time.time() - ) + timedelta(hours=24) - - scan_instance = Scan.objects.create( + next_scan_datetime = get_next_execution_datetime(task_id, provider_id) + scan_instance, _ = Scan.objects.get_or_create( tenant_id=tenant_id, - name="Daily scheduled scan", - provider=provider_instance, + provider_id=provider_id, trigger=Scan.TriggerChoices.SCHEDULED, - next_scan_at=next_scan_date, - task_id=task_id, + state__in=(StateChoices.SCHEDULED, StateChoices.AVAILABLE), + scheduler_task_id=periodic_task_instance.id, + defaults={"state": StateChoices.SCHEDULED}, ) - result = perform_prowler_scan( - tenant_id=tenant_id, - scan_id=str(scan_instance.id), - provider_id=provider_id, - ) + scan_instance.task_id = task_id + scan_instance.save() + + try: + result = perform_prowler_scan( + tenant_id=tenant_id, + scan_id=str(scan_instance.id), + provider_id=provider_id, + ) + except Exception as e: + raise e + finally: + with rls_transaction(tenant_id): + Scan.objects.get_or_create( + tenant_id=tenant_id, + name="Daily scheduled scan", + provider_id=provider_id, + trigger=Scan.TriggerChoices.SCHEDULED, + state=StateChoices.SCHEDULED, + scheduled_at=next_scan_datetime, + scheduler_task_id=periodic_task_instance.id, + ) + perform_scan_summary_task.apply_async( kwargs={ "tenant_id": tenant_id, diff --git a/api/src/backend/tasks/tests/test_beat.py b/api/src/backend/tasks/tests/test_beat.py index 78b5acb0391..6e5c6fdf914 100644 --- a/api/src/backend/tasks/tests/test_beat.py +++ b/api/src/backend/tasks/tests/test_beat.py @@ -6,6 +6,8 @@ from rest_framework_json_api.serializers import ValidationError from tasks.beat import schedule_provider_scan +from api.models import Scan + @pytest.mark.django_db class TestScheduleProviderScan: @@ -15,9 +17,11 @@ def test_schedule_provider_scan_success(self, providers_fixture): with patch( "tasks.tasks.perform_scheduled_scan_task.apply_async" ) as mock_apply_async: + assert Scan.all_objects.count() == 0 result = schedule_provider_scan(provider_instance) assert result is not None + assert Scan.all_objects.count() == 1 mock_apply_async.assert_called_once_with( kwargs={ diff --git a/api/src/backend/tasks/tests/test_utils.py b/api/src/backend/tasks/tests/test_utils.py new file mode 100644 index 00000000000..619626af9e5 --- /dev/null +++ b/api/src/backend/tasks/tests/test_utils.py @@ -0,0 +1,76 @@ +from datetime import datetime, timedelta, timezone +from unittest.mock import patch + +import pytest +from django_celery_beat.models import IntervalSchedule, PeriodicTask +from django_celery_results.models import TaskResult +from tasks.utils import get_next_execution_datetime + + +@pytest.mark.django_db +class TestGetNextExecutionDatetime: + @pytest.fixture + def setup_periodic_task(self, db): + # Create a periodic task with an hourly interval + interval = IntervalSchedule.objects.create( + every=1, period=IntervalSchedule.HOURS + ) + periodic_task = PeriodicTask.objects.create( + name="scan-perform-scheduled-123", + task="scan-perform-scheduled", + interval=interval, + ) + return periodic_task + + @pytest.fixture + def setup_task_result(self, db): + # Create a task result record + task_result = TaskResult.objects.create( + task_id="abc123", + task_name="scan-perform-scheduled", + status="SUCCESS", + date_created=datetime.now(timezone.utc) - timedelta(hours=1), + result="Success", + ) + return task_result + + def test_get_next_execution_datetime_success( + self, setup_task_result, setup_periodic_task + ): + task_result = setup_task_result + periodic_task = setup_periodic_task + + # Mock periodic_task_name on TaskResult + with patch.object( + TaskResult, "periodic_task_name", return_value=periodic_task.name + ): + next_execution = get_next_execution_datetime( + task_id=task_result.task_id, provider_id="123" + ) + + expected_time = task_result.date_created + timedelta(hours=1) + assert next_execution == expected_time + + def test_get_next_execution_datetime_fallback_to_provider_id( + self, setup_task_result, setup_periodic_task + ): + task_result = setup_task_result + + # Simulate the case where `periodic_task_name` is missing + with patch.object(TaskResult, "periodic_task_name", return_value=None): + next_execution = get_next_execution_datetime( + task_id=task_result.task_id, provider_id="123" + ) + + expected_time = task_result.date_created + timedelta(hours=1) + assert next_execution == expected_time + + def test_get_next_execution_datetime_periodic_task_does_not_exist( + self, setup_task_result + ): + task_result = setup_task_result + + with pytest.raises(PeriodicTask.DoesNotExist): + get_next_execution_datetime( + task_id=task_result.task_id, provider_id="nonexistent" + ) diff --git a/api/src/backend/tasks/utils.py b/api/src/backend/tasks/utils.py new file mode 100644 index 00000000000..5ac78a03ab8 --- /dev/null +++ b/api/src/backend/tasks/utils.py @@ -0,0 +1,26 @@ +from datetime import datetime, timedelta, timezone + +from django_celery_beat.models import PeriodicTask +from django_celery_results.models import TaskResult + + +def get_next_execution_datetime(task_id: int, provider_id: str) -> datetime: + task_instance = TaskResult.objects.get(task_id=task_id) + try: + periodic_task_instance = PeriodicTask.objects.get( + name=task_instance.periodic_task_name + ) + except PeriodicTask.DoesNotExist: + periodic_task_instance = PeriodicTask.objects.get( + name=f"scan-perform-scheduled-{provider_id}" + ) + + interval = periodic_task_instance.interval + + current_scheduled_time = datetime.combine( + datetime.now(timezone.utc).date(), + task_instance.date_created.time(), + tzinfo=timezone.utc, + ) + + return current_scheduled_time + timedelta(**{interval.period: interval.every}) diff --git a/docs/tutorials/prowler-app.md b/docs/tutorials/prowler-app.md index 40a40ed096a..3184d9e9586 100644 --- a/docs/tutorials/prowler-app.md +++ b/docs/tutorials/prowler-app.md @@ -133,3 +133,5 @@ While the scan is running, start exploring the findings in these sections: Issues - **Browse All Findings**: Detailed list of findings detected, where you can filter by severity, service, and more. Findings + +To view all `new` findings that have not been seen prior to this scan, click the `Delta` filter and select `new`. To view all `changed` findings that have had a status change (from `PASS` to `FAIL` for example), click the `Delta` filter and select `changed`. diff --git a/poetry.lock b/poetry.lock index cbb2504f70b..0f53a98c432 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1692,13 +1692,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.159.0" +version = "2.160.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.159.0-py2.py3-none-any.whl", hash = "sha256:baef0bb631a60a0bd7c0bf12a5499e3a40cd4388484de7ee55c1950bf820a0cf"}, - {file = "google_api_python_client-2.159.0.tar.gz", hash = "sha256:55197f430f25c907394b44fa078545ffef89d33fd4dca501b7db9f0d8e224bd6"}, + {file = "google_api_python_client-2.160.0-py2.py3-none-any.whl", hash = "sha256:63d61fb3e4cf3fb31a70a87f45567c22f6dfe87bbfa27252317e3e2c42900db4"}, + {file = "google_api_python_client-2.160.0.tar.gz", hash = "sha256:a8ccafaecfa42d15d5b5c3134ced8de08380019717fc9fb1ed510ca58eca3b7e"}, ] [package.dependencies] @@ -2122,13 +2122,13 @@ referencing = ">=0.31.0" [[package]] name = "kubernetes" -version = "31.0.0" +version = "32.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" files = [ - {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, - {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, + {file = "kubernetes-32.0.0-py2.py3-none-any.whl", hash = "sha256:60fd8c29e8e43d9c553ca4811895a687426717deba9c0a66fb2dcc3f5ef96692"}, + {file = "kubernetes-32.0.0.tar.gz", hash = "sha256:319fa840345a482001ac5d6062222daeb66ec4d1bcb3087402aed685adf0aecb"}, ] [package.dependencies] @@ -2536,13 +2536,13 @@ dev = ["click", "codecov", "mkdocs-gen-files", "mkdocs-git-authors-plugin", "mkd [[package]] name = "mkdocs-material" -version = "9.5.50" +version = "9.6.2" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.50-py3-none-any.whl", hash = "sha256:f24100f234741f4d423a9d672a909d859668a4f404796be3cf035f10d6050385"}, - {file = "mkdocs_material-9.5.50.tar.gz", hash = "sha256:ae5fe16f3d7c9ccd05bb6916a7da7420cf99a9ce5e33debd9d40403a090d5825"}, + {file = "mkdocs_material-9.6.2-py3-none-any.whl", hash = "sha256:71d90dbd63b393ad11a4d90151dfe3dcbfcd802c0f29ce80bebd9bbac6abc753"}, + {file = "mkdocs_material-9.6.2.tar.gz", hash = "sha256:a3de1c5d4c745f10afa78b1a02f917b9dce0808fb206adc0f5bb48b58c1ca21f"}, ] [package.dependencies] @@ -2592,13 +2592,13 @@ test = ["pytest", "pytest-cov"] [[package]] name = "moto" -version = "5.0.27" +version = "5.0.28" description = "A library that allows you to easily mock out tests based on AWS infrastructure" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.27-py3-none-any.whl", hash = "sha256:27042fd94c8def0166d9f2ae8d39d9488d4b3115542b5fca88566c0424549013"}, - {file = "moto-5.0.27.tar.gz", hash = "sha256:6c123de7e0e5e6508a10c399ba3ecf2d5143f263f8e804fd4a7091941c3f5207"}, + {file = "moto-5.0.28-py3-none-any.whl", hash = "sha256:2dfbea1afe3b593e13192059a1a7fc4b3cf7fdf92e432070c22346efa45aa0f0"}, + {file = "moto-5.0.28.tar.gz", hash = "sha256:4d3437693411ec943c13c77de5b0b520c4b0a9ac850fead4ba2a54709e086e8b"}, ] [package.dependencies] @@ -3454,13 +3454,13 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "py-ocsf-models" -version = "0.2.0" +version = "0.3.0" description = "This is a Python implementation of the OCSF models. The models are used to represent the data of the OCSF Schema defined in https://schema.ocsf.io/." optional = false python-versions = "<3.13,>=3.9" files = [ - {file = "py_ocsf_models-0.2.0-py3-none-any.whl", hash = "sha256:ac75fd21077694b343ebaad3479194db113c274879b114277560ff287d5cd7b5"}, - {file = "py_ocsf_models-0.2.0.tar.gz", hash = "sha256:3e12648d05329e6776a0e6b1ffea87a3eb60aa7d8cb2c4afd69e5724f443ce03"}, + {file = "py_ocsf_models-0.3.0-py3-none-any.whl", hash = "sha256:3d31e379be5e4271f7faf62dee9c36798559a1f7f98dff142c0e4cfdb35e291c"}, + {file = "py_ocsf_models-0.3.0.tar.gz", hash = "sha256:ad46b7d9761b74010f06a894df2d9541989252b7ff738cd5c7edbf4283df2279"}, ] [package.dependencies] @@ -3635,13 +3635,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "3.3.3" +version = "3.3.4" description = "python code static checker" optional = false python-versions = ">=3.9.0" files = [ - {file = "pylint-3.3.3-py3-none-any.whl", hash = "sha256:26e271a2bc8bce0fc23833805a9076dd9b4d5194e2a02164942cb3cdc37b4183"}, - {file = "pylint-3.3.3.tar.gz", hash = "sha256:07c607523b17e6d16e2ae0d7ef59602e332caa762af64203c24b41c27139f36a"}, + {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, + {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, ] [package.dependencies] @@ -3652,7 +3652,7 @@ dill = [ {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +isort = ">=4.2.5,<5.13.0 || >5.13.0,<7" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} @@ -3804,13 +3804,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2024.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] @@ -5090,4 +5090,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.13" -content-hash = "19e7c4654f280256e53757abd04cb63938d34ae4d8305a77f21e75c891b5376d" +content-hash = "7511c2a5abad01f351b1d222d73a6b2c5374f386e8c035cce79934daca7ea762" diff --git a/prowler/compliance/aws/aws_audit_manager_control_tower_guardrails_aws.json b/prowler/compliance/aws/aws_audit_manager_control_tower_guardrails_aws.json index 34a5856fd72..fa4d71c05b8 100644 --- a/prowler/compliance/aws/aws_audit_manager_control_tower_guardrails_aws.json +++ b/prowler/compliance/aws/aws_audit_manager_control_tower_guardrails_aws.json @@ -28,7 +28,9 @@ "Service": "ebs" } ], - "Checks": [] + "Checks": [ + "ec2_ebs_volume_snapshots_exists" + ] }, { "Id": "1.0.3", @@ -42,7 +44,8 @@ } ], "Checks": [ - "ec2_ebs_default_encryption" + "ec2_ebs_default_encryption", + "ec2_ebs_volume_encryption" ] }, { @@ -87,7 +90,9 @@ } ], "Checks": [ - "iam_user_mfa_enabled_console_access" + "iam_user_mfa_enabled_console_access", + "iam_user_hardware_mfa_enabled", + "iam_root_mfa_enabled" ] }, { @@ -102,7 +107,9 @@ } ], "Checks": [ - "iam_user_mfa_enabled_console_access" + "iam_user_mfa_enabled_console_access", + "iam_user_hardware_mfa_enabled", + "iam_root_mfa_enabled" ] }, { @@ -117,7 +124,9 @@ } ], "Checks": [ - "iam_root_mfa_enabled" + "iam_root_mfa_enabled", + "iam_root_hardware_mfa_enabled", + "iam_user_mfa_enabled_console_access" ] }, { @@ -162,7 +171,10 @@ } ], "Checks": [ - "rds_instance_no_public_access" + "rds_instance_no_public_access", + "s3_bucket_public_access", + "s3_bucket_public_list_acl", + "s3_account_level_public_access_blocks" ] }, { @@ -192,7 +204,8 @@ } ], "Checks": [ - "rds_instance_storage_encrypted" + "rds_instance_storage_encrypted", + "rds_instance_transport_encrypted" ] }, { diff --git a/prowler/compliance/aws/cis_1.4_aws.json b/prowler/compliance/aws/cis_1.4_aws.json index da77feb9910..584ca284c63 100644 --- a/prowler/compliance/aws/cis_1.4_aws.json +++ b/prowler/compliance/aws/cis_1.4_aws.json @@ -584,7 +584,8 @@ "Id": "2.3.1", "Description": "Ensure that encryption is enabled for RDS Instances", "Checks": [ - "rds_instance_storage_encrypted" + "rds_instance_storage_encrypted", + "rds_instance_transport_encrypted" ], "Attributes": [ { diff --git a/prowler/compliance/aws/cis_1.5_aws.json b/prowler/compliance/aws/cis_1.5_aws.json index 6d5283b81cd..90ca3742e4a 100644 --- a/prowler/compliance/aws/cis_1.5_aws.json +++ b/prowler/compliance/aws/cis_1.5_aws.json @@ -584,7 +584,8 @@ "Id": "2.3.1", "Description": "Ensure that encryption is enabled for RDS Instances", "Checks": [ - "rds_instance_storage_encrypted" + "rds_instance_storage_encrypted", + "rds_instance_transport_encrypted" ], "Attributes": [ { diff --git a/prowler/compliance/aws/cis_2.0_aws.json b/prowler/compliance/aws/cis_2.0_aws.json index 09f2ebe08fb..1d12e296d58 100644 --- a/prowler/compliance/aws/cis_2.0_aws.json +++ b/prowler/compliance/aws/cis_2.0_aws.json @@ -303,7 +303,9 @@ { "Id": "1.22", "Description": "Ensure access to AWSCloudShellFullAccess is restricted", - "Checks": [], + "Checks": [ + "iam_policy_cloudshell_admin_not_attached" + ], "Attributes": [ { "Section": "1. Identity and Access Management", @@ -492,7 +494,8 @@ "Id": "2.1.2", "Description": "Ensure MFA Delete is enabled on S3 buckets", "Checks": [ - "s3_bucket_no_mfa_delete" + "s3_bucket_no_mfa_delete", + "cloudtrail_bucket_requires_mfa_delete" ], "Attributes": [ { @@ -581,7 +584,8 @@ "Id": "2.3.1", "Description": "Ensure that encryption is enabled for RDS Instances", "Checks": [ - "rds_instance_storage_encrypted" + "rds_instance_storage_encrypted", + "rds_instance_transport_encrypted" ], "Attributes": [ { @@ -1347,7 +1351,8 @@ "Id": "5.6", "Description": "Ensure that EC2 Metadata Service only allows IMDSv2", "Checks": [ - "ec2_instance_imdsv2_enabled" + "ec2_instance_imdsv2_enabled", + "ec2_instance_account_imdsv2_enabled" ], "Attributes": [ { diff --git a/prowler/providers/aws/aws_regions_by_service.json b/prowler/providers/aws/aws_regions_by_service.json index a589f266df4..d9ed6b445c4 100644 --- a/prowler/providers/aws/aws_regions_by_service.json +++ b/prowler/providers/aws/aws_regions_by_service.json @@ -6782,6 +6782,7 @@ "mcs": { "regions": { "aws": [ + "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", diff --git a/prowler/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible.py b/prowler/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible.py index 4ab57e0de8b..86c3cbfec81 100644 --- a/prowler/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible.py +++ b/prowler/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible.py @@ -10,14 +10,14 @@ def execute(self): report = Check_Report_AWS(metadata=self.metadata(), resource=function) report.status = "PASS" - report.status_extended = f"Lambda function {function.name} has a policy resource-based policy not public." + report.status_extended = f"Lambda function {function.name} has a resource-based policy without public access." if is_policy_public( function.policy, awslambda_client.audited_account, is_cross_account_allowed=True, ): report.status = "FAIL" - report.status_extended = f"Lambda function {function.name} has a policy resource-based policy with public access." + report.status_extended = f"Lambda function {function.name} has a resource-based policy with public access." findings.append(report) diff --git a/prowler/providers/aws/services/directoryservice/directoryservice_service.py b/prowler/providers/aws/services/directoryservice/directoryservice_service.py index 42f3ec34c08..0beea818e20 100644 --- a/prowler/providers/aws/services/directoryservice/directoryservice_service.py +++ b/prowler/providers/aws/services/directoryservice/directoryservice_service.py @@ -130,21 +130,22 @@ def _describe_event_topics(self, regional_client): ) self.directories[directory.id].event_topics = event_topics except ClientError as error: - if error.response["Error"]["Code"] == "ClientException": - error_message = error.response["Error"]["Message"] - if "is in Deleting state" in error_message: - logger.warning( - f"{directory.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) - else: - logger.error( - f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) + if ( + "is in Deleting state" + in error.response["Error"]["Message"] + ): + logger.warning( + f"{directory.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) else: logger.error( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) - continue + except Exception as error: + logger.error( + f"{regional_client.region} -- {error.__class__.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: logger.error( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -226,21 +227,18 @@ def _get_snapshot_limits(self, regional_client): ]["ManualSnapshotsLimitReached"], ) except ClientError as error: - if error.response["Error"]["Code"] == "ClientException": - error_message = error.response["Error"]["Message"] - if "is in Deleting state" in error_message: - logger.warning( - f"{directory.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) - else: - logger.error( - f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) + if "is in Deleting state" in error.response["Error"]["Message"]: + logger.warning( + f"{directory.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) else: logger.error( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) - continue + except Exception as error: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( diff --git a/prowler/providers/aws/services/elasticache/elasticache_service.py b/prowler/providers/aws/services/elasticache/elasticache_service.py index 5fc295630e8..6e11a85bc6a 100644 --- a/prowler/providers/aws/services/elasticache/elasticache_service.py +++ b/prowler/providers/aws/services/elasticache/elasticache_service.py @@ -147,6 +147,12 @@ def _list_tags_for_resource(self): logger.warning( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) + except ( + regional_client.exceptions.InvalidReplicationGroupStateFault + ) as error: + logger.warning( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -163,6 +169,12 @@ def _list_tags_for_resource(self): logger.warning( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) + except ( + regional_client.exceptions.InvalidReplicationGroupStateFault + ) as error: + logger.warning( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" diff --git a/prowler/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled.py b/prowler/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled.py index b7deed387b1..3a7e1f1bc54 100644 --- a/prowler/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled.py +++ b/prowler/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled.py @@ -15,7 +15,7 @@ def execute(self) -> Check_Report_Azure: ) report.subscription = f"Tenant: {tenant}" report.status = "FAIL" - report.status_extended = "Entra security defaults is diabled." + report.status_extended = "Entra security defaults is disabled." if getattr(security_default, "is_enabled", False): report.status = "PASS" diff --git a/prowler/providers/gcp/services/compute/compute_public_address_shodan/compute_public_address_shodan.metadata.json b/prowler/providers/gcp/services/compute/compute_public_address_shodan/compute_public_address_shodan.metadata.json index 5c19d5d1f72..d897b42a1fe 100644 --- a/prowler/providers/gcp/services/compute/compute_public_address_shodan/compute_public_address_shodan.metadata.json +++ b/prowler/providers/gcp/services/compute/compute_public_address_shodan/compute_public_address_shodan.metadata.json @@ -1,5 +1,5 @@ { - "Provider": "compute", + "Provider": "gcp", "CheckID": "compute_public_address_shodan", "CheckTitle": "Check if any of the Public Addresses are in Shodan (requires Shodan API KEY).", "CheckType": [ diff --git a/prowler/providers/microsoft365/lib/regions/microsoft365_regions.py b/prowler/providers/microsoft365/lib/regions/regions.py similarity index 100% rename from prowler/providers/microsoft365/lib/regions/microsoft365_regions.py rename to prowler/providers/microsoft365/lib/regions/regions.py diff --git a/prowler/providers/microsoft365/microsoft365_provider.py b/prowler/providers/microsoft365/microsoft365_provider.py index dff8da4d978..af7a723f679 100644 --- a/prowler/providers/microsoft365/microsoft365_provider.py +++ b/prowler/providers/microsoft365/microsoft365_provider.py @@ -50,9 +50,7 @@ Microsoft365TenantIdAndClientSecretNotBelongingToClientIdError, ) from prowler.providers.microsoft365.lib.mutelist.mutelist import Microsoft365Mutelist -from prowler.providers.microsoft365.lib.regions.microsoft365_regions import ( - get_regions_config, -) +from prowler.providers.microsoft365.lib.regions.regions import get_regions_config from prowler.providers.microsoft365.models import ( Microsoft365IdentityInfo, Microsoft365RegionConfig, diff --git a/prowler/providers/microsoft365/services/admincenter/admincenter_service.py b/prowler/providers/microsoft365/services/admincenter/admincenter_service.py index 2fc14d1ae6e..fed450ca82a 100644 --- a/prowler/providers/microsoft365/services/admincenter/admincenter_service.py +++ b/prowler/providers/microsoft365/services/admincenter/admincenter_service.py @@ -22,11 +22,13 @@ def __init__(self, provider: Microsoft365Provider): gather( self._get_directory_roles(), self._get_groups(), + self._get_domains(), ) ) self.directory_roles = attributes[0] self.groups = attributes[1] + self.domains = attributes[2] async def _get_users(self): logger.info("Microsoft365 - Getting users...") @@ -130,6 +132,28 @@ async def _get_groups(self): ) return groups + async def _get_domains(self): + logger.info("Microsoft365 - Getting domains...") + domains = {} + try: + domains_list = await self.client.domains.get() + domains.update({}) + for domain in domains_list.value: + domains.update( + { + domain.id: Domain( + id=domain.id, + password_validity_period=domain.password_validity_period_in_days, + ) + } + ) + + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + return domains + class User(BaseModel): id: str @@ -149,3 +173,8 @@ class Group(BaseModel): id: str name: str visibility: str + + +class Domain(BaseModel): + id: str + password_validity_period: int diff --git a/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/__init__.py b/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire.metadata.json b/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire.metadata.json new file mode 100644 index 00000000000..13528b00083 --- /dev/null +++ b/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire.metadata.json @@ -0,0 +1,30 @@ +{ + "Provider": "microsoft", + "CheckID": "admincenter_settings_password_never_expire", + "CheckTitle": "Ensure the 'Password expiration policy' is set to 'Set passwords to never expire (recommended)'", + "CheckType": [], + "ServiceName": "admincenter", + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "medium", + "ResourceType": "Microsoft365Domain", + "Description": "This control ensures that the password expiration policy is set to 'Set passwords to never expire (recommended)'. This aligns with modern recommendations to enhance security by avoiding arbitrary password changes and focusing on supplementary controls like MFA.", + "Risk": "Arbitrary password expiration policies can lead to weaker passwords due to frequent changes. Users may adopt insecure habits such as using simple, memorable passwords.", + "RelatedUrl": "https://www.cisecurity.org/insights/white-papers/cis-password-policy-guide", + "Remediation": { + "Code": { + "CLI": "Set-MsolUser -UserPrincipalName -PasswordNeverExpires $true", + "NativeIaC": "", + "Other": "", + "Terraform": "" + }, + "Recommendation": { + "Text": "Enable the 'Never Expire Passwords' option in Microsoft 365 Admin Center.", + "Url": "https://learn.microsoft.com/en-us/microsoft-365/admin/misc/password-policy-recommendations?view=o365-worldwide" + } + }, + "Categories": [], + "DependsOn": [], + "RelatedTo": [], + "Notes": "" +} diff --git a/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire.py b/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire.py new file mode 100644 index 00000000000..8de62dd8d51 --- /dev/null +++ b/prowler/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire.py @@ -0,0 +1,27 @@ +from prowler.lib.check.models import Check, Check_Report_Microsoft365 +from prowler.providers.microsoft365.services.admincenter.admincenter_client import ( + admincenter_client, +) + + +class admincenter_settings_password_never_expire(Check): + def execute(self) -> Check_Report_Microsoft365: + findings = [] + for domain in admincenter_client.domains.values(): + report = Check_Report_Microsoft365(self.metadata(), resource=domain) + report.resource_id = domain.id + report.resource_name = domain.id + report.status = "FAIL" + report.status_extended = ( + f"Domain {domain.id} does not have a Password never expires policy." + ) + + if domain.password_validity_period == 2147483647: + report.status = "PASS" + report.status_extended = ( + f"Domain {domain.id} Password policy is set to never expire." + ) + + findings.append(report) + + return findings diff --git a/prowler/providers/microsoft365/services/admincenter/admincenter_users_between_two_and_four_global_admins/admincenter_users_between_two_and_four_global_admins.py b/prowler/providers/microsoft365/services/admincenter/admincenter_users_between_two_and_four_global_admins/admincenter_users_between_two_and_four_global_admins.py index aa0fc978c03..6002e67e4c1 100644 --- a/prowler/providers/microsoft365/services/admincenter/admincenter_users_between_two_and_four_global_admins/admincenter_users_between_two_and_four_global_admins.py +++ b/prowler/providers/microsoft365/services/admincenter/admincenter_users_between_two_and_four_global_admins/admincenter_users_between_two_and_four_global_admins.py @@ -9,9 +9,7 @@ def execute(self) -> Check_Report_Microsoft365: findings = [] directory_roles = admincenter_client.directory_roles - report = Check_Report_Microsoft365( - metadata=self.metadata(), resource=admincenter_client.directory_roles - ) + report = Check_Report_Microsoft365(metadata=self.metadata(), resource={}) report.status = "FAIL" report.resource_name = "Global Administrator" diff --git a/prowler/providers/microsoft365/services/entra/__init__.py b/prowler/providers/microsoft365/services/entra/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/prowler/providers/microsoft365/services/entra/entra_client.py b/prowler/providers/microsoft365/services/entra/entra_client.py new file mode 100644 index 00000000000..1a3b921adf3 --- /dev/null +++ b/prowler/providers/microsoft365/services/entra/entra_client.py @@ -0,0 +1,4 @@ +from prowler.providers.common.provider import Provider +from prowler.providers.microsoft365.services.entra.entra_service import Entra + +entra_client = Entra(Provider.get_global_provider()) diff --git a/prowler/providers/microsoft365/services/entra/entra_service.py b/prowler/providers/microsoft365/services/entra/entra_service.py new file mode 100644 index 00000000000..8561a31eddc --- /dev/null +++ b/prowler/providers/microsoft365/services/entra/entra_service.py @@ -0,0 +1,105 @@ +from asyncio import gather, get_event_loop +from typing import List, Optional + +from pydantic import BaseModel + +from prowler.lib.logger import logger +from prowler.providers.microsoft365.lib.service.service import Microsoft365Service +from prowler.providers.microsoft365.microsoft365_provider import Microsoft365Provider + + +class Entra(Microsoft365Service): + def __init__(self, provider: Microsoft365Provider): + super().__init__(provider) + + loop = get_event_loop() + + attributes = loop.run_until_complete( + gather( + self._get_authorization_policy(), + ) + ) + + self.authorization_policy = attributes[0] + + async def _get_authorization_policy(self): + logger.info("Entra - Getting authorization policy...") + + authorization_policy = {} + try: + auth_policy = await self.client.policies.authorization_policy.get() + + default_user_role_permissions = getattr( + auth_policy, "default_user_role_permissions", None + ) + + authorization_policy.update( + { + auth_policy.id: AuthorizationPolicy( + id=auth_policy.id, + name=auth_policy.display_name, + description=auth_policy.description, + default_user_role_permissions=DefaultUserRolePermissions( + allowed_to_create_apps=getattr( + default_user_role_permissions, + "allowed_to_create_apps", + None, + ), + allowed_to_create_security_groups=getattr( + default_user_role_permissions, + "allowed_to_create_security_groups", + None, + ), + allowed_to_create_tenants=getattr( + default_user_role_permissions, + "allowed_to_create_tenants", + None, + ), + allowed_to_read_bitlocker_keys_for_owned_device=getattr( + default_user_role_permissions, + "allowed_to_read_bitlocker_keys_for_owned_device", + None, + ), + allowed_to_read_other_users=getattr( + default_user_role_permissions, + "allowed_to_read_other_users", + None, + ), + odata_type=getattr( + default_user_role_permissions, "odata_type", None + ), + permission_grant_policies_assigned=[ + policy_assigned + for policy_assigned in getattr( + default_user_role_permissions, + "permission_grant_policies_assigned", + [], + ) + ], + ), + ) + } + ) + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + return authorization_policy + + +class DefaultUserRolePermissions(BaseModel): + allowed_to_create_apps: Optional[bool] + allowed_to_create_security_groups: Optional[bool] + allowed_to_create_tenants: Optional[bool] + allowed_to_read_bitlocker_keys_for_owned_device: Optional[bool] + allowed_to_read_other_users: Optional[bool] + odata_type: Optional[str] + permission_grant_policies_assigned: Optional[List[str]] = None + + +class AuthorizationPolicy(BaseModel): + id: str + name: str + description: str + default_user_role_permissions: Optional[DefaultUserRolePermissions] diff --git a/pyproject.toml b/pyproject.toml index 9cb926c96ec..b9cb7f8b1f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,19 +55,19 @@ cryptography = "43.0.1" dash = "2.18.2" dash-bootstrap-components = "1.6.0" detect-secrets = "1.5.0" -google-api-python-client = "2.159.0" +google-api-python-client = "2.160.0" google-auth-httplib2 = ">=0.1,<0.3" jsonschema = "4.23.0" -kubernetes = "31.0.0" +kubernetes = "32.0.0" microsoft-kiota-abstractions = "1.9.1" msgraph-sdk = "1.18.0" numpy = "2.0.2" pandas = "2.2.3" -py-ocsf-models = "0.2.0" +py-ocsf-models = "0.3.0" pydantic = "1.10.18" python = ">=3.9,<3.13" python-dateutil = "^2.9.0.post0" -pytz = "2024.2" +pytz = "2025.1" schema = "0.7.7" shodan = "1.31.0" slack-sdk = "3.34.0" @@ -82,10 +82,10 @@ docker = "7.1.0" flake8 = "7.1.1" freezegun = "1.5.1" mock = "5.1.0" -moto = {extras = ["all"], version = "5.0.27"} +moto = {extras = ["all"], version = "5.0.28"} openapi-schema-validator = "0.6.3" openapi-spec-validator = "0.7.1" -pylint = "3.3.3" +pylint = "3.3.4" pytest = "8.3.4" pytest-cov = "6.0.0" pytest-env = "1.1.5" @@ -100,7 +100,7 @@ optional = true [tool.poetry.group.docs.dependencies] mkdocs = "1.6.1" mkdocs-git-revision-date-localized-plugin = "1.3.0" -mkdocs-material = "9.5.50" +mkdocs-material = "9.6.2" mkdocs-material-extensions = "1.3.1" [tool.poetry.scripts] diff --git a/tests/lib/outputs/ocsf/ocsf_test.py b/tests/lib/outputs/ocsf/ocsf_test.py index e7bd8b20590..d2728499057 100644 --- a/tests/lib/outputs/ocsf/ocsf_test.py +++ b/tests/lib/outputs/ocsf/ocsf_test.py @@ -175,7 +175,7 @@ def test_batch_write_data_to_file(self): "vendor_name": "Prowler", "version": prowler_version, }, - "version": "1.3.0", + "version": "1.4.0", "profiles": ["cloud", "datetime"], "tenant_uid": "test-organization-id", }, diff --git a/tests/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible_test.py b/tests/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible_test.py index d7f8d75a40f..99aaec831a6 100644 --- a/tests/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible_test.py +++ b/tests/providers/aws/services/awslambda/awslambda_function_not_publicly_accessible/awslambda_function_not_publicly_accessible_test.py @@ -107,7 +107,7 @@ def test_function_public(self): assert result[0].status == "FAIL" assert ( result[0].status_extended - == f"Lambda function {function_name} has a policy resource-based policy with public access." + == f"Lambda function {function_name} has a resource-based policy with public access." ) assert result[0].resource_tags == [{"tag1": "value1", "tag2": "value2"}] @@ -184,7 +184,7 @@ def test_function_public_with_source_account(self): assert result[0].status == "PASS" assert ( result[0].status_extended - == f"Lambda function {function_name} has a policy resource-based policy not public." + == f"Lambda function {function_name} has a resource-based policy without public access." ) assert result[0].resource_tags == [{"tag1": "value1", "tag2": "value2"}] @@ -260,7 +260,7 @@ def test_function_not_public(self): assert result[0].status == "PASS" assert ( result[0].status_extended - == f"Lambda function {function_name} has a policy resource-based policy not public." + == f"Lambda function {function_name} has a resource-based policy without public access." ) assert result[0].resource_tags == [{"tag1": "value1", "tag2": "value2"}] @@ -319,7 +319,7 @@ def test_function_public_with_canonical(self): assert result[0].status == "FAIL" assert ( result[0].status_extended - == f"Lambda function {function_name} has a policy resource-based policy with public access." + == f"Lambda function {function_name} has a resource-based policy with public access." ) assert result[0].resource_tags == [] @@ -492,7 +492,7 @@ def test_function_public_with_alb(self): assert result[0].status == "FAIL" assert ( result[0].status_extended - == "Lambda function test-public-lambda has a policy resource-based policy with public access." + == "Lambda function test-public-lambda has a resource-based policy with public access." ) assert result[0].resource_tags == [{"tag1": "value1", "tag2": "value2"}] @@ -552,7 +552,7 @@ def test_function_could_be_invoked_by_specific_aws_account(self): assert result[0].status == "PASS" assert ( result[0].status_extended - == f"Lambda function {function_name} has a policy resource-based policy not public." + == f"Lambda function {function_name} has a resource-based policy without public access." ) assert result[0].resource_tags == [] @@ -612,7 +612,7 @@ def test_function_could_be_invoked_by_specific_other_aws_account(self): assert result[0].status == "PASS" assert ( result[0].status_extended - == f"Lambda function {function_name} has a policy resource-based policy not public." + == f"Lambda function {function_name} has a resource-based policy without public access." ) assert result[0].resource_tags == [] @@ -681,7 +681,7 @@ def test_function_public_policy_with_several_statements(self): assert result[0].status == "FAIL" assert ( result[0].status_extended - == f"Lambda function {function_name} has a policy resource-based policy with public access." + == f"Lambda function {function_name} has a resource-based policy with public access." ) assert result[0].resource_id == function_name assert result[0].resource_arn == function_arn diff --git a/tests/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled_test.py b/tests/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled_test.py index b1887a7425f..33f3f5a8395 100644 --- a/tests/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled_test.py +++ b/tests/providers/azure/services/entra/entra_security_defaults_enabled/entra_security_defaults_enabled_test.py @@ -45,7 +45,7 @@ def test_entra_tenant_empty(self): result = check.execute() assert len(result) == 1 assert result[0].status == "FAIL" - assert result[0].status_extended == "Entra security defaults is diabled." + assert result[0].status_extended == "Entra security defaults is disabled." assert result[0].subscription == f"Tenant: {DOMAIN}" assert result[0].resource_name == "" assert result[0].resource_id == "" @@ -109,7 +109,7 @@ def test_entra_security_default_disabled(self): result = check.execute() assert len(result) == 1 assert result[0].status == "FAIL" - assert result[0].status_extended == "Entra security defaults is diabled." + assert result[0].status_extended == "Entra security defaults is disabled." assert result[0].subscription == f"Tenant: {DOMAIN}" assert result[0].resource_name == "Sec Default" assert result[0].resource_id == id diff --git a/tests/providers/microsoft365/lib/regions/microsoft365_regions_test.py b/tests/providers/microsoft365/lib/regions/microsoft365_regions_test.py index 3e358511b8b..49ec5a480dc 100644 --- a/tests/providers/microsoft365/lib/regions/microsoft365_regions_test.py +++ b/tests/providers/microsoft365/lib/regions/microsoft365_regions_test.py @@ -1,6 +1,6 @@ from azure.identity import AzureAuthorityHosts -from prowler.providers.microsoft365.lib.regions.microsoft365_regions import ( +from prowler.providers.microsoft365.lib.regions.regions import ( MICROSOFT365_CHINA_CLOUD, MICROSOFT365_GENERIC_CLOUD, MICROSOFT365_US_GOV_CLOUD, diff --git a/tests/providers/microsoft365/microsoft365_fixtures.py b/tests/providers/microsoft365/microsoft365_fixtures.py index 79e8ec118f1..1dbb09c6125 100644 --- a/tests/providers/microsoft365/microsoft365_fixtures.py +++ b/tests/providers/microsoft365/microsoft365_fixtures.py @@ -29,7 +29,6 @@ def set_mocked_microsoft365_provider( audit_config: dict = None, azure_region_config: Microsoft365RegionConfig = Microsoft365RegionConfig(), ) -> Microsoft365Provider: - provider = MagicMock() provider.type = "microsoft365" provider.session.credentials = credentials diff --git a/tests/providers/microsoft365/microsoft365_provider_test.py b/tests/providers/microsoft365/microsoft365_provider_test.py index b475a02ca66..7658301e442 100644 --- a/tests/providers/microsoft365/microsoft365_provider_test.py +++ b/tests/providers/microsoft365/microsoft365_provider_test.py @@ -212,7 +212,6 @@ def test_test_connection_browser_auth(self): "prowler.providers.microsoft365.microsoft365_provider.GraphServiceClient" ) as mock_graph_client, ): - # Mock the return value of DefaultAzureCredential mock_credentials = MagicMock() mock_credentials.get_token.return_value = AccessToken( @@ -271,7 +270,6 @@ def test_test_connection_with_httpresponseerror(self): with patch( "prowler.providers.microsoft365.microsoft365_provider.Microsoft365Provider.setup_session" ) as mock_setup_session: - mock_setup_session.side_effect = Microsoft365HTTPResponseError( file="test_file", original_exception="Simulated HttpResponseError" ) diff --git a/tests/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire_test.py b/tests/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire_test.py new file mode 100644 index 00000000000..b49bc980953 --- /dev/null +++ b/tests/providers/microsoft365/services/admincenter/admincenter_settings_password_never_expire/admincenter_settings_password_never_expire_test.py @@ -0,0 +1,112 @@ +from unittest import mock +from uuid import uuid4 + +from tests.providers.microsoft365.microsoft365_fixtures import ( + DOMAIN, + set_mocked_microsoft365_provider, +) + + +class Test_admincenter_settings_password_never_expire: + def test_admincenter_no_domains(self): + admincenter_client = mock.MagicMock + admincenter_client.audited_tenant = "audited_tenant" + admincenter_client.audited_domain = DOMAIN + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_microsoft365_provider(), + ), + mock.patch( + "prowler.providers.microsoft365.services.admincenter.admincenter_settings_password_never_expire.admincenter_settings_password_never_expire.admincenter_client", + new=admincenter_client, + ), + ): + from prowler.providers.microsoft365.services.admincenter.admincenter_settings_password_never_expire.admincenter_settings_password_never_expire import ( + admincenter_settings_password_never_expire, + ) + + admincenter_client.domains = {} + + check = admincenter_settings_password_never_expire() + result = check.execute() + assert len(result) == 0 + + def test_admincenter_domain_password_expire(self): + admincenter_client = mock.MagicMock + admincenter_client.audited_tenant = "audited_tenant" + admincenter_client.audited_domain = DOMAIN + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_microsoft365_provider(), + ), + mock.patch( + "prowler.providers.microsoft365.services.admincenter.admincenter_settings_password_never_expire.admincenter_settings_password_never_expire.admincenter_client", + new=admincenter_client, + ), + ): + from prowler.providers.microsoft365.services.admincenter.admincenter_service import ( + Domain, + ) + from prowler.providers.microsoft365.services.admincenter.admincenter_settings_password_never_expire.admincenter_settings_password_never_expire import ( + admincenter_settings_password_never_expire, + ) + + id_domain = str(uuid4()) + + admincenter_client.domains = { + id_domain: Domain(id=id_domain, password_validity_period=5), + } + + check = admincenter_settings_password_never_expire() + result = check.execute() + assert len(result) == 1 + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"Domain {id_domain} does not have a Password never expires policy." + ) + assert result[0].resource_name == id_domain + assert result[0].resource_id == id_domain + + def test_admincenter_password_not_expire(self): + admincenter_client = mock.MagicMock + admincenter_client.audited_tenant = "audited_tenant" + admincenter_client.audited_domain = DOMAIN + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_microsoft365_provider(), + ), + mock.patch( + "prowler.providers.microsoft365.services.admincenter.admincenter_settings_password_never_expire.admincenter_settings_password_never_expire.admincenter_client", + new=admincenter_client, + ), + ): + from prowler.providers.microsoft365.services.admincenter.admincenter_service import ( + Domain, + ) + from prowler.providers.microsoft365.services.admincenter.admincenter_settings_password_never_expire.admincenter_settings_password_never_expire import ( + admincenter_settings_password_never_expire, + ) + + id_domain = str(uuid4()) + + admincenter_client.domains = { + id_domain: Domain(id=id_domain, password_validity_period=2147483647), + } + + check = admincenter_settings_password_never_expire() + result = check.execute() + assert len(result) == 1 + assert result[0].status == "PASS" + assert ( + result[0].status_extended + == f"Domain {id_domain} Password policy is set to never expire." + ) + assert result[0].resource_name == id_domain + assert result[0].resource_id == id_domain diff --git a/tests/providers/microsoft365/services/entra/microsoft365_entra_service_test.py b/tests/providers/microsoft365/services/entra/microsoft365_entra_service_test.py new file mode 100644 index 00000000000..86109478e2a --- /dev/null +++ b/tests/providers/microsoft365/services/entra/microsoft365_entra_service_test.py @@ -0,0 +1,45 @@ +from unittest.mock import patch + +from prowler.providers.microsoft365.models import Microsoft365IdentityInfo +from prowler.providers.microsoft365.services.entra.entra_service import ( + AuthorizationPolicy, + Entra, +) +from tests.providers.microsoft365.microsoft365_fixtures import ( + DOMAIN, + set_mocked_microsoft365_provider, +) + + +async def mock_entra_get_authorization_policy(_): + return { + "id-1": AuthorizationPolicy( + id="id-1", + name="Name 1", + description="Description 1", + default_user_role_permissions=None, + ) + } + + +@patch( + "prowler.providers.microsoft365.services.entra.entra_service.Entra._get_authorization_policy", + new=mock_entra_get_authorization_policy, +) +class Test_Entra_Service: + def test_get_client(self): + admincenter_client = Entra( + set_mocked_microsoft365_provider( + identity=Microsoft365IdentityInfo(tenant_domain=DOMAIN) + ) + ) + assert admincenter_client.client.__class__.__name__ == "GraphServiceClient" + + def test_get_authorization_policy(self): + entra_client = Entra(set_mocked_microsoft365_provider()) + assert entra_client.authorization_policy["id-1"].id == "id-1" + assert entra_client.authorization_policy["id-1"].name == "Name 1" + assert entra_client.authorization_policy["id-1"].description == "Description 1" + assert not entra_client.authorization_policy[ + "id-1" + ].default_user_role_permissions diff --git a/ui/app/(prowler)/findings/page.tsx b/ui/app/(prowler)/findings/page.tsx index 0d35cac8c46..39d2ad78465 100644 --- a/ui/app/(prowler)/findings/page.tsx +++ b/ui/app/(prowler)/findings/page.tsx @@ -26,21 +26,19 @@ export default async function Findings({ searchParams: SearchParamsProps; }) { const searchParamsKey = JSON.stringify(searchParams || {}); - const defaultSort = "severity,status"; - const sort = searchParams.sort?.toString() || defaultSort; + const sort = searchParams.sort?.toString(); // Make sure the sort is correctly encoded - const encodedSort = sort.replace(/^\+/, ""); - - // Extract all filter parameters and combine with default filters - const defaultFilters = { - "filter[status__in]": "FAIL, PASS", - }; + const encodedSort = sort?.replace(/^\+/, ""); const filters: Record = { - ...defaultFilters, ...Object.fromEntries( - Object.entries(searchParams).filter(([key]) => key.startsWith("filter[")), + Object.entries(searchParams) + .filter(([key]) => key.startsWith("filter[")) + .map(([key, value]) => [ + key, + Array.isArray(value) ? value.join(",") : value?.toString() || "", + ]), ), }; @@ -137,21 +135,20 @@ const SSRDataTable = async ({ searchParams: SearchParamsProps; }) => { const page = parseInt(searchParams.page?.toString() || "1", 10); - const defaultSort = "severity,status"; + const defaultSort = "severity,status,-inserted_at"; const sort = searchParams.sort?.toString() || defaultSort; // Make sure the sort is correctly encoded const encodedSort = sort.replace(/^\+/, ""); - // Extract all filter parameters and combine with default filters - const defaultFilters = { - "filter[status__in]": "FAIL, PASS", - }; - const filters: Record = { - ...defaultFilters, ...Object.fromEntries( - Object.entries(searchParams).filter(([key]) => key.startsWith("filter[")), + Object.entries(searchParams) + .filter(([key]) => key.startsWith("filter[")) + .map(([key, value]) => [ + key, + Array.isArray(value) ? value.join(",") : value?.toString() || "", + ]), ), }; diff --git a/ui/app/(prowler)/page.tsx b/ui/app/(prowler)/page.tsx index ff2ab3d28cf..c6e07483623 100644 --- a/ui/app/(prowler)/page.tsx +++ b/ui/app/(prowler)/page.tsx @@ -129,7 +129,7 @@ const SSRFindingsBySeverity = async ({ const SSRDataNewFindingsTable = async () => { const page = 1; - const sort = "severity,updated_at"; + const sort = "severity,-inserted_at"; const defaultFilters = { "filter[status__in]": "FAIL", diff --git a/ui/app/(prowler)/scans/page.tsx b/ui/app/(prowler)/scans/page.tsx index 97816271188..db38bd4e97a 100644 --- a/ui/app/(prowler)/scans/page.tsx +++ b/ui/app/(prowler)/scans/page.tsx @@ -8,7 +8,6 @@ import { ButtonRefreshData, NoProvidersAdded, NoProvidersConnected, - ScanWarningBar, } from "@/components/scans"; import { LaunchScanWorkflow } from "@/components/scans/launch-workflow"; import { SkeletonTableScans } from "@/components/scans/table"; @@ -73,8 +72,6 @@ export default async function Scans({
- - )} diff --git a/ui/components/auth/oss/auth-form.tsx b/ui/components/auth/oss/auth-form.tsx index e215abad4f7..967bbdd3526 100644 --- a/ui/components/auth/oss/auth-form.tsx +++ b/ui/components/auth/oss/auth-form.tsx @@ -1,7 +1,7 @@ "use client"; import { zodResolver } from "@hookform/resolvers/zod"; -import { Link } from "@nextui-org/react"; +import { Checkbox, Link } from "@nextui-org/react"; import { useRouter } from "next/navigation"; import { useForm } from "react-hook-form"; import { z } from "zod"; @@ -11,7 +11,12 @@ import { NotificationIcon, ProwlerExtended } from "@/components/icons"; import { ThemeSwitch } from "@/components/ThemeSwitch"; import { useToast } from "@/components/ui"; import { CustomButton, CustomInput } from "@/components/ui/custom"; -import { Form } from "@/components/ui/form"; +import { + Form, + FormControl, + FormField, + FormMessage, +} from "@/components/ui/form"; import { ApiError, authFormSchema } from "@/types"; export const AuthForm = ({ @@ -143,7 +148,7 @@ export const AuthForm = ({
{type === "sign-up" && ( @@ -167,6 +172,7 @@ export const AuthForm = ({ /> )} + {/* {type === "sign-in" && ( @@ -213,13 +223,44 @@ export const AuthForm = ({ isDisabled={invitationToken !== null && true} /> )} + + {process.env.NEXT_PUBLIC_IS_CLOUD_ENV === "true" && ( + ( + <> + + field.onChange(e.target.checked)} + > + I agree with the  + + Terms of Service + +  of Prowler + + + + + )} + /> + )} )} - {form.formState.errors?.email && ( -
+ {type === "sign-in" && form.formState.errors?.email && ( +
-

No user found

+

Invalid email or password

)} diff --git a/ui/components/scans/index.ts b/ui/components/scans/index.ts index 98157857ab0..aa42edfe3c3 100644 --- a/ui/components/scans/index.ts +++ b/ui/components/scans/index.ts @@ -2,4 +2,3 @@ export * from "./button-refresh-data"; export * from "./link-to-findings-from-scan"; export * from "./no-providers-added"; export * from "./no-providers-connected"; -export * from "./scan-warning-bar"; diff --git a/ui/components/scans/scan-warning-bar.tsx b/ui/components/scans/scan-warning-bar.tsx deleted file mode 100644 index 4c9acbb9783..00000000000 --- a/ui/components/scans/scan-warning-bar.tsx +++ /dev/null @@ -1,18 +0,0 @@ -"use client"; - -import { InfoIcon } from "../icons"; - -export const ScanWarningBar = () => { - return ( -
- -
- Waiting for Your Scan to Show Up? -

- It may take a few minutes for the scan to appear on the table and be - displayed. -

-
-
- ); -}; diff --git a/ui/components/scans/table/scan-detail.tsx b/ui/components/scans/table/scan-detail.tsx index 6e9c3e288c2..7307ff53f02 100644 --- a/ui/components/scans/table/scan-detail.tsx +++ b/ui/components/scans/table/scan-detail.tsx @@ -117,11 +117,9 @@ export const ScanDetail = ({ - {scan.next_scan_at && ( - - - - )} + + +
diff --git a/ui/components/scans/table/scans/column-get-scans.tsx b/ui/components/scans/table/scans/column-get-scans.tsx index 55087a0e666..72a70f93119 100644 --- a/ui/components/scans/table/scans/column-get-scans.tsx +++ b/ui/components/scans/table/scans/column-get-scans.tsx @@ -123,13 +123,13 @@ export const ColumnGetScans: ColumnDef[] = [ }, }, { - accessorKey: "next_scan_at", - header: "Next scan", + accessorKey: "scheduled_at", + header: "Scheduled at", cell: ({ row }) => { const { - attributes: { next_scan_at }, + attributes: { scheduled_at }, } = getScanData(row); - return ; + return ; }, }, { diff --git a/ui/components/ui/custom/custom-input.tsx b/ui/components/ui/custom/custom-input.tsx index 1a4cdb5dfed..d01302e16f0 100644 --- a/ui/components/ui/custom/custom-input.tsx +++ b/ui/components/ui/custom/custom-input.tsx @@ -23,6 +23,7 @@ interface CustomInputProps { isRequired?: boolean; isInvalid?: boolean; isDisabled?: boolean; + showFormMessage?: boolean; } export const CustomInput = ({ @@ -41,6 +42,7 @@ export const CustomInput = ({ isRequired = true, isInvalid, isDisabled = false, + showFormMessage = true, }: CustomInputProps) => { const [isPasswordVisible, setIsPasswordVisible] = useState(false); const [isConfirmPasswordVisible, setIsConfirmPasswordVisible] = @@ -112,7 +114,9 @@ export const CustomInput = ({ {...field} /> - + {showFormMessage && ( + + )} )} /> diff --git a/ui/types/authFormSchema.ts b/ui/types/authFormSchema.ts index ac0fc46dcf5..58737a1f8aa 100644 --- a/ui/types/authFormSchema.ts +++ b/ui/types/authFormSchema.ts @@ -24,11 +24,21 @@ export const authFormSchema = (type: string) => invitationToken: type === "sign-in" ? z.string().optional() : z.string().optional(), + termsAndConditions: + type === "sign-in" || process.env.NEXT_PUBLIC_IS_CLOUD_ENV !== "true" + ? z.boolean().optional() + : z.boolean().refine((value) => value === true, { + message: "You must accept the terms and conditions.", + }), + // Fields for Sign In and Sign Up email: z.string().email(), - password: z.string().min(12, { - message: "It must contain at least 12 characters.", - }), + password: + type === "sign-in" + ? z.string() + : z.string().min(12, { + message: "It must contain at least 12 characters.", + }), }) .refine( (data) => type === "sign-in" || data.password === data.confirmPassword,