diff --git a/packages/common-library/src/common_library/basic_types.py b/packages/common-library/src/common_library/basic_types.py index c01d76ac328..dc92a3efac2 100644 --- a/packages/common-library/src/common_library/basic_types.py +++ b/packages/common-library/src/common_library/basic_types.py @@ -3,11 +3,12 @@ from pydantic_core import PydanticUndefined -# SEE https://github.com/fastapi/fastapi/blob/master/fastapi/_compat.py#L75-L78 Undefined = PydanticUndefined DEFAULT_FACTORY: Any = Undefined -# Use `UNSET` as default when default_factory +# Use `DEFAULT_FACTORY` as field default when using Field(default_factory=...) # SEE https://github.com/ITISFoundation/osparc-simcore/pull/6882 +# SEE https://github.com/ITISFoundation/osparc-simcore/pull/7112#discussion_r1933432238 +# SEE https://github.com/fastapi/fastapi/blob/master/fastapi/_compat.py#L75-L78 class LogLevel(StrEnum): diff --git a/packages/settings-library/setup.cfg b/packages/settings-library/setup.cfg index 9f06ddde50e..43f7aec5015 100644 --- a/packages/settings-library/setup.cfg +++ b/packages/settings-library/setup.cfg @@ -14,6 +14,7 @@ universal = 1 # Define setup.py command aliases here test = pytest -# NOTE: uncomment when pytest-asyncio is added in requirements -# [tool:pytest] -# asyncio_mode = auto +[tool:pytest] +# SEE https://docs.pytest.org/en/stable/how-to/capture-warnings.html +filterwarnings = + error diff --git a/packages/settings-library/src/settings_library/application.py b/packages/settings-library/src/settings_library/application.py index 1af5e142ba8..321e450e65b 100644 --- a/packages/settings-library/src/settings_library/application.py +++ b/packages/settings-library/src/settings_library/application.py @@ -1,3 +1,5 @@ +from typing import Annotated + from pydantic import Field, PositiveInt from .base import BaseCustomSettings @@ -18,11 +20,13 @@ class BaseApplicationSettings(BaseCustomSettings): # @Dockerfile SC_BOOT_MODE: BootModeEnum | None = None SC_BOOT_TARGET: BuildTargetEnum | None = None - SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( - default=None, - description="If a single run of the check takes longer than timeout seconds " - "then the check is considered to have failed." - "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", - ) + SC_HEALTHCHECK_TIMEOUT: Annotated[ + PositiveInt | None, + Field( + description="If a single run of the check takes longer than timeout seconds " + "then the check is considered to have failed." + "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", + ), + ] = None SC_USER_ID: int | None = None SC_USER_NAME: str | None = None diff --git a/packages/settings-library/src/settings_library/director_v0.py b/packages/settings-library/src/settings_library/director_v0.py index 9e1e36d67a9..1d599d9b328 100644 --- a/packages/settings-library/src/settings_library/director_v0.py +++ b/packages/settings-library/src/settings_library/director_v0.py @@ -1,4 +1,5 @@ from functools import cached_property +from typing import Annotated from pydantic import AnyHttpUrl, Field, TypeAdapter from settings_library.base import BaseCustomSettings @@ -10,9 +11,9 @@ class DirectorV0Settings(BaseCustomSettings): DIRECTOR_HOST: str = "director" DIRECTOR_PORT: PortInt = TypeAdapter(PortInt).validate_python(8000) - DIRECTOR_VTAG: VersionTag = Field( - default="v0", description="Director-v0 service API's version tag" - ) + DIRECTOR_VTAG: Annotated[ + VersionTag, Field(description="Director-v0 service API's version tag") + ] = "v0" @cached_property def endpoint(self) -> str: diff --git a/packages/settings-library/src/settings_library/docker_registry.py b/packages/settings-library/src/settings_library/docker_registry.py index 32636f6b9df..312bd0a53d5 100644 --- a/packages/settings-library/src/settings_library/docker_registry.py +++ b/packages/settings-library/src/settings_library/docker_registry.py @@ -1,5 +1,5 @@ from functools import cached_property -from typing import Any, Self +from typing import Annotated, Any, Self from pydantic import ( AnyHttpUrl, @@ -15,29 +15,37 @@ class RegistrySettings(BaseCustomSettings): - REGISTRY_AUTH: bool = Field(..., description="do registry authentication") - REGISTRY_PATH: str | None = Field( - default=None, - # This is useful in case of a local registry, where the registry url (path) is relative to the host docker engine" - description="development mode only, in case a local registry is used - " - "this is the hostname to the docker registry as seen from the host running the containers (e.g. 127.0.0.1:5000)", - ) - # NOTE: name is missleading, http or https protocol are not included - REGISTRY_URL: str = Field( - ..., - description="hostname of docker registry (without protocol but with port if available)", - min_length=1, - ) + REGISTRY_AUTH: Annotated[bool, Field(description="do registry authentication")] + REGISTRY_PATH: Annotated[ + str | None, + Field( + # This is useful in case of a local registry, where the registry url (path) is relative to the host docker engine" + description="development mode only, in case a local registry is used - " + "this is the hostname to the docker registry as seen from the host running the containers (e.g. 127.0.0.1:5000)", + ), + ] = None - REGISTRY_USER: str = Field( - ..., description="username to access the docker registry" - ) - REGISTRY_PW: SecretStr = Field( - ..., description="password to access the docker registry" - ) - REGISTRY_SSL: bool = Field( - ..., description="True if docker registry is using HTTPS protocol" - ) + REGISTRY_URL: Annotated[ + str, + Field( + # NOTE: name is missleading, http or https protocol are not included + description="hostname of docker registry (without protocol but with port if available)", + min_length=1, + ), + ] + + REGISTRY_USER: Annotated[ + str, + Field(description="username to access the docker registry"), + ] + REGISTRY_PW: Annotated[ + SecretStr, + Field(description="password to access the docker registry"), + ] + REGISTRY_SSL: Annotated[ + bool, + Field(description="True if docker registry is using HTTPS protocol"), + ] @field_validator("REGISTRY_PATH", mode="before") @classmethod @@ -45,7 +53,7 @@ def _escape_none_string(cls, v) -> Any | None: return None if v == "None" else v @model_validator(mode="after") - def check_registry_authentication(self: Self) -> Self: + def _check_registry_authentication(self: Self) -> Self: if self.REGISTRY_AUTH and any( not v for v in (self.REGISTRY_USER, self.REGISTRY_PW) ): diff --git a/packages/settings-library/src/settings_library/ec2.py b/packages/settings-library/src/settings_library/ec2.py index 22d2d9af9ee..dd78a70a138 100644 --- a/packages/settings-library/src/settings_library/ec2.py +++ b/packages/settings-library/src/settings_library/ec2.py @@ -1,18 +1,19 @@ from typing import Annotated -from pydantic import AnyHttpUrl, BeforeValidator, Field, TypeAdapter +from pydantic import BeforeValidator, Field from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings - -ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) +from .utils_validators import validate_nullable_url class EC2Settings(BaseCustomSettings): EC2_ACCESS_KEY_ID: str EC2_ENDPOINT: Annotated[ - str, BeforeValidator(lambda x: str(ANY_HTTP_URL_ADAPTER.validate_python(x))) - ] | None = Field(default=None, description="do not define if using standard AWS") + str | None, + BeforeValidator(validate_nullable_url), + Field(description="do not define if using standard AWS"), + ] = None EC2_REGION_NAME: str = "us-east-1" EC2_SECRET_ACCESS_KEY: str diff --git a/packages/settings-library/src/settings_library/efs.py b/packages/settings-library/src/settings_library/efs.py index d8ad2b7395d..c27d70b37c6 100644 --- a/packages/settings-library/src/settings_library/efs.py +++ b/packages/settings-library/src/settings_library/efs.py @@ -1,4 +1,5 @@ from pathlib import Path +from typing import Annotated from pydantic import Field @@ -6,14 +7,20 @@ class AwsEfsSettings(BaseCustomSettings): - EFS_DNS_NAME: str = Field( - description="AWS Elastic File System DNS name", - examples=["fs-xxx.efs.us-east-1.amazonaws.com"], - ) + EFS_DNS_NAME: Annotated[ + str, + Field( + description="AWS Elastic File System DNS name", + examples=["fs-xxx.efs.us-east-1.amazonaws.com"], + ), + ] EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: str - EFS_MOUNTED_PATH: Path = Field( - description="This is the path where EFS is mounted to the EC2 machine", - ) + EFS_MOUNTED_PATH: Annotated[ + Path, + Field( + description="This is the path where EFS is mounted to the EC2 machine", + ), + ] NFS_PROTOCOL = "4.1" diff --git a/packages/settings-library/src/settings_library/email.py b/packages/settings-library/src/settings_library/email.py index fe5f8448b34..65ca6d6b735 100644 --- a/packages/settings-library/src/settings_library/email.py +++ b/packages/settings-library/src/settings_library/email.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Self +from typing import Annotated, Self from pydantic import model_validator from pydantic.fields import Field @@ -25,12 +25,15 @@ class SMTPSettings(BaseCustomSettings): SMTP_HOST: str SMTP_PORT: PortInt - SMTP_PROTOCOL: EmailProtocol = Field( - EmailProtocol.UNENCRYPTED, - description="Select between TLS, STARTTLS Secure Mode or unencrypted communication", - ) - SMTP_USERNAME: str | None = Field(None, min_length=1) - SMTP_PASSWORD: SecretStr | None = Field(None, min_length=1) + SMTP_PROTOCOL: Annotated[ + EmailProtocol, + Field( + description="Select between TLS, STARTTLS Secure Mode or unencrypted communication", + ), + ] = EmailProtocol.UNENCRYPTED + + SMTP_USERNAME: Annotated[str | None, Field(min_length=1)] = None + SMTP_PASSWORD: Annotated[SecretStr | None, Field(min_length=1)] = None @model_validator(mode="after") def _both_credentials_must_be_set(self) -> Self: diff --git a/packages/settings-library/src/settings_library/http_client_request.py b/packages/settings-library/src/settings_library/http_client_request.py index fcf7f2ff97f..b73cbdea82e 100644 --- a/packages/settings-library/src/settings_library/http_client_request.py +++ b/packages/settings-library/src/settings_library/http_client_request.py @@ -1,3 +1,5 @@ +from typing import Annotated + from pydantic import Field from .base import BaseCustomSettings @@ -6,26 +8,32 @@ class ClientRequestSettings(BaseCustomSettings): # NOTE: These entries are used in some old services as well. These need to be updated if these # variable names or defaults are changed. - HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT: int | None = Field( - default=20, - description="timeout in seconds used for outgoing http requests", - ) + HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT: Annotated[ + int | None, + Field( + description="timeout in seconds used for outgoing http requests", + ), + ] = 20 - HTTP_CLIENT_REQUEST_AIOHTTP_CONNECT_TIMEOUT: int | None = Field( - default=None, - description=( - "Maximal number of seconds for acquiring a connection" - " from pool. The time consists connection establishment" - " for a new connection or waiting for a free connection" - " from a pool if pool connection limits are exceeded. " - "For pure socket connection establishment time use sock_connect." + HTTP_CLIENT_REQUEST_AIOHTTP_CONNECT_TIMEOUT: Annotated[ + int | None, + Field( + description=( + "Maximal number of seconds for acquiring a connection" + " from pool. The time consists connection establishment" + " for a new connection or waiting for a free connection" + " from a pool if pool connection limits are exceeded. " + "For pure socket connection establishment time use sock_connect." + ), ), - ) + ] = None - HTTP_CLIENT_REQUEST_AIOHTTP_SOCK_CONNECT_TIMEOUT: int | None = Field( - default=5, - description=( - "aiohttp specific field used in ClientTimeout, timeout for connecting to a " - "peer for a new connection not given a pool" + HTTP_CLIENT_REQUEST_AIOHTTP_SOCK_CONNECT_TIMEOUT: Annotated[ + int | None, + Field( + description=( + "aiohttp specific field used in ClientTimeout, timeout for connecting to a " + "peer for a new connection not given a pool" + ), ), - ) + ] = 5 diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index e65f02e6edc..83aa960c92c 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,4 +1,5 @@ from functools import cached_property +from typing import Annotated from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse from pydantic import ( @@ -25,26 +26,28 @@ class PostgresSettings(BaseCustomSettings): POSTGRES_PASSWORD: SecretStr # database - POSTGRES_DB: str = Field(..., description="Database name") + POSTGRES_DB: Annotated[str, Field(description="Database name")] # pool connection limits - POSTGRES_MINSIZE: int = Field( - default=1, description="Minimum number of connections in the pool", ge=1 - ) - POSTGRES_MAXSIZE: int = Field( - default=50, description="Maximum number of connections in the pool", ge=1 - ) + POSTGRES_MINSIZE: Annotated[ + int, Field(description="Minimum number of connections in the pool", ge=1) + ] = 1 + POSTGRES_MAXSIZE: Annotated[ + int, Field(description="Maximum number of connections in the pool", ge=1) + ] = 50 - POSTGRES_CLIENT_NAME: str | None = Field( - default=None, - description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", - validation_alias=AliasChoices( - "POSTGRES_CLIENT_NAME", - # This is useful when running inside a docker container, then the hostname is set each client gets a different name - "HOST", - "HOSTNAME", + POSTGRES_CLIENT_NAME: Annotated[ + str | None, + Field( + description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", + validation_alias=AliasChoices( + "POSTGRES_CLIENT_NAME", + # This is useful when running inside a docker container, then the hostname is set each client gets a different name + "HOST", + "HOSTNAME", + ), ), - ) + ] = None @field_validator("POSTGRES_MAXSIZE") @classmethod diff --git a/packages/settings-library/src/settings_library/r_clone.py b/packages/settings-library/src/settings_library/r_clone.py index 062022be5aa..d1a6472e9c6 100644 --- a/packages/settings-library/src/settings_library/r_clone.py +++ b/packages/settings-library/src/settings_library/r_clone.py @@ -1,4 +1,5 @@ from enum import StrEnum +from typing import Annotated from pydantic import Field, NonNegativeInt @@ -13,19 +14,25 @@ class S3Provider(StrEnum): class RCloneSettings(BaseCustomSettings): - R_CLONE_S3: S3Settings = Field(json_schema_extra={"auto_default_from_env": True}) + R_CLONE_S3: Annotated[ + S3Settings, Field(json_schema_extra={"auto_default_from_env": True}) + ] R_CLONE_PROVIDER: S3Provider - # SEE https://rclone.org/docs/#transfers-n - R_CLONE_OPTION_TRANSFERS: NonNegativeInt = Field( - default=5, description="`--transfers X`: sets the amount of parallel transfers" - ) - # SEE https://rclone.org/docs/#retries-int - R_CLONE_OPTION_RETRIES: NonNegativeInt = Field( - default=3, description="`--retries X`: times to retry each individual transfer" - ) - # SEE https://rclone.org/docs/#buffer-size-size - R_CLONE_OPTION_BUFFER_SIZE: str = Field( - default="16M", - description="`--buffer-size X`: sets the amount of RAM to use for each individual transfer", - ) + R_CLONE_OPTION_TRANSFERS: Annotated[ + # SEE https://rclone.org/docs/#transfers-n + NonNegativeInt, + Field(description="`--transfers X`: sets the amount of parallel transfers"), + ] = 5 + R_CLONE_OPTION_RETRIES: Annotated[ + # SEE https://rclone.org/docs/#retries-int + NonNegativeInt, + Field(description="`--retries X`: times to retry each individual transfer"), + ] = 3 + R_CLONE_OPTION_BUFFER_SIZE: Annotated[ + # SEE https://rclone.org/docs/#buffer-size-size + str, + Field( + description="`--buffer-size X`: sets the amount of RAM to use for each individual transfer", + ), + ] = "16M" diff --git a/packages/settings-library/src/settings_library/s3.py b/packages/settings-library/src/settings_library/s3.py index 18f23860658..348e1dcb39d 100644 --- a/packages/settings-library/src/settings_library/s3.py +++ b/packages/settings-library/src/settings_library/s3.py @@ -1,3 +1,5 @@ +from typing import Annotated + from pydantic import AnyHttpUrl, Field from pydantic_settings import SettingsConfigDict @@ -8,9 +10,9 @@ class S3Settings(BaseCustomSettings): S3_ACCESS_KEY: IDStr S3_BUCKET_NAME: IDStr - S3_ENDPOINT: AnyHttpUrl | None = Field( - default=None, description="do not define if using standard AWS" - ) + S3_ENDPOINT: Annotated[ + AnyHttpUrl | None, Field(description="do not define if using standard AWS") + ] = None S3_REGION: IDStr S3_SECRET_KEY: IDStr diff --git a/packages/settings-library/src/settings_library/ssm.py b/packages/settings-library/src/settings_library/ssm.py index 8bed0906bbe..44546e4fdd7 100644 --- a/packages/settings-library/src/settings_library/ssm.py +++ b/packages/settings-library/src/settings_library/ssm.py @@ -1,20 +1,19 @@ from typing import Annotated -from pydantic import AnyHttpUrl, BeforeValidator, Field, SecretStr, TypeAdapter +from pydantic import BeforeValidator, Field, SecretStr from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings +from .utils_validators import validate_nullable_url class SSMSettings(BaseCustomSettings): SSM_ACCESS_KEY_ID: SecretStr - SSM_ENDPOINT: ( - Annotated[ - str, - BeforeValidator(lambda x: str(TypeAdapter(AnyHttpUrl).validate_python(x))), - ] - | None - ) = Field(default=None, description="do not define if using standard AWS") + SSM_ENDPOINT: Annotated[ + str | None, + BeforeValidator(validate_nullable_url), + Field(description="do not define if using standard AWS"), + ] = None SSM_REGION_NAME: str = "us-east-1" SSM_SECRET_ACCESS_KEY: SecretStr diff --git a/packages/settings-library/src/settings_library/tracing.py b/packages/settings-library/src/settings_library/tracing.py index 2d977d47ee7..c8263dd8be2 100644 --- a/packages/settings-library/src/settings_library/tracing.py +++ b/packages/settings-library/src/settings_library/tracing.py @@ -1,3 +1,5 @@ +from typing import Annotated + from pydantic import AnyUrl, Field from settings_library.basic_types import RegisteredPortInt @@ -7,9 +9,9 @@ class TracingSettings(BaseCustomSettings): - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: AnyUrl = Field( - ..., description="Opentelemetry compatible collector endpoint" - ) - TRACING_OPENTELEMETRY_COLLECTOR_PORT: RegisteredPortInt = Field( - ..., description="Opentelemetry compatible collector port" - ) + TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: Annotated[ + AnyUrl, Field(description="Opentelemetry compatible collector endpoint") + ] + TRACING_OPENTELEMETRY_COLLECTOR_PORT: Annotated[ + RegisteredPortInt, Field(description="Opentelemetry compatible collector port") + ] diff --git a/packages/settings-library/src/settings_library/twilio.py b/packages/settings-library/src/settings_library/twilio.py index 343cbda4732..93ebc753fec 100644 --- a/packages/settings-library/src/settings_library/twilio.py +++ b/packages/settings-library/src/settings_library/twilio.py @@ -11,25 +11,35 @@ from .base import BaseCustomSettings -# Based on https://countrycode.org/ CountryCodeStr: TypeAlias = Annotated[ str, BeforeValidator(str), + # Based on https://countrycode.org/ StringConstraints(strip_whitespace=True, pattern=r"^\d{1,4}"), ] class TwilioSettings(BaseCustomSettings): - TWILIO_ACCOUNT_SID: str = Field(..., description="Twilio account String Identifier") - TWILIO_AUTH_TOKEN: str = Field(..., description="API tokens") - TWILIO_COUNTRY_CODES_W_ALPHANUMERIC_SID_SUPPORT: list[CountryCodeStr] = Field( - default=TypeAdapter(list[CountryCodeStr]).validate_python( - [ - "41", - ], + TWILIO_ACCOUNT_SID: Annotated[ + str, + Field(description="Twilio account String Identifier"), + ] + + TWILIO_AUTH_TOKEN: Annotated[ + str, + Field(description="API tokens"), + ] + + TWILIO_COUNTRY_CODES_W_ALPHANUMERIC_SID_SUPPORT: Annotated[ + list[CountryCodeStr], + Field( + description="list of country-codes supporting/registered for alphanumeric sender ID" + "See https://support.twilio.com/hc/en-us/articles/223133767-International-support-for-Alphanumeric-Sender-ID", ), - description="list of country-codes supporting/registered for alphanumeric sender ID" - "See https://support.twilio.com/hc/en-us/articles/223133767-International-support-for-Alphanumeric-Sender-ID", + ] = TypeAdapter(list[CountryCodeStr]).validate_python( + [ + "41", + ], ) def is_alphanumeric_supported(self, phone_number: str) -> bool: diff --git a/packages/settings-library/src/settings_library/utils_logging.py b/packages/settings-library/src/settings_library/utils_logging.py index 9bfc1d47741..7ca3ee05b01 100644 --- a/packages/settings-library/src/settings_library/utils_logging.py +++ b/packages/settings-library/src/settings_library/utils_logging.py @@ -1,6 +1,8 @@ import logging from functools import cached_property +from common_library.basic_types import LogLevel + class MixinLoggingSettings: """ @@ -8,14 +10,14 @@ class MixinLoggingSettings: """ @classmethod - def validate_log_level(cls, value: str) -> str: + def validate_log_level(cls, value: str) -> LogLevel: """Standard implementation for @validator("LOG_LEVEL")""" try: getattr(logging, value.upper()) except AttributeError as err: msg = f"{value.upper()} is not a valid level" raise ValueError(msg) from err - return value.upper() + return LogLevel(value.upper()) @cached_property def log_level(self) -> int: diff --git a/packages/settings-library/src/settings_library/utils_validators.py b/packages/settings-library/src/settings_library/utils_validators.py new file mode 100644 index 00000000000..c26c09fa7c8 --- /dev/null +++ b/packages/settings-library/src/settings_library/utils_validators.py @@ -0,0 +1,9 @@ +from pydantic import AnyHttpUrl, TypeAdapter + +ANY_HTTP_URL_ADAPTER: TypeAdapter = TypeAdapter(AnyHttpUrl) + + +def validate_nullable_url(value: str | None) -> str | None: + if value is not None: + return str(ANY_HTTP_URL_ADAPTER.validate_python(value)) + return value diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index bc3da5494da..eb2989852cb 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -12,11 +12,16 @@ """ +from typing import Annotated + +import pytest +from common_library.basic_types import LogLevel from common_library.pydantic_fields_extension import is_nullable -from pydantic import ValidationInfo, field_validator +from pydantic import AliasChoices, Field, ValidationInfo, field_validator from pydantic_core import PydanticUndefined from pydantic_settings import BaseSettings from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from settings_library.application import BaseApplicationSettings def assert_field_specs( @@ -56,9 +61,10 @@ class Settings(BaseSettings): @classmethod def parse_none(cls, v, info: ValidationInfo): # WARNING: In nullable fields, envs equal to null or none are parsed as None !! + if ( info.field_name - and is_nullable(cls.model_fields[info.field_name]) + and is_nullable(dict(cls.model_fields)[info.field_name]) and isinstance(v, str) and v.lower() in ("null", "none") ): @@ -167,3 +173,28 @@ def test_construct(monkeypatch): assert settings_from_both == settings_from_init.model_copy( update={"VALUE_NULLABLE_REQUIRED": 3} ) + + +class _TestSettings(BaseApplicationSettings): + APP_LOGLEVEL: Annotated[ + LogLevel, + Field( + validation_alias=AliasChoices("APP_LOGLEVEL", "LOG_LEVEL"), + ), + ] = LogLevel.WARNING + + +@pytest.mark.filterwarnings("error") +def test_pydantic_serialization_user_warning(monkeypatch: pytest.MonkeyPatch): + # This test is exploring the reason for `UserWarning` + # + # /python3.11/site-packages/pydantic/main.py:477: UserWarning: Pydantic serializer warnings: + # Expected `enum` but got `str` with value `'WARNING'` - serialized value may not be as expected + # return self.__pydantic_serializer__.to_json( + # + # NOTE: it seems settings.model_dump_json(warnings='none') is not the cause here of `UserWarning` + monkeypatch.setenv("LOG_LEVEL", "DEBUG") + + settings = _TestSettings.create_from_envs() + assert settings.APP_LOGLEVEL == LogLevel.DEBUG + assert settings.model_dump_json(indent=2) diff --git a/packages/settings-library/tests/test_ec2.py b/packages/settings-library/tests/test_ec2.py new file mode 100644 index 00000000000..6f78d72e446 --- /dev/null +++ b/packages/settings-library/tests/test_ec2.py @@ -0,0 +1,66 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +import pytest +from pydantic import ValidationError +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from settings_library.ec2 import EC2Settings + + +def test_ec2_endpoint_defaults_to_null(monkeypatch: pytest.MonkeyPatch): + setenvs_from_dict( + monkeypatch, + { + "EC2_ACCESS_KEY_ID": "my_access_key_id", + "EC2_REGION_NAME": "us-east-1", + "EC2_SECRET_ACCESS_KEY": "my_secret_access_key", + }, + ) + + settings = EC2Settings.create_from_envs() + assert settings.EC2_ENDPOINT is None + + +def test_ec2_endpoint_is_nullified(monkeypatch: pytest.MonkeyPatch): + setenvs_from_dict( + monkeypatch, + { + "EC2_ACCESS_KEY_ID": "my_access_key_id", + "EC2_ENDPOINT": "null", + "EC2_REGION_NAME": "us-east-1", + "EC2_SECRET_ACCESS_KEY": "my_secret_access_key", + }, + ) + + settings = EC2Settings.create_from_envs() + assert settings.EC2_ENDPOINT is None + + +def test_ec2_endpoint_invalid(monkeypatch: pytest.MonkeyPatch): + setenvs_from_dict( + monkeypatch, + { + "EC2_ACCESS_KEY_ID": "my_access_key_id", + "EC2_ENDPOINT": "ftp://my_ec2_endpoint.com", + "EC2_REGION_NAME": "us-east-1", + "EC2_SECRET_ACCESS_KEY": "my_secret_access_key", + }, + ) + + with pytest.raises(ValidationError) as err_info: + EC2Settings.create_from_envs() + + assert err_info.value.error_count() == 1 + error = err_info.value.errors()[0] + + assert error["loc"] == ("EC2_ENDPOINT",) + assert error["type"] == "url_scheme" + + +def test_ec2_endpoint_description(): + model_fields = dict(EC2Settings.model_fields) + assert model_fields["EC2_ACCESS_KEY_ID"].description is None + assert model_fields["EC2_ENDPOINT"].description is not None diff --git a/packages/settings-library/tests/test_email.py b/packages/settings-library/tests/test_email.py index acb9d607c89..439c0c634e5 100644 --- a/packages/settings-library/tests/test_email.py +++ b/packages/settings-library/tests/test_email.py @@ -3,11 +3,12 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments +from enum import Enum from typing import Any import pytest from pydantic import ValidationError -from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.email import EmailProtocol, SMTPSettings @@ -35,7 +36,7 @@ def all_env_devel_undefined( { "SMTP_HOST": "test", "SMTP_PORT": 113, - "SMTP_PROTOCOL": EmailProtocol.UNENCRYPTED, + "SMTP_PROTOCOL": EmailProtocol.UNENCRYPTED.value, }, { "SMTP_HOST": "test", @@ -48,71 +49,114 @@ def all_env_devel_undefined( "SMTP_PORT": 113, "SMTP_USERNAME": "test", "SMTP_PASSWORD": "test", - "SMTP_PROTOCOL": EmailProtocol.UNENCRYPTED, + "SMTP_PROTOCOL": EmailProtocol.UNENCRYPTED.value, }, { "SMTP_HOST": "test", "SMTP_PORT": 113, "SMTP_USERNAME": "test", "SMTP_PASSWORD": "test", - "SMTP_PROTOCOL": EmailProtocol.TLS, + "SMTP_PROTOCOL": EmailProtocol.TLS.value, }, { "SMTP_HOST": "test", "SMTP_PORT": 113, "SMTP_USERNAME": "test", "SMTP_PASSWORD": "test", - "SMTP_PROTOCOL": EmailProtocol.STARTTLS, + "SMTP_PROTOCOL": EmailProtocol.STARTTLS.value, }, ], ) -def test_smtp_configuration_ok(cfg: dict[str, Any], all_env_devel_undefined: None): +def test_smtp_configuration_ok( + all_env_devel_undefined: None, + monkeypatch: pytest.MonkeyPatch, + cfg: dict[str, Any], +): assert SMTPSettings.model_validate(cfg) + setenvs_from_dict(monkeypatch, {k: f"{v}" for k, v in cfg.items()}) + assert SMTPSettings.create_from_envs() + @pytest.mark.parametrize( - "cfg", + "cfg,error_type", [ - { - "SMTP_HOST": "test", - "SMTP_PORT": 111, - "SMTP_USERNAME": "test", - # password required if username provided - }, - { - "SMTP_HOST": "test", - "SMTP_PORT": 112, - "SMTP_PASSWORD": "test", - # username required if password provided - }, - { - "SMTP_HOST": "test", - "SMTP_PORT": 113, - "SMTP_PROTOCOL": EmailProtocol.STARTTLS, - "SMTP_PASSWORD": "test", - }, - { - "SMTP_HOST": "test", - "SMTP_PORT": 114, - "SMTP_PROTOCOL": EmailProtocol.STARTTLS, - "SMTP_USERNAME": "test", - }, - { - "SMTP_HOST": "test", - "SMTP_PORT": 115, - "SMTP_USERNAME": "", - "SMTP_PASSWORD": "test", - "SMTP_PROTOCOL": EmailProtocol.STARTTLS, - }, - { - "SMTP_HOST": "test", - "SMTP_PORT": 116, - "SMTP_USERNAME": "", - "SMTP_PASSWORD": "test", - "SMTP_PROTOCOL": EmailProtocol.TLS, - }, + ( + { + "SMTP_HOST": "test", + "SMTP_PORT": 111, + "SMTP_USERNAME": "test", + # password required if username provided + }, + "value_error", + ), + ( + { + "SMTP_HOST": "test", + "SMTP_PORT": 112, + "SMTP_PASSWORD": "test", + # username required if password provided + }, + "value_error", + ), + ( + { + "SMTP_HOST": "test", + "SMTP_PORT": 113, + "SMTP_PROTOCOL": EmailProtocol.STARTTLS, + "SMTP_PASSWORD": "test", + }, + "value_error", + ), + ( + { + "SMTP_HOST": "test", + "SMTP_PORT": 114, + "SMTP_PROTOCOL": EmailProtocol.STARTTLS, + "SMTP_USERNAME": "test", + }, + "value_error", + ), + ( + { + "SMTP_HOST": "test", + "SMTP_PORT": 115, + "SMTP_USERNAME": "", + "SMTP_PASSWORD": "test", + "SMTP_PROTOCOL": EmailProtocol.STARTTLS, + }, + "string_too_short", + ), + ( + { + "SMTP_HOST": "test", + "SMTP_PORT": 116, + "SMTP_USERNAME": "", + "SMTP_PASSWORD": "test", + "SMTP_PROTOCOL": EmailProtocol.TLS, + }, + "string_too_short", + ), ], ) -def test_smtp_configuration_fails(cfg: dict[str, Any], all_env_devel_undefined: None): - with pytest.raises(ValidationError): +def test_smtp_configuration_fails( + all_env_devel_undefined: None, + monkeypatch: pytest.MonkeyPatch, + cfg: dict[str, Any], + error_type: str, +): + with pytest.raises(ValidationError) as err_info: SMTPSettings(**cfg) + + assert err_info.value.error_count() == 1 + assert err_info.value.errors()[0]["type"] == error_type + + setenvs_from_dict( + monkeypatch, + {k: str(v.value if isinstance(v, Enum) else v) for k, v in cfg.items()}, + ) + with pytest.raises(ValidationError) as err_info: + SMTPSettings.create_from_envs() + + assert err_info.value.error_count() == 1 + assert err_info.value.errors()[0]["type"] == error_type diff --git a/services/agent/src/simcore_service_agent/core/settings.py b/services/agent/src/simcore_service_agent/core/settings.py index 742d3bf02d1..d11b286f065 100644 --- a/services/agent/src/simcore_service_agent/core/settings.py +++ b/services/agent/src/simcore_service_agent/core/settings.py @@ -1,5 +1,7 @@ from datetime import timedelta +from typing import Annotated +from common_library.basic_types import DEFAULT_FACTORY from models_library.basic_types import BootModeEnum, LogLevel from models_library.docker import DockerNodeID from pydantic import AliasChoices, AnyHttpUrl, Field, field_validator @@ -12,87 +14,107 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): - LOG_LEVEL: LogLevel = Field( - LogLevel.WARNING, - validation_alias=AliasChoices( - "AGENT_LOGLEVEL", - "LOG_LEVEL", - "LOGLEVEL", + LOG_LEVEL: Annotated[ + LogLevel, + Field( + validation_alias=AliasChoices( + "AGENT_LOGLEVEL", + "LOG_LEVEL", + "LOGLEVEL", + ), ), - ) + ] = LogLevel.WARNING + SC_BOOT_MODE: BootModeEnum | None - AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", - ), - description=( - "Enables local development log format. WARNING: make sure it is " - "disabled if you want to have structured logs!" + AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description=( + "Enables local development log format. WARNING: make sure it is " + "disabled if you want to have structured logs!" + ), ), - ) - AGENT_VOLUMES_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "AGENT_VOLUMES_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] = False + + AGENT_VOLUMES_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "AGENT_VOLUMES_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) - AGENT_VOLUMES_CLEANUP_TARGET_SWARM_STACK_NAME: str = Field( - ..., description="Exactly the same as director-v2's `SWARM_STACK_NAME` env var" - ) + ] = DEFAULT_FACTORY + + AGENT_VOLUMES_CLEANUP_TARGET_SWARM_STACK_NAME: str AGENT_VOLUMES_CLEANUP_S3_ENDPOINT: AnyHttpUrl AGENT_VOLUMES_CLEANUP_S3_ACCESS_KEY: str AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY: str AGENT_VOLUMES_CLEANUP_S3_BUCKET: str AGENT_VOLUMES_CLEANUP_S3_PROVIDER: S3Provider AGENT_VOLUMES_CLEANUP_S3_REGION: str = "us-east-1" - AGENT_VOLUMES_CLEANUP_RETRIES: int = Field( - 3, description="upload retries in case of error" - ) - AGENT_VOLUMES_CLEANUP_PARALLELISM: int = Field( - 5, description="parallel transfers to s3" - ) - AGENT_VOLUMES_CLEANUP_EXCLUDE_FILES: list[str] = Field( - [".hidden_do_not_remove", "key_values.json"], - description="Files to ignore when syncing to s3", - ) - AGENT_VOLUMES_CLEANUP_INTERVAL: timedelta = Field( - timedelta(minutes=1), description="interval for running volumes removal" - ) - AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL: timedelta = Field( - timedelta(minutes=1), - description=( - "interval at which to scan for unsued volumes and keep track since " - "they were detected as being unused" + AGENT_VOLUMES_CLEANUP_RETRIES: Annotated[ + int, Field(description="upload retries in case of error") + ] = 3 + AGENT_VOLUMES_CLEANUP_PARALLELISM: Annotated[ + int, Field(description="parallel transfers to s3") + ] = 5 + AGENT_VOLUMES_CLEANUP_EXCLUDE_FILES: Annotated[ + list[str], + Field( + [".hidden_do_not_remove", "key_values.json"], + description="Files to ignore when syncing to s3", ), - ) - AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR: timedelta = Field( - timedelta(minutes=65), - description=( - "if a volume is unused for more than this interval it can be removed. " - "The default is set to a health 60+ miunutes since it might take upto " - "60 minutes for the dy-sidecar to properly save data form the volumes" + ] + AGENT_VOLUMES_CLEANUP_INTERVAL: Annotated[ + timedelta, Field(description="interval for running volumes removal") + ] = timedelta(minutes=1) + AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL: Annotated[ + timedelta, + Field( + description=( + "interval at which to scan for unsued volumes and keep track since " + "they were detected as being unused" + ), ), - ) + ] = timedelta(minutes=1) + AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR: Annotated[ + timedelta, + Field( + description=( + "if a volume is unused for more than this interval it can be removed. " + "The default is set to a health 60+ miunutes since it might take upto " + "60 minutes for the dy-sidecar to properly save data form the volumes" + ), + ), + ] = timedelta(minutes=65) AGENT_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True + AGENT_DOCKER_NODE_ID: Annotated[ + DockerNodeID, Field(description="used by the rabbitmq module") + ] - AGENT_DOCKER_NODE_ID: DockerNodeID = Field( - ..., description="used by the rabbitmq module" - ) - - AGENT_RABBITMQ: RabbitSettings = Field( - description="settings for service/rabbitmq", - json_schema_extra={"auto_default_from_env": True}, - ) + AGENT_RABBITMQ: Annotated[ + RabbitSettings, + Field( + description="settings for service/rabbitmq", + json_schema_extra={"auto_default_from_env": True}, + ), + ] - AGENT_TRACING: TracingSettings | None = Field( - description="settings for opentelemetry tracing", - json_schema_extra={"auto_default_from_env": True}, - ) + AGENT_TRACING: Annotated[ + TracingSettings | None, + Field( + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, + ), + ] @field_validator("LOG_LEVEL") @classmethod diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py index 6e33055084d..14c92c46775 100644 --- a/services/api-server/src/simcore_service_api_server/core/settings.py +++ b/services/api-server/src/simcore_service_api_server/core/settings.py @@ -1,6 +1,7 @@ from functools import cached_property from typing import Annotated +from common_library.basic_types import DEFAULT_FACTORY from models_library.basic_types import BootModeEnum, LogLevel from pydantic import ( AliasChoices, @@ -28,20 +29,22 @@ class WebServerSettings(WebServerBaseSettings, MixinSessionSettings): - WEBSERVER_SESSION_SECRET_KEY: SecretStr = Field( - ..., - description="Secret key to encrypt cookies. " - 'TIP: python3 -c "from cryptography.fernet import *; print(Fernet.generate_key())"', - min_length=44, - validation_alias=AliasChoices( - "SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY" + WEBSERVER_SESSION_SECRET_KEY: Annotated[ + SecretStr, + Field( + description="Secret key to encrypt cookies. " + 'TIP: python3 -c "from cryptography.fernet import *; print(Fernet.generate_key())"', + min_length=44, + validation_alias=AliasChoices( + "SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY" + ), ), - ) + ] WEBSERVER_SESSION_NAME: str = DEFAULT_SESSION_COOKIE_NAME @field_validator("WEBSERVER_SESSION_SECRET_KEY") @classmethod - def check_valid_fernet_key(cls, v): + def _check_valid_fernet_key(cls, v): return cls.do_check_valid_fernet_key(v) @@ -50,12 +53,14 @@ def check_valid_fernet_key(cls, v): class BasicSettings(BaseCustomSettings, MixinLoggingSettings): # DEVELOPMENT - API_SERVER_DEV_FEATURES_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "API_SERVER_DEV_FEATURES_ENABLED", "FAKE_API_SERVER_ENABLED" + API_SERVER_DEV_FEATURES_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "API_SERVER_DEV_FEATURES_ENABLED", "FAKE_API_SERVER_ENABLED" + ), ), - ) + ] = False # LOGGING LOG_LEVEL: Annotated[ @@ -67,20 +72,27 @@ class BasicSettings(BaseCustomSettings, MixinLoggingSettings): ), ] = LogLevel.INFO - API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - API_SERVER_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "API_SERVER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] = False + + API_SERVER_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "API_SERVER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) + ] = DEFAULT_FACTORY @field_validator("LOG_LEVEL", mode="before") @classmethod @@ -98,24 +110,29 @@ class ApplicationSettings(BasicSettings): Field(json_schema_extra={"auto_default_from_env": True}), ] - API_SERVER_RABBITMQ: RabbitSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for service/rabbitmq", - ) + API_SERVER_RABBITMQ: Annotated[ + RabbitSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for service/rabbitmq", + ), + ] # SERVICES with http API - API_SERVER_WEBSERVER: WebServerSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - API_SERVER_CATALOG: CatalogSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - API_SERVER_STORAGE: StorageSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - API_SERVER_DIRECTOR_V2: DirectorV2Settings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + API_SERVER_WEBSERVER: Annotated[ + WebServerSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + API_SERVER_CATALOG: Annotated[ + CatalogSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] + API_SERVER_STORAGE: Annotated[ + StorageSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] + API_SERVER_DIRECTOR_V2: Annotated[ + DirectorV2Settings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] API_SERVER_LOG_CHECK_TIMEOUT_SECONDS: NonNegativeInt = 3 * 60 API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS: PositiveInt = 30 @@ -123,10 +140,13 @@ class ApplicationSettings(BasicSettings): API_SERVER_ALLOWED_HEALTH_CHECK_FAILURES: PositiveInt = 5 API_SERVER_PROMETHEUS_INSTRUMENTATION_COLLECT_SECONDS: PositiveInt = 5 API_SERVER_PROFILING: bool = False - API_SERVER_TRACING: TracingSettings | None = Field( - description="settings for opentelemetry tracing", - json_schema_extra={"auto_default_from_env": True}, - ) + API_SERVER_TRACING: Annotated[ + TracingSettings | None, + Field( + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, + ), + ] @cached_property def debug(self) -> bool: diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py index 347c4c978c3..ff67aeeaab1 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py @@ -1,16 +1,10 @@ import datetime from functools import cached_property -from typing import Final, Self, cast +from typing import Annotated, Final, Self, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from fastapi import FastAPI -from models_library.basic_types import ( - BootModeEnum, - BuildTargetEnum, - LogLevel, - PortInt, - VersionTag, -) +from models_library.basic_types import LogLevel, PortInt, VersionTag from models_library.clusters import ClusterAuthentication from models_library.docker import DockerLabelKey from pydantic import ( @@ -18,13 +12,13 @@ AnyUrl, Field, NonNegativeInt, - PositiveInt, TypeAdapter, field_validator, model_validator, ) from pydantic_settings import SettingsConfigDict from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from settings_library.application import BaseApplicationSettings from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings from settings_library.ec2 import EC2Settings @@ -61,79 +55,109 @@ class AutoscalingEC2Settings(EC2Settings): class EC2InstancesSettings(BaseCustomSettings): - EC2_INSTANCES_ALLOWED_TYPES: dict[str, EC2InstanceBootSpecific] = Field( - ..., - description="Defines which EC2 instances are considered as candidates for new EC2 instance and their respective boot specific parameters" - "NOTE: minimum length >0", - ) + EC2_INSTANCES_ALLOWED_TYPES: Annotated[ + dict[str, EC2InstanceBootSpecific], + Field( + description="Defines which EC2 instances are considered as candidates for new EC2 instance and their respective boot specific parameters" + "NOTE: minimum length >0", + ), + ] + + EC2_INSTANCES_KEY_NAME: Annotated[ + str, + Field( + min_length=1, + description="SSH key filename (without ext) to access the instance through SSH" + " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)," + "this is required to start a new EC2 instance", + ), + ] + EC2_INSTANCES_MACHINES_BUFFER: Annotated[ + NonNegativeInt, + Field( + description="Constant reserve of drained ready machines for fast(er) usage," + "disabled when set to 0. Uses 1st machine defined in EC2_INSTANCES_ALLOWED_TYPES", + ), + ] = 0 + EC2_INSTANCES_MAX_INSTANCES: Annotated[ + int, + Field( + description="Defines the maximum number of instances the autoscaling app may create", + ), + ] = 10 + EC2_INSTANCES_MAX_START_TIME: Annotated[ + datetime.timedelta, + Field( + description="Usual time taken an EC2 instance with the given AMI takes to join the cluster " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)." + "NOTE: be careful that this time should always be a factor larger than the real time, as EC2 instances" + "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", + ), + ] = datetime.timedelta(minutes=1) - EC2_INSTANCES_KEY_NAME: str = Field( - ..., - min_length=1, - description="SSH key filename (without ext) to access the instance through SSH" - " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)," - "this is required to start a new EC2 instance", - ) - EC2_INSTANCES_MACHINES_BUFFER: NonNegativeInt = Field( - default=0, - description="Constant reserve of drained ready machines for fast(er) usage," - "disabled when set to 0. Uses 1st machine defined in EC2_INSTANCES_ALLOWED_TYPES", - ) - EC2_INSTANCES_MAX_INSTANCES: int = Field( - default=10, - description="Defines the maximum number of instances the autoscaling app may create", - ) - EC2_INSTANCES_MAX_START_TIME: datetime.timedelta = Field( - default=datetime.timedelta(minutes=1), - description="Usual time taken an EC2 instance with the given AMI takes to join the cluster " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)." - "NOTE: be careful that this time should always be a factor larger than the real time, as EC2 instances" - "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", - ) - EC2_INSTANCES_NAME_PREFIX: str = Field( - default="autoscaling", - min_length=1, - description="prefix used to name the EC2 instances created by this instance of autoscaling", - ) + EC2_INSTANCES_NAME_PREFIX: Annotated[ + str, + Field( + min_length=1, + description="prefix used to name the EC2 instances created by this instance of autoscaling", + ), + ] = "autoscaling" + + EC2_INSTANCES_SECURITY_GROUP_IDS: Annotated[ + list[str], + Field( + min_length=1, + description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" + " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " + " this is required to start a new EC2 instance", + ), + ] + EC2_INSTANCES_SUBNET_ID: Annotated[ + str, + Field( + min_length=1, + description="A subnet is a range of IP addresses in your VPC " + " (https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html), " + "this is required to start a new EC2 instance", + ), + ] + EC2_INSTANCES_TIME_BEFORE_DRAINING: Annotated[ + datetime.timedelta, + Field( + description="Time after which an EC2 instance may be drained (10s<=T<=1 minutes, is automatically capped)" + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(seconds=20) - EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( - ..., - min_length=1, - description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" - " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " - " this is required to start a new EC2 instance", - ) - EC2_INSTANCES_SUBNET_ID: str = Field( - ..., - min_length=1, - description="A subnet is a range of IP addresses in your VPC " - " (https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html), " - "this is required to start a new EC2 instance", - ) - EC2_INSTANCES_TIME_BEFORE_DRAINING: datetime.timedelta = Field( - default=datetime.timedelta(seconds=20), - description="Time after which an EC2 instance may be drained (10s<=T<=1 minutes, is automatically capped)" - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) - EC2_INSTANCES_TIME_BEFORE_TERMINATION: datetime.timedelta = Field( - default=datetime.timedelta(minutes=1), - description="Time after which an EC2 instance may begin the termination process (0<=T<=59 minutes, is automatically capped)" - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) - EC2_INSTANCES_TIME_BEFORE_FINAL_TERMINATION: datetime.timedelta = Field( - default=datetime.timedelta(seconds=30), - description="Time after which an EC2 instance is terminated after draining" - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) - EC2_INSTANCES_CUSTOM_TAGS: EC2Tags = Field( - ..., - description="Allows to define tags that should be added to the created EC2 instance default tags. " - "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", - ) - EC2_INSTANCES_ATTACHED_IAM_PROFILE: str = Field( - ..., - description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string", - ) + EC2_INSTANCES_TIME_BEFORE_TERMINATION: Annotated[ + datetime.timedelta, + Field( + description="Time after which an EC2 instance may begin the termination process (0<=T<=59 minutes, is automatically capped)" + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(minutes=1) + + EC2_INSTANCES_TIME_BEFORE_FINAL_TERMINATION: Annotated[ + datetime.timedelta, + Field( + description="Time after which an EC2 instance is terminated after draining" + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(seconds=30) + + EC2_INSTANCES_CUSTOM_TAGS: Annotated[ + EC2Tags, + Field( + description="Allows to define tags that should be added to the created EC2 instance default tags. " + "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", + ), + ] + EC2_INSTANCES_ATTACHED_IAM_PROFILE: Annotated[ + str, + Field( + description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string", + ), + ] @field_validator("EC2_INSTANCES_TIME_BEFORE_DRAINING") @classmethod @@ -175,146 +199,166 @@ def _check_valid_instance_names_and_not_empty( class NodesMonitoringSettings(BaseCustomSettings): - NODES_MONITORING_NODE_LABELS: list[DockerLabelKey] = Field( - ..., - description="autoscaling will only monitor nodes with the given labels (if empty all nodes will be monitored), these labels will be added to the new created nodes by default", - ) + NODES_MONITORING_NODE_LABELS: Annotated[ + list[DockerLabelKey], + Field( + description="autoscaling will only monitor nodes with the given labels (if empty all nodes will be monitored), these labels will be added to the new created nodes by default", + ), + ] - NODES_MONITORING_SERVICE_LABELS: list[DockerLabelKey] = Field( - ..., - description="autoscaling will only monitor services with the given labels (if empty all services will be monitored)", - ) + NODES_MONITORING_SERVICE_LABELS: Annotated[ + list[DockerLabelKey], + Field( + description="autoscaling will only monitor services with the given labels (if empty all services will be monitored)", + ), + ] - NODES_MONITORING_NEW_NODES_LABELS: list[DockerLabelKey] = Field( - ..., - description="autoscaling will add these labels to any new node it creates (additional to the ones in NODES_MONITORING_NODE_LABELS", - ) + NODES_MONITORING_NEW_NODES_LABELS: Annotated[ + list[DockerLabelKey], + Field( + description="autoscaling will add these labels to any new node it creates (additional to the ones in NODES_MONITORING_NODE_LABELS", + ), + ] class DaskMonitoringSettings(BaseCustomSettings): - DASK_MONITORING_URL: AnyUrl = Field( - ..., description="the url to the dask-scheduler" - ) - DASK_SCHEDULER_AUTH: ClusterAuthentication = Field( - ..., - description="defines the authentication of the clusters created via clusters-keeper (can be None or TLS)", - ) + DASK_MONITORING_URL: Annotated[ + AnyUrl, Field(description="the url to the dask-scheduler") + ] + DASK_SCHEDULER_AUTH: Annotated[ + ClusterAuthentication, + Field( + description="defines the authentication of the clusters created via clusters-keeper (can be None or TLS)", + ), + ] -class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): +class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION APP_NAME: str = APP_NAME API_VTAG: VersionTag = API_VTAG - # IMAGE BUILDTIME ------------------------------------------------------ - # @Makefile - SC_BUILD_DATE: str | None = None - SC_BUILD_TARGET: BuildTargetEnum | None = None - SC_VCS_REF: str | None = None - SC_VCS_URL: str | None = None - - # @Dockerfile - SC_BOOT_MODE: BootModeEnum | None = None - SC_BOOT_TARGET: BuildTargetEnum | None = None - SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( - None, - description="If a single run of the check takes longer than timeout seconds " - "then the check is considered to have failed." - "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", - ) - SC_USER_ID: int | None = None - SC_USER_NAME: str | None = None - # RUNTIME ----------------------------------------------------------- - AUTOSCALING_DEBUG: bool = Field( - default=False, - description="Debug mode", - validation_alias=AliasChoices("AUTOSCALING_DEBUG", "DEBUG"), - ) + AUTOSCALING_DEBUG: Annotated[ + bool, + Field( + description="Debug mode", + validation_alias=AliasChoices("AUTOSCALING_DEBUG", "DEBUG"), + ), + ] = False + AUTOSCALING_REMOTE_DEBUG_PORT: PortInt = 3000 - AUTOSCALING_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, - validation_alias=AliasChoices("AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), - ) - AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", + AUTOSCALING_LOGLEVEL: Annotated[ + LogLevel, + Field( + LogLevel.INFO, + validation_alias=AliasChoices( + "AUTOSCALING_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - AUTOSCALING_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "AUTOSCALING_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] + AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) - - AUTOSCALING_EC2_ACCESS: AutoscalingEC2Settings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - AUTOSCALING_SSM_ACCESS: AutoscalingSSMSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - AUTOSCALING_NODES_MONITORING: NodesMonitoringSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + ] = False + + AUTOSCALING_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "AUTOSCALING_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", + ), + ] + + AUTOSCALING_EC2_ACCESS: Annotated[ + AutoscalingEC2Settings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + AUTOSCALING_SSM_ACCESS: Annotated[ + AutoscalingSSMSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + AUTOSCALING_EC2_INSTANCES: Annotated[ + EC2InstancesSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + AUTOSCALING_NODES_MONITORING: Annotated[ + NodesMonitoringSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + AUTOSCALING_POLL_INTERVAL: Annotated[ + datetime.timedelta, + Field( + description="interval between each resource check " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(seconds=10) - AUTOSCALING_POLL_INTERVAL: datetime.timedelta = Field( - default=datetime.timedelta(seconds=10), - description="interval between each resource check " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) + AUTOSCALING_RABBITMQ: Annotated[ + RabbitSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] - AUTOSCALING_RABBITMQ: RabbitSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + AUTOSCALING_REDIS: Annotated[ + RedisSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] - AUTOSCALING_REDIS: RedisSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - AUTOSCALING_REGISTRY: RegistrySettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + AUTOSCALING_REGISTRY: Annotated[ + RegistrySettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] - AUTOSCALING_DASK: DaskMonitoringSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + AUTOSCALING_DASK: Annotated[ + DaskMonitoringSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] AUTOSCALING_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - AUTOSCALING_DRAIN_NODES_WITH_LABELS: bool = Field( - default=False, - description="If true, drained nodes" - " are maintained as active (in the docker terminology) " - "but a docker node label named osparc-services-ready is attached", - ) - AUTOSCALING_TRACING: TracingSettings | None = Field( - description="settings for opentelemetry tracing", - json_schema_extra={"auto_default_from_env": True}, - ) + AUTOSCALING_DRAIN_NODES_WITH_LABELS: Annotated[ + bool, + Field( + description="If true, drained nodes" + " are maintained as active (in the docker terminology) " + "but a docker node label named osparc-services-ready is attached", + ), + ] = False + AUTOSCALING_TRACING: Annotated[ + TracingSettings | None, + Field( + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, + ), + ] - AUTOSCALING_DOCKER_JOIN_DRAINED: bool = Field( - default=True, - description="If true, new nodes join the swarm as drained. If false as active.", - ) + AUTOSCALING_DOCKER_JOIN_DRAINED: Annotated[ + bool, + Field( + description="If true, new nodes join the swarm as drained. If false as active.", + ), + ] = True - AUTOSCALING_WAIT_FOR_CLOUD_INIT_BEFORE_WARM_BUFFER_ACTIVATION: bool = Field( - default=False, - description="If True, then explicitely wait for cloud-init process to be completed before issuing commands. " - "TIP: might be useful when cheap machines are used", - ) + AUTOSCALING_WAIT_FOR_CLOUD_INIT_BEFORE_WARM_BUFFER_ACTIVATION: Annotated[ + bool, + Field( + description="If True, then explicitely wait for cloud-init process to be completed before issuing commands. " + "TIP: might be useful when cheap machines are used", + ), + ] = False @cached_property def LOG_LEVEL(self): # noqa: N802 @@ -326,7 +370,7 @@ def _valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) @model_validator(mode="after") - def exclude_both_dynamic_computational_mode(self) -> Self: + def _exclude_both_dynamic_computational_mode(self) -> Self: if ( self.AUTOSCALING_DASK is not None and self.AUTOSCALING_NODES_MONITORING is not None diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py index 8f521cf6221..eba2176bc81 100644 --- a/services/catalog/src/simcore_service_catalog/core/settings.py +++ b/services/catalog/src/simcore_service_catalog/core/settings.py @@ -2,6 +2,7 @@ from functools import cached_property from typing import Annotated, Final +from common_library.basic_types import DEFAULT_FACTORY from models_library.api_schemas_catalog.services_specifications import ( ServiceSpecifications, ) @@ -59,40 +60,51 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ), ] = LogLevel.INFO - CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - CATALOG_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "CATALOG_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] = False + CATALOG_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "CATALOG_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) - CATALOG_DEV_FEATURES_ENABLED: bool = Field( - default=False, - description="Enables development features. WARNING: make sure it is disabled in production .env file!", - ) + ] = DEFAULT_FACTORY - CATALOG_POSTGRES: PostgresSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CATALOG_DEV_FEATURES_ENABLED: Annotated[ + bool, + Field( + description="Enables development features. WARNING: make sure it is disabled in production .env file!", + ), + ] = False - CATALOG_RABBITMQ: RabbitSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CATALOG_POSTGRES: Annotated[ + PostgresSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] - CATALOG_CLIENT_REQUEST: ClientRequestSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CATALOG_RABBITMQ: Annotated[ + RabbitSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] - CATALOG_DIRECTOR: DirectorSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CATALOG_CLIENT_REQUEST: Annotated[ + ClientRequestSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + CATALOG_DIRECTOR: Annotated[ + DirectorSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] CATALOG_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True @@ -106,10 +118,13 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): CATALOG_SERVICES_DEFAULT_SPECIFICATIONS: ServiceSpecifications = ( _DEFAULT_SERVICE_SPECIFICATIONS ) - CATALOG_TRACING: TracingSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for opentelemetry tracing", - ) + CATALOG_TRACING: Annotated[ + TracingSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", + ), + ] - DIRECTOR_DEFAULT_MAX_MEMORY: NonNegativeInt = Field(default=0) - DIRECTOR_DEFAULT_MAX_NANO_CPUS: NonNegativeInt = Field(default=0) + DIRECTOR_DEFAULT_MAX_MEMORY: NonNegativeInt = 0 + DIRECTOR_DEFAULT_MAX_NANO_CPUS: NonNegativeInt = 0 diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py index 32b5cdae9d1..af120c9d4cc 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py @@ -1,8 +1,9 @@ import datetime from functools import cached_property -from typing import Final, Literal, cast +from typing import Annotated, Final, Literal, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags +from common_library.basic_types import DEFAULT_FACTORY from fastapi import FastAPI from models_library.basic_types import ( BootModeEnum, @@ -74,67 +75,85 @@ class ClustersKeeperSSMSettings(SSMSettings): class WorkersEC2InstancesSettings(BaseCustomSettings): - WORKERS_EC2_INSTANCES_ALLOWED_TYPES: dict[str, EC2InstanceBootSpecific] = Field( - ..., - description="Defines which EC2 instances are considered as candidates for new EC2 instance and their respective boot specific parameters", - ) - - WORKERS_EC2_INSTANCES_KEY_NAME: str = Field( - ..., - min_length=1, - description="SSH key filename (without ext) to access the instance through SSH" - " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)," - "this is required to start a new EC2 instance", - ) + WORKERS_EC2_INSTANCES_ALLOWED_TYPES: Annotated[ + dict[str, EC2InstanceBootSpecific], + Field( + description="Defines which EC2 instances are considered as candidates for new EC2 instance and their respective boot specific parameters", + ), + ] + + WORKERS_EC2_INSTANCES_KEY_NAME: Annotated[ + str, + Field( + min_length=1, + description="SSH key filename (without ext) to access the instance through SSH" + " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)," + "this is required to start a new EC2 instance", + ), + ] # BUFFER is not exposed since we set it to 0 - WORKERS_EC2_INSTANCES_MAX_START_TIME: datetime.timedelta = Field( - default=datetime.timedelta(minutes=1), - description="Usual time taken an EC2 instance with the given AMI takes to join the cluster " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)." - "NOTE: be careful that this time should always be a factor larger than the real time, as EC2 instances" - "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", - ) - WORKERS_EC2_INSTANCES_MAX_INSTANCES: int = Field( - default=10, - description="Defines the maximum number of instances the clusters_keeper app may create", - ) + WORKERS_EC2_INSTANCES_MAX_START_TIME: Annotated[ + datetime.timedelta, + Field( + description="Usual time taken an EC2 instance with the given AMI takes to join the cluster " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)." + "NOTE: be careful that this time should always be a factor larger than the real time, as EC2 instances" + "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", + ), + ] = datetime.timedelta(minutes=1) + WORKERS_EC2_INSTANCES_MAX_INSTANCES: Annotated[ + int, + Field( + description="Defines the maximum number of instances the clusters_keeper app may create", + ), + ] = 10 # NAME PREFIX is not exposed since we override it anyway - WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( - ..., - min_length=1, - description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" - " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " - " this is required to start a new EC2 instance", - ) - WORKERS_EC2_INSTANCES_SUBNET_ID: str = Field( - ..., - min_length=1, - description="A subnet is a range of IP addresses in your VPC " - " (https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html), " - "this is required to start a new EC2 instance", - ) + WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS: Annotated[ + list[str], + Field( + min_length=1, + description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" + " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " + " this is required to start a new EC2 instance", + ), + ] + WORKERS_EC2_INSTANCES_SUBNET_ID: Annotated[ + str, + Field( + min_length=1, + description="A subnet is a range of IP addresses in your VPC " + " (https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html), " + "this is required to start a new EC2 instance", + ), + ] - WORKERS_EC2_INSTANCES_TIME_BEFORE_DRAINING: datetime.timedelta = Field( - default=datetime.timedelta(minutes=1), - description="Time after which an EC2 instance may be terminated (min 0 max 1 minute) " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) + WORKERS_EC2_INSTANCES_TIME_BEFORE_DRAINING: Annotated[ + datetime.timedelta, + Field( + description="Time after which an EC2 instance may be terminated (min 0 max 1 minute) " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(minutes=1) - WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION: datetime.timedelta = Field( - default=datetime.timedelta(minutes=3), - description="Time after which an EC2 instance may be terminated (min 0, max 59 minutes) " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) + WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION: Annotated[ + datetime.timedelta, + Field( + description="Time after which an EC2 instance may be terminated (min 0, max 59 minutes) " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(minutes=3) - WORKERS_EC2_INSTANCES_CUSTOM_TAGS: EC2Tags = Field( - ..., - description="Allows to define tags that should be added to the created EC2 instance default tags. " - "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", - ) + WORKERS_EC2_INSTANCES_CUSTOM_TAGS: Annotated[ + EC2Tags, + Field( + description="Allows to define tags that should be added to the created EC2 instance default tags. " + "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", + ), + ] @field_validator("WORKERS_EC2_INSTANCES_ALLOWED_TYPES") @classmethod - def check_valid_instance_names( + def _check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings @@ -144,77 +163,98 @@ def check_valid_instance_names( class PrimaryEC2InstancesSettings(BaseCustomSettings): - PRIMARY_EC2_INSTANCES_ALLOWED_TYPES: dict[str, EC2InstanceBootSpecific] = Field( - ..., - description="Defines which EC2 instances are considered as candidates for new EC2 instance and their respective boot specific parameters", - ) - PRIMARY_EC2_INSTANCES_MAX_INSTANCES: int = Field( - default=10, - description="Defines the maximum number of instances the clusters_keeper app may create", - ) - PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS: list[str] = Field( - ..., - min_length=1, - description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" - " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " - " this is required to start a new EC2 instance", - ) - PRIMARY_EC2_INSTANCES_SUBNET_ID: str = Field( - ..., - min_length=1, - description="A subnet is a range of IP addresses in your VPC " - " (https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html), " - "this is required to start a new EC2 instance", - ) - PRIMARY_EC2_INSTANCES_KEY_NAME: str = Field( - ..., - min_length=1, - description="SSH key filename (without ext) to access the instance through SSH" - " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)," - "this is required to start a new EC2 instance", - ) - PRIMARY_EC2_INSTANCES_CUSTOM_TAGS: EC2Tags = Field( - ..., - description="Allows to define tags that should be added to the created EC2 instance default tags. " - "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", - ) - PRIMARY_EC2_INSTANCES_ATTACHED_IAM_PROFILE: str = Field( - ..., - description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string", - ) - PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CA: str = Field( - ..., description="Name of the dask TLC CA in AWS Parameter Store" - ) - PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CERT: str = Field( - ..., description="Name of the dask TLC certificate in AWS Parameter Store" - ) - PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_KEY: str = Field( - ..., description="Name of the dask TLC key in AWS Parameter Store" - ) - PRIMARY_EC2_INSTANCES_PROMETHEUS_USERNAME: str = Field( - ..., description="Username for accessing prometheus data" - ) - PRIMARY_EC2_INSTANCES_PROMETHEUS_PASSWORD: SecretStr = Field( - ..., description="Password for accessing prometheus data" - ) + PRIMARY_EC2_INSTANCES_ALLOWED_TYPES: Annotated[ + dict[str, EC2InstanceBootSpecific], + Field( + description="Defines which EC2 instances are considered as candidates for new EC2 instance and their respective boot specific parameters", + ), + ] - PRIMARY_EC2_INSTANCES_MAX_START_TIME: datetime.timedelta = Field( - default=datetime.timedelta(minutes=2), - description="Usual time taken an EC2 instance with the given AMI takes to startup and be ready to receive jobs " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)." - "NOTE: be careful that this time should always be a factor larger than the real time, as EC2 instances" - "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", - ) + PRIMARY_EC2_INSTANCES_MAX_INSTANCES: Annotated[ + int, + Field( + description="Defines the maximum number of instances the clusters_keeper app may create", + ), + ] = 10 + PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS: Annotated[ + list[str], + Field( + min_length=1, + description="A security group acts as a virtual firewall for your EC2 instances to control incoming and outgoing traffic" + " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html), " + " this is required to start a new EC2 instance", + ), + ] + PRIMARY_EC2_INSTANCES_SUBNET_ID: Annotated[ + str, + Field( + min_length=1, + description="A subnet is a range of IP addresses in your VPC " + " (https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html), " + "this is required to start a new EC2 instance", + ), + ] + + PRIMARY_EC2_INSTANCES_KEY_NAME: Annotated[ + str, + Field( + min_length=1, + description="SSH key filename (without ext) to access the instance through SSH" + " (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-key-pairs.html)," + "this is required to start a new EC2 instance", + ), + ] + PRIMARY_EC2_INSTANCES_CUSTOM_TAGS: Annotated[ + EC2Tags, + Field( + description="Allows to define tags that should be added to the created EC2 instance default tags. " + "a tag must have a key and an optional value. see [https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html]", + ), + ] + PRIMARY_EC2_INSTANCES_ATTACHED_IAM_PROFILE: Annotated[ + str, + Field( + description="ARN the EC2 instance should be attached to (example: arn:aws:iam::XXXXX:role/NAME), to disable pass an empty string", + ), + ] + PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CA: Annotated[ + str, Field(description="Name of the dask TLC CA in AWS Parameter Store") + ] + PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CERT: Annotated[ + str, + Field(description="Name of the dask TLC certificate in AWS Parameter Store"), + ] + PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_KEY: Annotated[ + str, Field(description="Name of the dask TLC key in AWS Parameter Store") + ] + PRIMARY_EC2_INSTANCES_PROMETHEUS_USERNAME: Annotated[ + str, Field(description="Username for accessing prometheus data") + ] + PRIMARY_EC2_INSTANCES_PROMETHEUS_PASSWORD: Annotated[ + SecretStr, Field(description="Password for accessing prometheus data") + ] + + PRIMARY_EC2_INSTANCES_MAX_START_TIME: Annotated[ + datetime.timedelta, + Field( + description="Usual time taken an EC2 instance with the given AMI takes to startup and be ready to receive jobs " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)." + "NOTE: be careful that this time should always be a factor larger than the real time, as EC2 instances" + "that take longer than this time will be terminated as sometimes it happens that EC2 machine fail on start.", + ), + ] = datetime.timedelta(minutes=2) - PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL: str = Field( - default="172.20.0.0/14", - description="defines the docker swarm default address pool in CIDR format " - "(see https://docs.docker.com/reference/cli/docker/swarm/init/)", - ) + PRIMARY_EC2_INSTANCES_DOCKER_DEFAULT_ADDRESS_POOL: Annotated[ + str, + Field( + description="defines the docker swarm default address pool in CIDR format " + "(see https://docs.docker.com/reference/cli/docker/swarm/init/)", + ), + ] = "172.20.0.0/14" # nosec @field_validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod - def check_valid_instance_names( + def _check_valid_instance_names( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: # NOTE: needed because of a flaw in BaseCustomSettings @@ -224,7 +264,7 @@ def check_valid_instance_names( @field_validator("PRIMARY_EC2_INSTANCES_ALLOWED_TYPES") @classmethod - def check_only_one_value( + def _check_only_one_value( cls, value: dict[str, EC2InstanceBootSpecific] ) -> dict[str, EC2InstanceBootSpecific]: if len(value) != 1: @@ -250,127 +290,160 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # @Dockerfile SC_BOOT_MODE: BootModeEnum | None = None SC_BOOT_TARGET: BuildTargetEnum | None = None - SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( - None, - description="If a single run of the check takes longer than timeout seconds " - "then the check is considered to have failed." - "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", - ) + SC_HEALTHCHECK_TIMEOUT: Annotated[ + PositiveInt | None, + Field( + description="If a single run of the check takes longer than timeout seconds " + "then the check is considered to have failed." + "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", + ), + ] = None SC_USER_ID: int | None = None SC_USER_NAME: str | None = None # RUNTIME ----------------------------------------------------------- - CLUSTERS_KEEPER_DEBUG: bool = Field( - default=False, - description="Debug mode", - validation_alias=AliasChoices("CLUSTERS_KEEPER_DEBUG", "DEBUG"), - ) - CLUSTERS_KEEPER_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, - validation_alias=AliasChoices( - "CLUSTERS_KEEPER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + CLUSTERS_KEEPER_DEBUG: Annotated[ + bool, + Field( + default=False, + description="Debug mode", + validation_alias=AliasChoices("CLUSTERS_KEEPER_DEBUG", "DEBUG"), ), - ) - CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", + ] = False + CLUSTERS_KEEPER_LOGLEVEL: Annotated[ + LogLevel, + Field( + validation_alias=AliasChoices( + "CLUSTERS_KEEPER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - CLUSTERS_KEEPER_LOG_FILTER_MAPPING: dict[ - LoggerName, list[MessageSubstring] - ] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "CLUSTERS_KEEPER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" - ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) - - CLUSTERS_KEEPER_EC2_ACCESS: ClustersKeeperEC2Settings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - CLUSTERS_KEEPER_SSM_ACCESS: ClustersKeeperSSMSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: PrimaryEC2InstancesSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES: WorkersEC2InstancesSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - - CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX: str = Field( - ..., - description="set a prefix to all machines created (useful for testing)", - ) + ] = LogLevel.INFO + CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", + ), + ] = False + CLUSTERS_KEEPER_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "CLUSTERS_KEEPER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", + ), + ] = DEFAULT_FACTORY + + CLUSTERS_KEEPER_EC2_ACCESS: Annotated[ + ClustersKeeperEC2Settings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + CLUSTERS_KEEPER_SSM_ACCESS: Annotated[ + ClustersKeeperSSMSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: Annotated[ + PrimaryEC2InstancesSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES: Annotated[ + WorkersEC2InstancesSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] + + CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX: Annotated[ + str, + Field( + description="set a prefix to all machines created (useful for testing)", + ), + ] - CLUSTERS_KEEPER_RABBITMQ: RabbitSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CLUSTERS_KEEPER_RABBITMQ: Annotated[ + RabbitSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] CLUSTERS_KEEPER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - CLUSTERS_KEEPER_REDIS: RedisSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CLUSTERS_KEEPER_REDIS: Annotated[ + RedisSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] - CLUSTERS_KEEPER_REGISTRY: RegistrySettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CLUSTERS_KEEPER_REGISTRY: Annotated[ + RegistrySettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] - CLUSTERS_KEEPER_TASK_INTERVAL: datetime.timedelta = Field( - default=datetime.timedelta(seconds=30), - description="interval between each clusters clean check " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) + CLUSTERS_KEEPER_TASK_INTERVAL: Annotated[ + datetime.timedelta, + Field( + description="interval between each clusters clean check " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(seconds=30) - SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field( - default=datetime.timedelta(seconds=60), - description="Service heartbeat interval (everytime a heartbeat is sent into RabbitMQ) " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) + SERVICE_TRACKING_HEARTBEAT: Annotated[ + datetime.timedelta, + Field( + description="Service heartbeat interval (everytime a heartbeat is sent into RabbitMQ) " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(seconds=60) - CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: NonNegativeInt = Field( - default=5, - description="Max number of missed heartbeats before a cluster is terminated", - ) + CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: Annotated[ + NonNegativeInt, + Field( + description="Max number of missed heartbeats before a cluster is terminated", + ), + ] = 5 - CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG: str = Field( - ..., - description="defines the image tag to use for the computational backend sidecar image (NOTE: it currently defaults to use itisfoundation organisation in Dockerhub)", - ) + CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG: Annotated[ + str, + Field( + description="defines the image tag to use for the computational backend sidecar image (NOTE: it currently defaults to use itisfoundation organisation in Dockerhub)", + ), + ] - CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: ( - ClusterAuthentication - ) = Field( - ..., - description="defines the authentication of the clusters created via clusters-keeper (can be None or TLS)", - ) + CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: Annotated[ + ClusterAuthentication, + Field( + description="defines the authentication of the clusters created via clusters-keeper (can be None or TLS)", + ), + ] - CLUSTERS_KEEPER_DASK_NTHREADS: NonNegativeInt = Field( - ..., - description="overrides the default number of threads in the dask-sidecars, setting it to 0 will use the default (see description in dask-sidecar)", - ) + CLUSTERS_KEEPER_DASK_NTHREADS: Annotated[ + NonNegativeInt, + Field( + description="overrides the default number of threads in the dask-sidecars, setting it to 0 will use the default (see description in dask-sidecar)", + ), + ] - CLUSTERS_KEEPER_DASK_WORKER_SATURATION: NonNegativeFloat | Literal["inf"] = Field( - default="inf", - description="override the dask scheduler 'worker-saturation' field" - ", see https://selectfrom.dev/deep-dive-into-dask-distributed-scheduler-9fdb3b36b7c7", - ) - CLUSTERS_KEEPER_TRACING: TracingSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for opentelemetry tracing", - ) + CLUSTERS_KEEPER_DASK_WORKER_SATURATION: Annotated[ + NonNegativeFloat | Literal["inf"], + Field( + description="override the dask scheduler 'worker-saturation' field" + ", see https://selectfrom.dev/deep-dive-into-dask-distributed-scheduler-9fdb3b36b7c7", + ), + ] = "inf" - SWARM_STACK_NAME: str = Field( - ..., description="Stack name defined upon deploy (see main Makefile)" - ) + CLUSTERS_KEEPER_TRACING: Annotated[ + TracingSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", + ), + ] + + SWARM_STACK_NAME: Annotated[ + str, Field(description="Stack name defined upon deploy (see main Makefile)") + ] @cached_property def LOG_LEVEL(self) -> LogLevel: # noqa: N802 diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py index d6d8a1924f3..c95f13f0ecc 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py @@ -1,6 +1,7 @@ from functools import cached_property from typing import Annotated +from common_library.basic_types import DEFAULT_FACTORY from models_library.basic_types import BootModeEnum, LogLevel from pydantic import AliasChoices, Field, TypeAdapter, field_validator from pydantic.networks import AnyUrl @@ -34,32 +35,39 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] = LogLevel.INFO - PENNSIEVE: PennsieveSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + PENNSIEVE: Annotated[ + PennsieveSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] - DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", + DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - DATCORE_ADAPTER_LOG_FILTER_MAPPING: dict[ - LoggerName, list[MessageSubstring] - ] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "DATCORE_ADAPTER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] = False + DATCORE_ADAPTER_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "DATCORE_ADAPTER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) + ] = DEFAULT_FACTORY + DATCORE_ADAPTER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - DATCORE_ADAPTER_TRACING: TracingSettings | None = Field( - description="settings for opentelemetry tracing", - json_schema_extra={"auto_default_from_env": True}, - ) + DATCORE_ADAPTER_TRACING: Annotated[ + TracingSettings | None, + Field( + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, + ), + ] @cached_property def debug(self) -> bool: diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index 19ae09e588f..2044671bc66 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -65,7 +65,7 @@ def pennsieve_mock_dataset_packages(mocks_dir: Path) -> dict[str, Any]: @pytest.fixture() def minimal_app( - app_envs: None, + app_environment: None, ) -> FastAPI: from simcore_service_datcore_adapter.main import the_app @@ -79,7 +79,7 @@ def client(minimal_app: FastAPI) -> TestClient: @pytest.fixture -def app_envs( +def app_environment( mock_env_devel_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch ) -> EnvVarsDict: return setenvs_from_dict( @@ -93,7 +93,7 @@ def app_envs( @pytest.fixture() async def initialized_app( - app_envs: None, minimal_app: FastAPI + app_environment: None, minimal_app: FastAPI ) -> AsyncIterator[FastAPI]: async with LifespanManager(minimal_app): yield minimal_app diff --git a/services/datcore-adapter/tests/unit/test_core_settings.py b/services/datcore-adapter/tests/unit/test_core_settings.py new file mode 100644 index 00000000000..6ab82562ad2 --- /dev/null +++ b/services/datcore-adapter/tests/unit/test_core_settings.py @@ -0,0 +1,42 @@ +# pylint: disable=unused-variable +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name + + +import pytest +from pytest_simcore.helpers.monkeypatch_envs import ( + EnvVarsDict, + delenvs_from_dict, + setenvs_from_dict, +) +from simcore_service_datcore_adapter.core.settings import ApplicationSettings + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + external_envfile_dict: EnvVarsDict, +) -> EnvVarsDict: + """ + NOTE: To run against repo.config in osparc-config repo + + ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets + pytest --external-envfile=.secrets tests/unit/test_core_settings.py + + """ + if external_envfile_dict: + delenvs_from_dict(monkeypatch, app_environment, raising=False) + return setenvs_from_dict( + monkeypatch, + {**external_envfile_dict}, + ) + return app_environment + + +def test_unit_app_environment(app_environment: EnvVarsDict): + assert app_environment + settings = ApplicationSettings.create_from_envs() + print("captured settings: \n", settings.model_dump_json(indent=2)) + + assert settings.PENNSIEVE diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index 828e8ad52a9..15c6ea87e22 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -265,7 +265,7 @@ async def patch_dynamic_service_url(app: FastAPI, node_uuid: str) -> str: ) assert ( proxy_published_port is not None - ), f"{sidecar_settings.model_dump_json()=}" + ), f"{sidecar_settings.model_dump_json(warnings='none')=}" async with scheduler.scheduler._lock: # noqa: SLF001 localhost_ip = get_localhost_ip() diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index 39e0b0aa813..ab5377a82d3 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -1,24 +1,13 @@ import datetime from functools import cached_property -from typing import Final, cast +from typing import Annotated, Final, cast +from common_library.basic_types import DEFAULT_FACTORY from fastapi import FastAPI -from models_library.basic_types import ( - BootModeEnum, - BuildTargetEnum, - LogLevel, - VersionTag, -) -from pydantic import ( - AliasChoices, - ByteSize, - Field, - PositiveInt, - TypeAdapter, - field_validator, -) +from models_library.basic_types import LogLevel, VersionTag +from pydantic import AliasChoices, ByteSize, Field, TypeAdapter, field_validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring -from settings_library.base import BaseCustomSettings +from settings_library.application import BaseApplicationSettings from settings_library.efs import AwsEfsSettings from settings_library.postgres import PostgresSettings from settings_library.rabbit import RabbitSettings @@ -31,93 +20,100 @@ EFS_GUARDIAN_ENV_PREFIX: Final[str] = "EFS_GUARDIAN_" -class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): +class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION APP_NAME: str = APP_NAME API_VTAG: VersionTag = API_VTAG - # IMAGE BUILDTIME ------------------------------------------------------ - # @Makefile - SC_BUILD_DATE: str | None = None - SC_BUILD_TARGET: BuildTargetEnum | None = None - SC_VCS_REF: str | None = None - SC_VCS_URL: str | None = None - - # @Dockerfile - SC_BOOT_MODE: BootModeEnum | None = None - SC_BOOT_TARGET: BuildTargetEnum | None = None - SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( - None, - description="If a single run of the check takes longer than timeout seconds " - "then the check is considered to have failed." - "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", - ) - SC_USER_ID: int - SC_USER_NAME: str + EFS_USER_ID: Annotated[ + int, Field(description="Linux user ID that the Guardian service will run with") + ] + EFS_USER_NAME: Annotated[ + str, + Field(description="Linux user name that the Guardian service will run with"), + ] + EFS_GROUP_ID: Annotated[ + int, + Field( + description="Linux group ID that the EFS and Simcore linux users are part of" + ), + ] + EFS_GROUP_NAME: Annotated[ + str, + Field( + description="Linux group name that the EFS and Simcore linux users are part of" + ), + ] + EFS_DEFAULT_USER_SERVICE_SIZE_BYTES: ByteSize = TypeAdapter( + ByteSize + ).validate_python("500GiB") - EFS_USER_ID: int = Field( - description="Linux user ID that the Guardian service will run with" - ) - EFS_USER_NAME: str = Field( - description="Linux user name that the Guardian service will run with" - ) - EFS_GROUP_ID: int = Field( - description="Linux group ID that the EFS and Simcore linux users are part of" - ) - EFS_GROUP_NAME: str = Field( - description="Linux group name that the EFS and Simcore linux users are part of" - ) - EFS_DEFAULT_USER_SERVICE_SIZE_BYTES: ByteSize = Field( - default=TypeAdapter(ByteSize).validate_python("500GiB") - ) - EFS_REMOVAL_POLICY_TASK_AGE_LIMIT_TIMEDELTA: datetime.timedelta = Field( - default=datetime.timedelta(days=10), - description="For how long must a project remain unused before we remove its data from the EFS. (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) + EFS_REMOVAL_POLICY_TASK_AGE_LIMIT_TIMEDELTA: Annotated[ + datetime.timedelta, + Field( + description="For how long must a project remain unused before we remove its data from the EFS. (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(days=10) # RUNTIME ----------------------------------------------------------- - EFS_GUARDIAN_DEBUG: bool = Field( - default=False, - description="Debug mode", - validation_alias=AliasChoices("EFS_GUARDIAN_DEBUG", "DEBUG"), - ) - EFS_GUARDIAN_LOGLEVEL: LogLevel = Field( - LogLevel.INFO, - validation_alias=AliasChoices("EFS_GUARDIAN_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), - ) - EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", + EFS_GUARDIAN_DEBUG: Annotated[ + bool, + Field( + description="Debug mode", + validation_alias=AliasChoices("EFS_GUARDIAN_DEBUG", "DEBUG"), ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - EFS_GUARDIAN_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "EFS_GUARDIAN_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] = False + + EFS_GUARDIAN_LOGLEVEL: Annotated[ + LogLevel, + Field( + validation_alias=AliasChoices( + "EFS_GUARDIAN_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) + ] = LogLevel.INFO - EFS_GUARDIAN_AWS_EFS_SETTINGS: AwsEfsSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) - EFS_GUARDIAN_POSTGRES: PostgresSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) - EFS_GUARDIAN_RABBITMQ: RabbitSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) - EFS_GUARDIAN_REDIS: RedisSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) - EFS_GUARDIAN_TRACING: TracingSettings | None = Field( - description="settings for opentelemetry tracing", - json_schema_extra={"auto_default_from_env": True}, - ) + EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", + ), + ] = False + EFS_GUARDIAN_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "EFS_GUARDIAN_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", + ), + ] = DEFAULT_FACTORY + + EFS_GUARDIAN_AWS_EFS_SETTINGS: Annotated[ + AwsEfsSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] + EFS_GUARDIAN_POSTGRES: Annotated[ + PostgresSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] + EFS_GUARDIAN_RABBITMQ: Annotated[ + RabbitSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] + EFS_GUARDIAN_REDIS: Annotated[ + RedisSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] + EFS_GUARDIAN_TRACING: Annotated[ + TracingSettings | None, + Field( + description="settings for opentelemetry tracing", + json_schema_extra={"auto_default_from_env": True}, + ), + ] @cached_property def LOG_LEVEL(self) -> LogLevel: # noqa: N802 @@ -125,7 +121,7 @@ def LOG_LEVEL(self) -> LogLevel: # noqa: N802 @field_validator("EFS_GUARDIAN_LOGLEVEL", mode="before") @classmethod - def valid_log_level(cls, value: str) -> str: + def _valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) diff --git a/services/efs-guardian/tests/unit/test_core_settings.py b/services/efs-guardian/tests/unit/test_core_settings.py index f8fd28355d7..0d72653a8e4 100644 --- a/services/efs-guardian/tests/unit/test_core_settings.py +++ b/services/efs-guardian/tests/unit/test_core_settings.py @@ -8,5 +8,15 @@ def test_settings(app_environment: EnvVarsDict): - settings = ApplicationSettings.create_from_envs() + """ + We validate actual envfiles (e.g. repo.config files) by passing them via the CLI + + $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets + $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + + """ + settings = ApplicationSettings() # type: ignore assert settings + + assert settings == ApplicationSettings.create_from_envs() + assert settings.EFS_GUARDIAN_POSTGRES diff --git a/services/invitations/src/simcore_service_invitations/core/settings.py b/services/invitations/src/simcore_service_invitations/core/settings.py index 6d9b1ec3a25..2df10527929 100644 --- a/services/invitations/src/simcore_service_invitations/core/settings.py +++ b/services/invitations/src/simcore_service_invitations/core/settings.py @@ -1,24 +1,19 @@ from functools import cached_property +from typing import Annotated +from common_library.basic_types import DEFAULT_FACTORY from models_library.products import ProductName -from pydantic import ( - AliasChoices, - Field, - HttpUrl, - PositiveInt, - SecretStr, - field_validator, -) +from pydantic import AliasChoices, Field, HttpUrl, SecretStr, field_validator from servicelib.logging_utils_filtering import LoggerName, MessageSubstring -from settings_library.base import BaseCustomSettings -from settings_library.basic_types import BuildTargetEnum, LogLevel, VersionTag +from settings_library.application import BaseApplicationSettings +from settings_library.basic_types import LogLevel, VersionTag from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings from .._meta import API_VERSION, API_VTAG, PROJECT_NAME -class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): +class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): """Base settings of any osparc service's app""" # CODE STATICS --------------------------------------------------------- @@ -26,45 +21,38 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): APP_NAME: str = PROJECT_NAME API_VTAG: VersionTag = API_VTAG - # IMAGE BUILDTIME ------------------------------------------------------ - # @Makefile - SC_BUILD_DATE: str | None = None - SC_BUILD_TARGET: BuildTargetEnum | None = None - SC_VCS_REF: str | None = None - SC_VCS_URL: str | None = None - - # @Dockerfile - SC_BOOT_TARGET: BuildTargetEnum | None = None - SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( - default=None, - description="If a single run of the check takes longer than timeout seconds " - "then the check is considered to have failed." - "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", - ) - SC_USER_ID: int | None = None - SC_USER_NAME: str | None = None - # RUNTIME ----------------------------------------------------------- - INVITATIONS_LOGLEVEL: LogLevel = Field( - default=LogLevel.INFO, - validation_alias=AliasChoices("INVITATIONS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), - ) - INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", + INVITATIONS_LOGLEVEL: Annotated[ + LogLevel, + Field( + validation_alias=AliasChoices( + "INVITATIONS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" + ), + ), + ] = LogLevel.INFO + + INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - INVITATIONS_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "INVITATIONS_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] = False + + INVITATIONS_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "INVITATIONS_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) + ] = DEFAULT_FACTORY @cached_property def LOG_LEVEL(self): @@ -72,7 +60,7 @@ def LOG_LEVEL(self): @field_validator("INVITATIONS_LOGLEVEL", mode="before") @classmethod - def valid_log_level(cls, value: str) -> str: + def _valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) @@ -83,23 +71,27 @@ class MinimalApplicationSettings(_BaseApplicationSettings): are not related to the web server. """ - INVITATIONS_SWAGGER_API_DOC_ENABLED: bool = Field( - default=True, description="If true, it displays swagger doc at /doc" - ) + INVITATIONS_SWAGGER_API_DOC_ENABLED: Annotated[ + bool, Field(description="If true, it displays swagger doc at /doc") + ] = True - INVITATIONS_SECRET_KEY: SecretStr = Field( - ..., - description="Secret key to generate invitations. " - "TIP: simcore-service-invitations generate-key", - min_length=44, - ) - - INVITATIONS_OSPARC_URL: HttpUrl = Field(..., description="Target platform") - INVITATIONS_DEFAULT_PRODUCT: ProductName = Field( - ..., - description="Default product if not specified in the request. " - "WARNING: this product must be defined in INVITATIONS_OSPARC_URL", - ) + INVITATIONS_SECRET_KEY: Annotated[ + SecretStr, + Field( + description="Secret key to generate invitations. " + "TIP: simcore-service-invitations generate-key", + min_length=44, + ), + ] + + INVITATIONS_OSPARC_URL: Annotated[HttpUrl, Field(description="Target platform")] + INVITATIONS_DEFAULT_PRODUCT: Annotated[ + ProductName, + Field( + description="Default product if not specified in the request. " + "WARNING: this product must be defined in INVITATIONS_OSPARC_URL", + ), + ] class ApplicationSettings(MinimalApplicationSettings): @@ -108,18 +100,25 @@ class ApplicationSettings(MinimalApplicationSettings): These settings includes extra configuration for the http-API """ - INVITATIONS_USERNAME: str = Field( - ..., - description="Username for HTTP Basic Auth. Required if started as a web app.", - min_length=3, - ) - INVITATIONS_PASSWORD: SecretStr = Field( - ..., - description="Password for HTTP Basic Auth. Required if started as a web app.", - min_length=10, - ) + INVITATIONS_USERNAME: Annotated[ + str, + Field( + description="Username for HTTP Basic Auth. Required if started as a web app.", + min_length=3, + ), + ] + INVITATIONS_PASSWORD: Annotated[ + SecretStr, + Field( + description="Password for HTTP Basic Auth. Required if started as a web app.", + min_length=10, + ), + ] INVITATIONS_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - INVITATIONS_TRACING: TracingSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for opentelemetry tracing", - ) + INVITATIONS_TRACING: Annotated[ + TracingSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", + ), + ] diff --git a/services/payments/src/simcore_service_payments/core/settings.py b/services/payments/src/simcore_service_payments/core/settings.py index 43b2a071745..5d9c69d861b 100644 --- a/services/payments/src/simcore_service_payments/core/settings.py +++ b/services/payments/src/simcore_service_payments/core/settings.py @@ -2,6 +2,7 @@ from functools import cached_property from typing import Annotated +from common_library.basic_types import DEFAULT_FACTORY from models_library.basic_types import NonNegativeDecimal from pydantic import ( AliasChoices, @@ -36,24 +37,33 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # RUNTIME ----------------------------------------------------------- - PAYMENTS_LOGLEVEL: LogLevel = Field( - default=LogLevel.INFO, - validation_alias=AliasChoices("PAYMENTS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), - ) - PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "LOG_FORMAT_LOCAL_DEV_ENABLED", "PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED" - ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - PAYMENTS_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "LOG_FILTER_MAPPING", "PAYMENTS_LOG_FILTER_MAPPING" - ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) + PAYMENTS_LOGLEVEL: Annotated[ + LogLevel, + Field( + validation_alias=AliasChoices("PAYMENTS_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL"), + ), + ] = LogLevel.INFO + + PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "LOG_FORMAT_LOCAL_DEV_ENABLED", "PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED" + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", + ), + ] = False + + PAYMENTS_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "LOG_FILTER_MAPPING", "PAYMENTS_LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", + ), + ] = DEFAULT_FACTORY @cached_property def LOG_LEVEL(self): # noqa: N802 @@ -61,7 +71,7 @@ def LOG_LEVEL(self): # noqa: N802 @field_validator("PAYMENTS_LOGLEVEL", mode="before") @classmethod - def valid_log_level(cls, value: str) -> str: + def _valid_log_level(cls, value: str) -> str: return cls.validate_log_level(value) @@ -71,31 +81,37 @@ class ApplicationSettings(_BaseApplicationSettings): These settings includes extra configuration for the http-API """ - PAYMENTS_GATEWAY_URL: HttpUrl = Field( - ..., description="Base url to the payment gateway" - ) - - PAYMENTS_GATEWAY_API_SECRET: SecretStr = Field( - ..., description="Credentials for payments-gateway api" - ) - - PAYMENTS_USERNAME: str = Field( - ..., - description="Username for Auth. Required if started as a web app.", - min_length=3, - ) - PAYMENTS_PASSWORD: SecretStr = Field( - ..., - description="Password for Auth. Required if started as a web app.", - min_length=10, - ) - - PAYMENTS_ACCESS_TOKEN_SECRET_KEY: SecretStr = Field( - ..., - description="To generate a random password with openssl in hex format with 32 bytes, run `openssl rand -hex 32`", - min_length=30, - ) - PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES: PositiveFloat = Field(default=30) + PAYMENTS_GATEWAY_URL: Annotated[ + HttpUrl, Field(description="Base url to the payment gateway") + ] + + PAYMENTS_GATEWAY_API_SECRET: Annotated[ + SecretStr, Field(description="Credentials for payments-gateway api") + ] + + PAYMENTS_USERNAME: Annotated[ + str, + Field( + description="Username for Auth. Required if started as a web app.", + min_length=3, + ), + ] + PAYMENTS_PASSWORD: Annotated[ + SecretStr, + Field( + description="Password for Auth. Required if started as a web app.", + min_length=10, + ), + ] + + PAYMENTS_ACCESS_TOKEN_SECRET_KEY: Annotated[ + SecretStr, + Field( + description="To generate a random password with openssl in hex format with 32 bytes, run `openssl rand -hex 32`", + min_length=30, + ), + ] + PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES: PositiveFloat = 30.0 PAYMENTS_AUTORECHARGE_MIN_BALANCE_IN_CREDITS: Annotated[ NonNegativeDecimal, @@ -118,50 +134,69 @@ class ApplicationSettings(_BaseApplicationSettings): ), ] = Decimal(10_000) - PAYMENTS_AUTORECHARGE_ENABLED: bool = Field( - default=False, - description="Based on this variable is the auto recharge functionality in Payment service enabled", - ) - - PAYMENTS_BCC_EMAIL: EmailStr | None = Field( - default=None, - description="Special email for finance department. Currently used to BCC invoices.", - ) - - PAYMENTS_RABBITMQ: RabbitSettings = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for service/rabbitmq", - ) - - PAYMENTS_TRACING: TracingSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for opentelemetry tracing", - ) - - PAYMENTS_POSTGRES: PostgresSettings = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for postgres service", - ) - - PAYMENTS_STRIPE_URL: HttpUrl = Field( - ..., description="Base url to the payment Stripe" - ) - PAYMENTS_STRIPE_API_SECRET: SecretStr = Field( - ..., description="Credentials for Stripe api" - ) - - PAYMENTS_SWAGGER_API_DOC_ENABLED: bool = Field( - default=True, description="If true, it displays swagger doc at /doc" - ) - - PAYMENTS_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for RUT service", - ) + PAYMENTS_AUTORECHARGE_ENABLED: Annotated[ + bool, + Field( + description="Based on this variable is the auto recharge functionality in Payment service enabled", + ), + ] = False + + PAYMENTS_BCC_EMAIL: Annotated[ + EmailStr | None, + Field( + description="Special email for finance department. Currently used to BCC invoices.", + ), + ] = None + + PAYMENTS_RABBITMQ: Annotated[ + RabbitSettings, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for service/rabbitmq", + ), + ] + + PAYMENTS_TRACING: Annotated[ + TracingSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", + ), + ] + + PAYMENTS_POSTGRES: Annotated[ + PostgresSettings, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for postgres service", + ), + ] + + PAYMENTS_STRIPE_URL: Annotated[ + HttpUrl, Field(description="Base url to the payment Stripe") + ] + PAYMENTS_STRIPE_API_SECRET: Annotated[ + SecretStr, Field(description="Credentials for Stripe api") + ] + + PAYMENTS_SWAGGER_API_DOC_ENABLED: Annotated[ + bool, Field(description="If true, it displays swagger doc at /doc") + ] = True + + PAYMENTS_RESOURCE_USAGE_TRACKER: Annotated[ + ResourceUsageTrackerSettings, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for RUT service", + ), + ] PAYMENTS_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True - PAYMENTS_EMAIL: SMTPSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="optional email (see notifier_email service)", - ) + PAYMENTS_EMAIL: Annotated[ + SMTPSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="optional email (see notifier_email service)", + ), + ] diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py index fefb9df5dd7..b842ddf358d 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/cli.py @@ -42,5 +42,5 @@ def evaluate(ctx: typer.Context) -> None: assert ctx # nosec settings = MinimalApplicationSettings.create_from_envs() err_console.print( - f"[yellow]running with configuration:\n{settings.model_dump_json()}[/yellow]" + f"[yellow]running with configuration:\n{settings.model_dump_json(warnings='none')}[/yellow]" ) diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index c3f4e2ed7ae..2e52bba1a07 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -3,14 +3,9 @@ from typing import Annotated, Any, Final from aiohttp import web +from common_library.basic_types import DEFAULT_FACTORY from common_library.pydantic_fields_extension import is_nullable -from models_library.basic_types import ( - BootModeEnum, - BuildTargetEnum, - LogLevel, - PortInt, - VersionTag, -) +from models_library.basic_types import LogLevel, PortInt, VersionTag from models_library.utils.change_case import snake_to_camel from pydantic import ( AliasChoices, @@ -21,9 +16,8 @@ model_validator, ) from pydantic.fields import Field -from pydantic.types import PositiveInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring -from settings_library.base import BaseCustomSettings +from settings_library.application import BaseApplicationSettings from settings_library.email import SMTPSettings from settings_library.postgres import PostgresSettings from settings_library.prometheus import PrometheusSettings @@ -59,30 +53,12 @@ _logger = logging.getLogger(__name__) -class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): +class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): # CODE STATICS --------------------------------------------------------- API_VERSION: str = API_VERSION APP_NAME: str = APP_NAME API_VTAG: VersionTag = TypeAdapter(VersionTag).validate_python(API_VTAG) - # IMAGE BUILDTIME ------------------------------------------------------ - # @Makefile - SC_BUILD_DATE: str | None = None - SC_BUILD_TARGET: BuildTargetEnum | None = None - SC_VCS_REF: str | None = None - SC_VCS_URL: str | None = None - - # @Dockerfile - SC_BOOT_MODE: BootModeEnum | None = None - SC_HEALTHCHECK_TIMEOUT: PositiveInt | None = Field( - None, - description="If a single run of the check takes longer than timeout seconds " - "then the check is considered to have failed." - "It takes retries consecutive failures of the health check for the container to be considered unhealthy.", - ) - SC_USER_ID: int | None = None - SC_USER_NAME: str | None = None - # RUNTIME ----------------------------------------------------------- # settings defined from environs defined when container runs # @@ -90,31 +66,42 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): AIODEBUG_SLOW_DURATION_SECS: float = 0 # Release information: Passed by the osparc-ops-autodeployer - SIMCORE_VCS_RELEASE_TAG: str | None = Field( - default=None, - description="Name of the tag that marks this release, or None if undefined", - examples=["ResistanceIsFutile10"], - ) + SIMCORE_VCS_RELEASE_TAG: Annotated[ + str | None, + Field( + default=None, + description="Name of the tag that marks this release, or None if undefined", + examples=["ResistanceIsFutile10"], + ), + ] - SIMCORE_VCS_RELEASE_URL: AnyHttpUrl | None = Field( - default=None, - description="URL to release notes", - examples=[ - "https://github.com/ITISFoundation/osparc-simcore/releases/tag/staging_ResistanceIsFutile10" - ], - ) + SIMCORE_VCS_RELEASE_URL: Annotated[ + AnyHttpUrl | None, + Field( + default=None, + description="URL to release notes", + examples=[ + "https://github.com/ITISFoundation/osparc-simcore/releases/tag/staging_ResistanceIsFutile10" + ], + ), + ] - SWARM_STACK_NAME: str | None = Field( - None, description="Stack name defined upon deploy (see main Makefile)" - ) + SWARM_STACK_NAME: Annotated[ + str | None, + Field(None, description="Stack name defined upon deploy (see main Makefile)"), + ] + + WEBSERVER_DEV_FEATURES_ENABLED: Annotated[ + bool, + Field( + default=False, + description="Enables development features. WARNING: make sure it is disabled in production .env file!", + ), + ] + WEBSERVER_CREDIT_COMPUTATION_ENABLED: Annotated[ + bool, Field(default=False, description="Enables credit computation features.") + ] - WEBSERVER_DEV_FEATURES_ENABLED: bool = Field( - default=False, - description="Enables development features. WARNING: make sure it is disabled in production .env file!", - ) - WEBSERVER_CREDIT_COMPUTATION_ENABLED: bool = Field( - default=False, description="Enables credit computation features." - ) WEBSERVER_LOGLEVEL: Annotated[ LogLevel, Field( @@ -124,78 +111,125 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): # NOTE: suffix '_LOGLEVEL' is used overall ), ] = LogLevel.WARNING - WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + + WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + default=False, + validation_alias=AliasChoices( + "WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED", "LOG_FORMAT_LOCAL_DEV_ENABLED" + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - WEBSERVER_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "WEBSERVER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] + + WEBSERVER_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "WEBSERVER_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) - # TODO: find a better name!? - WEBSERVER_SERVER_HOST: str = Field( - default="0.0.0.0", # nosec - description="host name to serve within the container." - "NOTE that this different from WEBSERVER_HOST env which is the host seen outside the container", - ) - WEBSERVER_HOST: str | None = Field( - None, validation_alias=AliasChoices("WEBSERVER_HOST", "HOST", "HOSTNAME") - ) + ] = DEFAULT_FACTORY + + WEBSERVER_SERVER_HOST: Annotated[ + # TODO: find a better name!? + str, + Field( + description="host name to serve within the container." + "NOTE that this different from WEBSERVER_HOST env which is the host seen outside the container", + ), + ] = "0.0.0.0" # nosec + + WEBSERVER_HOST: Annotated[ + str | None, + Field( + None, validation_alias=AliasChoices("WEBSERVER_HOST", "HOST", "HOSTNAME") + ), + ] + WEBSERVER_PORT: PortInt = TypeAdapter(PortInt).validate_python(DEFAULT_AIOHTTP_PORT) - WEBSERVER_FRONTEND: FrontEndAppSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="front-end static settings", - ) + WEBSERVER_FRONTEND: Annotated[ + FrontEndAppSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="front-end static settings", + ), + ] # PLUGINS ---------------- - WEBSERVER_ACTIVITY: PrometheusSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="activity plugin", - ) - WEBSERVER_CATALOG: CatalogSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="catalog service client's plugin", - ) - # TODO: Shall be required - WEBSERVER_DB: PostgresSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, description="database plugin" - ) - WEBSERVER_DIAGNOSTICS: DiagnosticsSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="diagnostics plugin", - ) - WEBSERVER_DIRECTOR_V2: DirectorV2Settings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="director-v2 service client's plugin", - ) + WEBSERVER_ACTIVITY: Annotated[ + PrometheusSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="activity plugin", + ), + ] + WEBSERVER_CATALOG: Annotated[ + CatalogSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="catalog service client's plugin", + ), + ] + WEBSERVER_DB: Annotated[ + PostgresSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="database plugin", + ), + ] + WEBSERVER_DIAGNOSTICS: Annotated[ + DiagnosticsSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="diagnostics plugin", + ), + ] + WEBSERVER_DIRECTOR_V2: Annotated[ + DirectorV2Settings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="director-v2 service client's plugin", + ), + ] - WEBSERVER_DYNAMIC_SCHEDULER: DynamicSchedulerSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - ) + WEBSERVER_DYNAMIC_SCHEDULER: Annotated[ + DynamicSchedulerSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] - WEBSERVER_EMAIL: SMTPSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - WEBSERVER_EXPORTER: ExporterSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, description="exporter plugin" - ) - WEBSERVER_GARBAGE_COLLECTOR: GarbageCollectorSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="garbage collector plugin", - ) + WEBSERVER_EMAIL: Annotated[ + SMTPSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] + WEBSERVER_EXPORTER: Annotated[ + ExporterSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="exporter plugin", + ), + ] + WEBSERVER_GARBAGE_COLLECTOR: Annotated[ + GarbageCollectorSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="garbage collector plugin", + ), + ] - WEBSERVER_INVITATIONS: InvitationsSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="invitations plugin", - ) + WEBSERVER_INVITATIONS: Annotated[ + InvitationsSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="invitations plugin", + ), + ] WEBSERVER_LOGIN: Annotated[ LoginSettings | None, @@ -205,35 +239,52 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ), ] - WEBSERVER_PAYMENTS: PaymentsSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="payments plugin settings", - ) + WEBSERVER_PAYMENTS: Annotated[ + PaymentsSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="payments plugin settings", + ), + ] - WEBSERVER_PROJECTS: ProjectsSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + WEBSERVER_PROJECTS: Annotated[ + ProjectsSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] - WEBSERVER_REDIS: RedisSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + WEBSERVER_REDIS: Annotated[ + RedisSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] - WEBSERVER_REST: RestSettings | None = Field( - description="rest api plugin", json_schema_extra={"auto_default_from_env": True} - ) + WEBSERVER_REST: Annotated[ + RestSettings | None, + Field( + description="rest api plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] - WEBSERVER_RESOURCE_MANAGER: ResourceManagerSettings = Field( - description="resource_manager plugin", - json_schema_extra={"auto_default_from_env": True}, - ) - WEBSERVER_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings | None = Field( - description="resource usage tracker service client's plugin", - json_schema_extra={"auto_default_from_env": True}, - ) - WEBSERVER_SCICRUNCH: SciCrunchSettings | None = Field( - description="scicrunch plugin", - json_schema_extra={"auto_default_from_env": True}, - ) + WEBSERVER_RESOURCE_MANAGER: Annotated[ + ResourceManagerSettings, + Field( + description="resource_manager plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] + WEBSERVER_RESOURCE_USAGE_TRACKER: Annotated[ + ResourceUsageTrackerSettings | None, + Field( + description="resource usage tracker service client's plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] + WEBSERVER_SCICRUNCH: Annotated[ + SciCrunchSettings | None, + Field( + description="scicrunch plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] WEBSERVER_SESSION: Annotated[ SessionSettings, Field( @@ -242,22 +293,37 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ), ] - WEBSERVER_STATICWEB: StaticWebserverModuleSettings | None = Field( - description="static-webserver service plugin", - json_schema_extra={"auto_default_from_env": True}, - ) - WEBSERVER_STORAGE: StorageSettings | None = Field( - description="storage service client's plugin", - json_schema_extra={"auto_default_from_env": True}, - ) - WEBSERVER_STUDIES_DISPATCHER: StudiesDispatcherSettings | None = Field( - description="studies dispatcher plugin", - json_schema_extra={"auto_default_from_env": True}, - ) + WEBSERVER_STATICWEB: Annotated[ + StaticWebserverModuleSettings | None, + Field( + description="static-webserver service plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] - WEBSERVER_TRACING: TracingSettings | None = Field( - description="tracing plugin", json_schema_extra={"auto_default_from_env": True} - ) + WEBSERVER_STORAGE: Annotated[ + StorageSettings | None, + Field( + description="storage service client's plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] + + WEBSERVER_STUDIES_DISPATCHER: Annotated[ + StudiesDispatcherSettings | None, + Field( + description="studies dispatcher plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] + + WEBSERVER_TRACING: Annotated[ + TracingSettings | None, + Field( + description="tracing plugin", + json_schema_extra={"auto_default_from_env": True}, + ), + ] WEBSERVER_TRASH: Annotated[ TrashSettings, Field(json_schema_extra={"auto_default_from_env": True}) @@ -285,7 +351,7 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): WEBSERVER_GROUPS: bool = True WEBSERVER_LICENSES: bool = False WEBSERVER_META_MODELING: bool = True - WEBSERVER_NOTIFICATIONS: bool = Field(default=True) + WEBSERVER_NOTIFICATIONS: bool = True WEBSERVER_PRODUCTS: bool = True WEBSERVER_PROFILING: bool = False WEBSERVER_PUBLICATIONS: bool = True @@ -296,12 +362,14 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): WEBSERVER_WALLETS: bool = True WEBSERVER_WORKSPACES: bool = True - # - WEBSERVER_SECURITY: bool = Field( - default=True, - description="This is a place-holder for future settings." - "Currently this is a system plugin and cannot be disabled", - ) + WEBSERVER_SECURITY: Annotated[ + bool, + Field( + default=True, + description="This is a place-holder for future settings." + "Currently this is a system plugin and cannot be disabled", + ), + ] @model_validator(mode="before") @classmethod diff --git a/services/web/server/tests/unit/isolated/test_application_settings.py b/services/web/server/tests/unit/isolated/test_application_settings.py index 954f85d5129..5aae772d2d9 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings.py +++ b/services/web/server/tests/unit/isolated/test_application_settings.py @@ -131,6 +131,7 @@ def test_disabled_plugins_settings_to_client_statics( assert plugin_name in set(statics["pluginsDisabled"]) +@pytest.mark.filterwarnings("error") def test_avoid_sensitive_info_in_public(app_settings: ApplicationSettings): # avoids display of sensitive info assert not any("pass" in key for key in app_settings.public_dict())