diff --git a/packages/service-library/src/servicelib/project_lock.py b/packages/service-library/src/servicelib/project_lock.py deleted file mode 100644 index f2ae6ce6ddd..00000000000 --- a/packages/service-library/src/servicelib/project_lock.py +++ /dev/null @@ -1,80 +0,0 @@ -import datetime -import logging -from asyncio.log import logger -from collections.abc import AsyncIterator -from contextlib import asynccontextmanager -from typing import Final, TypeAlias - -import redis -import redis.exceptions -from models_library.projects import ProjectID -from models_library.projects_access import Owner -from models_library.projects_state import ProjectLocked, ProjectStatus -from redis.asyncio.lock import Lock - -from .background_task import periodic_task -from .logging_utils import log_context - -_logger = logging.getLogger(__name__) - -PROJECT_REDIS_LOCK_KEY: str = "project_lock:{}" -PROJECT_LOCK_TIMEOUT: Final[datetime.timedelta] = datetime.timedelta(seconds=10) -ProjectLock = Lock - -ProjectLockError: TypeAlias = redis.exceptions.LockError - - -async def _auto_extend_project_lock(project_lock: Lock) -> None: - # NOTE: the background task already catches anything that might raise here - await project_lock.reacquire() - - -@asynccontextmanager -async def lock_project( - redis_lock: Lock, - project_uuid: str | ProjectID, - status: ProjectStatus, - owner: Owner | None = None, -) -> AsyncIterator[None]: - """Context manager to lock and unlock a project by user_id - - Raises: - ProjectLockError: if project is already locked - """ - - try: - if not await redis_lock.acquire( - blocking=False, - token=ProjectLocked( - value=True, - owner=owner, - status=status, - ).model_dump_json(), - ): - msg = f"Lock for project {project_uuid!r} owner {owner!r} could not be acquired" - raise ProjectLockError(msg) - - with log_context( - _logger, - logging.DEBUG, - msg=f"with lock for {owner=}:{project_uuid=}:{status=}", - ): - async with periodic_task( - _auto_extend_project_lock, - interval=0.6 * PROJECT_LOCK_TIMEOUT, - task_name=f"{PROJECT_REDIS_LOCK_KEY.format(project_uuid)}_lock_auto_extend", - project_lock=redis_lock, - ): - yield - - finally: - # let's ensure we release that stuff - try: - if await redis_lock.owned(): - await redis_lock.release() - except (redis.exceptions.LockError, redis.exceptions.LockNotOwnedError) as exc: - logger.warning( - "releasing %s unexpectedly raised an exception: %s", - f"{redis_lock=!r}", - f"{exc}", - ) diff --git a/packages/service-library/src/servicelib/redis/__init__.py b/packages/service-library/src/servicelib/redis/__init__.py index 8d78d47ece5..9e63a9f6525 100644 --- a/packages/service-library/src/servicelib/redis/__init__.py +++ b/packages/service-library/src/servicelib/redis/__init__.py @@ -5,19 +5,29 @@ CouldNotAcquireLockError, CouldNotConnectToRedisError, LockLostError, + ProjectLockError, ) from ._models import RedisManagerDBConfig +from ._project_lock import ( + get_project_locked_state, + is_project_locked, + with_project_locked, +) from ._utils import handle_redis_returns_union_types __all__: tuple[str, ...] = ( "CouldNotAcquireLockError", "CouldNotConnectToRedisError", "exclusive", + "get_project_locked_state", "handle_redis_returns_union_types", + "is_project_locked", "LockLostError", + "ProjectLockError", "RedisClientSDK", "RedisClientsManager", "RedisManagerDBConfig", + "with_project_locked", ) # nopycln: file diff --git a/packages/service-library/src/servicelib/redis/_errors.py b/packages/service-library/src/servicelib/redis/_errors.py index 998a9c1cb51..7fc3c7823ae 100644 --- a/packages/service-library/src/servicelib/redis/_errors.py +++ b/packages/service-library/src/servicelib/redis/_errors.py @@ -1,3 +1,6 @@ +from typing import TypeAlias + +import redis.exceptions from common_library.errors_classes import OsparcErrorMixin @@ -19,3 +22,6 @@ class LockLostError(BaseRedisError): "TIP: check connection to Redis DBs or look for Synchronous " "code that might block the auto-extender task. Somehow the distributed lock disappeared!" ) + + +ProjectLockError: TypeAlias = redis.exceptions.LockError # NOTE: backwards compatible diff --git a/packages/service-library/src/servicelib/redis/_project_lock.py b/packages/service-library/src/servicelib/redis/_project_lock.py new file mode 100644 index 00000000000..12f25e068d9 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/_project_lock.py @@ -0,0 +1,99 @@ +import functools +from collections.abc import Awaitable, Callable, Coroutine +from typing import Any, Final, ParamSpec, TypeVar + +from models_library.projects import ProjectID +from models_library.projects_access import Owner +from models_library.projects_state import ProjectLocked, ProjectStatus + +from ._client import RedisClientSDK +from ._decorators import exclusive +from ._errors import CouldNotAcquireLockError, ProjectLockError + +_PROJECT_REDIS_LOCK_KEY: Final[str] = "project_lock:{}" + + +P = ParamSpec("P") +R = TypeVar("R") + + +def with_project_locked( + redis_client: RedisClientSDK | Callable[..., RedisClientSDK], + *, + project_uuid: str | ProjectID, + status: ProjectStatus, + owner: Owner | None, + notification_cb: Callable[[], Awaitable[None]] | None, +) -> Callable[ + [Callable[P, Coroutine[Any, Any, R]]], Callable[P, Coroutine[Any, Any, R]] +]: + """creates a distributed auto sustained Redis lock for project with project_uuid, keeping its status and owner in the lock data + + Arguments: + redis_client -- the client to use to access redis + project_uuid -- the project UUID + status -- the project status + owner -- the owner of the lock (default: {None}) + notification_cb -- an optional notification callback that will be called AFTER the project is locked and AFTER it was unlocked + + Returns: + the decorated function return value + + Raises: + raises anything from the decorated function and from the optional notification callback + """ + + def _decorator( + func: Callable[P, Coroutine[Any, Any, R]], + ) -> Callable[P, Coroutine[Any, Any, R]]: + @functools.wraps(func) + async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + @exclusive( + redis_client, + lock_key=_PROJECT_REDIS_LOCK_KEY.format(project_uuid), + lock_value=ProjectLocked( + value=True, + owner=owner, + status=status, + ).model_dump_json(), + ) + async def _exclusive_func(*args, **kwargs) -> R: + if notification_cb is not None: + await notification_cb() + return await func(*args, **kwargs) + + try: + result = await _exclusive_func(*args, **kwargs) + # we are now unlocked + if notification_cb is not None: + await notification_cb() + return result + except CouldNotAcquireLockError as e: + raise ProjectLockError from e + + return _wrapper + + return _decorator + + +async def is_project_locked( + redis_client: RedisClientSDK, project_uuid: str | ProjectID +) -> bool: + redis_lock = redis_client.create_lock(_PROJECT_REDIS_LOCK_KEY.format(project_uuid)) + return await redis_lock.locked() + + +async def get_project_locked_state( + redis_client: RedisClientSDK, project_uuid: str | ProjectID +) -> ProjectLocked | None: + """ + Returns: + ProjectLocked object if the project project_uuid is locked or None otherwise + """ + if await is_project_locked(redis_client, project_uuid=project_uuid) and ( + lock_value := await redis_client.redis.get( + _PROJECT_REDIS_LOCK_KEY.format(project_uuid) + ) + ): + return ProjectLocked.model_validate_json(lock_value) + return None diff --git a/packages/service-library/tests/redis/test_decorators.py b/packages/service-library/tests/redis/test_decorators.py index 643cfef99d8..e4ca9d51463 100644 --- a/packages/service-library/tests/redis/test_decorators.py +++ b/packages/service-library/tests/redis/test_decorators.py @@ -75,7 +75,7 @@ async def _(): pass -async def test_exclusive_decorator( +async def test_exclusive_decorator_runs_original_method( redis_client_sdk: RedisClientSDK, lock_name: str, sleep_duration: float, diff --git a/packages/service-library/tests/redis/test_project_lock.py b/packages/service-library/tests/redis/test_project_lock.py new file mode 100644 index 00000000000..aa9d7fd1c74 --- /dev/null +++ b/packages/service-library/tests/redis/test_project_lock.py @@ -0,0 +1,144 @@ +# pylint: disable=no-value-for-parameter +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import asyncio +from typing import cast +from unittest import mock +from uuid import UUID + +import pytest +from faker import Faker +from models_library.projects import ProjectID +from models_library.projects_access import Owner +from models_library.projects_state import ProjectLocked, ProjectStatus +from servicelib.async_utils import cancel_wait_task +from servicelib.redis import ( + ProjectLockError, + RedisClientSDK, + get_project_locked_state, + is_project_locked, + with_project_locked, +) +from servicelib.redis._project_lock import _PROJECT_REDIS_LOCK_KEY + +pytest_simcore_core_services_selection = [ + "redis", +] +pytest_simcore_ops_services_selection = [ + "redis-commander", +] + + +@pytest.fixture() +def project_uuid(faker: Faker) -> ProjectID: + return cast(UUID, faker.uuid4(cast_to=None)) + + +assert "json_schema_extra" in Owner.model_config +assert isinstance(Owner.model_config["json_schema_extra"], dict) +assert isinstance(Owner.model_config["json_schema_extra"]["examples"], list) + + +@pytest.fixture(params=Owner.model_config["json_schema_extra"]["examples"]) +def owner(request: pytest.FixtureRequest) -> Owner: + return Owner(**request.param) + + +@pytest.fixture +def mocked_notification_cb() -> mock.AsyncMock: + return mock.AsyncMock() + + +@pytest.mark.parametrize( + "project_status", + [ + ProjectStatus.CLOSING, + ProjectStatus.CLONING, + ProjectStatus.EXPORTING, + ProjectStatus.OPENING, + ProjectStatus.MAINTAINING, + ], +) +async def test_with_project_locked( + redis_client_sdk: RedisClientSDK, + project_uuid: ProjectID, + owner: Owner, + project_status: ProjectStatus, + mocked_notification_cb: mock.AsyncMock, +): + @with_project_locked( + redis_client_sdk, + project_uuid=project_uuid, + status=project_status, + owner=owner, + notification_cb=mocked_notification_cb, + ) + async def _locked_fct() -> None: + mocked_notification_cb.assert_called_once() + assert await is_project_locked(redis_client_sdk, project_uuid) is True + locked_state = await get_project_locked_state(redis_client_sdk, project_uuid) + assert locked_state is not None + assert locked_state == ProjectLocked( + value=True, + owner=owner, + status=project_status, + ) + # check lock name formatting is correct + redis_lock = await redis_client_sdk.redis.get( + _PROJECT_REDIS_LOCK_KEY.format(project_uuid) + ) + assert redis_lock + assert ProjectLocked.model_validate_json(redis_lock) == ProjectLocked( + value=True, + owner=owner, + status=project_status, + ) + + mocked_notification_cb.assert_not_called() + assert await get_project_locked_state(redis_client_sdk, project_uuid) is None + assert await is_project_locked(redis_client_sdk, project_uuid) is False + await _locked_fct() + assert await is_project_locked(redis_client_sdk, project_uuid) is False + assert await get_project_locked_state(redis_client_sdk, project_uuid) is None + mocked_notification_cb.assert_called() + assert mocked_notification_cb.call_count == 2 + + +@pytest.mark.parametrize( + "project_status", + [ + ProjectStatus.CLOSING, + ProjectStatus.CLONING, + ProjectStatus.EXPORTING, + ProjectStatus.OPENING, + ProjectStatus.MAINTAINING, + ], +) +async def test_lock_already_locked_project_raises( + redis_client_sdk: RedisClientSDK, + project_uuid: ProjectID, + owner: Owner, + project_status: ProjectStatus, +): + started_event = asyncio.Event() + + @with_project_locked( + redis_client_sdk, + project_uuid=project_uuid, + status=project_status, + owner=owner, + notification_cb=None, + ) + async def _locked_fct() -> None: + started_event.set() + await asyncio.sleep(10) + + task1 = asyncio.create_task(_locked_fct(), name="pytest_task_1") + await started_event.wait() + with pytest.raises(ProjectLockError): + await _locked_fct() + + await cancel_wait_task(task1) diff --git a/packages/service-library/tests/test_project_lock.py b/packages/service-library/tests/test_project_lock.py deleted file mode 100644 index 386c14be3fb..00000000000 --- a/packages/service-library/tests/test_project_lock.py +++ /dev/null @@ -1 +0,0 @@ -# NOTE: Tested in osparc-simcore/services/web/server/tests/unit/with_dbs/02/test_project_lock.py diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/rpc/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/rpc/clusters.py index b7552dcfb09..82f84b9d471 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/rpc/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/rpc/clusters.py @@ -7,8 +7,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from servicelib.rabbitmq import RPCRouter -from servicelib.redis._client import RedisClientSDK -from servicelib.redis._decorators import exclusive +from servicelib.redis import RedisClientSDK, exclusive from ..core.settings import get_application_settings from ..modules import clusters diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks.py index d932efec51a..45d89f1bdc1 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks.py @@ -5,11 +5,7 @@ from models_library.projects import ProjectID from models_library.projects_state import ProjectStatus from servicelib.logging_utils import log_context -from servicelib.project_lock import ( - PROJECT_LOCK_TIMEOUT, - PROJECT_REDIS_LOCK_KEY, - lock_project, -) +from servicelib.redis import with_project_locked from simcore_postgres_database.utils_projects import ( DBProjectNotFoundError, ProjectsRepo, @@ -22,6 +18,22 @@ _logger = logging.getLogger(__name__) +async def _lock_project_and_remove_data(app: FastAPI, project_id: ProjectID) -> None: + efs_manager: EfsManager = app.state.efs_manager + + @with_project_locked( + get_redis_lock_client(app), + project_uuid=project_id, + status=ProjectStatus.MAINTAINING, + owner=None, + notification_cb=None, + ) + async def _remove(): + await efs_manager.remove_project_efs_data(project_id) + + await _remove() + + async def removal_policy_task(app: FastAPI) -> None: _logger.info("Removal policy task started") @@ -60,13 +72,4 @@ async def removal_policy_task(app: FastAPI) -> None: logging.INFO, msg=f"Removing data for project {project_id} started, project last change date {_project_last_change_date}, efs removal policy task age limit timedelta {app_settings.EFS_REMOVAL_POLICY_TASK_AGE_LIMIT_TIMEDELTA}", ): - redis_lock = get_redis_lock_client(app).create_lock( - PROJECT_REDIS_LOCK_KEY.format(project_id), - ttl=PROJECT_LOCK_TIMEOUT, - ) - async with lock_project( - redis_lock, - project_uuid=project_id, - status=ProjectStatus.MAINTAINING, - ): - await efs_manager.remove_project_efs_data(project_id) + await _lock_project_and_remove_data(app, project_id) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py index 77907bc51a5..1b84c02df1d 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/process_messages.py @@ -9,7 +9,7 @@ from servicelib.rabbitmq.rpc_interfaces.dynamic_sidecar.disk_usage import ( update_disk_usage, ) -from servicelib.redis._decorators import exclusive +from servicelib.redis import exclusive from servicelib.utils import fire_and_forget_task from ..core.settings import get_application_settings diff --git a/services/efs-guardian/tests/unit/conftest.py b/services/efs-guardian/tests/unit/conftest.py index 61d2daaba6d..d598fe06ebd 100644 --- a/services/efs-guardian/tests/unit/conftest.py +++ b/services/efs-guardian/tests/unit/conftest.py @@ -17,7 +17,6 @@ from asgi_lifespan import LifespanManager from faker import Faker from fastapi import FastAPI -from httpx._transports.asgi import ASGITransport from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient @@ -119,38 +118,22 @@ async def client(app: FastAPI) -> AsyncIterator[httpx.AsyncClient]: # - Needed for app to trigger start/stop event handlers # - Prefer this client instead of fastapi.testclient.TestClient async with httpx.AsyncClient( - app=app, - base_url="http://efs-guardian.testserver.io", + transport=httpx.ASGITransport(app=app), + base_url=f"http://{app.title}.testserver.io", headers={"Content-Type": "application/json"}, ) as client: - assert isinstance( - client._transport, ASGITransport # pylint: disable=protected-access - ) yield client -# -# Redis -# - - @pytest.fixture -def disable_redis_and_background_tasks_setup(mocker: MockerFixture) -> Callable: - def _(): - # The following services are affected if redis is not in place - mocker.patch("simcore_service_efs_guardian.core.application.setup_redis") - mocker.patch( - "simcore_service_efs_guardian.core.application.setup_background_tasks" - ) - - return _ +def with_disabled_background_tasks(mocker: MockerFixture) -> None: + mocker.patch("simcore_service_efs_guardian.core.application.setup_background_tasks") @pytest.fixture -def with_disabled_redis_and_background_tasks( - disable_redis_and_background_tasks_setup: Callable, -): - disable_redis_and_background_tasks_setup() +def with_disabled_redis_and_background_tasks(mocker: MockerFixture) -> None: + mocker.patch("simcore_service_efs_guardian.core.application.setup_redis") + mocker.patch("simcore_service_efs_guardian.core.application.setup_background_tasks") # @@ -159,8 +142,7 @@ def with_disabled_redis_and_background_tasks( @pytest.fixture -async def efs_cleanup(app: FastAPI): - +async def efs_cleanup(app: FastAPI) -> AsyncIterator[None]: yield aws_efs_settings: AwsEfsSettings = app.state.settings.EFS_GUARDIAN_AWS_EFS_SETTINGS diff --git a/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py b/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py index cd57d865002..4000fab0c88 100644 --- a/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py +++ b/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py @@ -102,10 +102,28 @@ async def project_in_db( yield row +# Create a mock object manually +mock_with_locked_project = MagicMock() + + +# The stand-in decorator to replace the original one and record the function call +def mock_decorator(*args, **kwargs): + def _decorator(func): + def wrapper(*args, **kwargs): + mock_with_locked_project(*args, **kwargs) # Log the call + return func(*args, **kwargs) + + return wrapper + + return _decorator + + @patch("simcore_service_efs_guardian.services.background_tasks.get_redis_lock_client") -@patch("simcore_service_efs_guardian.services.background_tasks.lock_project") +@patch( + "simcore_service_efs_guardian.services.background_tasks.with_project_locked", + new=mock_decorator, +) async def test_efs_removal_policy_task( - mock_lock_project: MagicMock, mock_get_redis_lock_client: MagicMock, faker: Faker, app: FastAPI, @@ -116,7 +134,7 @@ async def test_efs_removal_policy_task( ): # 1. Nothing should happen await removal_policy_task(app) - assert not mock_lock_project.called + assert not mock_with_locked_project.called # 2. Lets create some project with data aws_efs_settings: AwsEfsSettings = app.state.settings.EFS_GUARDIAN_AWS_EFS_SETTINGS @@ -148,7 +166,7 @@ async def test_efs_removal_policy_task( # 3. Nothing should happen await removal_policy_task(app) - assert not mock_lock_project.called + assert not mock_with_locked_project.called # 4. We will artifically change the project last change date app_settings: ApplicationSettings = app.state.settings @@ -169,7 +187,7 @@ async def test_efs_removal_policy_task( # 5. Now removal policy should remove those data await removal_policy_task(app) - assert mock_lock_project.assert_called_once + assert mock_with_locked_project.assert_called_once assert mock_get_redis_lock_client.assert_called_once projects_list = await efs_manager.list_projects_across_whole_efs() assert projects_list == [] diff --git a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py index 97749637f54..07821c489f4 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py @@ -1,18 +1,22 @@ import logging from collections.abc import Callable, Coroutine from contextlib import AsyncExitStack +from pathlib import Path from typing import Any from aiofiles.tempfile import TemporaryDirectory as AioTemporaryDirectory from aiohttp import web +from models_library.projects import ProjectID +from models_library.projects_access import Owner from models_library.projects_state import ProjectStatus +from servicelib.redis import with_project_locked from servicelib.request_keys import RQT_USERID_KEY from .._constants import RQ_PRODUCT_KEY from .._meta import API_VTAG from ..login.decorators import login_required -from ..projects.lock import lock_project -from ..projects.projects_api import retrieve_and_notify_project_locked_state +from ..projects.projects_api import create_user_notification_cb +from ..redis import get_redis_lock_manager_client_sdk from ..security.decorators import permission_required from ..users.api import get_user_fullname from ._formatter.archive import get_sds_archive_path @@ -42,18 +46,20 @@ async def export_project(request: web.Request): user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") assert project_uuid # nosec - delete_tmp_dir: Callable[[], Coroutine[Any, Any, None]] | None = None - try: - async with AsyncExitStack() as tmp_dir_stack, lock_project( - request.app, - project_uuid, - ProjectStatus.EXPORTING, - user_id, - await get_user_fullname(request.app, user_id=user_id), - ): - await retrieve_and_notify_project_locked_state( - user_id, project_uuid, request.app - ) + + @with_project_locked( + get_redis_lock_manager_client_sdk(request.app), + project_uuid=project_uuid, + status=ProjectStatus.EXPORTING, + owner=Owner( + user_id=user_id, **await get_user_fullname(request.app, user_id=user_id) + ), + notification_cb=create_user_notification_cb( + user_id, ProjectID(f"{project_uuid}"), request.app + ), + ) + async def _() -> tuple[Callable[[], Coroutine[Any, Any, None]], Path]: + async with AsyncExitStack() as tmp_dir_stack: tmp_dir = await tmp_dir_stack.enter_async_context(AioTemporaryDirectory()) file_to_download = await get_sds_archive_path( app=request.app, @@ -68,14 +74,13 @@ async def export_project(request: web.Request): msg = f"Must provide a file to download, not {file_to_download!s}" raise SDSException(msg) # this allows to transfer deletion of the tmp dir responsibility - delete_tmp_dir = tmp_dir_stack.pop_all().aclose - finally: - await retrieve_and_notify_project_locked_state( - user_id, project_uuid, request.app - ) + return tmp_dir_stack.pop_all().aclose, file_to_download + + delete_tmp_dir_callable, file_to_download = await _() headers = {"Content-Disposition": f'attachment; filename="{file_to_download.name}"'} - assert delete_tmp_dir # nosec return CleanupFileResponse( - remove_tmp_dir_cb=delete_tmp_dir, path=file_to_download, headers=headers + remove_tmp_dir_cb=delete_tmp_dir_callable, + path=file_to_download, + headers=headers, ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index 8b656f67cac..67d11cfc010 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -1,7 +1,6 @@ import asyncio import logging from collections.abc import Coroutine -from contextlib import AsyncExitStack from typing import Any, TypeAlias from aiohttp import web @@ -10,6 +9,7 @@ from models_library.api_schemas_long_running_tasks.base import ProgressPercent from models_library.api_schemas_webserver.projects import ProjectGet from models_library.projects import ProjectID +from models_library.projects_access import Owner from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.projects_state import ProjectStatus from models_library.users import UserID @@ -18,6 +18,7 @@ from pydantic import TypeAdapter from servicelib.aiohttp.long_running_tasks.server import TaskProgress from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.redis import with_project_locked from simcore_postgres_database.utils_projects_nodes import ( ProjectNode, ProjectNodeCreate, @@ -29,6 +30,7 @@ from ..director_v2 import api as director_v2_api from ..dynamic_scheduler import api as dynamic_scheduler_api from ..folders import _folders_repository as folders_db +from ..redis import get_redis_lock_manager_client_sdk from ..storage.api import ( copy_data_folders_from_project, get_project_total_size_simcore_s3, @@ -163,17 +165,7 @@ async def _copy_files_from_source_project( != ProjectTypeDB.TEMPLATE ) - async with AsyncExitStack() as stack: - if needs_lock_source_project: - await stack.enter_async_context( - projects_api.lock_with_notification( - app, - source_project["uuid"], - ProjectStatus.CLONING, - user_id, - await get_user_fullname(app, user_id=user_id), - ) - ) + async def _copy() -> None: starting_value = task_progress.percent async for long_running_task in copy_data_folders_from_project( app, source_project, new_project, nodes_map, user_id @@ -190,6 +182,21 @@ async def _copy_files_from_source_project( if long_running_task.done(): await long_running_task.result() + if needs_lock_source_project: + await with_project_locked( + get_redis_lock_manager_client_sdk(app), + project_uuid=source_project["uuid"], + status=ProjectStatus.CLONING, + owner=Owner( + user_id=user_id, **await get_user_fullname(app, user_id=user_id) + ), + notification_cb=projects_api.create_user_notification_cb( + user_id, ProjectID(f"{source_project['uuid']}"), app + ), + )(_copy)() + else: + await _copy() + async def _compose_project_data( app: web.Application, diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py index 2ea0d84b808..2530db3053d 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py @@ -1,4 +1,4 @@ -""" Handlers for STANDARD methods on /projects colletions +"""Handlers for STANDARD methods on /projects colletions Standard methods or CRUD that states for Create+Read(Get&List)+Update+Delete @@ -36,12 +36,14 @@ X_SIMCORE_USER_AGENT, ) from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.redis import get_project_locked_state from servicelib.rest_constants import RESPONSE_MODEL_POLICY from .._meta import API_VTAG as VTAG from ..catalog.client import get_services_for_user_in_product from ..folders.errors import FolderAccessForbiddenError, FolderNotFoundError from ..login.decorators import login_required +from ..redis import get_redis_lock_manager_client_sdk from ..resource_manager.user_sessions import PROJECT_ID_KEY, managed_resource from ..security.api import check_user_permission from ..security.decorators import permission_required @@ -65,7 +67,6 @@ ProjectOwnerNotFoundInTheProjectAccessRightsError, WrongTagIdsInQueryError, ) -from .lock import get_project_locked_state from .models import ProjectDict from .utils import get_project_unavailable_services, project_uses_available_services @@ -139,7 +140,8 @@ async def create_project(request: web.Request): project_create: ( ProjectCreateNew | ProjectCopyOverride | EmptyModel ) = await parse_request_body_as( - ProjectCreateNew | ProjectCopyOverride | EmptyModel, request # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 + ProjectCreateNew | ProjectCopyOverride | EmptyModel, # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 + request, ) predefined_project = ( project_create.model_dump( @@ -457,7 +459,7 @@ async def delete_project(request: web.Request): ) if project_users: other_user_names = { - await get_user_fullname(request.app, user_id=uid) + f"{await get_user_fullname(request.app, user_id=uid)}" for uid in project_users } raise web.HTTPForbidden( @@ -467,7 +469,8 @@ async def delete_project(request: web.Request): project_locked_state: ProjectLocked | None if project_locked_state := await get_project_locked_state( - app=request.app, project_uuid=path_params.project_id + get_redis_lock_manager_client_sdk(request.app), + project_uuid=path_params.project_id, ): raise web.HTTPConflict( reason=f"Project {path_params.project_id} is locked: {project_locked_state=}" diff --git a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py index 6b6256c00e5..c726d73200c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_states_handlers.py @@ -1,6 +1,4 @@ -""" handlers for project states - -""" +"""handlers for project states""" import contextlib import functools @@ -131,7 +129,7 @@ async def open_project(request: web.Request) -> web.Response: if not await projects_api.try_open_project_for_user( req_ctx.user_id, - project_uuid=f"{path_params.project_id}", + project_uuid=path_params.project_id, client_session_id=client_session_id, app=request.app, max_number_of_studies_per_user=product.max_open_studies_per_user, diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index 76cadc26987..1598cdbcced 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -1,10 +1,11 @@ """Defines the different exceptions that may arise in the projects subpackage""" + # mypy: disable-error-code=truthy-function from typing import Any from models_library.projects import ProjectID from models_library.users import UserID -from servicelib.project_lock import ProjectLockError +from servicelib.redis import ProjectLockError from ..errors import WebServerBaseError diff --git a/services/web/server/src/simcore_service_webserver/projects/lock.py b/services/web/server/src/simcore_service_webserver/projects/lock.py deleted file mode 100644 index 84b24c087e7..00000000000 --- a/services/web/server/src/simcore_service_webserver/projects/lock.py +++ /dev/null @@ -1,67 +0,0 @@ -import logging -from collections.abc import AsyncIterator -from contextlib import asynccontextmanager - -from aiohttp import web -from models_library.projects import ProjectID -from models_library.projects_access import Owner -from models_library.projects_state import ProjectLocked, ProjectStatus -from servicelib.project_lock import PROJECT_LOCK_TIMEOUT, PROJECT_REDIS_LOCK_KEY -from servicelib.project_lock import lock_project as common_lock_project - -from ..redis import get_redis_lock_manager_client -from ..users.api import FullNameDict - -_logger = logging.getLogger(__name__) - - -@asynccontextmanager -async def lock_project( - app: web.Application, - project_uuid: str | ProjectID, - status: ProjectStatus, - user_id: int, - user_fullname: FullNameDict, -) -> AsyncIterator[None]: - """Context manager to lock and unlock a project by user_id - - Raises: - ProjectLockError: if project is already locked - """ - - redis_lock = get_redis_lock_manager_client(app).lock( - PROJECT_REDIS_LOCK_KEY.format(project_uuid), - timeout=PROJECT_LOCK_TIMEOUT.total_seconds(), - ) - owner = Owner(user_id=user_id, **user_fullname) - - async with common_lock_project( - redis_lock, project_uuid=project_uuid, status=status, owner=owner - ): - yield - - -async def is_project_locked( - app: web.Application, project_uuid: str | ProjectID -) -> bool: - redis_lock = get_redis_lock_manager_client(app).lock( - PROJECT_REDIS_LOCK_KEY.format(project_uuid) - ) - return await redis_lock.locked() - - -async def get_project_locked_state( - app: web.Application, project_uuid: str | ProjectID -) -> ProjectLocked | None: - """ - Returns: - ProjectLocked object if the project project_uuid is locked or None otherwise - """ - if await is_project_locked(app, project_uuid): - redis_locks_client = get_redis_lock_manager_client(app) - - if lock_value := await redis_locks_client.get( - PROJECT_REDIS_LOCK_KEY.format(project_uuid) - ): - return ProjectLocked.model_validate_json(lock_value) - return None diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index fa46afffed2..8d421b9ad42 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -9,7 +9,6 @@ import asyncio import collections -import contextlib import datetime import json import logging @@ -81,7 +80,12 @@ ServiceWaitingForManualInterventionError, ServiceWasNotFoundError, ) -from servicelib.redis._decorators import exclusive +from servicelib.redis import ( + exclusive, + get_project_locked_state, + is_project_locked, + with_project_locked, +) from servicelib.utils import fire_and_forget_task, logged_gather from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.utils_projects_nodes import ( @@ -145,7 +149,6 @@ ProjectStartsTooManyDynamicNodesError, ProjectTooManyProjectOpenedError, ) -from .lock import get_project_locked_state, is_project_locked, lock_project from .models import ProjectDict, ProjectPatchExtended from .settings import ProjectsSettings, get_plugin_settings from .utils import extract_dns_without_default_port @@ -282,7 +285,7 @@ async def patch_project( "delete": True, } user: dict = await get_user(app, project_db.prj_owner) - _prj_owner_primary_group = f'{user["primary_gid"]}' + _prj_owner_primary_group = f"{user['primary_gid']}" if _prj_owner_primary_group not in new_prj_access_rights: raise ProjectOwnerNotFoundInTheProjectAccessRightsError if new_prj_access_rights[_prj_owner_primary_group] != _prj_required_permissions: @@ -361,8 +364,8 @@ async def _get_default_pricing_and_hardware_info( service_pricing_plan_get = await rut_api.get_default_service_pricing_plan( app, product_name=product_name, - service_key=ServiceKey(service_key), - service_version=ServiceVersion(service_version), + service_key=service_key, + service_version=service_version, ) if service_pricing_plan_get.pricing_units: for unit in service_pricing_plan_get.pricing_units: @@ -378,9 +381,9 @@ async def _get_default_pricing_and_hardware_info( ) -_MACHINE_TOTAL_RAM_SAFE_MARGIN_RATIO: Final[ - float -] = 0.1 # NOTE: machines always have less available RAM than advertised +_MACHINE_TOTAL_RAM_SAFE_MARGIN_RATIO: Final[float] = ( + 0.1 # NOTE: machines always have less available RAM than advertised +) _SIDECARS_OPS_SAFE_RAM_MARGIN: Final[ByteSize] = TypeAdapter(ByteSize).validate_python( "1GiB" ) @@ -1150,7 +1153,7 @@ async def _trigger_connected_service_retrieve( app: web.Application, project: dict, updated_node_uuid: str, changed_keys: list[str] ) -> None: project_id = project["uuid"] - if await is_project_locked(app, project_id): + if await is_project_locked(get_redis_lock_manager_client_sdk(app), project_id): # NOTE: we log warn since this function is fire&forget and raise an exception would not be anybody to handle it log.warning( "Skipping service retrieval because project with %s is currently locked." @@ -1241,9 +1244,18 @@ async def _clean_user_disconnected_clients( await user_session.remove(PROJECT_ID_KEY) +def create_user_notification_cb( + user_id: UserID, project_uuid: ProjectID, app: web.Application +): + async def _notification_cb() -> None: + await retrieve_and_notify_project_locked_state(user_id, f"{project_uuid}", app) + + return _notification_cb + + async def try_open_project_for_user( user_id: UserID, - project_uuid: str, + project_uuid: ProjectID, client_session_id: str, app: web.Application, max_number_of_studies_per_user: int | None, @@ -1256,14 +1268,17 @@ async def try_open_project_for_user( False if cannot be opened (e.g. locked, ) """ try: - async with lock_with_notification( - app, - project_uuid, - ProjectStatus.OPENING, - user_id, - await get_user_fullname(app, user_id=user_id), - notify_users=False, - ): + + @with_project_locked( + get_redis_lock_manager_client_sdk(app), + project_uuid=project_uuid, + status=ProjectStatus.OPENING, + owner=Owner( + user_id=user_id, **await get_user_fullname(app, user_id=user_id) + ), + notification_cb=None, + ) + async def _open_project() -> bool: with managed_resource(user_id, client_session_id, app) as user_session: # NOTE: if max_number_of_studies_per_user is set, the same # project shall still be openable if the tab was closed @@ -1288,11 +1303,11 @@ async def try_open_project_for_user( sessions_with_project: list[ UserSessionID ] = await user_session.find_users_of_resource( - app, PROJECT_ID_KEY, project_uuid + app, PROJECT_ID_KEY, f"{project_uuid}" ) if not sessions_with_project: # no one has the project so we assign it - await user_session.add(PROJECT_ID_KEY, project_uuid) + await user_session.add(PROJECT_ID_KEY, f"{project_uuid}") return True # Otherwise if this is the only user (NOTE: a session = user_id + client_seesion_id !) @@ -1308,7 +1323,7 @@ async def try_open_project_for_user( app, ): # steal the project - await user_session.add(PROJECT_ID_KEY, project_uuid) + await user_session.add(PROJECT_ID_KEY, f"{project_uuid}") await _clean_user_disconnected_clients( sessions_with_project, app ) @@ -1316,6 +1331,8 @@ async def try_open_project_for_user( return False + return await _open_project() + except ProjectLockError: # the project is currently locked return False @@ -1401,7 +1418,7 @@ async def _get_project_lock_state( f"{user_id=}", ) prj_locked_state: ProjectLocked | None = await get_project_locked_state( - app, project_uuid + get_redis_lock_manager_client_sdk(app), project_uuid ) if prj_locked_state: log.debug( @@ -1495,26 +1512,27 @@ async def add_project_states_for_user( lock_state = await _get_project_lock_state(user_id, project["uuid"], app) running_state = RunningState.UNKNOWN - if not is_template: - if computation_task := await director_v2_api.get_computation_task( + if not is_template and ( + computation_task := await director_v2_api.get_computation_task( app, user_id, project["uuid"] - ): - # get the running state - running_state = computation_task.state - # get the nodes individual states - for ( - node_id, - node_state, - ) in computation_task.pipeline_details.node_states.items(): - prj_node = project["workbench"].get(str(node_id)) - if prj_node is None: - continue - node_state_dict = json.loads( - node_state.model_dump_json(by_alias=True, exclude_unset=True) - ) - prj_node.setdefault("state", {}).update(node_state_dict) - prj_node_progress = node_state_dict.get("progress", None) or 0 - prj_node.update({"progress": round(prj_node_progress * 100.0)}) + ) + ): + # get the running state + running_state = computation_task.state + # get the nodes individual states + for ( + node_id, + node_state, + ) in computation_task.pipeline_details.node_states.items(): + prj_node = project["workbench"].get(str(node_id)) + if prj_node is None: + continue + node_state_dict = json.loads( + node_state.model_dump_json(by_alias=True, exclude_unset=True) + ) + prj_node.setdefault("state", {}).update(node_state_dict) + prj_node_progress = node_state_dict.get("progress", None) or 0 + prj_node.update({"progress": round(prj_node_progress * 100.0)}) project["state"] = ProjectState( locked=lock_state, state=ProjectRunningState(value=running_state) @@ -1748,14 +1766,18 @@ async def remove_project_dynamic_services( save_state = False # ------------------- - async with lock_with_notification( - app, - project_uuid, - ProjectStatus.CLOSING, - user_id, - user_name_data, - notify_users=notify_users, - ): + @with_project_locked( + get_redis_lock_manager_client_sdk(app), + project_uuid=project_uuid, + status=ProjectStatus.CLOSING, + owner=Owner(user_id=user_id, **user_name_data), + notification_cb=( + create_user_notification_cb(user_id, ProjectID(project_uuid), app) + if notify_users + else None + ), + ) + async def _locked_stop_dynamic_serivces_in_project() -> None: # save the state if the user is not a guest. if we do not know we save in any case. with suppress( RPCServerError, @@ -1771,6 +1793,8 @@ async def remove_project_dynamic_services( save_state=save_state, ) + await _locked_stop_dynamic_serivces_in_project() + # # NOTIFICATIONS & LOCKS ----------------------------------------------------- @@ -1848,45 +1872,6 @@ async def retrieve_and_notify_project_locked_state( ) -@contextlib.asynccontextmanager -async def lock_with_notification( - app: web.Application, - project_uuid: str, - status: ProjectStatus, - user_id: int, - user_name: FullNameDict, - *, - notify_users: bool = True, -): - try: - async with lock_project( - app, - project_uuid, - status, - user_id, - user_name, - ): - if notify_users: - await retrieve_and_notify_project_locked_state( - user_id, project_uuid, app - ) - yield - except ProjectLockError: - # someone else has already the lock? - prj_states: ProjectState = await get_project_states_for_user( - user_id, project_uuid, app - ) - log.exception( - "Project [%s] already locked in state '%s'. Please check with support.", - f"{project_uuid=}", - f"{prj_states.locked.status=}", - ) - raise - finally: - if notify_users: - await retrieve_and_notify_project_locked_state(user_id, project_uuid, app) - - async def get_project_inactivity( app: web.Application, project_id: ProjectID ) -> GetProjectInactivityResponse: diff --git a/services/web/server/tests/unit/with_dbs/02/test_project_lock.py b/services/web/server/tests/unit/with_dbs/02/test_project_lock.py deleted file mode 100644 index 71d4602a1f8..00000000000 --- a/services/web/server/tests/unit/with_dbs/02/test_project_lock.py +++ /dev/null @@ -1,194 +0,0 @@ -# pylint: disable=no-value-for-parameter -# pylint: disable=protected-access -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -import pytest -import redis.asyncio as aioredis -from aiohttp.test_utils import TestClient -from faker import Faker -from models_library.projects import ProjectID -from models_library.projects_access import Owner -from models_library.projects_state import ProjectLocked, ProjectStatus -from models_library.users import UserID -from pydantic import TypeAdapter -from simcore_service_webserver.projects.exceptions import ProjectLockError -from simcore_service_webserver.projects.lock import ( - PROJECT_REDIS_LOCK_KEY, - get_project_locked_state, - is_project_locked, - lock_project, -) -from simcore_service_webserver.users.api import FullNameDict - - -@pytest.fixture() -def project_uuid(faker: Faker) -> ProjectID: - return faker.uuid4(cast_to=None) - - -async def test_lock_project( - client: TestClient, - user_id: UserID, - project_uuid: ProjectID, - redis_locks_client: aioredis.Redis, - faker: Faker, -): - assert client.app - user_fullname: FullNameDict = { - "first_name": faker.first_name(), - "last_name": faker.last_name(), - } - async with lock_project( - app=client.app, - project_uuid=project_uuid, - status=ProjectStatus.EXPORTING, - user_id=user_id, - user_fullname=user_fullname, - ): - redis_value = await redis_locks_client.get( - PROJECT_REDIS_LOCK_KEY.format(project_uuid) - ) - assert redis_value - lock_value = TypeAdapter(ProjectLocked).validate_json(redis_value) - assert lock_value == ProjectLocked( - value=True, - owner=Owner(user_id=user_id, **user_fullname), - status=ProjectStatus.EXPORTING, - ) - - # once the lock is released, the value goes away - redis_value = await redis_locks_client.get( - PROJECT_REDIS_LOCK_KEY.format(project_uuid) - ) - assert not redis_value - - -async def test_lock_already_locked_project_raises( - client: TestClient, - user_id: UserID, - project_uuid: ProjectID, - redis_locks_client: aioredis.Redis, - faker: Faker, -): - assert client.app - user_name: FullNameDict = { - "first_name": faker.first_name(), - "last_name": faker.last_name(), - } - async with lock_project( - app=client.app, - project_uuid=project_uuid, - status=ProjectStatus.EXPORTING, - user_id=user_id, - user_fullname=user_name, - ): - # locking again is not permitted - with pytest.raises(ProjectLockError): - async with lock_project( - app=client.app, - project_uuid=project_uuid, - status=ProjectStatus.OPENING, - user_id=user_id, - user_fullname=user_name, - ): - ... - - -async def test_raise_exception_while_locked_release_lock( - client: TestClient, - user_id: UserID, - project_uuid: ProjectID, - redis_locks_client: aioredis.Redis, - faker: Faker, -): - assert client.app - user_name: FullNameDict = { - "first_name": faker.first_name(), - "last_name": faker.last_name(), - } - with pytest.raises(ValueError): - async with lock_project( - app=client.app, - project_uuid=project_uuid, - status=ProjectStatus.EXPORTING, - user_id=user_id, - user_fullname=user_name, - ): - # here we have the project locked - redis_value = await redis_locks_client.get( - PROJECT_REDIS_LOCK_KEY.format(project_uuid) - ) - assert redis_value - # now raising an exception - msg = "pytest exception" - raise ValueError(msg) - # now the lock shall be released - redis_value = await redis_locks_client.get( - PROJECT_REDIS_LOCK_KEY.format(project_uuid) - ) - assert not redis_value - - -async def test_is_project_locked( - client: TestClient, - user_id: UserID, - project_uuid: ProjectID, - faker: Faker, -): - assert client.app - assert await is_project_locked(client.app, project_uuid) is False - user_name: FullNameDict = { - "first_name": faker.first_name(), - "last_name": faker.last_name(), - } - async with lock_project( - app=client.app, - project_uuid=project_uuid, - status=ProjectStatus.EXPORTING, - user_id=user_id, - user_fullname=user_name, - ): - assert await is_project_locked(client.app, project_uuid) is True - - -@pytest.mark.parametrize( - "lock_status", - [ - ProjectStatus.CLOSING, - ProjectStatus.CLONING, - ProjectStatus.EXPORTING, - ProjectStatus.OPENING, - ], -) -async def test_get_project_locked_state( - client: TestClient, - user_id: UserID, - project_uuid: ProjectID, - faker: Faker, - lock_status: ProjectStatus, -): - assert client.app - # no lock - assert await get_project_locked_state(client.app, project_uuid) is None - - assert await is_project_locked(client.app, project_uuid) is False - user_name: FullNameDict = { - "first_name": faker.first_name(), - "last_name": faker.last_name(), - } - async with lock_project( - app=client.app, - project_uuid=project_uuid, - status=lock_status, - user_id=user_id, - user_fullname=user_name, - ): - locked_state = await get_project_locked_state(client.app, project_uuid) - expected_locked_state = ProjectLocked( - value=bool(lock_status not in [ProjectStatus.CLOSED, ProjectStatus.OPENED]), - owner=Owner(user_id=user_id, **user_name), - status=lock_status, - ) - assert locked_state == expected_locked_state diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py index 80f7f4a7bd3..6dbcbe488ac 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py @@ -11,7 +11,6 @@ from unittest.mock import MagicMock, call import pytest -import redis.asyncio as aioredis import sqlalchemy as sa from aiohttp.test_utils import TestClient from faker import Faker @@ -20,6 +19,7 @@ DynamicServiceStop, ) from models_library.projects import ProjectID +from models_library.projects_access import Owner from models_library.projects_state import ProjectStatus from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import UserInfoDict @@ -30,13 +30,14 @@ ) from servicelib.aiohttp import status from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.redis import with_project_locked from simcore_postgres_database.models.products import products from simcore_postgres_database.models.projects_to_products import projects_to_products from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects import _crud_api_delete from simcore_service_webserver.projects.models import ProjectDict -from simcore_service_webserver.projects.projects_api import lock_with_notification +from simcore_service_webserver.redis import get_redis_lock_manager_client_sdk from socketio.exceptions import ConnectionError as SocketConnectionError @@ -147,7 +148,6 @@ async def test_delete_multiple_opened_project_forbidden( user_role: UserRole, expected_ok: HTTPStatus, expected_forbidden: HTTPStatus, - redis_client: aioredis.Redis, ): assert client.app @@ -223,17 +223,16 @@ async def test_delete_project_while_it_is_locked_raises_error( logged_user: UserInfoDict, user_project: ProjectDict, expected: ExpectedResponse, + faker: Faker, ): assert client.app project_uuid = user_project["uuid"] user_id = logged_user["id"] - async with lock_with_notification( - app=client.app, + await with_project_locked( + get_redis_lock_manager_client_sdk(client.app), project_uuid=project_uuid, status=ProjectStatus.CLOSING, - user_id=user_id, - user_name={"first_name": "test", "last_name": "test"}, - notify_users=False, - ): - await _request_delete_project(client, user_project, expected.conflict) + owner=Owner(user_id=user_id, first_name=faker.name(), last_name=faker.name()), + notification_cb=None, + )(_request_delete_project)(client, user_project, expected.conflict)