From 7d7dd8b72c9faf1cba4ca82fc39a27ddf187d3d1 Mon Sep 17 00:00:00 2001 From: EPajares Date: Sat, 14 Sep 2024 20:18:00 +0000 Subject: [PATCH 1/8] WIP layer sharing --- src/core/config.py | 3 +- src/crud/crud_layer.py | 87 ++++++++++++- src/crud/crud_report.py | 9 -- src/db/models/__init__.py | 8 +- src/db/models/_link_model.py | 207 +++++++++++++++++++++++++++++- src/db/models/layer.py | 9 +- src/db/models/organization.py | 40 ++++++ src/db/models/project.py | 16 ++- src/db/models/report.py | 63 --------- src/db/models/role.py | 50 ++++++++ src/db/models/scenario_feature.py | 1 + src/db/models/team.py | 38 ++++++ src/db/models/user.py | 4 + src/db/sql/recreate_trigger.py | 62 +++++++++ src/endpoints/v2/api.py | 2 - src/endpoints/v2/layer.py | 45 +++---- src/endpoints/v2/project.py | 33 +---- src/endpoints/v2/report.py | 199 ---------------------------- src/schemas/layer.py | 14 ++ src/schemas/project.py | 1 - src/schemas/report.py | 47 ------- tests/api/test_layer.py | 11 ++ tests/conftest.py | 120 +++++++++++++++-- 23 files changed, 656 insertions(+), 413 deletions(-) delete mode 100644 src/crud/crud_report.py create mode 100644 src/db/models/organization.py delete mode 100644 src/db/models/report.py create mode 100644 src/db/models/role.py create mode 100644 src/db/models/team.py create mode 100644 src/db/sql/recreate_trigger.py delete mode 100644 src/endpoints/v2/report.py delete mode 100644 src/schemas/report.py diff --git a/src/core/config.py b/src/core/config.py index 784e93b..2c3d9d1 100644 --- a/src/core/config.py +++ b/src/core/config.py @@ -15,6 +15,7 @@ class SyncPostgresDsn(PostgresDsn): class Settings(BaseSettings): + AUTH: Optional[bool] = False TEST_MODE: Optional[bool] = False ENVIRONMENT: Optional[str] = "dev" API_V2_STR: str = "/api/v2" @@ -23,6 +24,7 @@ class Settings(BaseSettings): PROJECT_NAME: Optional[str] = "GOAT Core API" USER_DATA_SCHEMA: Optional[str] = "user_data" CUSTOMER_SCHEMA: Optional[str] = "customer" + ACCOUNTS_SCHEMA: Optional[str] = "accounts" REGION_MAPPING_PT_TABLE: Optional[str] = "basic.region_mapping_pt" BASE_STREET_NETWORK: Optional[UUID] = "903ecdca-b717-48db-bbce-0219e41439cf" STREET_NETWORK_EDGE_DEFAULT_LAYER_PROJECT_ID = 36126 @@ -177,4 +179,3 @@ class Config: settings = Settings() -settings = Settings() diff --git a/src/crud/crud_layer.py b/src/crud/crud_layer.py index 5b8e6ee..31a0ed8 100644 --- a/src/crud/crud_layer.py +++ b/src/crud/crud_layer.py @@ -11,8 +11,9 @@ from fastapi_pagination import Params as PaginationParams from geoalchemy2.shape import WKTElement from pydantic import BaseModel -from sqlalchemy import and_, func, or_, select, text +from sqlalchemy import and_, func, or_, select, text, cast from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload, load_only, contains_eager from sqlmodel import SQLModel from starlette.datastructures import UploadFile @@ -28,7 +29,14 @@ ) from src.crud.base import CRUDBase from src.crud.crud_layer_project import layer_project as crud_layer_project -from src.db.models.layer import Layer +from src.db.models import ( + Layer, + LayerOrganizationLink, + LayerTeamLink, + Role, + Team, + Organization, +) from src.schemas.error import ( ColumnNotFoundError, LayerNotFoundError, @@ -582,6 +590,8 @@ async def get_layers_with_filter( order: str, page_params: PaginationParams, params: ILayerGet | ICatalogLayerGet, + team_id: UUID = None, + organization_id: UUID = None, ): """Get layer with filter.""" @@ -599,8 +609,46 @@ async def get_layers_with_filter( # Get base filter filters = await self.get_base_filter(user_id=user_id, params=params) - # Build query - query = select(Layer).where(and_(*filters)) + # If it has team_id then do a join with the LayerTeamLink table + if team_id: + query = ( + select(Layer, Role.name, Team.name, Team.id, Team.avatar) + .join(LayerTeamLink, LayerTeamLink.layer_id == Layer.id) + .join(Role, LayerTeamLink.role_id == Role.id) + .join(Team, LayerTeamLink.team_id == Team.id) + .where( + and_( + LayerTeamLink.team_id == team_id, + *filters, + ) + ) + .options(contains_eager(Layer.team_links)) + ) + elif organization_id: + query = ( + select( + Layer, + Role.name, + Organization.name, + Organization.id, + Organization.avatar, + ) + .join(LayerOrganizationLink, LayerOrganizationLink.layer_id == Layer.id) + .join(Role, LayerOrganizationLink.role_id == Role.id) + .join( + Organization, + LayerOrganizationLink.organization_id == Organization.id, + ) + .where( + and_( + LayerOrganizationLink.organization_id == organization_id, + *filters, + ) + ) + .options(contains_eager(Layer.organization_links)) + ) + else: + query = select(Layer).where(and_(*filters)) # Build params params = { @@ -617,6 +665,37 @@ async def get_layers_with_filter( page_params=page_params, **params, ) + + # Build shared_with object + layers_arr = [] + if team_id: + shared_with_key = "teams" + elif organization_id: + shared_with_key = "organizations" + + if team_id or organization_id: + for layer in layers.items: + # Check if layer[1] is of type LayerTeamLink + shared_with = {shared_with_key: []} + layer_obj = layer[0] + role = layer[1] + name = layer[2] + team_id = layer[3] + avatar = layer[4] + + shared_with[shared_with_key].append( + { + "role": role, + "team_id": team_id, + "team_name": name, + "team_avatar": avatar, + } + ) + layers_arr.append({"layer": layer_obj, "shared_with": shared_with}) + else: + layers_arr = [{"layer": layer} for layer in layers.items] + + layers.items = layers_arr return layers async def metadata_aggregate( diff --git a/src/crud/crud_report.py b/src/crud/crud_report.py deleted file mode 100644 index 15bd88f..0000000 --- a/src/crud/crud_report.py +++ /dev/null @@ -1,9 +0,0 @@ -from .base import CRUDBase -from src.db.models.report import Report - - -class CRUDReport(CRUDBase): - pass - - -report = CRUDReport(Report) diff --git a/src/db/models/__init__.py b/src/db/models/__init__.py index 1deacd1..edfbc00 100644 --- a/src/db/models/__init__.py +++ b/src/db/models/__init__.py @@ -1,12 +1,14 @@ -from ._link_model import LayerProjectLink, ScenarioScenarioFeatureLink +from ._link_model import LayerProjectLink, ScenarioScenarioFeatureLink, UserProjectLink, LayerOrganizationLink, LayerTeamLink from .data_store import DataStore from .folder import Folder from .job import Job from .layer import Layer from .project import Project -from .report import Report from .scenario import Scenario from .scenario_feature import ScenarioFeature from .system_setting import SystemSetting -from .user import User from .system_task import SystemTask +from .user import User +from .organization import Organization +from .team import Team +from .role import Role diff --git a/src/db/models/_link_model.py b/src/db/models/_link_model.py index bbec868..a77ffa0 100644 --- a/src/db/models/_link_model.py +++ b/src/db/models/_link_model.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING, List, Optional from uuid import UUID from sqlalchemy.dialects.postgresql import JSONB @@ -14,12 +14,18 @@ ) from src.db.models._base_class import DateTimeBase - +from src.core.config import settings +from sqlmodel import SQLModel +from src.db.models.organization import Organization if TYPE_CHECKING: from .layer import Layer from .project import Project from .scenario import Scenario from .scenario_feature import ScenarioFeature + from .user import User + from .team import Team + from .role import Role + from src.db.models.organization import Organization class LayerProjectLink(DateTimeBase, table=True): @@ -136,3 +142,200 @@ class UserProjectLink(DateTimeBase, table=True): UniqueConstraint( UserProjectLink.project_id, UserProjectLink.user_id, name="unique_user_project" ) + + +# class UserTeamLink(SQLModel, table=True): +# """ +# A table representing the relation between users and teams. + +# Attributes: +# id (int): The unique identifier for the user team. +# team_id (str): The unique identifier for the team the user belongs to. +# user_id (str): The unique identifier for the user that belongs to the team. +# """ + +# __tablename__ = "user_team" +# __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + +# id: Optional[int] = Field( +# sa_column=Column(Integer, primary_key=True, autoincrement=True) +# ) +# team_id: UUID = Field( +# sa_column=Column( +# UUID_PG(as_uuid=True), +# ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.team.id", ondelete="CASCADE"), +# nullable=False, +# ) +# ) +# user_id: UUID = Field( +# sa_column=Column( +# UUID_PG(as_uuid=True), +# ForeignKey(f"{settings.CUSTOMER_SCHEMA}.user.id", ondelete="CASCADE"), +# nullable=False, +# ) +# ) + +# Relationships +# user: "User" = Relationship(back_populates="team_links") +# team: "Team" = Relationship(back_populates="user_links") + + +class LayerOrganizationLink(SQLModel, table=True): + """ + A table representing the relation between layers and organizations. + + Attributes: + id (int): The unique identifier for the layer organization. + organization_id (str): The unique identifier for the organization the layer belongs to. + layer_id (str): The unique identifier for the layer that belongs to the organization. + """ + + __tablename__ = "layer_organization" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: Optional[int] = Field( + sa_column=Column(Integer, primary_key=True, autoincrement=True) + ) + organization_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey( + f"{settings.ACCOUNTS_SCHEMA}.organization.id", ondelete="CASCADE" + ), + nullable=False, + ) + ) + layer_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.layer.id", ondelete="CASCADE"), + nullable=False, + ) + ) + role_id: UUID = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.role.id"), + nullable=False, + ) + ) + + # Relationships + layer: "Layer" = Relationship(back_populates="organization_links") + organization: "Organization" = Relationship(back_populates="layer_links") + + +class LayerTeamLink(SQLModel, table=True): + """ + A table representing the relation between layers and teams. + + Attributes: + id (int): The unique identifier for the layer team. + team_id (str): The unique identifier for the team the layer belongs to. + layer_id (str): The unique identifier for the layer that belongs to the team. + """ + + __tablename__ = "layer_team" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: Optional[int] = Field( + sa_column=Column(Integer, primary_key=True, autoincrement=True) + ) + team_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.team.id", ondelete="CASCADE"), + nullable=False, + ) + ) + layer_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.layer.id", ondelete="CASCADE"), + nullable=False, + ) + ) + role_id: UUID = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.role.id"), + nullable=False, + ) + ) + + # Relationships + layer: "Layer" = Relationship(back_populates="team_links") + team: "Team" = Relationship(back_populates="layer_links") + + +class ProjectTeamLink(SQLModel, table=True): + """ + A table representing the relation between projects and teams. + + Attributes: + id (int): The unique identifier for the project team. + team_id (str): The unique identifier for the team the project belongs to. + project_id (str): The unique identifier for the project that belongs to the team. + """ + + __tablename__ = "project_team" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: Optional[int] = Field( + sa_column=Column(Integer, primary_key=True, autoincrement=True) + ) + team_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.team.id", ondelete="CASCADE"), + nullable=False, + ) + ) + project_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.project.id", ondelete="CASCADE"), + nullable=False, + ) + ) + + # Relationships + project: "Project" = Relationship(back_populates="team_links") + team: "Team" = Relationship(back_populates="project_links") + +class ProjectOrganizationLink(SQLModel, table=True): + """ + A table representing the relation between projects and organizations. + + Attributes: + id (int): The unique identifier for the project organization. + organization_id (str): The unique identifier for the organization the project belongs to. + project_id (str): The unique identifier for the project that belongs to the organization. + """ + + __tablename__ = "project_organization" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: Optional[int] = Field( + sa_column=Column(Integer, primary_key=True, autoincrement=True) + ) + organization_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey( + f"{settings.ACCOUNTS_SCHEMA}.organization.id", ondelete="CASCADE" + ), + nullable=False, + ) + ) + project_id: Optional[UUID] = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.project.id", ondelete="CASCADE"), + nullable=False, + ) + ) + + # Relationships + project: "Project" = Relationship(back_populates="organization_links") + organization: "Organization" = Relationship(back_populates="project_links") \ No newline at end of file diff --git a/src/db/models/layer.py b/src/db/models/layer.py index 16557fd..5c2d37e 100644 --- a/src/db/models/layer.py +++ b/src/db/models/layer.py @@ -26,8 +26,7 @@ if TYPE_CHECKING: from src.db.models.folder import Folder - - from ._link_model import LayerProjectLink + from ._link_model import LayerProjectLink, LayerOrganizationLink, LayerTeamLink from .data_store import DataStore @@ -448,6 +447,12 @@ class Layer(LayerBase, GeospatialAttributes, DateTimeBase, table=True): back_populates="layer", sa_relationship_kwargs={"cascade": "all, delete-orphan"} ) folder: "Folder" = Relationship(back_populates="layers") + organization_links: List["LayerOrganizationLink"] = Relationship( + back_populates="layer", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) + team_links: List["LayerTeamLink"] = Relationship( + back_populates="layer", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) @validator("extent", pre=True) def wkt_to_geojson(cls, v): diff --git a/src/db/models/organization.py b/src/db/models/organization.py new file mode 100644 index 0000000..2e228e1 --- /dev/null +++ b/src/db/models/organization.py @@ -0,0 +1,40 @@ +from typing import List, TYPE_CHECKING +from sqlmodel import Field, Relationship, Text +from src.db.models._base_class import DateTimeBase +from sqlalchemy import Column, text +from sqlalchemy.dialects.postgresql import UUID as UUID_PG +from uuid import UUID +from src.core.config import settings + +if TYPE_CHECKING: + from ._link_model import LayerOrganizationLink, ProjectOrganizationLink + +class Organization(DateTimeBase, table=True): + """ + A stub representation of the Layer model from another repository. + """ + + __tablename__ = "organization" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: UUID | None = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + primary_key=True, + nullable=False, + server_default=text("uuid_generate_v4()"), + ), + description="Organization ID", + ) + name: str = Field( + sa_column=Column(Text, nullable=False), description="Organization name", max_length=255 + ) + avatar: str | None = Field(sa_column=Column(Text, nullable=True)) + + # Relationships + layer_links: List["LayerOrganizationLink"] = Relationship( + back_populates="organization", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) + project_links: List["ProjectOrganizationLink"] = Relationship( + back_populates="organization", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) \ No newline at end of file diff --git a/src/db/models/project.py b/src/db/models/project.py index 8ca1def..9230ff8 100644 --- a/src/db/models/project.py +++ b/src/db/models/project.py @@ -20,8 +20,7 @@ from src.db.models.layer import ContentBaseAttributes if TYPE_CHECKING: - from _link_model import LayerProjectLink, UserProjectLink - from .report import Report + from _link_model import LayerProjectLink, UserProjectLink, ProjectTeamLink, ProjectOrganizationLink from .scenario import Scenario @@ -74,10 +73,6 @@ class Project(ContentBaseAttributes, DateTimeBase, table=True): default=settings.DEFAULT_PROJECT_THUMBNAIL, ) # Relationships - reports: List["Report"] = Relationship( - back_populates="project", - sa_relationship_kwargs={"cascade": "all, delete-orphan"}, - ) user_projects: List["UserProjectLink"] = Relationship( back_populates="project", sa_relationship_kwargs={"cascade": "all, delete-orphan"}, @@ -90,6 +85,13 @@ class Project(ContentBaseAttributes, DateTimeBase, table=True): back_populates="project", sa_relationship_kwargs={"cascade": "all, delete-orphan"}, ) - + team_links: List["ProjectTeamLink"] = Relationship( + back_populates="project", + sa_relationship_kwargs={"cascade": "all, delete-orphan"}, + ) + organization_links: List["ProjectOrganizationLink"] = Relationship( + back_populates="project", + sa_relationship_kwargs={"cascade": "all, delete-orphan"}, + ) UniqueConstraint(Project.__table__.c.folder_id, Project.__table__.c.name) diff --git a/src/db/models/report.py b/src/db/models/report.py deleted file mode 100644 index 53c3663..0000000 --- a/src/db/models/report.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import TYPE_CHECKING -from uuid import UUID - -from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy import Text -from sqlalchemy.sql import text -from sqlmodel import Column, Field, ForeignKey, Relationship -from sqlalchemy.dialects.postgresql import UUID as UUID_PG -from src.db.models._base_class import DateTimeBase, ContentBaseAttributes -from pydantic import HttpUrl -from src.core.config import settings - -if TYPE_CHECKING: - from .project import Project - - -class Report(DateTimeBase, ContentBaseAttributes, table=True): - __tablename__ = "report" - __table_args__ = {"schema": "customer"} - - id: UUID | None = Field( - sa_column=Column( - UUID_PG(as_uuid=True), - primary_key=True, - nullable=False, - server_default=text("uuid_generate_v4()"), - ), - description="Layer ID", - ) - user_id: UUID = Field( - sa_column=Column( - UUID_PG(as_uuid=True), - ForeignKey("customer.user.id", ondelete="CASCADE"), - nullable=False, - ), - description="Project owner ID", - ) - folder_id: UUID = Field( - sa_column=Column( - UUID_PG(as_uuid=True), - ForeignKey("customer.folder.id", ondelete="CASCADE"), - nullable=False, - ), - description="Project folder ID", - ) - project_id: UUID = Field( - sa_column=Column( - UUID_PG(as_uuid=True), - ForeignKey("customer.project.id", ondelete="CASCADE"), - nullable=False, - ), - description="Project ID that contains the report. This is mandatory for reports as they are always contained in a project.", - ) - report: dict = Field(sa_column=Column(JSONB), description="Report object in JSON Format") - thumbnail_url: HttpUrl | None = Field( - sa_column=Column(Text, nullable=True), description="Report thumbnail URL", - default=settings.DEFAULT_REPORT_THUMBNAIL, - ) - - # Relationships - project: "Project" = Relationship(back_populates="reports") - -Report.update_forward_refs() diff --git a/src/db/models/role.py b/src/db/models/role.py new file mode 100644 index 0000000..6fdb395 --- /dev/null +++ b/src/db/models/role.py @@ -0,0 +1,50 @@ +from typing import List, TYPE_CHECKING +from sqlmodel import ( + Column, + Field, + Relationship, + Text, + text, +) +from src.db.models._base_class import DateTimeBase +from enum import Enum +from sqlalchemy.dialects.postgresql import UUID as UUID_PG +from uuid import UUID +from src.core.config import settings + + +if TYPE_CHECKING: + from ._link_model import LayerTeamLink, ProjectTeamLink, LayerOrganizationLink, ProjectOrganizationLink + + +class RessourceTypeEnum(str, Enum): + organization = "organization" + team = "team" + layer = "layer" + project = "project" + + +class Role(DateTimeBase, table=True): + """ + A table representing a role. A role is a collection of permissions. + + Attributes: + id (str): The unique identifier for the role. + name (str): The name of the role. + permissions (List[Permission]): A list of permission objects associated with the role. This is a relation to the permission table. + users (List[User]): A list of user objects associated with the role. This is a relation to the + """ + + __tablename__ = "role" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: UUID | None = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + primary_key=True, + nullable=False, + server_default=text("uuid_generate_v4()"), + ), + description="Organization ID", + ) + name: str = Field(sa_column=Column(Text, nullable=False), max_length=255) \ No newline at end of file diff --git a/src/db/models/scenario_feature.py b/src/db/models/scenario_feature.py index 2f567a7..f2def29 100644 --- a/src/db/models/scenario_feature.py +++ b/src/db/models/scenario_feature.py @@ -144,6 +144,7 @@ class ScenarioFeature(DateTimeBase, UserData, table=True): edit_type: ScenarioFeatureEditType = Field( sa_column=Column(Text, nullable=False), description="Type of the edit" ) + # Relationships layer_project: "LayerProjectLink" = Relationship(back_populates="scenario_features") diff --git a/src/db/models/team.py b/src/db/models/team.py new file mode 100644 index 0000000..d8796cd --- /dev/null +++ b/src/db/models/team.py @@ -0,0 +1,38 @@ +from typing import TYPE_CHECKING, List +from sqlmodel import Field, Text, Relationship +from src.db.models._base_class import DateTimeBase +from sqlalchemy import Column, text +from sqlalchemy.dialects.postgresql import UUID as UUID_PG +from uuid import UUID +from src.core.config import settings + +if TYPE_CHECKING: + from ._link_model import LayerTeamLink, ProjectTeamLink + +class Team(DateTimeBase, table=True): + """ + A stub representation of the Layer model from another repository. + """ + + __tablename__ = "team" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: UUID | None = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + primary_key=True, + nullable=False, + server_default=text("uuid_generate_v4()"), + ), + description="Team ID", + ) + name: str = Field( + sa_column=Column(Text, nullable=False), description="Team name", max_length=255 + ) + avatar: str | None = Field(sa_column=Column(Text, nullable=True)) + layer_links: List["LayerTeamLink"] = Relationship( + back_populates="team", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) + project_links: List["ProjectTeamLink"] = Relationship( + back_populates="team", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) diff --git a/src/db/models/user.py b/src/db/models/user.py index c52f2bb..b103929 100644 --- a/src/db/models/user.py +++ b/src/db/models/user.py @@ -14,6 +14,7 @@ from .job import Job from .scenario import Scenario from .system_setting import SystemSetting + #from ._link_model import UserTeamLink class User(SQLModel, table=True): @@ -36,3 +37,6 @@ class User(SQLModel, table=True): jobs: List["Job"] = Relationship( back_populates="user", sa_relationship_kwargs={"cascade": "all, delete-orphan"} ) + # team_links: List["UserTeamLink"] = Relationship( + # back_populates="user", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + # ) diff --git a/src/db/sql/recreate_trigger.py b/src/db/sql/recreate_trigger.py new file mode 100644 index 0000000..47667b5 --- /dev/null +++ b/src/db/sql/recreate_trigger.py @@ -0,0 +1,62 @@ +# WIP: Recreate trigger for all tables in user_data schema +# Get all tables from user_data +from src.core.config import settings +from sqlalchemy import create_engine + +engine = create_engine(settings.POSTGRES_DATABASE_URI) + + +def get_tables(): + with engine.connect() as conn: + result = conn.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'user_data';" + ) + tables = result.fetchall() + return tables + + +def add_uuid_constraint(tables): + with engine.connect() as conn: + for table in tables: + table_name = table[0] + # Set default to basic.uuid_generate_v7() but don't recreate column + # print(f"ALTER TABLE user_data.{table_name} ALTER COLUMN id SET DEFAULT basic.uuid_generate_v7();") + # Check type of id column + is_uuid = conn.execute( + f"SELECT data_type FROM information_schema.columns WHERE table_name = '{table_name}' AND column_name = 'id';" + ) + is_uuid = is_uuid.fetchone() + # If is uuid type, set default to basic.uuid_generate_v7() + if is_uuid[0] == "uuid": + conn.execute( + f"ALTER TABLE user_data.{table_name} ALTER COLUMN id SET DEFAULT basic.uuid_generate_v7();" + ) + else: + continue + # Commit changes + + +def add_trigger(tables): + with engine.connect() as conn: + for table in tables: + table_name = table[0] + table_type = table_name.split("_")[0] + + if table_type in ("point", "line", "polygon"): + # Check if trigger exists + trigger_exists = conn.execute( + f"SELECT trigger_name FROM information_schema.triggers WHERE trigger_name = 'trigger_{settings.USER_DATA_SCHEMA}_{table_name}';" + ) + trigger_exists = trigger_exists.fetchone() + if not trigger_exists: + # Create Trigger + sql_create_trigger = f"""CREATE TRIGGER trigger_{settings.USER_DATA_SCHEMA}_{table_name} + BEFORE INSERT OR UPDATE ON {settings.USER_DATA_SCHEMA}."{table_name}" + FOR EACH ROW EXECUTE FUNCTION basic.set_user_data_h3(); + """ + conn.execute(sql_create_trigger) + + +# tables = get_tables() +# add_uuid_constraint(tables) +# add_trigger(tables) diff --git a/src/endpoints/v2/api.py b/src/endpoints/v2/api.py index d99ff75..062d960 100644 --- a/src/endpoints/v2/api.py +++ b/src/endpoints/v2/api.py @@ -7,7 +7,6 @@ layer, motorized_mobility, project, - report, system, tool, user, @@ -19,7 +18,6 @@ router.include_router(folder.router, prefix="/folder", tags=["Folder"]) router.include_router(layer.router, prefix="/layer", tags=["Layer"]) router.include_router(project.router, prefix="/project", tags=["Project"]) -router.include_router(report.router, prefix="/report", tags=["Report"]) router.include_router(job.router, prefix="/job", tags=["Job"]) router.include_router(system.router, prefix="/system", tags=["System Settings"]) router.include_router( diff --git a/src/endpoints/v2/layer.py b/src/endpoints/v2/layer.py index e71d7e7..95cd1ea 100644 --- a/src/endpoints/v2/layer.py +++ b/src/endpoints/v2/layer.py @@ -55,6 +55,7 @@ ILayerFromDatasetCreate, ILayerGet, ILayerRead, + ILayerReadShared, IMetadataAggregate, IMetadataAggregateRead, IRasterCreate, @@ -367,35 +368,9 @@ async def read_layer( async_session=async_session, id=layer_id, model=Layer, crud_content=crud_layer ) - -@router.post( - "/get-by-ids", - summary="Retrieve a list of layers by their IDs", - response_model=Page[ILayerRead], - response_model_exclude_none=True, - status_code=200, -) -async def read_layers_by_ids( - async_session: AsyncSession = Depends(get_db), - page_params: PaginationParams = Depends(), - ids: ContentIdList = Body( - ..., - example=layer_request_examples["get"], - description="List of layer IDs to retrieve", - ), -): - return await read_contents_by_ids( - async_session=async_session, - ids=ids, - model=Layer, - crud_content=crud_layer, - page_params=page_params, - ) - - @router.post( "", - response_model=Page[ILayerRead], + response_model=Page[ILayerReadShared], response_model_exclude_none=True, status_code=200, summary="Retrieve a list of layers using different filters including a spatial filter. If not filter is specified, all layers will be returned.", @@ -409,6 +384,16 @@ async def read_layers( examples={}, description="Layer to get", ), + team_id: UUID | None = Query( + None, + description="The ID of the team to get the layers from", + example="3fa85f64-5717-4562-b3fc-2c963f66afa6", + ), + organization_id: UUID | None = Query( + None, + description="The ID of the organization to get the layers from", + example="3fa85f64-5717-4562-b3fc-2c963f66afa6", + ), order_by: str = Query( None, description="Specify the column name that should be used to order. You can check the Layer model to see which column names exist.", @@ -423,6 +408,10 @@ async def read_layers( """This endpoints returns a list of layers based one the specified filters.""" with HTTPErrorHandler(): + # Make sure that team_id and organization_id are not both set + if team_id is not None and organization_id is not None: + raise ValueError("Only one of team_id and organization_id can be set.") + # Get layers from CRUD layers = await crud_layer.get_layers_with_filter( async_session=async_session, @@ -431,6 +420,8 @@ async def read_layers( order_by=order_by, order=order, page_params=page_params, + team_id=team_id, + organization_id=organization_id, ) return layers diff --git a/src/endpoints/v2/project.py b/src/endpoints/v2/project.py index e75be9a..5749ce4 100644 --- a/src/endpoints/v2/project.py +++ b/src/endpoints/v2/project.py @@ -64,7 +64,7 @@ async def create_project( ..., example=project_request_examples["create"], description="Project to create" ), ): - """This will create an empty project with a default initial view state. The project does not contains layers or reports.""" + """This will create an empty project with a default initial view state. The project does not contains layers.""" # Create project return await crud_project.create( @@ -139,37 +139,6 @@ async def read_projects( return projects - -@router.post( - "/get-by-ids", - summary="Retrieve a list of projects by their IDs", - response_model=Page[IProjectRead], - response_model_exclude_none=True, - status_code=200, -) -async def read_projects_by_ids( - async_session: AsyncSession = Depends(get_db), - page_params: PaginationParams = Depends(), - user_id: UUID4 = Depends(get_user_id), - ids: ContentIdList = Body( - ..., - example=project_request_examples["get"], - description="List of project IDs to retrieve", - ), -): - """Retrieve a list of projects by their IDs.""" - - # Get projects by ids - projects = await crud_project.get_projects( - async_session=async_session, - user_id=user_id, - page_params=page_params, - ids=ids.ids, - ) - - return projects - - @router.put( "/{project_id}", response_model=IProjectRead, diff --git a/src/endpoints/v2/report.py b/src/endpoints/v2/report.py deleted file mode 100644 index 5ad26de..0000000 --- a/src/endpoints/v2/report.py +++ /dev/null @@ -1,199 +0,0 @@ -from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, status -from fastapi_pagination import Page -from fastapi_pagination import Params as PaginationParams -from pydantic import UUID4 -from sqlalchemy import and_, select - -from src.core.content import ( - create_content, - delete_content_by_id, - read_content_by_id, - read_contents_by_ids, - update_content_by_id, -) -from src.crud.crud_report import report as crud_report -from src.db.models.report import Report -from src.db.session import AsyncSession -from src.endpoints.deps import get_db, get_user_id -from src.schemas.common import ContentIdList, OrderEnum -from src.schemas.report import ( - IReportCreate, - IReportRead, - IReportUpdate, -) -from src.schemas.report import ( - request_examples as report_request_examples, -) - -router = APIRouter() - - -# Report endpoints -@router.post( - "", - summary="Create a new report", - response_model=IReportRead, - response_model_exclude_none=True, - status_code=201, -) -async def create_report( - async_session: AsyncSession = Depends(get_db), - user_id: UUID4 = Depends(get_user_id), - *, - report_in: IReportCreate = Body( - ..., example=report_request_examples["create"], description="Project to create" - ), -): - """This will create an empty report.""" - return await create_content( - async_session=async_session, - model=Report, - crud_content=crud_report, - content_in=report_in, - other_params={"user_id": user_id}, - ) - - -@router.post( - "/get-by-ids", - summary="Retrieve a list of reports by their IDs", - response_model=Page[IReportRead], - response_model_exclude_none=True, - status_code=200, -) -async def read_reports_by_ids( - async_session: AsyncSession = Depends(get_db), - page_params: PaginationParams = Depends(), - ids: ContentIdList = Body( - ..., - example=report_request_examples["get"], - description="List of report IDs to retrieve", - ), -): - return await read_contents_by_ids( - async_session=async_session, - ids=ids, - model=Report, - crud_content=crud_report, - page_params=page_params, - ) - - -@router.get( - "/{report_id}", - summary="Retrieve a report by its ID", - response_model=IReportRead, - response_model_exclude_none=True, - status_code=200, -) -async def read_report( - async_session: AsyncSession = Depends(get_db), - report_id: UUID4 = Path( - ..., - description="The ID of the report to get", - example="3fa85f64-5717-4562-b3fc-2c963f66afa6", - ), -): - """Retrieve a report by its ID.""" - return await read_content_by_id( - async_session=async_session, - id=report_id, - model=Report, - crud_content=crud_report, - ) - - -@router.get( - "", - summary="Retrieve a list of reports", - response_model=Page[IReportRead], - response_model_exclude_none=True, - status_code=200, -) -async def read_reports( - async_session: AsyncSession = Depends(get_db), - page_params: PaginationParams = Depends(), - folder_id: UUID4 | None = Query(None, description="Folder ID"), - user_id: UUID4 = Depends(get_user_id), - search: str = Query(None, description="Searches the name of the report"), - order_by: str = Query( - None, - description="Specify the column name that should be used to order. You can check the Report model to see which column names exist.", - example="created_at", - ), - order: OrderEnum = Query( - "descendent", - description="Specify the order to apply. There are the option ascendent or descendent.", - example="descendent", - ), -): - """Retrieve a list of reports.""" - - if folder_id is None: - sql_and_filters = [Report.user_id == user_id] - else: - sql_and_filters = [Report.user_id == user_id, Report.folder_id == folder_id] - - query = select(Report).where(and_(*sql_and_filters)) - reports = await crud_report.get_multi( - async_session, - query=query, - page_params=page_params, - search_text={"name": search} if search else {}, - order_by=order_by, - order=order, - ) - - if len(reports.items) == 0: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="No Reports Found" - ) - - return reports - - -@router.put( - "/{report_id}", - response_model=IReportRead, - response_model_exclude_none=True, - status_code=200, -) -async def update_report( - async_session: AsyncSession = Depends(get_db), - report_id: UUID4 = Path( - ..., - description="The ID of the report to update", - example="3fa85f64-5717-4562-b3fc-2c963f66afa6", - ), - report_in: IReportUpdate = Body( - ..., example=report_request_examples["update"], description="Report to update" - ), -): - return await update_content_by_id( - async_session=async_session, - id=report_id, - model=Report, - crud_content=crud_report, - content_in=report_in, - ) - - -@router.delete( - "/{report_id}", - response_model=None, - status_code=204, -) -async def delete_report( - async_session: AsyncSession = Depends(get_db), - report_id: UUID4 = Path( - ..., - description="The ID of the report to update", - example="3fa85f64-5717-4562-b3fc-2c963f66afa6", - ), -): - return await delete_content_by_id( - async_session=async_session, - id=report_id, - model=Report, - crud_content=crud_report, - ) diff --git a/src/schemas/layer.py b/src/schemas/layer.py index 3690160..ea68ddc 100644 --- a/src/schemas/layer.py +++ b/src/schemas/layer.py @@ -129,6 +129,9 @@ class LayerReadBaseAttributes(BaseModel): id: UUID = Field(..., description="Content ID of the layer", alias="id") user_id: UUID = Field(..., description="User ID of the owner") type: LayerType = Field(..., description="Layer type") + shared_with: dict | None = Field( + None, description="List of user IDs the layer is shared with" + ) class LayerProperties(BaseModel): @@ -504,6 +507,17 @@ def __new__(cls, *args, **kwargs): ) return layer_read_class(**kwargs) +class ILayerReadShared(BaseModel): + def __new__(cls, *args, **kwargs): + layer = kwargs["layer"] + shared_with = kwargs.get("shared_with", None) + layer_read_class = get_layer_class( + "read", + layer_creator_class, + **layer, + ) + return layer_read_class(**layer, shared_with=shared_with) + class ILayerUpdate(BaseModel): def __new__(cls, *args, **kwargs): diff --git a/src/schemas/project.py b/src/schemas/project.py index 7c47790..b6b517a 100644 --- a/src/schemas/project.py +++ b/src/schemas/project.py @@ -23,7 +23,6 @@ ################################################################################ class ProjectContentType(str, Enum): layer = "layer" - report = "report" class InitialViewState(BaseModel): diff --git a/src/schemas/report.py b/src/schemas/report.py deleted file mode 100644 index 8943e48..0000000 --- a/src/schemas/report.py +++ /dev/null @@ -1,47 +0,0 @@ -from enum import Enum -from uuid import UUID - -from src.db.models.report import Report -from pydantic import Field -from src.db.models._base_class import ContentBaseAttributes - - -class ReportExportType(str, Enum): - pdf = "pdf" - png = "png" - html = "html" - - -class IReportCreate(ContentBaseAttributes): - project_id: UUID = Field(..., description="Project ID") - report: dict = Field(..., description="Report object in JSON Format") - - -class IReportUpdate(ContentBaseAttributes): - report: dict = Field(..., description="Report object in JSON Format") - - -class IReportRead(Report): - pass - - -request_examples = { - "get": { - "ids": ["39e16c27-2b03-498e-8ccc-68e798c64b8d", "e7dcaae4-1750-49b7-89a5-9510bf2761ad"], - }, - "create": { - "folder_id": "39e16c27-2b03-498e-8ccc-68e798c64b8d", - "name": "My first report", - "description": "This is my first report", - "tags": ["tag1", "tag2"], - "project_id": "39e16c27-2b03-498e-8ccc-68e798c64b8d", - "report": {"no_structure": "no_structure"}, - }, - "update": { - "folder_id": "39e16c27-2b03-498e-8ccc-68e798c64b8d", - "name": "My updated report", - "description": "This is my updated report", - "tags": ["tag1", "tag2"], - "report": {"no_structure": "no_structure"}, - }, -} diff --git a/tests/api/test_layer.py b/tests/api/test_layer.py index 4d650dc..042b09e 100644 --- a/tests/api/test_layer.py +++ b/tests/api/test_layer.py @@ -479,6 +479,17 @@ async def test_get_layers(client: AsyncClient, fixture_create_multiple_layers): assert response.status_code == 200 assert len(response.json()["items"]) == 4 +async def test_get_shared_team_layers(client: AsyncClient, fixture_create_shared_team_layers): + team_id = fixture_create_shared_team_layers["teams"][0].id + response = await client.post(f"{settings.API_V2_STR}/layer?team_id={team_id}") + assert response.status_code == 200 + assert len(response.json()["items"]) == 5 + +async def test_get_shared_organization_layers(client: AsyncClient, fixture_create_shared_organization_layers): + organization_id = fixture_create_shared_organization_layers["organizations"][0].id + response = await client.post(f"{settings.API_V2_STR}/layer?organization_id={organization_id}") + assert response.status_code == 200 + assert len(response.json()["items"]) == 5 # Get metadata aggregate for layers based on different filters async def test_get_catalog_layers(client: AsyncClient, fixture_create_catalog_layers): diff --git a/tests/conftest.py b/tests/conftest.py index a238567..4dc5eed 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,7 @@ # Local application imports from src.core.config import settings +from src.db.models import LayerTeamLink, LayerOrganizationLink, Role, Team, Organization from src.db.models.layer import LayerType from src.endpoints.deps import get_db, session_manager from src.main import app @@ -39,7 +40,8 @@ def set_test_mode(): settings.RUN_AS_BACKGROUND_TASK = True settings.USER_DATA_SCHEMA = "test_user_data" - settings.CUSTOMER_SCHEMA = "test_customer" + settings.CUSTOMER_SCHEMA = "test_customer1" + settings.ACCOUNTS_SCHEMA = "test_accounts1" settings.MAX_FOLDER_COUNT = 15 settings.TEST_MODE = True @@ -64,15 +66,21 @@ def event_loop(): async def session_fixture(event_loop): session_manager.init(settings.ASYNC_SQLALCHEMY_DATABASE_URI) session_manager._engine.update_execution_options( - schema_translate_map={"customer": settings.CUSTOMER_SCHEMA} + schema_translate_map={ + "customer": settings.CUSTOMER_SCHEMA, + "accounts": settings.ACCOUNTS_SCHEMA, + } ) async with session_manager.connect() as connection: - await connection.execute( - text(f"""CREATE SCHEMA IF NOT EXISTS {settings.CUSTOMER_SCHEMA}""") - ) - await connection.execute( - text(f"""CREATE SCHEMA IF NOT EXISTS {settings.USER_DATA_SCHEMA}""") - ) + for schema in [ + settings.CUSTOMER_SCHEMA, + settings.USER_DATA_SCHEMA, + settings.ACCOUNTS_SCHEMA, + ]: + await connection.execute( + text(f"""DROP SCHEMA IF EXISTS {schema} CASCADE""") + ) + await connection.execute(text(f"""CREATE SCHEMA IF NOT EXISTS {schema}""")) await session_manager.drop_all(connection) await session_manager.create_all(connection) await connection.commit() @@ -80,12 +88,6 @@ async def session_fixture(event_loop): logging.info("Starting session_fixture finalizer") async with session_manager.connect() as connection: pass - await connection.execute( - text(f"""DROP SCHEMA IF EXISTS {settings.CUSTOMER_SCHEMA} CASCADE""") - ) - await connection.execute( - text(f"""DROP SCHEMA IF EXISTS {settings.USER_DATA_SCHEMA} CASCADE""") - ) await session_manager.close() logging.info("Finished session_fixture finalizer") @@ -1076,6 +1078,96 @@ async def generic_post_fixture( return generic_post_fixture +@pytest.fixture +async def fixture_create_shared_team_layers( + client: AsyncClient, fixture_create_folder, db_session +): + + # Create five layers + layers = [] + for _i in range(5): + layer = await create_raster_layer(client, fixture_create_folder) + layers.append(layer) + + # Create a team + team1 = Team(name="test_team", avatar="https://www.plan4better.de/logo.png") + team2 = Team(name="test_team2", avatar="https://www.plan4better.de/logo.png") + + # Create role + role = Role(name="team_member") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + # Create layer team links + layer_teams1 = [] + layer_teams2 = [] + for layer in layers: + layer_team1 = LayerTeamLink( + layer_id=layer["id"], team_id=team1.id, role_id=role.id + ) + layer_team2 = LayerTeamLink( + layer_id=layer["id"], team_id=team2.id, role_id=role.id + ) + layer_teams1.append(layer_team1) + layer_teams2.append(layer_team2) + + team1.layer_links = layer_teams1 + team2.layer_links = layer_teams2 + db_session.add(team1) + db_session.add(team2) + await db_session.commit() + + return {"teams": [team1, team2], "layers": layers} + + +@pytest.fixture +async def fixture_create_shared_organization_layers( + client: AsyncClient, fixture_create_folder, db_session +): + + # Create five layers + layers = [] + for _i in range(5): + layer = await create_raster_layer(client, fixture_create_folder) + layers.append(layer) + + # Create organization + organization1 = Organization( + name="test_organization", avatar="https://www.plan4better.de/logo.png" + ) + organization2 = Organization( + name="test_organization2", avatar="https://www.plan4better.de/logo.png" + ) + + # Create role + role = Role(name="organization_member") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + # Create layer organization links + layer_organizations1 = [] + layer_organizations2 = [] + for layer in layers: + layer_organization1 = LayerOrganizationLink( + layer_id=layer["id"], organization_id=organization1.id, role_id=role.id + ) + layer_organization2 = LayerOrganizationLink( + layer_id=layer["id"], organization_id=organization2.id, role_id=role.id + ) + layer_organizations1.append(layer_organization1) + layer_organizations2.append(layer_organization2) + + organization1.layer_links = layer_organizations1 + organization2.layer_links = layer_organizations2 + db_session.add(organization1) + db_session.add(organization2) + await db_session.commit() + + return {"organizations": [organization1, organization2], "layers": layers} + + fixture_catchment_area_active_mobility = create_generic_toolbox_fixture( "/active-mobility/catchment-area", active_mobility_request_examples["catchment_area_active_mobility"], From b664218fcb4e3a26390c980c7431bb7660f58282 Mon Sep 17 00:00:00 2001 From: EPajares Date: Sat, 14 Sep 2024 21:03:15 +0000 Subject: [PATCH 2/8] Added shared-with to layers when reading --- src/crud/crud_layer.py | 90 +++++++++++++++++++++++++++++++++-------- tests/api/test_layer.py | 6 +-- 2 files changed, 77 insertions(+), 19 deletions(-) diff --git a/src/crud/crud_layer.py b/src/crud/crud_layer.py index 31a0ed8..c68cdbc 100644 --- a/src/crud/crud_layer.py +++ b/src/crud/crud_layer.py @@ -11,9 +11,9 @@ from fastapi_pagination import Params as PaginationParams from geoalchemy2.shape import WKTElement from pydantic import BaseModel -from sqlalchemy import and_, func, or_, select, text, cast +from sqlalchemy import and_, func, or_, select, text, exists from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm import joinedload, load_only, contains_eager +from sqlalchemy.orm import contains_eager, selectinload from sqlmodel import SQLModel from starlette.datastructures import UploadFile @@ -609,7 +609,12 @@ async def get_layers_with_filter( # Get base filter filters = await self.get_base_filter(user_id=user_id, params=params) - # If it has team_id then do a join with the LayerTeamLink table + # Get roles + roles = await CRUDBase(Role).get_all( + async_session, + ) + role_mapping = {role.id: role.name for role in roles} + if team_id: query = ( select(Layer, Role.name, Team.name, Team.id, Team.avatar) @@ -648,7 +653,46 @@ async def get_layers_with_filter( .options(contains_eager(Layer.organization_links)) ) else: - query = select(Layer).where(and_(*filters)) + query = ( + select(Layer) + .where( + or_( + exists().where( + and_( + LayerTeamLink.layer_id == Layer.id, + LayerTeamLink.team_id.isnot(None), + ) + ), + exists().where( + and_( + LayerOrganizationLink.layer_id == Layer.id, + LayerOrganizationLink.organization_id.isnot(None), + ) + ), + ), + *filters, + ) + .options( + selectinload(Layer.team_links).selectinload(LayerTeamLink.team), + selectinload(Layer.organization_links).selectinload( + LayerOrganizationLink.organization + ), + ) + ) + query = ( + select(Layer) + .outerjoin(LayerTeamLink, LayerTeamLink.layer_id == Layer.id) # Left join to include layers without team links + .outerjoin(LayerOrganizationLink, LayerOrganizationLink.layer_id == Layer.id) # Left join to include layers without org links + .where( + and_( + *filters, # Apply your other filters here + ) + ) + .options( + selectinload(Layer.team_links).selectinload(LayerTeamLink.team), + selectinload(Layer.organization_links).selectinload(LayerOrganizationLink.organization), + ) + ) # Build params params = { @@ -677,23 +721,37 @@ async def get_layers_with_filter( for layer in layers.items: # Check if layer[1] is of type LayerTeamLink shared_with = {shared_with_key: []} - layer_obj = layer[0] - role = layer[1] - name = layer[2] - team_id = layer[3] - avatar = layer[4] - shared_with[shared_with_key].append( { - "role": role, - "team_id": team_id, - "team_name": name, - "team_avatar": avatar, + "role": layer[1], + "team_id": layer[3], + "team_name": layer[2], + "team_avatar": layer[4], } ) - layers_arr.append({"layer": layer_obj, "shared_with": shared_with}) + layers_arr.append({"layer": layer[0], "shared_with": shared_with}) else: - layers_arr = [{"layer": layer} for layer in layers.items] + for layer in layers.items: + shared_with = {"teams": [], "organizations": []} + for team_link in layer.team_links: + shared_with["teams"].append( + { + "role": role_mapping[team_link.role_id], + "team_id": team_link.team.id, + "team_name": team_link.team.name, + "team_avatar": team_link.team.avatar, + } + ) + for organization_link in layer.organization_links: + shared_with["organizations"].append( + { + "role": role_mapping[organization_link.role_id], + "organization_id": organization_link.organization.id, + "organization_name": organization_link.organization.name, + "organization_avatar": organization_link.organization.avatar, + } + ) + layers_arr.append({"layer": layer, "shared_with": shared_with}) layers.items = layers_arr return layers diff --git a/tests/api/test_layer.py b/tests/api/test_layer.py index 042b09e..c412c8d 100644 --- a/tests/api/test_layer.py +++ b/tests/api/test_layer.py @@ -492,10 +492,10 @@ async def test_get_shared_organization_layers(client: AsyncClient, fixture_creat assert len(response.json()["items"]) == 5 # Get metadata aggregate for layers based on different filters -async def test_get_catalog_layers(client: AsyncClient, fixture_create_catalog_layers): - response = await client.post(f"{settings.API_V2_STR}/layer/catalog") +async def test_get_layers_with_shared(client: AsyncClient, fixture_create_shared_team_layers, fixture_create_shared_organization_layers): + response = await client.post(f"{settings.API_V2_STR}/layer") assert response.status_code == 200 - assert len(response.json()["items"]) == 4 + assert len(response.json()["items"]) == 10 # Get metadata aggregate for layers based on different filters From 0d872ae0272d048a1c5359d18506896ca018f390 Mon Sep 17 00:00:00 2001 From: EPajares Date: Sat, 14 Sep 2024 21:08:42 +0000 Subject: [PATCH 3/8] Unified attribute naming --- src/crud/crud_layer.py | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/src/crud/crud_layer.py b/src/crud/crud_layer.py index c68cdbc..5741a97 100644 --- a/src/crud/crud_layer.py +++ b/src/crud/crud_layer.py @@ -11,7 +11,7 @@ from fastapi_pagination import Params as PaginationParams from geoalchemy2.shape import WKTElement from pydantic import BaseModel -from sqlalchemy import and_, func, or_, select, text, exists +from sqlalchemy import and_, exists, func, or_, select, text from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import contains_eager, selectinload from sqlmodel import SQLModel @@ -33,9 +33,9 @@ Layer, LayerOrganizationLink, LayerTeamLink, + Organization, Role, Team, - Organization, ) from src.schemas.error import ( ColumnNotFoundError, @@ -681,8 +681,12 @@ async def get_layers_with_filter( ) query = ( select(Layer) - .outerjoin(LayerTeamLink, LayerTeamLink.layer_id == Layer.id) # Left join to include layers without team links - .outerjoin(LayerOrganizationLink, LayerOrganizationLink.layer_id == Layer.id) # Left join to include layers without org links + .outerjoin( + LayerTeamLink, LayerTeamLink.layer_id == Layer.id + ) # Left join to include layers without team links + .outerjoin( + LayerOrganizationLink, LayerOrganizationLink.layer_id == Layer.id + ) # Left join to include layers without org links .where( and_( *filters, # Apply your other filters here @@ -690,7 +694,9 @@ async def get_layers_with_filter( ) .options( selectinload(Layer.team_links).selectinload(LayerTeamLink.team), - selectinload(Layer.organization_links).selectinload(LayerOrganizationLink.organization), + selectinload(Layer.organization_links).selectinload( + LayerOrganizationLink.organization + ), ) ) @@ -724,9 +730,9 @@ async def get_layers_with_filter( shared_with[shared_with_key].append( { "role": layer[1], - "team_id": layer[3], - "team_name": layer[2], - "team_avatar": layer[4], + "id": layer[3], + "name": layer[2], + "avatar": layer[4], } ) layers_arr.append({"layer": layer[0], "shared_with": shared_with}) @@ -737,18 +743,18 @@ async def get_layers_with_filter( shared_with["teams"].append( { "role": role_mapping[team_link.role_id], - "team_id": team_link.team.id, - "team_name": team_link.team.name, - "team_avatar": team_link.team.avatar, + "id": team_link.team.id, + "name": team_link.team.name, + "avatar": team_link.team.avatar, } ) for organization_link in layer.organization_links: shared_with["organizations"].append( { "role": role_mapping[organization_link.role_id], - "organization_id": organization_link.organization.id, - "organization_name": organization_link.organization.name, - "organization_avatar": organization_link.organization.avatar, + "id": organization_link.organization.id, + "name": organization_link.organization.name, + "avatar": organization_link.organization.avatar, } ) layers_arr.append({"layer": layer, "shared_with": shared_with}) From a848638fddee0573c133722b53e3df8350c32e71 Mon Sep 17 00:00:00 2001 From: EPajares Date: Sun, 15 Sep 2024 07:29:53 +0000 Subject: [PATCH 4/8] Read shared projects --- src/core/content.py | 191 ++++++++++++++++++++++++++++++++++- src/crud/crud_layer.py | 184 +++++++++------------------------ src/crud/crud_project.py | 96 ++++++++++++++---- src/db/models/__init__.py | 10 +- src/db/models/_link_model.py | 86 +++++++++------- src/db/models/team.py | 5 +- src/db/models/user.py | 8 +- src/endpoints/v2/layer.py | 2 +- src/endpoints/v2/project.py | 12 +++ src/schemas/project.py | 1 + tests/api/test_project.py | 15 +++ tests/conftest.py | 41 +++++++- 12 files changed, 449 insertions(+), 202 deletions(-) diff --git a/src/core/content.py b/src/core/content.py index e2c735a..4b7dfb1 100644 --- a/src/core/content.py +++ b/src/core/content.py @@ -7,6 +7,8 @@ from sqlmodel import SQLModel from src.db.session import AsyncSession from src.schemas.common import ContentIdList +from sqlalchemy import select, and_ +from sqlalchemy.orm import contains_eager, selectinload ### Generic helper functions for content @@ -52,7 +54,9 @@ async def read_contents_by_ids( """Read contents by their IDs.""" # Read contents by IDs query = select(model).where(model.id.in_(ids.ids)) - contents = await crud_content.get_multi(async_session, query=query, page_params=page_params) + contents = await crud_content.get_multi( + async_session, query=query, page_params=page_params + ) # Check if all contents were found if len(contents.items) != len(ids.ids): @@ -100,3 +104,188 @@ async def delete_content_by_id( ) await crud_content.remove(async_session, id=id) return + + +def create_query_shared_content( + model, + team_link_model, + organization_link_model, + team_model, + organization_model, + role_model, + filters, + team_id=None, + organization_id=None, +): + """ + Creates a dynamic query for a given model (Layer or Project) and its associated team and organization links. + + :param model: The main model (Layer or Project) + :param team_link_model: The model linking the main model with teams (LayerTeamLink or ProjectTeamLink) + :param organization_link_model: The model linking the main model with organizations (LayerOrganizationLink or ProjectOrganizationLink) + :param team_model: The Team model + :param organization_model: The Organization model + :param role_model: The Role model + :param filters: Additional filters to apply + :param team_id: ID of the team (optional) + :param organization_id: ID of the organization (optional) + :return: A SQLAlchemy query object + """ + + # Determine the link field based on the model + link_field = f"{model.__tablename__}_id" + + if team_id: + query = ( + select( + model, + role_model.name, + team_model.name, + team_model.id, + team_model.avatar, + ) + .join( + team_link_model, getattr(team_link_model, link_field) == model.id + ) # Dynamically replace `layer_id` or `project_id` + .join(role_model, team_link_model.role_id == role_model.id) + .join(team_model, team_link_model.team_id == team_model.id) + .where( + and_( + team_link_model.team_id == team_id, + *filters, + ) + ) + .options( + contains_eager(getattr(model, "team_links")) + ) # Adjust field as needed for relationships + ) + elif organization_id: + query = ( + select( + model, + role_model.name, + organization_model.name, + organization_model.id, + organization_model.avatar, + ) + .join( + organization_link_model, + getattr(organization_link_model, link_field) == model.id, + ) # Dynamically replace `layer_id` or `project_id` + .join(role_model, organization_link_model.role_id == role_model.id) + .join( + organization_model, + organization_link_model.organization_id == organization_model.id, + ) + .where( + and_( + organization_link_model.organization_id == organization_id, + *filters, + ) + ) + .options( + contains_eager(getattr(model, "organization_links")) + ) # Adjust field as needed for relationships + ) + else: + query = ( + select(model) + .outerjoin( + team_link_model, getattr(team_link_model, link_field) == model.id + ) # Dynamically replace `layer_id` or `project_id` + .outerjoin( + organization_link_model, + getattr(organization_link_model, link_field) == model.id, + ) # Dynamically replace `layer_id` or `project_id` + .where(and_(*filters)) + .options( + selectinload(getattr(model, "team_links")).selectinload( + getattr(team_link_model, "team") + ), # Adjust fields as needed + selectinload(getattr(model, "organization_links")).selectinload( + getattr(organization_link_model, "organization") + ), # Adjust fields as needed + ) + ) + + return query + + +def build_shared_with_object( + items, + role_mapping, + team_key="team_links", + org_key="organization_links", + model_name="layer", + team_id=None, + organization_id=None, +): + """ + Builds the shared_with object for both Layer and Project models. + + :param items: The list of Layer or Project items + :param role_mapping: The mapping of role IDs to role names + :param team_key: The attribute name for team links (default is "team_links") + :param org_key: The attribute name for organization links (default is "organization_links") + :param model_name: The name of the model ("layer" or "project") + :param team_id: Optional ID for team-specific sharing + :param organization_id: Optional ID for organization-specific sharing + :return: A list of dictionaries containing the model and the shared_with data + """ + result_arr = [] + + # Determine shared_with key + if team_id: + shared_with_key = "teams" + elif organization_id: + shared_with_key = "organizations" + + # Case where shared_with is for a specific team or organization + if team_id or organization_id: + for item in items: + shared_with = {shared_with_key: []} + shared_with[shared_with_key].append( + { + "role": item[1], # Role name + "id": item[3], # Team or Organization ID + "name": item[2], # Team or Organization name + "avatar": item[4], # Team or Organization avatar + } + ) + result_arr.append({**item[0].dict(), "shared_with": shared_with}) + else: + # Case where shared_with includes both teams and organizations + for item in items: + shared_with = {"teams": [], "organizations": []} + + # Process team links + team_links = getattr(item, team_key) + for team_link in team_links: + shared_with["teams"].append( + { + "role": role_mapping[ + team_link.role_id + ], # Role based on role_mapping + "id": team_link.team.id, + "name": team_link.team.name, + "avatar": team_link.team.avatar, + } + ) + + # Process organization links + organization_links = getattr(item, org_key) + for organization_link in organization_links: + shared_with["organizations"].append( + { + "role": role_mapping[ + organization_link.role_id + ], # Role based on role_mapping + "id": organization_link.organization.id, + "name": organization_link.organization.name, + "avatar": organization_link.organization.avatar, + } + ) + + result_arr.append({**item.dict(), "shared_with": shared_with}) + + return result_arr diff --git a/src/crud/crud_layer.py b/src/crud/crud_layer.py index 5741a97..0a29e46 100644 --- a/src/crud/crud_layer.py +++ b/src/crud/crud_layer.py @@ -20,6 +20,7 @@ # Local application imports from src.core.config import settings from src.core.job import CRUDFailedJob, job_init, job_log, run_background_or_immediately +from src.core.content import create_query_shared_content, build_shared_with_object from src.core.layer import ( CRUDLayerBase, FileUpload, @@ -532,6 +533,8 @@ async def get_base_filter( user_id: UUID, params: ILayerGet | ICatalogLayerGet, attributes_to_exclude: list = [], + team_id: UUID = None, + organization_id: UUID = None, ): """Get filter for get layer queries.""" filters = [] @@ -546,22 +549,34 @@ async def get_base_filter( ) and value is not None ): + # Avoid adding folder_id in case team_id or organization_id is provided + if key == "folder_id" and (team_id or organization_id): + continue + # Convert value to list if not list if not isinstance(value, list): value = [value] filters.append(getattr(Layer, key).in_(value)) - # Check if ILayer get then it is for user owned layers + # Check if ILayer get then it is organization layers if isinstance(params, ILayerGet): if params.in_catalog is not None: - filters.append( - and_( - Layer.in_catalog == bool(params.in_catalog), - Layer.user_id == user_id, + if not team_id and not organization_id: + filters.append( + and_( + Layer.in_catalog == bool(params.in_catalog), + Layer.user_id == user_id, + ) + ) + else: + filters.append( + and_( + Layer.in_catalog == bool(params.in_catalog), + ) ) - ) else: - filters.append(Layer.user_id == user_id) + if not team_id and not organization_id: + filters.append(Layer.user_id == user_id) else: filters.append(Layer.in_catalog == bool(True)) @@ -607,7 +622,12 @@ async def get_layers_with_filter( detail="Feature layer type can only be set when layer type is feature", ) # Get base filter - filters = await self.get_base_filter(user_id=user_id, params=params) + filters = await self.get_base_filter( + user_id=user_id, + params=params, + team_id=team_id, + organization_id=organization_id, + ) # Get roles roles = await CRUDBase(Role).get_all( @@ -615,90 +635,17 @@ async def get_layers_with_filter( ) role_mapping = {role.id: role.name for role in roles} - if team_id: - query = ( - select(Layer, Role.name, Team.name, Team.id, Team.avatar) - .join(LayerTeamLink, LayerTeamLink.layer_id == Layer.id) - .join(Role, LayerTeamLink.role_id == Role.id) - .join(Team, LayerTeamLink.team_id == Team.id) - .where( - and_( - LayerTeamLink.team_id == team_id, - *filters, - ) - ) - .options(contains_eager(Layer.team_links)) - ) - elif organization_id: - query = ( - select( - Layer, - Role.name, - Organization.name, - Organization.id, - Organization.avatar, - ) - .join(LayerOrganizationLink, LayerOrganizationLink.layer_id == Layer.id) - .join(Role, LayerOrganizationLink.role_id == Role.id) - .join( - Organization, - LayerOrganizationLink.organization_id == Organization.id, - ) - .where( - and_( - LayerOrganizationLink.organization_id == organization_id, - *filters, - ) - ) - .options(contains_eager(Layer.organization_links)) - ) - else: - query = ( - select(Layer) - .where( - or_( - exists().where( - and_( - LayerTeamLink.layer_id == Layer.id, - LayerTeamLink.team_id.isnot(None), - ) - ), - exists().where( - and_( - LayerOrganizationLink.layer_id == Layer.id, - LayerOrganizationLink.organization_id.isnot(None), - ) - ), - ), - *filters, - ) - .options( - selectinload(Layer.team_links).selectinload(LayerTeamLink.team), - selectinload(Layer.organization_links).selectinload( - LayerOrganizationLink.organization - ), - ) - ) - query = ( - select(Layer) - .outerjoin( - LayerTeamLink, LayerTeamLink.layer_id == Layer.id - ) # Left join to include layers without team links - .outerjoin( - LayerOrganizationLink, LayerOrganizationLink.layer_id == Layer.id - ) # Left join to include layers without org links - .where( - and_( - *filters, # Apply your other filters here - ) - ) - .options( - selectinload(Layer.team_links).selectinload(LayerTeamLink.team), - selectinload(Layer.organization_links).selectinload( - LayerOrganizationLink.organization - ), - ) - ) + query = create_query_shared_content( + Layer, + LayerTeamLink, + LayerOrganizationLink, + Team, + Organization, + Role, + filters, + team_id=team_id, + organization_id=organization_id, + ) # Build params params = { @@ -715,50 +662,15 @@ async def get_layers_with_filter( page_params=page_params, **params, ) - - # Build shared_with object - layers_arr = [] - if team_id: - shared_with_key = "teams" - elif organization_id: - shared_with_key = "organizations" - - if team_id or organization_id: - for layer in layers.items: - # Check if layer[1] is of type LayerTeamLink - shared_with = {shared_with_key: []} - shared_with[shared_with_key].append( - { - "role": layer[1], - "id": layer[3], - "name": layer[2], - "avatar": layer[4], - } - ) - layers_arr.append({"layer": layer[0], "shared_with": shared_with}) - else: - for layer in layers.items: - shared_with = {"teams": [], "organizations": []} - for team_link in layer.team_links: - shared_with["teams"].append( - { - "role": role_mapping[team_link.role_id], - "id": team_link.team.id, - "name": team_link.team.name, - "avatar": team_link.team.avatar, - } - ) - for organization_link in layer.organization_links: - shared_with["organizations"].append( - { - "role": role_mapping[organization_link.role_id], - "id": organization_link.organization.id, - "name": organization_link.organization.name, - "avatar": organization_link.organization.avatar, - } - ) - layers_arr.append({"layer": layer, "shared_with": shared_with}) - + layers_arr = build_shared_with_object( + layers.items, + role_mapping, + team_key="team_links", + org_key="organization_links", + model_name="layer", + team_id=team_id, + organization_id=organization_id, + ) layers.items = layers_arr return layers diff --git a/src/crud/crud_project.py b/src/crud/crud_project.py index 355864f..52551e2 100644 --- a/src/crud/crud_project.py +++ b/src/crud/crud_project.py @@ -1,21 +1,36 @@ from uuid import UUID -from fastapi_pagination import Page, Params as PaginationParams + +from fastapi_pagination import Page +from fastapi_pagination import Params as PaginationParams from sqlalchemy import and_, select from sqlalchemy.ext.asyncio import AsyncSession -from src.core.content import update_content_by_id + from src.core.config import settings -from src.db.models.project import Project +from src.core.content import ( + create_query_shared_content, + update_content_by_id, + build_shared_with_object, +) +from src.crud.base import CRUDBase +from src.crud.crud_layer_project import layer_project as crud_layer_project +from src.crud.crud_user_project import user_project as crud_user_project +from src.db.models import ( + Organization, + Project, + ProjectOrganizationLink, + ProjectTeamLink, + Role, + Team, +) +from src.db.models._link_model import UserProjectLink from src.schemas.common import OrderEnum from src.schemas.project import ( + InitialViewState, IProjectBaseUpdate, - IProjectRead, IProjectCreate, - InitialViewState, + IProjectRead, ) -from src.crud.crud_user_project import user_project as crud_user_project -from src.crud.crud_layer_project import layer_project as crud_layer_project -from src.crud.base import CRUDBase -from src.db.models._link_model import UserProjectLink + class CRUDProject(CRUDBase): async def create( @@ -32,7 +47,15 @@ async def create( obj_in=project_in, ) # Default initial view state - initial_view_state = {"zoom": 5, "pitch": 0, "bearing": 0, "latitude": 51.01364693631891, "max_zoom": 20, "min_zoom": 0, "longitude": 9.576740589534126} + initial_view_state = { + "zoom": 5, + "pitch": 0, + "bearing": 0, + "latitude": 51.01364693631891, + "max_zoom": 20, + "min_zoom": 0, + "longitude": 9.576740589534126, + } # Create link between user and project for initial view state await crud_user_project.create( @@ -49,8 +72,8 @@ async def create( await crud_layer_project.create( async_session=async_session, project_id=project.id, - layer_ids=[settings.BASE_STREET_NETWORK] - ) + layer_ids=[settings.BASE_STREET_NETWORK], + ) # Doing unneeded type conversion to make sure the relations of project are not loaded return IProjectRead(**project.dict()) @@ -64,23 +87,44 @@ async def get_projects( order_by: str = None, order: OrderEnum = None, ids: list = None, + team_id: UUID = None, + organization_id: UUID = None, ) -> Page[IProjectRead]: """Get projects for a user and folder""" # If ids are provided apply filter by ids, otherwise apply filter by folder_id and user_id + if team_id or organization_id: + filters = [] + elif folder_id: + filters = [ + Project.user_id == user_id, + Project.folder_id == folder_id, + ] + else: + filters = [Project.user_id == user_id] + if ids: query = select(Project).where(Project.id.in_(ids)) else: - if not folder_id: - query = select(Project).where(Project.user_id == user_id) - else: - query = select(Project).where( - and_( - Project.user_id == user_id, - Project.folder_id == folder_id, - ) - ) + query = create_query_shared_content( + Project, + ProjectTeamLink, + ProjectOrganizationLink, + Team, + Organization, + Role, + filters, + team_id=team_id, + organization_id=organization_id, + ) + + # Get roles + roles = await CRUDBase(Role).get_all( + async_session, + ) + role_mapping = {role.id: role.name for role in roles} + # Get projects projects = await self.get_multi( async_session, query=query, @@ -89,7 +133,15 @@ async def get_projects( order_by=order_by, order=order, ) - + projects.items = build_shared_with_object( + projects.items, + role_mapping, + team_key="team_links", + org_key="organization_links", + model_name="project", + team_id=team_id, + organization_id=organization_id, + ) return projects async def update_base( diff --git a/src/db/models/__init__.py b/src/db/models/__init__.py index edfbc00..ee192e4 100644 --- a/src/db/models/__init__.py +++ b/src/db/models/__init__.py @@ -1,4 +1,12 @@ -from ._link_model import LayerProjectLink, ScenarioScenarioFeatureLink, UserProjectLink, LayerOrganizationLink, LayerTeamLink +from ._link_model import ( + LayerProjectLink, + ScenarioScenarioFeatureLink, + UserProjectLink, + LayerOrganizationLink, + LayerTeamLink, + ProjectTeamLink, + ProjectOrganizationLink, +) from .data_store import DataStore from .folder import Folder from .job import Job diff --git a/src/db/models/_link_model.py b/src/db/models/_link_model.py index a77ffa0..571f49c 100644 --- a/src/db/models/_link_model.py +++ b/src/db/models/_link_model.py @@ -17,6 +17,7 @@ from src.core.config import settings from sqlmodel import SQLModel from src.db.models.organization import Organization + if TYPE_CHECKING: from .layer import Layer from .project import Project @@ -144,40 +145,40 @@ class UserProjectLink(DateTimeBase, table=True): ) -# class UserTeamLink(SQLModel, table=True): -# """ -# A table representing the relation between users and teams. - -# Attributes: -# id (int): The unique identifier for the user team. -# team_id (str): The unique identifier for the team the user belongs to. -# user_id (str): The unique identifier for the user that belongs to the team. -# """ - -# __tablename__ = "user_team" -# __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} - -# id: Optional[int] = Field( -# sa_column=Column(Integer, primary_key=True, autoincrement=True) -# ) -# team_id: UUID = Field( -# sa_column=Column( -# UUID_PG(as_uuid=True), -# ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.team.id", ondelete="CASCADE"), -# nullable=False, -# ) -# ) -# user_id: UUID = Field( -# sa_column=Column( -# UUID_PG(as_uuid=True), -# ForeignKey(f"{settings.CUSTOMER_SCHEMA}.user.id", ondelete="CASCADE"), -# nullable=False, -# ) -# ) - -# Relationships -# user: "User" = Relationship(back_populates="team_links") -# team: "Team" = Relationship(back_populates="user_links") +class UserTeamLink(SQLModel, table=True): + """ + A table representing the relation between users and teams. + + Attributes: + id (int): The unique identifier for the user team. + team_id (str): The unique identifier for the team the user belongs to. + user_id (str): The unique identifier for the user that belongs to the team. + """ + + __tablename__ = "user_team" + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} + + id: Optional[int] = Field( + sa_column=Column(Integer, primary_key=True, autoincrement=True) + ) + team_id: UUID = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.team.id", ondelete="CASCADE"), + nullable=False, + ) + ) + user_id: UUID = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.user.id", ondelete="CASCADE"), + nullable=False, + ) + ) + + # Relationships + user: "User" = Relationship(back_populates="team_links") + team: "Team" = Relationship(back_populates="user_links") class LayerOrganizationLink(SQLModel, table=True): @@ -298,11 +299,19 @@ class ProjectTeamLink(SQLModel, table=True): nullable=False, ) ) + role_id: UUID = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.role.id"), + nullable=False, + ) + ) # Relationships project: "Project" = Relationship(back_populates="team_links") team: "Team" = Relationship(back_populates="project_links") + class ProjectOrganizationLink(SQLModel, table=True): """ A table representing the relation between projects and organizations. @@ -335,7 +344,14 @@ class ProjectOrganizationLink(SQLModel, table=True): nullable=False, ) ) + role_id: UUID = Field( + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.role.id"), + nullable=False, + ) + ) # Relationships project: "Project" = Relationship(back_populates="organization_links") - organization: "Organization" = Relationship(back_populates="project_links") \ No newline at end of file + organization: "Organization" = Relationship(back_populates="project_links") diff --git a/src/db/models/team.py b/src/db/models/team.py index d8796cd..98045bd 100644 --- a/src/db/models/team.py +++ b/src/db/models/team.py @@ -7,7 +7,7 @@ from src.core.config import settings if TYPE_CHECKING: - from ._link_model import LayerTeamLink, ProjectTeamLink + from ._link_model import LayerTeamLink, ProjectTeamLink, UserTeamLink class Team(DateTimeBase, table=True): """ @@ -36,3 +36,6 @@ class Team(DateTimeBase, table=True): project_links: List["ProjectTeamLink"] = Relationship( back_populates="team", sa_relationship_kwargs={"cascade": "all, delete-orphan"} ) + user_links: List["UserTeamLink"] = Relationship( + back_populates="team", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) diff --git a/src/db/models/user.py b/src/db/models/user.py index b103929..bb7d85d 100644 --- a/src/db/models/user.py +++ b/src/db/models/user.py @@ -14,7 +14,7 @@ from .job import Job from .scenario import Scenario from .system_setting import SystemSetting - #from ._link_model import UserTeamLink + from ._link_model import UserTeamLink class User(SQLModel, table=True): @@ -37,6 +37,6 @@ class User(SQLModel, table=True): jobs: List["Job"] = Relationship( back_populates="user", sa_relationship_kwargs={"cascade": "all, delete-orphan"} ) - # team_links: List["UserTeamLink"] = Relationship( - # back_populates="user", sa_relationship_kwargs={"cascade": "all, delete-orphan"} - # ) + team_links: List["UserTeamLink"] = Relationship( + back_populates="user", sa_relationship_kwargs={"cascade": "all, delete-orphan"} + ) diff --git a/src/endpoints/v2/layer.py b/src/endpoints/v2/layer.py index 95cd1ea..91ba66c 100644 --- a/src/endpoints/v2/layer.py +++ b/src/endpoints/v2/layer.py @@ -370,7 +370,7 @@ async def read_layer( @router.post( "", - response_model=Page[ILayerReadShared], + response_model=Page[ILayerRead], response_model_exclude_none=True, status_code=200, summary="Retrieve a list of layers using different filters including a spatial filter. If not filter is specified, all layers will be returned.", diff --git a/src/endpoints/v2/project.py b/src/endpoints/v2/project.py index 5749ce4..c4e99c2 100644 --- a/src/endpoints/v2/project.py +++ b/src/endpoints/v2/project.py @@ -113,6 +113,16 @@ async def read_projects( page_params: PaginationParams = Depends(), folder_id: UUID4 | None = Query(None, description="Folder ID"), user_id: UUID4 = Depends(get_user_id), + team_id: UUID | None = Query( + None, + description="The ID of the team to get the layers from", + example="3fa85f64-5717-4562-b3fc-2c963f66afa6", + ), + organization_id: UUID | None = Query( + None, + description="The ID of the organization to get the layers from", + example="3fa85f64-5717-4562-b3fc-2c963f66afa6", + ), search: str = Query(None, description="Searches the name of the project"), order_by: str = Query( None, @@ -135,6 +145,8 @@ async def read_projects( search=search, order_by=order_by, order=order, + team_id=team_id, + organization_id=organization_id, ) return projects diff --git a/src/schemas/project.py b/src/schemas/project.py index b6b517a..6bba18a 100644 --- a/src/schemas/project.py +++ b/src/schemas/project.py @@ -71,6 +71,7 @@ class IProjectRead(ContentBaseAttributes, DateTimeBase): layer_order: list[int] | None = Field(None, description="Layer order in project") thumbnail_url: HttpUrl | None = Field(description="Project thumbnail URL") active_scenario_id: UUID | None = Field(None, description="Active scenario ID") + shared_with: dict | None = Field(None, description="Shared with") @optional diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 57fdf61..b03e48c 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -35,6 +35,21 @@ async def test_get_projects( assert len(response.json()["items"]) == len(fixture_create_projects) +@pytest.mark.asyncio +async def test_get_shared_projects( + client: AsyncClient, + fixture_create_shared_team_projects, +): + team_id = str(fixture_create_shared_team_projects["teams"][0].id) + response = await client.get( + f"{settings.API_V2_STR}/project?team_id={team_id}", + ) + assert response.status_code == 200 + assert len(response.json()["items"]) == len( + fixture_create_shared_team_projects["projects"] + ) + + @pytest.mark.asyncio async def test_get_project_wrong_id(client: AsyncClient, fixture_create_project): await get_with_wrong_id(client, "project") diff --git a/tests/conftest.py b/tests/conftest.py index 4dc5eed..63f1bc7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,7 +11,7 @@ # Local application imports from src.core.config import settings -from src.db.models import LayerTeamLink, LayerOrganizationLink, Role, Team, Organization +from src.db.models import LayerTeamLink, LayerOrganizationLink, Role, Team, Organization, ProjectTeamLink, ProjectOrganizationLink from src.db.models.layer import LayerType from src.endpoints.deps import get_db, session_manager from src.main import app @@ -1167,6 +1167,45 @@ async def fixture_create_shared_organization_layers( return {"organizations": [organization1, organization2], "layers": layers} +@pytest.fixture +async def fixture_create_shared_team_projects( + client: AsyncClient, fixture_create_folder, fixture_create_projects, db_session +): + + # Create projects + projects = fixture_create_projects + + # Create a team + team1 = Team(name="test_team", avatar="https://www.plan4better.de/logo.png") + team2 = Team(name="test_team2", avatar="https://www.plan4better.de/logo.png") + + # Create role + role = Role(name="team_member") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + # Create layer team links + project_teams1 = [] + project_teams2 = [] + for project in projects: + project_team1 = ProjectTeamLink( + project_id=project["id"], team_id=team1.id, role_id=role.id + ) + project_team2 = ProjectTeamLink( + project_id=project["id"], team_id=team2.id, role_id=role.id + ) + project_teams1.append(project_team1) + project_teams2.append(project_team2) + + team1.project_links = project_teams1 + team2.project_links = project_teams2 + db_session.add(team1) + db_session.add(team2) + await db_session.commit() + + return {"teams": [team1, team2], "projects": projects} + fixture_catchment_area_active_mobility = create_generic_toolbox_fixture( "/active-mobility/catchment-area", From fc7fafa36287dac3c64f2f56e4eb37b447908ced Mon Sep 17 00:00:00 2001 From: EPajares Date: Sun, 15 Sep 2024 09:58:08 +0000 Subject: [PATCH 5/8] Deleted legacy code --- src/crud/legacy/__init__.py | 0 src/crud/legacy/crud_check_data.py | 17 - src/crud/legacy/crud_customization.py | 363 ---------- src/crud/legacy/crud_geostore.py | 10 - src/crud/legacy/crud_indicator.py | 143 ---- src/crud/legacy/crud_layer.py | 210 ------ src/crud/legacy/crud_layer_library.py | 27 - src/crud/legacy/crud_opportunity_config.py | 30 - src/crud/legacy/crud_organization.py | 10 - src/crud/legacy/crud_poi_aoi.py | 49 -- src/crud/legacy/crud_r5.py | 152 ---- src/crud/legacy/crud_role.py | 15 - src/crud/legacy/crud_scenario.py | 404 ----------- src/crud/legacy/crud_static_layer.py | 42 -- src/crud/legacy/crud_study_area.py | 28 - src/crud/legacy/crud_study_area_geostore.py | 10 - src/crud/legacy/crud_system.py | 7 - src/crud/legacy/crud_upload.py | 330 --------- src/crud/legacy/crud_user.py | 133 ---- src/db/legacy/aoi.py | 84 --- src/db/legacy/building.py | 86 --- src/db/legacy/config_validation.py | 59 -- src/db/legacy/customization.py | 72 -- src/db/legacy/data_upload.py | 55 -- src/db/legacy/edge.py | 124 ---- src/db/legacy/geostore.py | 32 - src/db/legacy/isochrone.py | 51 -- src/db/legacy/layer_library.py | 93 --- src/db/legacy/node.py | 42 -- src/db/legacy/opportunity_config.py | 118 ---- src/db/legacy/organization.py | 20 - src/db/legacy/poi.py | 98 --- src/db/legacy/population.py | 78 --- src/db/legacy/role.py | 20 - src/db/legacy/static_layer.py | 52 -- src/db/legacy/study_area.py | 97 --- src/db/legacy/system.py | 20 - src/db/models/role.py | 2 - src/endpoints/legacy/deps.py | 116 --- src/endpoints/v1/__init__.py | 0 src/endpoints/v1/api.py | 75 -- src/endpoints/v1/customizations.py | 275 -------- src/endpoints/v1/data_preparation.py | 133 ---- src/endpoints/v1/geostores.py | 142 ---- src/endpoints/v1/indicators.py | 297 -------- src/endpoints/v1/isochrones.py | 113 --- src/endpoints/v1/layer_library.py | 134 ---- src/endpoints/v1/layers.py | 430 ------------ src/endpoints/v1/login.py | 105 --- src/endpoints/v1/opportunities.py | 36 - src/endpoints/v1/opportunity_config.py | 78 --- src/endpoints/v1/organizations.py | 124 ---- src/endpoints/v1/poi_aoi.py | 36 - src/endpoints/v1/projects.py | 64 -- src/endpoints/v1/r5.py | 268 ------- src/endpoints/v1/roles.py | 26 - src/endpoints/v1/scenarios.py | 317 --------- src/endpoints/v1/static_layers.py | 65 -- src/endpoints/v1/static_layers_extra.py | 147 ---- src/endpoints/v1/study_area.py | 63 -- src/endpoints/v1/system.py | 23 - src/endpoints/v1/upload.py | 160 ----- src/endpoints/v1/users.py | 341 --------- src/endpoints/v1/utils.py | 47 -- src/schemas/legacy/building.py | 1 - src/schemas/legacy/customization.py | 16 - src/schemas/legacy/data_preparation.py | 190 ----- src/schemas/legacy/geostore.py | 57 -- src/schemas/legacy/heatmap.py | 553 --------------- src/schemas/legacy/isochrone.py | 737 -------------------- src/schemas/legacy/item.py | 39 -- src/schemas/legacy/layer_library.py | 195 ------ src/schemas/legacy/mapbox.py | 55 -- src/schemas/legacy/opportunity_config.py | 47 -- src/schemas/legacy/organization.py | 26 - src/schemas/legacy/r5.py | 173 ----- src/schemas/legacy/role.py | 26 - src/schemas/legacy/study_area.py | 116 --- src/schemas/legacy/style.py | 22 - src/schemas/legacy/system.py | 7 - src/schemas/legacy/token.py | 12 - src/schemas/legacy/upload.py | 9 - src/schemas/legacy/user.py | 129 ---- src/schemas/legacy/vector_tile.py | 174 ----- src/schemas/legacy/way.py | 1 - 85 files changed, 9383 deletions(-) delete mode 100644 src/crud/legacy/__init__.py delete mode 100644 src/crud/legacy/crud_check_data.py delete mode 100644 src/crud/legacy/crud_customization.py delete mode 100644 src/crud/legacy/crud_geostore.py delete mode 100644 src/crud/legacy/crud_indicator.py delete mode 100644 src/crud/legacy/crud_layer.py delete mode 100644 src/crud/legacy/crud_layer_library.py delete mode 100644 src/crud/legacy/crud_opportunity_config.py delete mode 100644 src/crud/legacy/crud_organization.py delete mode 100644 src/crud/legacy/crud_poi_aoi.py delete mode 100644 src/crud/legacy/crud_r5.py delete mode 100644 src/crud/legacy/crud_role.py delete mode 100644 src/crud/legacy/crud_scenario.py delete mode 100644 src/crud/legacy/crud_static_layer.py delete mode 100644 src/crud/legacy/crud_study_area.py delete mode 100644 src/crud/legacy/crud_study_area_geostore.py delete mode 100644 src/crud/legacy/crud_system.py delete mode 100644 src/crud/legacy/crud_upload.py delete mode 100644 src/crud/legacy/crud_user.py delete mode 100644 src/db/legacy/aoi.py delete mode 100644 src/db/legacy/building.py delete mode 100644 src/db/legacy/config_validation.py delete mode 100644 src/db/legacy/customization.py delete mode 100644 src/db/legacy/data_upload.py delete mode 100644 src/db/legacy/edge.py delete mode 100644 src/db/legacy/geostore.py delete mode 100644 src/db/legacy/isochrone.py delete mode 100644 src/db/legacy/layer_library.py delete mode 100644 src/db/legacy/node.py delete mode 100644 src/db/legacy/opportunity_config.py delete mode 100644 src/db/legacy/organization.py delete mode 100644 src/db/legacy/poi.py delete mode 100644 src/db/legacy/population.py delete mode 100644 src/db/legacy/role.py delete mode 100644 src/db/legacy/static_layer.py delete mode 100644 src/db/legacy/study_area.py delete mode 100644 src/db/legacy/system.py delete mode 100644 src/endpoints/legacy/deps.py delete mode 100644 src/endpoints/v1/__init__.py delete mode 100644 src/endpoints/v1/api.py delete mode 100644 src/endpoints/v1/customizations.py delete mode 100644 src/endpoints/v1/data_preparation.py delete mode 100644 src/endpoints/v1/geostores.py delete mode 100644 src/endpoints/v1/indicators.py delete mode 100644 src/endpoints/v1/isochrones.py delete mode 100644 src/endpoints/v1/layer_library.py delete mode 100644 src/endpoints/v1/layers.py delete mode 100644 src/endpoints/v1/login.py delete mode 100644 src/endpoints/v1/opportunities.py delete mode 100644 src/endpoints/v1/opportunity_config.py delete mode 100644 src/endpoints/v1/organizations.py delete mode 100644 src/endpoints/v1/poi_aoi.py delete mode 100644 src/endpoints/v1/projects.py delete mode 100644 src/endpoints/v1/r5.py delete mode 100644 src/endpoints/v1/roles.py delete mode 100644 src/endpoints/v1/scenarios.py delete mode 100644 src/endpoints/v1/static_layers.py delete mode 100644 src/endpoints/v1/static_layers_extra.py delete mode 100644 src/endpoints/v1/study_area.py delete mode 100644 src/endpoints/v1/system.py delete mode 100644 src/endpoints/v1/upload.py delete mode 100644 src/endpoints/v1/users.py delete mode 100644 src/endpoints/v1/utils.py delete mode 100644 src/schemas/legacy/building.py delete mode 100644 src/schemas/legacy/customization.py delete mode 100644 src/schemas/legacy/data_preparation.py delete mode 100644 src/schemas/legacy/geostore.py delete mode 100644 src/schemas/legacy/heatmap.py delete mode 100644 src/schemas/legacy/isochrone.py delete mode 100644 src/schemas/legacy/item.py delete mode 100644 src/schemas/legacy/layer_library.py delete mode 100644 src/schemas/legacy/mapbox.py delete mode 100644 src/schemas/legacy/opportunity_config.py delete mode 100644 src/schemas/legacy/organization.py delete mode 100644 src/schemas/legacy/r5.py delete mode 100644 src/schemas/legacy/role.py delete mode 100644 src/schemas/legacy/study_area.py delete mode 100644 src/schemas/legacy/style.py delete mode 100644 src/schemas/legacy/system.py delete mode 100644 src/schemas/legacy/token.py delete mode 100644 src/schemas/legacy/upload.py delete mode 100644 src/schemas/legacy/user.py delete mode 100644 src/schemas/legacy/vector_tile.py delete mode 100644 src/schemas/legacy/way.py diff --git a/src/crud/legacy/__init__.py b/src/crud/legacy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/crud/legacy/crud_check_data.py b/src/crud/legacy/crud_check_data.py deleted file mode 100644 index 1e0ddd1..0000000 --- a/src/crud/legacy/crud_check_data.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Dict -from sqlalchemy.future import select -from sentry_sdk import capture_exception - -class CRUDCheckData(): - async def table_is_empty(self, db, ModelDB) -> Dict: - try: - result = await db.execute(select(ModelDB).limit(1)) - except Exception as e: - capture_exception(e) - - if result.scalars().first() is not None: - return True - else: - return False - -check_data = CRUDCheckData() diff --git a/src/crud/legacy/crud_customization.py b/src/crud/legacy/crud_customization.py deleted file mode 100644 index 1b5bd99..0000000 --- a/src/crud/legacy/crud_customization.py +++ /dev/null @@ -1,363 +0,0 @@ -from fastapi import HTTPException -from fastapi.encoders import jsonable_encoder -from geoalchemy2.shape import from_shape, to_shape -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.future import select -from sqlalchemy.sql import and_, delete, func, select - -from src import crud -from src.crud.base import CRUDBase -from src.db import models -from src.db.models.config_validation import * -from src.db.models.customization import Customization -from src.utils import web_mercator_to_wgs84, wgs84_to_web_mercator - - -class CRUDCustomization( - CRUDBase[models.Customization, models.Customization, models.Customization] -): - pass - - -class CRUDUserCustomization( - CRUDBase[models.UserCustomization, models.UserCustomization, models.UserCustomization] -): - pass - - -customization = CRUDCustomization(models.Customization) -user_customization = CRUDUserCustomization(models.UserCustomization) - - -class CRUDDynamicCustomization: - def layer_arr_to_dict(self, arr_dict): - result = {} - for elem in arr_dict: - result.update(elem) - return result - - async def build_layer_category_obj(self, db: AsyncSession, layer_name: str): - """ "This function will build the layer category obj for a specific layer name""" - layer = await crud.layer_library.get_by_multi_keys( - db=db, keys={"name": layer_name}, extra_fields=[models.LayerLibrary.style_library] - ) - if layer == []: - HTTPException(status_code=400, detail="The layer %s does not exist." % layer_name) - - layer = layer[0] - layer_attributes = {} - for key in [ - "url", - "legend_urls", - "type", - "map_attribution", - "access_token", - "max_resolution", - "max_resolution", - "doc_url" - ]: - if getattr(layer, key) is not None: - layer_attributes[key] = getattr(layer, key) - - if layer.style_library is not None: - if layer.style_library.style is not None: - layer_attributes["style"] = layer.style_library.style - if layer.style_library.translation is not None: - layer_attributes["translation"] = layer.style_library.translation - - if layer.date is not None and layer.source is not None: - source_obj = {} - source_obj["date"] = layer.date - source_obj["source"] = layer.source - - if layer.date_1 is not None and layer.source_1 is not None: - source_obj["date"] = source_obj["date"] + "," + layer.date_1 - source_obj["source"] = source_obj["source"] + "," + layer.source_1 - - layer_attributes["attributes"] = source_obj - - if layer.type == "BING" and layer.special_attribute is not None: - layer_attributes["imagery_set"] = layer.special_attribute["imagery_set"] - - layer_obj = {layer_name: layer_attributes} - if check_dict_schema(LayerCategory, layer_obj) is False: - HTTPException( - status_code=400, detail="For %s the layer object is not valid." % layer_name - ) - - return layer_obj - - async def build_layer_group_obj(self, db: AsyncSession, list_groups, layer_group): - """This function will build the layer group obj for a specific group""" - layers = [] - group_name = list(layer_group.keys())[0] - for category in layer_group[group_name]: - category_obj = await self.build_layer_category_obj(db, category) - layers.append(category_obj) - - group_obj = {group_name: {"icon": list_groups[group_name], "children": layers}} - if check_dict_schema(LayerGroup, group_obj) is False: - HTTPException( - status_code=400, detail="For %s the group object is not valid." % group_name - ) - return group_obj - - async def merge_layer_groups( - self, db: AsyncSession, list_groups, default_groups, study_area_groups - ): - """This function will merge the default layer groups with the study area layer groups""" - default_groups = self.layer_arr_to_dict(default_groups) - study_area_groups = self.layer_arr_to_dict(study_area_groups) - combined_group_objs = [] - - for group in list_groups.keys(): - - if group in default_groups.keys() and group in study_area_groups.keys(): - merge_groups = default_groups[group] + list( - set(study_area_groups[group]) - set(default_groups[group]) - ) - elif group in default_groups.keys(): - merge_groups = default_groups[group] - elif group in study_area_groups.keys(): - merge_groups = study_area_groups[group] - else: - continue - - if merge_groups != []: - group_obj = await self.build_layer_group_obj( - db, list_groups, {group: merge_groups} - ) - combined_group_objs.append(group_obj) - else: - continue - - if check_dict_schema(LayerGroups, {"layer_groups": combined_group_objs}) is False: - HTTPException(status_code=400, detail="The layer group object is not valid.") - - return combined_group_objs - - async def prepare_settings_dict(self, db: AsyncSession, sql_query): - settings = await db.execute(sql_query) - settings = settings.all() - settings_dict = {} - - if settings is not None: - for setting in settings: - settings_dict.update(setting[0].setting) - - return settings_dict - - async def get_all_default_poi_categories(self, db: AsyncSession): - """This will get a list of all default POI categories""" - - stmt = ( - select(models.OpportunityDefaultConfig.category) - .join(models.OpportunityGroup) - .where( - and_( - models.OpportunityGroup.type == "poi", - models.OpportunityDefaultConfig.opportunity_group_id - == models.OpportunityGroup.id, - ) - ) - ) - poi_categories = await db.execute(stmt) - poi_categories = poi_categories.all() - poi_categories = [category[0] for category in poi_categories] - return poi_categories - - async def build_main_setting_json(self, *, db: AsyncSession, current_user: models.User): - """This function builds the main setting json for one specific user.""" - combined_settings = {} - - # Get default customization - default_settings = await self.prepare_settings_dict(db, select(models.Customization)) - - # Get user customization - sql_query = select(models.UserCustomization).where( - and_( - models.UserCustomization.user_id == current_user.id, - models.UserCustomization.study_area_id == current_user.active_study_area_id, - ) - ) - user_settings = await self.prepare_settings_dict(db, sql_query) - - # Get study area customization - study_area_settings = await self.prepare_settings_dict( - db, - select(models.StudyArea).where( - models.StudyArea.id == current_user.active_study_area_id - ), - ) - # Get active POI settings - combined_poi_settings = await db.execute( - func.basic.active_opportunities_json( - "poi", current_user.id, current_user.active_study_area_id - ) - ) - combined_poi_settings = combined_poi_settings.first() - - if check_dict_schema(PoiGroups, {"poi_groups": combined_poi_settings[0]}) is False: - HTTPException(status_code=400, detail="Build POI groups are invalid.") - - combined_aoi_settings = await db.execute( - func.basic.active_opportunities_json( - "aoi", current_user.id, current_user.active_study_area_id - ) - ) - combined_aoi_settings = combined_aoi_settings.first() - - if check_dict_schema(PoiGroups, {"aoi_groups": combined_aoi_settings[0]}) is False: - HTTPException(status_code=400, detail="Build POI groups are invalid.") - - - # Combine settings for layers - combined_layer_groups = await self.merge_layer_groups( - db, - default_settings["app_ui"]["layer_tree"]["group_icons"], - default_settings["layer_groups"], - study_area_settings["layer_groups"], - ) - - - # TODO: Manage other settings then layers and POIs - # Loop through default_settings and merge settings - for setting_key in default_settings: - combined_settings.update({setting_key: default_settings[setting_key]}) - if setting_key in user_settings: - combined_settings.update({setting_key: user_settings[setting_key]}) - - if setting_key in study_area_settings: - combined_settings.update({setting_key: study_area_settings[setting_key]}) - - combined_settings["layer_groups"] = combined_layer_groups - combined_settings["aoi_groups"] = combined_aoi_settings[0] - combined_settings["poi_groups"] = combined_poi_settings[0] - - # Added geostores to settings - study_area_obj = await crud.study_area.get(db, id=current_user.active_study_area_id, extra_fields=[models.StudyArea.geostores]) - combined_settings["geostores"] = jsonable_encoder(study_area_obj.geostores) - - # Remove transit modes that are not operating in study area from settings - transit = {} - for index_mode, mode in enumerate(combined_settings["routing"]): - if mode["type"] == 'transit': - transit = mode - index_transit = index_mode - break - - if transit != {}: - filtered_transit_modes = [] - for transit_type in transit["transit_modes"]: - # Check if station type is in study area buffer - study_area_geom = to_shape(study_area_obj.geom) - study_area_geom = wgs84_to_web_mercator(study_area_geom) - study_area_geom = study_area_geom.buffer(60000) - study_area_geom = web_mercator_to_wgs84(study_area_geom) - study_area_geom = from_shape(study_area_geom, srid=4326) - - statement = select(models.Poi).where( - and_( - models.Poi.geom.ST_Intersects(study_area_geom), - models.Poi.category == transit_type["poi_category"], - ) - ).limit(1) - result = await db.execute(statement) - result = result.first() - - if result is not None: - filtered_transit_modes.append(transit_type) - - combined_settings["routing"][index_transit]["transit_modes"] = filtered_transit_modes - - return combined_settings - - async def insert_opportunity_setting( - self, - *, - db: AsyncSession, - current_user: models.User, - insert_settings: dict, - data_upload_id: int = None - ): - # Check if there is a default category - category = list(insert_settings.keys())[0] - existing_user_setting = await crud.opportunity_user_config.get_by_multi_keys( - db=db, keys={"user_id": current_user.id, "category": category, "study_area_id": current_user.active_study_area_id} - ) - - if existing_user_setting == []: - default_setting = await crud.opportunity_default_config.get_by_key( - db=db, key="category", value=category - ) - study_area_setting = await crud.opportunity_study_area_config.get_by_multi_keys( - db=db, keys={"category": category, "study_area_id": current_user.active_study_area_id} - ) - if default_setting != []: - opportunity_group_id = default_setting[0].opportunity_group_id - elif study_area_setting != []: - opportunity_group_id = study_area_setting[0].opportunity_group_id - else: - opportunity_group_id = await crud.opportunity_group.get_by_key( - db=db, key="group", value="other" - ) - opportunity_group_id = opportunity_group_id[0].id - - new_setting = models.OpportunityUserConfig( - category=category, - opportunity_group_id=opportunity_group_id, - icon=insert_settings[category]["icon"], - color=insert_settings[category]["color"], - study_area_id=current_user.active_study_area_id, - user_id=current_user.id, - data_upload_id=data_upload_id, - ) - await crud.opportunity_user_config.create(db=db, obj_in=new_setting) - else: - await crud.opportunity_user_config.update( - db=db, - db_obj=existing_user_setting[0], - obj_in={ - "icon": insert_settings[category]["icon"], - "color": insert_settings[category]["color"], - }, - ) - - async def delete_opportunity_setting( - self, *, db: AsyncSession, current_user: models.User, setting_type: str, category: str - ): - await db.execute( - delete(models.OpportunityUserConfig).where( - and_( - models.OpportunityUserConfig.user_id == current_user.id, - models.OpportunityUserConfig.study_area_id == current_user.active_study_area_id, - models.OpportunityUserConfig.category == category - ) - ) - ) - - await db.commit() - return {"msg": "Features deleted successfully"} - - async def get_user_settings( - self, *, db: AsyncSession, current_user: models.User, setting_type - ): - """Get user settings for specific user and its active study area""" - # Get relevant user customization - query_user_customization = ( - select(models.UserCustomization) - .join(Customization) - .where( - and_( - models.UserCustomization.user_id == current_user.id, - models.UserCustomization.study_area_id == current_user.active_study_area_id, - models.UserCustomization.customization_id == Customization.id, - Customization.type == setting_type, - ) - ) - ) - - user_customizations = await db.execute(query_user_customization) - return user_customizations.first() - -dynamic_customization = CRUDDynamicCustomization() diff --git a/src/crud/legacy/crud_geostore.py b/src/crud/legacy/crud_geostore.py deleted file mode 100644 index 27b5485..0000000 --- a/src/crud/legacy/crud_geostore.py +++ /dev/null @@ -1,10 +0,0 @@ -from src.crud.base import CRUDBase -from src.db import models - - -class CRUDGeostore(CRUDBase[models.Geostore, models.Geostore, models.Geostore]): - pass - - -geostore = CRUDGeostore(models.Geostore) - diff --git a/src/crud/legacy/crud_indicator.py b/src/crud/legacy/crud_indicator.py deleted file mode 100644 index d45e9ab..0000000 --- a/src/crud/legacy/crud_indicator.py +++ /dev/null @@ -1,143 +0,0 @@ -import bisect -import json -from datetime import timedelta -from typing import Any - -import pyproj -from geojson import Feature, FeatureCollection -from geopandas import read_postgis -from pandas.io.sql import read_sql -from shapely import wkb -from shapely.ops import transform, unary_union - -from src.db.session import legacy_engine as db_sync_engine - - -class CRUDIndicator: - async def count_pt_service_stations( - self, start_time, end_time, weekday, study_area_id - ) -> Any: - """Get count of public transport stations for every service.""" - stations_count = read_postgis( - f""" - SELECT * FROM basic.count_public_transport_services_station({study_area_id}, - '{timedelta(seconds=start_time)}', - '{timedelta(seconds=end_time)}', - {weekday}) - """, - con=db_sync_engine, - ) - stations_count = json.loads(stations_count.to_json()) - - return stations_count - - async def compute_oev_gueteklassen( - self, - start_time, - end_time, - weekday, - study_area_ids, - station_config, - ) -> FeatureCollection: - """ - Calculate the OEV-Gueteklassen for a given time period and weekday. - """ - # TODO: Use isochrone calculation instead of buffer - - time_window = (end_time - start_time) / 60 - - # Get max buffer size from config to find buffer size for study area - buffer_distances = [] - for cls in station_config["classification"].items(): - buffer_distances = buffer_distances + list(cls[1].keys()) - max_buffer_distance = max(map(int, buffer_distances)) - - stations = [] - for study_area_id in study_area_ids: - - fetched_stations = read_sql( - f""" - SELECT trip_cnt, ST_TRANSFORM(geom, 3857) as geom - FROM basic.count_public_transport_services_station({study_area_id}, - '{timedelta(seconds=start_time)}', - '{timedelta(seconds=end_time)}', - {weekday}, - {max_buffer_distance}, - ARRAY[{list(station_config["groups"].keys())}]) - """, - con=db_sync_engine, - ) - - fetched_stations = list(fetched_stations.to_records(index=False)) - stations = stations + fetched_stations - - project = pyproj.Transformer.from_crs( - pyproj.CRS("EPSG:3857"), pyproj.CRS("EPSG:4326"), always_xy=True - ).transform - classificiation_buffers = {} - for station in stations: - station_geom = wkb.loads(station.geom, hex=True) - trip_cnt = station["trip_cnt"] - # - find station group - station_groups = [] # list of station groups e.g [A, B, C] - station_group_trip_count = 0 # accumulated trips per station group - for route_type, trip_count in trip_cnt.items(): - station_group = station_config["groups"].get(str(route_type)) - if station_group: - station_groups.append(station_group) - station_group_trip_count += trip_count - - station_group = min(station_groups) # the highest priority (e.g A ) - if station_group_trip_count == 0: - continue - station_group_trip_time_frequency = time_window / ( - station_group_trip_count / 2 - ) - # - find station category based on time frequency and group - time_interval = bisect.bisect_left( - station_config["time_frequency"], station_group_trip_time_frequency - ) - if time_interval == len(station_config["time_frequency"]): - continue # no category found - station_category = station_config["categories"][time_interval - 1].get( - station_group - ) - - if not station_category: - continue - # - find station classification based on category - station_classification = station_config["classification"][ - str(station_category) - ] - for buffer_dist, classification in station_classification.items(): - - buffer_geom = station_geom.buffer(int(buffer_dist)) - # add geom in classfication_shapes - if classification not in classificiation_buffers: - classificiation_buffers[classification] = [buffer_geom] - else: - classificiation_buffers[classification].append(buffer_geom) - - features = [] - agg_union = None - for classification, shapes in dict( - sorted(classificiation_buffers.items()) - ).items(): - union_geom = unary_union(shapes) - difference_geom = union_geom - if agg_union: - difference_geom = union_geom.difference(agg_union) - agg_union = agg_union.union(union_geom) - else: - agg_union = union_geom - feature = Feature( - geometry=transform(project, difference_geom), - properties={"class": classification}, - ) - if feature.geometry is not None: - features.append(feature) - - return FeatureCollection(features) - - -indicator = CRUDIndicator() diff --git a/src/crud/legacy/crud_layer.py b/src/crud/legacy/crud_layer.py deleted file mode 100644 index 0986fbd..0000000 --- a/src/crud/legacy/crud_layer.py +++ /dev/null @@ -1,210 +0,0 @@ -from typing import Any - -import morecantile -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.sql import text -from src.core.config import settings -from src.schemas.layer import VectorTileFunction, VectorTileTable - -class CRUDLayer: - # === FETCH TABLES AND FUNCTIONS ===# - async def table_index(self, db: AsyncSession) -> Any: - sql = text( - """ - WITH geo_tables AS ( - SELECT - f_table_schema, - f_table_name, - f_geometry_column, - type, - srid - FROM - geometry_columns - WHERE srid <> 0 - ), t AS ( - SELECT - f_table_schema, - f_table_name, - f_geometry_column, - type, - srid, - jsonb_object( - array_agg(column_name), - array_agg(udt_name) - ) as coldict, - ( - SELECT - ARRAY[ - ST_XMin(extent.geom), - ST_YMin(extent.geom), - ST_XMax(extent.geom), - ST_YMax(extent.geom) - ] - FROM ( - SELECT - coalesce( - ST_Transform(ST_SetSRID(ST_EstimatedExtent(f_table_schema, f_table_name, f_geometry_column), srid), 4326), - ST_MakeEnvelope(-180, -90, 180, 90, 4326) - ) as geom - ) AS extent - ) AS bounds - FROM - information_schema.columns, - geo_tables - WHERE - f_table_schema=table_schema - AND - f_table_name=table_name - GROUP BY - f_table_schema, - f_table_name, - f_geometry_column, - type, - srid - ) - SELECT - jsonb_agg( - jsonb_build_object( - 'id', concat(f_table_schema, '.', f_table_name), - 'schema', f_table_schema, - 'table', f_table_name, - 'geometry_column', f_geometry_column, - 'srid', srid, - 'geometry_type', type, - 'properties', coldict, - 'bounds', bounds - ) - ) - FROM t - ; - """ - ) - result = await db.execute(sql) - - rows = [] - for row in result: - dict_row = dict(row) - if dict_row and dict_row["jsonb_agg"]: - layers = dict_row["jsonb_agg"] - rows.extend(layers) - - return rows - - # === VECTOR TILE LAYERS ===# - async def tile_from_table( - self, - db: AsyncSession, - tile: morecantile.Tile, - tms: morecantile.TileMatrixSet, - obj_in: VectorTileTable, - **kwargs: Any, - ) -> Any: - """Get Tile Data.""" - bbox = tms.xy_bounds(tile) - - limit = kwargs.get( - "limit", str(settings.MAX_FEATURES_PER_TILE) - ) # Number of features to write to a tile. - columns = kwargs.get( - "columns" - ) # Comma-seprated list of properties (column's name) to include in the tile - resolution = kwargs.get("resolution", str(settings.TILE_RESOLUTION)) # Tile's resolution - buffer = kwargs.get( - "buffer", str(settings.TILE_BUFFER) - ) # Size of extra data to add for a tile. - - limitstr = f"LIMIT {limit}" if int(limit) > -1 else "" - # create list of columns to return - geometry_column = obj_in.geometry_column - cols = obj_in.properties - if geometry_column in cols: - del cols[geometry_column] - - if columns is not None: - include_cols = [c.strip() for c in columns.split(",")] - for c in cols.copy(): - if c not in include_cols: - del cols[c] - - # Add double quote to avoid problem with capital letters in column names - colstring = ", ".join([f'"{c}"' for c in cols]) - - segSize = bbox.right - bbox.left - sql_query = f""" - WITH - bounds AS ( - SELECT - ST_Segmentize( - ST_MakeEnvelope( - :xmin, - :ymin, - :xmax, - :ymax, - {tms.crs.to_epsg()} - ), - :seg_size - ) AS geom - ), - mvtgeom AS ( - SELECT ST_AsMVTGeom( - ST_Transform(t.{geometry_column}, {tms.crs.to_epsg()}), - bounds.geom, - :tile_resolution, - :tile_buffer - ) AS geom, {colstring} - FROM {obj_in.id} t, bounds - WHERE ST_Intersects( - ST_Transform(t.{geometry_column}, 4326), - ST_Transform(bounds.geom, 4326) - ) {limitstr} - ) - SELECT ST_AsMVT(mvtgeom.*) FROM mvtgeom - """ - input_data = { - "xmin": bbox.left, - "ymin": bbox.bottom, - "xmax": bbox.right, - "ymax": bbox.top, - "seg_size": segSize, - "tile_resolution": int(resolution), - "tile_buffer": int(buffer), - } - cursor = await db.execute(sql_query, input_data) - feature = cursor.first()["st_asmvt"] - return feature - - async def tile_from_function( - self, - db: AsyncSession, - tile: morecantile.Tile, - tms: morecantile.TileMatrixSet, - obj_in: VectorTileFunction, - **kwargs: Any, - ) -> Any: - """Get Tile Data.""" - bbox = tms.xy_bounds(tile) - async with pool.acquire() as conn: - transaction = conn.transaction() - await transaction.start() - await conn.execute(obj_in.sql) - - function_params = ":xmin, :ymin, :xmax, :ymax, :epsg" - if kwargs: - params = ", ".join([f"{k} => {v}" for k, v in kwargs.items()]) - function_params += f", {params}" - sql_query = text(f"SELECT {obj_in.function_name}({function_params})") - content = await conn.fetchval_b( - sql_query, - xmin=bbox.left, - ymin=bbox.bottom, - xmax=bbox.right, - ymax=bbox.top, - epsg=tms.crs.to_epsg(), - ) - - await transaction.rollback() - - return content - - -layer = CRUDLayer() diff --git a/src/crud/legacy/crud_layer_library.py b/src/crud/legacy/crud_layer_library.py deleted file mode 100644 index 2532fe7..0000000 --- a/src/crud/legacy/crud_layer_library.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import List - -from sqlalchemy.future import select -from sqlalchemy.orm import Session - -from src.crud.base import CRUDBase -from src.db import models - - -class CRUDLayerLibrary(CRUDBase[models.LayerLibrary, models.LayerLibrary, models.LayerLibrary]): - - # Used Session in order to use in validator - def get_all_layer_names(self, db: Session) -> List[str]: - statement = select(self.model.name) - layers = db.execute(statement) - layers = layers.scalars().all() - return layers - - -layer_library = CRUDLayerLibrary(models.LayerLibrary) - - -class CRUDStyleLibrary(CRUDBase[models.StyleLibrary, models.StyleLibrary, models.StyleLibrary]): - pass - - -style_library = CRUDStyleLibrary(models.StyleLibrary) diff --git a/src/crud/legacy/crud_opportunity_config.py b/src/crud/legacy/crud_opportunity_config.py deleted file mode 100644 index ce4da76..0000000 --- a/src/crud/legacy/crud_opportunity_config.py +++ /dev/null @@ -1,30 +0,0 @@ -from src.crud.base import CRUDBase -from src.db import models - - -class CRUDOpportunityGroup(CRUDBase[models.OpportunityGroup, models.OpportunityGroup, models.OpportunityGroup]): - pass - - -opportunity_group = CRUDOpportunityGroup(models.OpportunityGroup) - - -class CRUDOpportunityDefaultConfig(CRUDBase[models.OpportunityDefaultConfig, models.OpportunityDefaultConfig, models.OpportunityDefaultConfig]): - pass - - -opportunity_default_config = CRUDOpportunityDefaultConfig(models.OpportunityDefaultConfig) - - -class CRUDOpportunityStudyAreaConfig(CRUDBase[models.OpportunityStudyAreaConfig, models.OpportunityStudyAreaConfig, models.OpportunityStudyAreaConfig]): - pass - - -opportunity_study_area_config = CRUDOpportunityStudyAreaConfig(models.OpportunityStudyAreaConfig) - -class CRUDOpportunityUserConfig(CRUDBase[models.OpportunityUserConfig, models.OpportunityUserConfig, models.OpportunityUserConfig]): - pass - - -opportunity_user_config = CRUDOpportunityStudyAreaConfig(models.OpportunityUserConfig) - diff --git a/src/crud/legacy/crud_organization.py b/src/crud/legacy/crud_organization.py deleted file mode 100644 index 2afcfb9..0000000 --- a/src/crud/legacy/crud_organization.py +++ /dev/null @@ -1,10 +0,0 @@ -from src.crud.base import CRUDBase -from src.db import models -from src.schemas.organization import OrganizationCreate, OrganizationUpdate - - -class CRUDOrganization(CRUDBase[models.Organization, OrganizationCreate, OrganizationUpdate]): - pass - - -organization = CRUDOrganization(models.Organization) diff --git a/src/crud/legacy/crud_poi_aoi.py b/src/crud/legacy/crud_poi_aoi.py deleted file mode 100644 index 4b163a1..0000000 --- a/src/crud/legacy/crud_poi_aoi.py +++ /dev/null @@ -1,49 +0,0 @@ -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.sql import text - -from src.db import models -from src.resources.enums import ReturnType, SQLReturnTypes - - -class CRUDPoiAoi: - async def poi_aoi_visualization( - self, - db: AsyncSession, - *, - scenario_id: int, - current_user: models.User, - return_type: ReturnType, - ): - _return_type = return_type.value - if return_type == ReturnType.geobuf.value: - _return_type = "db_geobuf" - template_sql = SQLReturnTypes[_return_type].value - - attributes = 'uid, id, category, name, geom, opening_hours, street, housenumber, zipcode, edit_type' - - query = f""" - SELECT {attributes}, NULL as grouped - FROM basic.poi_aoi_visualization(:user_id, :scenario_id, :active_upload_ids, :active_study_area_id, FALSE) - UNION ALL - SELECT NULL AS uid, row_number() over() AS id, category, name, ST_CENTROID(ST_COLLECT(ST_ClusterWithin(geom, 0.001))) AS geom, - NULL, NULL, NULL, NULL, NULL, true - FROM basic.poi_aoi_visualization(:user_id, :scenario_id, :active_upload_ids, :active_study_area_id, TRUE) - GROUP BY category, name - """ - sql = text( - template_sql - % query - ) - result = await db.execute( - sql, - { - "user_id": current_user.id, - "scenario_id": scenario_id, - "active_upload_ids": current_user.active_data_upload_ids, - "active_study_area_id": current_user.active_study_area_id - }, - ) - return result.fetchall()[0][0] - - -poi_aoi = CRUDPoiAoi() diff --git a/src/crud/legacy/crud_r5.py b/src/crud/legacy/crud_r5.py deleted file mode 100644 index 8312aa0..0000000 --- a/src/crud/legacy/crud_r5.py +++ /dev/null @@ -1,152 +0,0 @@ -import datetime -from typing import Any, List - -from bson.objectid import ObjectId -from fastapi import HTTPException -from motor.motor_asyncio import AsyncIOMotorClient - -from src.schemas.r5 import ( - R5ProjectCreateDTO, - R5ProjectInDB, - R5ProjectUpdateDTO, - R5RegionCreateDTO, - R5RegionInDB, -) - -database_name = "analysis" - - -class CRUDR5: - # --------------------------REGION CRUD------------------------------ - # ------------------------------------------------------------------- - async def get_all_regions(self, db: AsyncIOMotorClient) -> List[R5RegionInDB]: - """ - Get all regions. - """ - regions = [] - rows = db[database_name].regions.find() - async for row in rows: - regions.append(R5RegionInDB(**row)) - return regions - - async def get_region(self, db: AsyncIOMotorClient, region_id: str) -> R5RegionInDB: - """ - Get region. - """ - region = await db[database_name].regions.find_one({"_id": region_id}) - - return region - - async def create_region( - self, db: AsyncIOMotorClient, region_in: R5RegionCreateDTO - ) -> R5RegionInDB: - """ - Create new region. - """ - region_db_obj = R5RegionInDB( - **region_in.dict(), - createdAt=datetime.datetime.now(), - updatedAt=datetime.datetime.now(), - nonce=str(ObjectId()) - ) - if hasattr(region_db_obj, "id"): - delattr(region_db_obj, "id") - region_db_dict = region_db_obj.dict(by_alias=True) - region_db_dict["_id"] = str(ObjectId()) - ret = await db[database_name].regions.insert_one(region_db_dict) - region_db_obj.id = ret.inserted_id - return region_db_obj - - # delete region - async def delete_region(self, db: AsyncIOMotorClient, region_id: str) -> Any: - """ - Delete region. - """ - await db[database_name].regions.delete_one({"_id": region_id}) - return {"msg": "Region deleted successfully"} - - # -------------------------PROJECT CRUD----------------------------- - # ------------------------------------------------------------------ - async def get_all_projects(self, db: AsyncIOMotorClient) -> List[R5ProjectInDB]: - """ - Get all projects. - """ - projects = [] - rows = db[database_name].projects.find() - async for row in rows: - projects.append(R5ProjectInDB(**row)) - return projects - - async def get_projects_for_region( - self, db: AsyncIOMotorClient, region_id: str - ) -> List[R5ProjectInDB]: - """ - Get all projects for a region. - """ - projects = [] - rows = db[database_name].projects.find({"regionId": region_id}) - async for row in rows: - projects.append(R5ProjectInDB(**row)) - return projects - - # get project by id - async def get_project(self, db: AsyncIOMotorClient, project_id: str) -> R5ProjectInDB: - """ - Get project. - """ - project = await db[database_name].projects.find_one({"_id": project_id}) - return project - - async def create_project( - self, db: AsyncIOMotorClient, project_in: R5ProjectCreateDTO - ) -> R5ProjectInDB: - """ - Create new project. - """ - region = await db[database_name].regions.find_one({"_id": project_in.regionId}) - bundle = await db[database_name].bundles.find_one({"_id": project_in.bundleId}) - if region is None: - raise HTTPException( - status_code=400, - detail="Region does not exist", - ) - if bundle is None: - raise HTTPException( - status_code=400, - detail="Bundle does not exist", - ) - - project_db_obj = R5ProjectInDB( - **project_in.dict(), - createdAt=datetime.datetime.now(), - updatedAt=datetime.datetime.now(), - nonce=str(ObjectId()) - ) - if hasattr(project_db_obj, "id"): - delattr(project_db_obj, "id") - project_db_dict = project_db_obj.dict(by_alias=True) - project_db_dict["_id"] = str(ObjectId()) - ret = await db[database_name].projects.insert_one(project_db_dict) - project_db_obj.id = ret.inserted_id - return project_db_obj - - # update project - async def update_project(self, db: AsyncIOMotorClient, project_in: R5ProjectUpdateDTO) -> Any: - """ - Update project. - """ - await db[database_name].projects.update_one( - {"_id": project_in.id}, {"$set": project_in.dict()} - ) - return {"msg": "Project updated successfully"} - - # delete project - async def delete_project(self, db: AsyncIOMotorClient, project_id: str) -> Any: - """ - Delete project. - """ - await db[database_name].projects.delete_one({"_id": project_id}) - return {"msg": "Project deleted successfully"} - - -r5 = CRUDR5() diff --git a/src/crud/legacy/crud_role.py b/src/crud/legacy/crud_role.py deleted file mode 100644 index 7eb470c..0000000 --- a/src/crud/legacy/crud_role.py +++ /dev/null @@ -1,15 +0,0 @@ -from src import schemas -from src.crud.base import CRUDBase -from src.db import models - - -class CRUDRole(CRUDBase[models.Role, schemas.RoleCreate, schemas.RoleUpdate]): - pass - -role = CRUDRole(models.Role) - - -class CRUDUserRole(CRUDBase[models.UserRole, models.UserRole, models.UserRole]): - pass - -user_role = CRUDUserRole(models.UserRole) diff --git a/src/crud/legacy/crud_scenario.py b/src/crud/legacy/crud_scenario.py deleted file mode 100644 index d40be0e..0000000 --- a/src/crud/legacy/crud_scenario.py +++ /dev/null @@ -1,404 +0,0 @@ -import enum -import uuid -from typing import Any, List - -import pyproj -from fastapi import HTTPException -from geoalchemy2.shape import WKTElement, to_shape -from shapely import Polygon -from shapely.ops import transform -from sqlalchemy import and_, func, or_, text -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.sql import delete, select -from src import schemas -from src.core.opportunity import opportunity -from src.core.config import settings -from src.crud.base import CRUDBase -from src.core.heatmap.heatmap_compute import ComputeHeatmap -from src.db import models -from src.db.session import legacy_engine -from shapely import wkb -import shutil -import h3 - -from src.schemas.legacy.isochrone import ( - IsochroneDTO, - IsochroneWalkingProfile, - request_examples, -) - -scenario_layer_models = { - schemas.ScenarioLayersNoPoisEnum.way.value: models.Edge, - schemas.ScenarioLayersNoPoisEnum.way_modified.value: models.WayModified, - schemas.ScenarioLayersNoPoisEnum.building.value: models.Building, - schemas.ScenarioLayersNoPoisEnum.building_modified.value: models.BuildingModified, - schemas.ScenarioLayersNoPoisEnum.population.value: models.Population, - schemas.ScenarioLayersNoPoisEnum.population_modified.value: models.PopulationModified, - "poi": models.Poi, - "poi_modified": models.PoiModified, -} - -# TODO: Check if geometries are within study area -# TODO: Check geometry CRS - - -class CRUDScenario(CRUDBase[models.Scenario, schemas.ScenarioCreate, schemas.ScenarioUpdate]): - async def read_scenario_features( - self, - db: AsyncSession, - current_user: models.User, - scenario_id: int, - layer_name: str, - intersect: str, - ) -> Any: - layer = scenario_layer_models[layer_name.value] - - if "_modified" not in layer_name.value and intersect is None: - raise HTTPException( - status_code=400, detail="Intersect parameter is required for non-modified layers" - ) - - polygon = None - if intersect is not None: - try: - polygon = WKTElement(intersect, srid=4326) - # Check if area of polygon is smaller than 10 km2 - project = pyproj.Transformer.from_crs( - pyproj.CRS("EPSG:4326"), pyproj.CRS("EPSG:3857"), always_xy=True - ).transform - projected_area = transform(project, to_shape(polygon)).area - if (projected_area / 1000000) > 10: - raise HTTPException( - status_code=400, - detail="The area of the polygon is too large. Please select a smaller area.", - ) - except: - raise HTTPException(status_code=400, detail="Invalid geometry") - statement = select(layer) - - if layer_name.value == schemas.ScenarioLayersNoPoisEnum.way.value: - excluded_ids_results = await db.execute( - func.basic.select_customization("excluded_class_id_walking", current_user.active_study_area_id) - ) - excluded_ids = excluded_ids_results.fetchall() - excluded_ids_list = dict(excluded_ids[0])["select_customization_1"] - - excluded_foot_results = await db.execute( - func.basic.select_customization("categories_no_foot", current_user.active_study_area_id) - ) - excluded_foot = excluded_foot_results.fetchall() - excluded_foot_list = dict(excluded_foot[0])["select_customization_1"] - - statement = statement.where( - and_( - layer.class_id.notin_(excluded_ids_list), - or_(layer.foot.notin_(excluded_foot_list), layer.foot.is_(None)), - layer.geom.ST_Intersects(polygon), - layer.scenario_id is None, - ) - ) - - elif "_modified" in layer_name.value and intersect is None: - statement = statement.where(layer.scenario_id == scenario_id) - elif "_modified" in layer_name.value: - statement = statement.where( - and_( - layer.geom.ST_Intersects(polygon), - layer.scenario_id == scenario_id, - ) - ) - - else: - statement = statement.where(layer.geom.ST_Intersects(polygon)) - - result = await db.execute(statement) - result = result.scalars().all() - return result - - async def delete_scenario_features( - self, db: AsyncSession, current_user: models.User, scenario_id: int, layer_name: str - ) -> Any: - layer = scenario_layer_models[layer_name.value] - await db.execute(delete(layer).where(layer.scenario_id == scenario_id)) - await db.commit() - scenario_dir = f"{settings.CACHE_PATH}/user/scenario/{scenario_id}" - shutil.rmtree(scenario_dir, ignore_errors=True) - return {"msg": "Features deleted successfully"} - - async def delete_scenario_feature( - self, - db: AsyncSession, - current_user: models.User, - scenario_id: int, - layer_name: str, - feature_ids: List[int], - ) -> Any: - layer = scenario_layer_models[layer_name.value] - # check if feature exists in the table - await db.execute( - delete(layer).where(and_(layer.id.in_(feature_ids), layer.scenario_id == scenario_id)) - ) - await db.commit() - if layer_name.value == schemas.ScenarioLayerFeatureEnum.population_modified.value: - await db.execute(func.basic.population_modification(scenario_id)) - await db.commit() - # Recalculate opportunity matrices for poi_modified and population_modified - features_in_db = await db.execute( - select(layer).where(and_(layer.scenario_id == scenario_id, layer.id.in_(feature_ids))) - ) - features_in_db = features_in_db.scalars().fetchall() - - return {"msg": "Features deleted successfully"} - - async def create_scenario_features( - self, - db: AsyncSession, - current_user: models.User, - scenario_id: int, - layer_name: str, - feature_in: schemas.ScenarioFeatureCreate, - ) -> Any: - layer = scenario_layer_models[layer_name.value] - features = feature_in.features - features_in_db = [] - for feature in features: - feature_dict = {} - - feature_dict["scenario_id"] = scenario_id - - # Check if population modified intersect with sub study area - if layer_name.value == schemas.ScenarioLayerFeatureEnum.population_modified.value: - point = WKTElement(feature.geom, srid=4326) - statement = select(models.SubStudyArea).where( - and_(models.SubStudyArea.geom.ST_Intersects(point)) - ) - sub_study_area_result = await db.execute(statement) - sub_study_area_result = sub_study_area_result.scalars().all() - if len(sub_study_area_result) == 0: - raise HTTPException( - status_code=400, - detail="The population feature does not intersect with any sub study area", - ) - feature_dict["sub_study_area_id"] = sub_study_area_result[0].id - try: - for key, value in feature: - if ( - key == "uid" - and layer_name.value == schemas.ScenarioLayerFeatureEnum.poi_modified.value - ): - if value is None: - # new POI - feature_dict["uid"] = uuid.uuid4().hex - else: - # existing POI - feature_dict["uid"] = value - splited_values = value.split("_") - if len(splited_values) >= 5: - feature_dict["data_upload_id"] = int( - splited_values[-1].replace("u", "") - ) - - # TODO: check if uid is valid (poi / poi_user) - elif ( - layer_name.value == schemas.ScenarioLayerFeatureEnum.way_modified.value - and key == "way_id" - ): - if value is not None: - feature_dict["way_id"] = value - - # TODO: For population check if geometry and building with {building_modified_id} intersect - - elif isinstance(value, enum.Enum): - feature_dict[key] = value.value - elif key == "class_id" and value is None: - feature_dict[key] = 100 - elif value is None: - continue - else: - feature_dict[key] = value - feature_obj = layer.from_orm(layer(**feature_dict)) - features_in_db.append(feature_obj) - except Exception: - raise HTTPException(status_code=400, detail="Invalid feature") - - db.add_all(features_in_db) - await db.commit() - # Execute population distribution on population modified - if layer_name.value in ( - schemas.ScenarioLayerFeatureEnum.building_modified.value, - schemas.ScenarioLayerFeatureEnum.population_modified.value, - ): - await db.execute(func.basic.population_modification(scenario_id)) - await db.commit() - - for feature in features_in_db: - await db.refresh(feature) - - # Recalculate opportunity matrices for poi_modified and population_modified - if layer_name.value in ("poi_modified"): - await self.compute_scenario_opportunity_matrices( - scenario_id, layer_name.value.split("_")[0], features_in_db, current_user - ) - - return features_in_db - - async def update_scenario_features( - self, - db: AsyncSession, - current_user: models.User, - scenario_id: int, - layer_name: str, - feature_in: schemas.ScenarioFeatureUpdate, - ) -> Any: - layer = scenario_layer_models[layer_name.value] - features = feature_in.features - features_obj = {} - feature_ids = [] - for feature in features: - features_obj[feature.id] = {} - feature_dict = {} - # Check if population modified intersect with sub study area - if layer_name.value == schemas.ScenarioLayerFeatureEnum.population_modified.value: - point = WKTElement(feature.geom, srid=4326) - statement = select(models.SubStudyArea).where( - and_(models.SubStudyArea.geom.ST_Intersects(point)) - ) - sub_study_area_result = await db.execute(statement) - sub_study_area_result = sub_study_area_result.scalars().all() - if len(sub_study_area_result) == 0: - raise HTTPException( - status_code=400, - detail="The population feature does not intersect with any sub study area", - ) - feature_dict["sub_study_area_id"] = sub_study_area_result[0].id - try: - for key, value in feature: - if key == "id": - feature_ids.append(value) - continue - elif isinstance(value, enum.Enum): - feature_dict[key] = value.value - else: - feature_dict[key] = value - except Exception: - raise HTTPException(status_code=400, detail="Invalid feature") - - features_obj[feature.id] = feature_dict - - features_in_db = await db.execute( - select(layer).where(and_(layer.scenario_id == scenario_id, layer.id.in_(feature_ids))) - ) - features_in_db = features_in_db.scalars().fetchall() - - for db_feature in features_in_db: - feature_id = db_feature.id - for key, value in features_obj[feature_id].items(): - if value is not None: - # TODO: For population check if geometry and building with {building_modified_id} intersect - setattr(db_feature, key, value) - - db.add_all(features_in_db) - await db.commit() - - # Execute population distribution on population modified - if layer_name.value in ( - schemas.ScenarioLayerFeatureEnum.building_modified.value, - schemas.ScenarioLayerFeatureEnum.population_modified.value, - ): - await db.execute(func.basic.population_modification(scenario_id)) - await db.commit() - - for feature in features_in_db: - await db.refresh(feature) - - # Recalculate opportunity matrices for poi_modified and population_modified - if layer_name.value in ("poi_modified"): - await self.compute_scenario_opportunity_matrices( - scenario_id, layer_name.value.split("_")[0], features_in_db, current_user - ) - - return features_in_db - - async def remove_multi_by_id_and_userid( - self, db: AsyncSession, *, ids: List[int], user_id: int - ) -> Any: - statement = ( - delete(self.model).where(self.model.id.in_(ids)).where(self.model.user_id == user_id) - ) - await db.execute(statement) - await db.commit() - if statement.is_delete is True: - # Remove scenario cache - for id in ids: - scenario_dir = f"{settings.CACHE_PATH}/user/scenario/{id}" - shutil.rmtree(scenario_dir, ignore_errors=True) - else: - raise HTTPException(status_code=400, detail="Invalid scenario id") - - return ids - - async def is_scenario_broken(self, db: AsyncSession, scenario_id: int) -> bool: - # Cheking if any of the modified objects has a outdatad column set to true - query = text( - """SELECT EXISTS (SELECT 1 FROM customer.poi_modified pm WHERE outdated = true and pm.scenario_id=:scenario_id) - OR EXISTS (SELECT 1 FROM customer.way_modified wm WHERE outdated = true and wm.scenario_id=:scenario_id) - OR EXISTS (SELECT 1 FROM customer.building_modified bm WHERE outdated = true and bm.scenario_id=:scenario_id) - AS broken;""" - ) - result = await db.execute(query, {"scenario_id": scenario_id}) - result = result.scalars().first() - - return bool(result) - - async def compute_scenario_opportunity_matrices( - self, scenario_id: int, opportunity_type: str, features, user - ) -> None: - # -> - s3_folder = "" - crud_compute_heatmap = ComputeHeatmap(current_user=user) - # todo: the isochrone settings can be dynamic in the future. Currently scenario is only for walking and 20 minutes, 5km/h - isochrone_dto = IsochroneDTO( - **request_examples["isochrone"]["single_walking_default"]["value"] - ) - isochrone_dto.settings.travel_time = 20 # minutes - isochrone_dto.settings.speed = 5 # km/h - isochrone_dto.settings.walking_profile = IsochroneWalkingProfile.STANDARD - isochrone_dto.output.resolution = 12 - # -> - bulk_id_affected = [] - for feature in features: - if feature.edit_type == "d": - continue - point_shape = wkb.loads(feature.geom.data) - h3_id = h3.geo_to_h3(point_shape.y, point_shape.x, 6) - bulk_id_affected.append(h3_id) - - bulk_id_affected = list(set(bulk_id_affected)) - - for bulk_id in bulk_id_affected: - bulk_geom = Polygon(h3.h3_to_geo_boundary(h=bulk_id, geo_json=True)) - opportunities_modified = opportunity.read_modified_data( - db=legacy_engine, - layer=opportunity_type, - bbox_wkt=bulk_geom.wkt, - scenario_id=scenario_id, - edit_type=["n", "m"], - ) - travel_time_matrices = await crud_compute_heatmap.read_travel_time_matrices( - bulk_id=bulk_id, isochrone_dto=isochrone_dto, s3_folder=s3_folder - ) - if len(opportunities_modified) == 0: - return None - await crud_compute_heatmap.compute_opportunity_matrix( - bulk_id, - isochrone_dto, - opportunity_type, - opportunities=opportunities_modified, - travel_time_matrices=travel_time_matrices, - output_path=f"{settings.CACHE_PATH}/user/scenario/{scenario_id}", - s3_folder=s3_folder, - ) - return bulk_id_affected - - -scenario = CRUDScenario(models.Scenario) diff --git a/src/crud/legacy/crud_static_layer.py b/src/crud/legacy/crud_static_layer.py deleted file mode 100644 index 6c01b6f..0000000 --- a/src/crud/legacy/crud_static_layer.py +++ /dev/null @@ -1,42 +0,0 @@ -from sqlalchemy import Column, MetaData, String, Table, select -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.schema import DropTable - -from src.crud.base import CRUDBase -from src.db import models -from src.utils import tablify - - -class CRUDStaticLayer(CRUDBase[models.StaticLayer, models.StaticLayer, models.StaticLayer]): - async def drop_postgis_table(self, db: AsyncSession, table_name: str): - metadata_obj = MetaData(schema="extra") - table = Table(table_name, metadata_obj) - await db.execute(DropTable(table, if_exists=True)) - - async def list_static_layer_table_names(self, db: AsyncSession, name_like: str = ""): - metadata_obj = MetaData(schema="information_schema") - tables = Table( - "tables", - metadata_obj, - Column("table_name", String()), - Column("table_schema", String()), - ) - query = select(tables).where(tables.columns.table_name.like(name_like + "%")) - result = await db.execute(query) - tables = result.scalars().all() - return tables - - async def uniquify_static_layer_name(self, db: AsyncSession, file_name: str): - original_name = file_name.split(".")[0] - original_name = tablify(original_name) - name = original_name - static_layer_table_names = await self.list_static_layer_table_names(db=db, name_like=name) - counter = 1 - while name in static_layer_table_names: - name = f"{original_name}_{counter}" - counter += 1 - - return name - - -static_layer = CRUDStaticLayer(models.StaticLayer) diff --git a/src/crud/legacy/crud_study_area.py b/src/crud/legacy/crud_study_area.py deleted file mode 100644 index 985c0a0..0000000 --- a/src/crud/legacy/crud_study_area.py +++ /dev/null @@ -1,28 +0,0 @@ -from sqlalchemy.ext.asyncio import AsyncSession - -from src.crud.base import CRUDBase -from src.db import models - -from .base import ModelType -from .crud_study_area_geostore import study_area_geostore as crud_study_area_geostore - - -class CRUDStudyArea(CRUDBase[models.StudyArea, models.StudyArea, models.StudyArea]): - async def get_first(self, db: AsyncSession) -> ModelType: - all_study_areas = await self.get_all(db=db) - if all_study_areas: - return all_study_areas[0] - else: - return None - - async def add_geostore_to_study_area( - self, db: AsyncSession, study_area_id: int, geostore_id: int - ): - study_area_geostore = models.StudyAreaGeostore( - study_area_id=study_area_id, geostore_id=geostore_id - ) - - return await crud_study_area_geostore.create(db, obj_in=study_area_geostore) - - -study_area = CRUDStudyArea(models.StudyArea) diff --git a/src/crud/legacy/crud_study_area_geostore.py b/src/crud/legacy/crud_study_area_geostore.py deleted file mode 100644 index 7489e87..0000000 --- a/src/crud/legacy/crud_study_area_geostore.py +++ /dev/null @@ -1,10 +0,0 @@ - -from src.crud.base import CRUDBase -from src.db import models - - - -class CRUDStudyAreaGeostore(CRUDBase[models.StudyAreaGeostore, models.StudyAreaGeostore, models.StudyAreaGeostore]): - pass - -study_area_geostore = CRUDStudyAreaGeostore(models.StudyAreaGeostore) diff --git a/src/crud/legacy/crud_system.py b/src/crud/legacy/crud_system.py deleted file mode 100644 index 3daf84a..0000000 --- a/src/crud/legacy/crud_system.py +++ /dev/null @@ -1,7 +0,0 @@ -from src.crud.base import CRUDBase -from src.db import models - -class CRUDSystem(CRUDBase[models.System, models.System, models.System]): - pass - -system = CRUDSystem(models.System) diff --git a/src/crud/legacy/crud_upload.py b/src/crud/legacy/crud_upload.py deleted file mode 100644 index 344256b..0000000 --- a/src/crud/legacy/crud_upload.py +++ /dev/null @@ -1,330 +0,0 @@ -import os -import random -import shutil - -from fastapi import HTTPException, UploadFile -from geoalchemy2.shape import to_shape -from geopandas import read_file as gpd_read_file -from sqlalchemy import and_, delete, text, update -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.future import select -from sqlalchemy.orm.attributes import flag_modified - -from src import crud, schemas -from src.crud.base import CRUDBase -from src.db import models -from src.db.models.config_validation import PoiCategory, check_dict_schema -from src.db.session import legacy_engine -from src.resources.enums import UploadFileTypes -from src.utils import clean_unpacked_zip, delete_file - - -class CRUDDataUpload(CRUDBase[models.Customization, models.Customization, models.Customization]): - pass - - -data_upload = CRUDDataUpload(models.DataUpload) - - -class CRUDUploadFile: - async def upload_custom_pois( - self, - *, - db: AsyncSession, - file: UploadFile, - file_dir: str, - file_name: str, - poi_category: str, - current_user: models.User, - ): - - """Handle uploaded custom pois.""" - # Check if poi_category is already uploaded for study area - try: - query_poi_features = ( - select(models.PoiUser.category) - .join(models.DataUpload) - .where( - and_( - models.DataUpload.user_id == current_user.id, - models.DataUpload.study_area_id == current_user.active_study_area_id, - models.PoiUser.data_upload_id == models.DataUpload.id, - models.PoiUser.category == poi_category, - ) - ) - .limit(1) - ) - - poi_features = await db.execute(query_poi_features) - poi_features = poi_features.first() - except: - delete_file(file_dir) - raise HTTPException( - status_code=400, - detail="Failed reading the file.", - ) - - if poi_features is not None: - delete_file(file_dir) - raise HTTPException( - status_code=400, - detail="The chosen custom poi category already exists. Please delete the old data-set first in case you want to replace it with the new one", - ) - - required_attributes = ["geometry"] - optional_attributes = [ - "opening_hours", - "name", - "street", - "housenumber", - "zipcode", - "opening_hours", - "wheelchair", - ] - # Get active study area - - study_area_obj = await crud.study_area.get( - db=db, id=current_user.active_study_area_id, extra_fields=["geom"] - ) - study_area_geom = to_shape(study_area_obj.geom) - - if UploadFileTypes.geojson.value in file_name: - try: - gdf = gpd_read_file(file_dir, driver="GeoJSON") - delete_file(file_dir) - except: - delete_file(file_dir) - raise HTTPException( - status_code=400, - detail="Failed reading the file in GeodataFrame", - ) - elif UploadFileTypes.zip.value in file_name: - unzipped_file_dir = ( - os.path.splitext(file_dir)[0] - + "/" - + file.filename.replace(UploadFileTypes.zip.value, "") - ) - - # Create directory - try: - shutil.unpack_archive(file_dir, os.path.splitext(file_dir)[0], "zip") - except: - clean_unpacked_zip( - zip_path=file_dir, dir_path=file_dir.replace(UploadFileTypes.zip.value, "") - ) - raise HTTPException(status_code=400, detail="Could not read or process file.") - - # List shapefiles - try: - available_shapefiles = [ - f for f in os.listdir(unzipped_file_dir) if f.endswith(".shp") - ] - except: - clean_unpacked_zip( - zip_path=file_dir, dir_path=file_dir.replace(UploadFileTypes.zip.value, "") - ) - raise HTTPException(status_code=400, detail="No shapefiles inside folder.") - - # Read shapefiles and append to GeoDataFrame - if len(available_shapefiles) == 1: - gdf = gpd_read_file(f"{unzipped_file_dir}/{available_shapefiles[0]}") - elif len(available_shapefiles) > 1: - clean_unpacked_zip( - zip_path=file_dir, dir_path=file_dir.replace(UploadFileTypes.zip.value, "") - ) - raise HTTPException( - status_code=400, detail="More then one shapefiles inside folder." - ) - else: - raise HTTPException(status_code=400, detail="No shapefiles inside folder.") - clean_unpacked_zip( - zip_path=file_dir, dir_path=file_dir.replace(UploadFileTypes.zip.value, "") - ) - else: - raise HTTPException(status_code=400, detail="Invalid file type") - - # Convert to EPSG 4326 - gdf_schema = dict(gdf.dtypes) - if gdf.crs.name == "unknown": - raise HTTPException(status_code=400, detail="Invalid CRS") - else: - gdf.to_crs(epsg=4326, inplace=True) - gdf.set_crs(epsg=4326) - gdf = gdf.clip(study_area_geom) - - # Drop not needed columns - columns_to_drop = [] - for attribute in gdf_schema: - if attribute not in optional_attributes + required_attributes: - columns_to_drop.append(attribute) - - gdf = gdf.drop(columns_to_drop, axis=1) - if len(gdf) == 0: - raise HTTPException( - status_code=400, detail="No valid data in file or data outside the study area." - ) - - # Assign specified category to all points - gdf["category"] = poi_category - - # Create entry in upload table - upload_obj = models.DataUpload( - data_type=file.content_type, - upload_type=models.PoiUser.__table__.name, - user_id=current_user.id, - upload_size=int(file.file.tell() / 1000), - study_area_id=current_user.active_study_area_id, - ) - upload_obj = await data_upload.create(db=db, obj_in=upload_obj) - - # Write to database - try: - gdf["uid"] = ( - gdf.centroid.map( - lambda p: str(format(round(p.x, 4), ".4f")).replace(".", "") - + "_" - + str(format(round(p.y, 4), ".4f")).replace(".", "") - ) - + "_" - + str(poi_category) - ) - gdf["count_uid"] = gdf.groupby(["uid"]).cumcount() + 1 - gdf["uid"] = ( - gdf["uid"] + "_" + gdf["count_uid"].astype(str) + "_u" + str(upload_obj.id) - ) - gdf["data_upload_id"] = upload_obj.id - - gdf.rename_geometry("geom", inplace=True) - gdf.drop(["count_uid"], axis=1, inplace=True) - - gdf.to_postgis( - name="poi_user", - schema="customer", - con=legacy_engine, - if_exists="append", - chunksize=1000, - ) - - except: - await db.execute( - """DELETE FROM customer.data_upload WHERE id = :data_upload_id""", - {"data_upload_id": upload_obj.id}, - ) - await db.commit() - raise HTTPException( - status_code=400, - detail="An error happened when writing the data into the database.", - ) - - try: - default_poi_categories = ( - await crud.dynamic_customization.get_all_default_poi_categories(db) - ) - if poi_category not in default_poi_categories: - hex_color = "#%06x" % random.randint(0, 0xFFFFFF) - new_setting = {poi_category: {"icon": "fas fa-question", "color": [hex_color]}} - - if check_dict_schema(PoiCategory, new_setting) is False: - raise HTTPException(status_code=400, detail="Invalid JSON-schema") - - await crud.dynamic_customization.insert_opportunity_setting( - db=db, current_user=current_user, insert_settings=new_setting, data_upload_id=upload_obj.id - ) - - except: - await db.execute( - """DELETE FROM customer.data_upload WHERE id = :data_upload_id""", - {"data_upload_id": upload_obj.id}, - ) - await db.commit() - raise HTTPException( - status_code=400, - detail="An error happened when writing new settings to the database.", - ) - - return {"msg": "Upload successful"} - - async def delete_custom_pois( - self, *, db: AsyncSession, data_upload_id: int, current_user: models.User - ): - """Delete uploaded custom pois.""" - - category_name = await db.execute( - select(models.PoiUser.category).where(models.PoiUser.data_upload_id == data_upload_id) - ) - category_name = category_name.first()[0] - - # Check if poi_category is default - default_category = await crud.opportunity_default_config.get_by_key(db, key="category", value=category_name) - - try: - # Delete uploaded data - await db.execute( - delete(models.DataUpload).where(models.DataUpload.id == data_upload_id) - ) - - # Delete related scenarios - sql = text( - """DELETE FROM customer.scenario WHERE data_upload_ids && :data_upload_id AND user_id = :user_id""" - ) - await db.execute(sql, {"data_upload_id": [data_upload_id], "user_id": current_user.id}) - - # Delete customization for uploaded pois - if default_category == [] and category_name is not None: - await crud.dynamic_customization.delete_opportunity_setting( - db=db, - current_user=current_user, - category=category_name, - setting_type="poi", - ) - - if ( - current_user.active_data_upload_ids != [] - and data_upload_id in current_user.active_data_upload_ids - ): - current_user.active_data_upload_ids.remove(data_upload_id) - await db.execute( - update(models.User) - .where(models.User.id == current_user.id) - .values(active_data_upload_ids=current_user.active_data_upload_ids) - ) - - await db.commit() - except Exception: - await db.rollback() - raise HTTPException( - status_code=400, detail="Could not delete %s data." % category_name - ) - - async def set_active_state_of_custom_poi( - self, *, db: AsyncSession, obj_in: schemas.CutomDataUploadState, current_user: models.User - ): - """Set active state of custom poi.""" - data_upload_obj = await db.execute( - select(models.DataUpload).filter(models.DataUpload.id == obj_in.data_upload_id) - ) - data_upload_obj = data_upload_obj.scalars().first() - if data_upload_obj.user_id != current_user.id: - raise HTTPException(status_code=400, detail="User ID does not match") - - data_upload_ids_obj = current_user.active_data_upload_ids - - if obj_in.state is False and data_upload_obj.id in data_upload_ids_obj: - try: - data_upload_ids_obj.remove(obj_in.data_upload_id) - except ValueError: - print("Data upload doesn't exist") - elif obj_in.state is True and data_upload_obj.id not in data_upload_ids_obj: - data_upload_ids_obj.append(obj_in.data_upload_id) - else: - return current_user - - current_user.active_data_upload_ids = data_upload_ids_obj - flag_modified(current_user, "active_data_upload_ids") - db.add(current_user) - await db.commit() - await db.refresh(current_user) - return current_user - - -upload = CRUDUploadFile() diff --git a/src/crud/legacy/crud_user.py b/src/crud/legacy/crud_user.py deleted file mode 100644 index 45d3299..0000000 --- a/src/crud/legacy/crud_user.py +++ /dev/null @@ -1,133 +0,0 @@ -from typing import Any, Dict, Optional, Union - -from geoalchemy2.shape import from_shape, to_shape -from shapely.geometry import Polygon -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select -from shapely import Point -from src.core.security import get_password_hash, verify_password -from src.crud.base import CRUDBase -from src.db import models -from src.schemas.legacy.user import UserCreate, UserUpdate -from src.schemas.legacy.isochrone import IsochroneStartingPointCoord - - -class CRUDUser(CRUDBase[models.User, UserCreate, UserUpdate]): - async def create(self, db: AsyncSession, *, obj_in: UserCreate) -> models.User: - db_obj = models.User.from_orm(obj_in) - db_obj.hashed_password = get_password_hash(obj_in.password) - roles = await db.execute( - select(models.Role).filter(models.Role.name.in_(obj_in.roles)) - ) - db_obj.roles = roles.scalars().all() - - # combine study_area_ids with active_study_area_id - user_study_area_ids = set() - if obj_in.active_study_area_id: - user_study_area_ids.add(obj_in.active_study_area_id) - user_study_area_ids.update(obj_in.study_areas) - - study_areas = await db.execute( - select(models.StudyArea).filter( - models.StudyArea.id.in_(user_study_area_ids) - ) - ) - db_obj.study_areas = study_areas.scalars().all() - db.add(db_obj) - await db.commit() - await db.refresh(db_obj) - return db_obj - - async def update( - self, - db: AsyncSession, - *, - db_obj: models.User, - obj_in: Union[UserUpdate, Dict[str, Any]], - ) -> models.User: - if isinstance(obj_in, dict): - update_data = obj_in - else: - update_data = obj_in.dict(exclude_unset=True) - if update_data.get("password"): - hashed_password = get_password_hash(update_data["password"]) - del update_data["password"] - update_data["hashed_password"] = hashed_password - if update_data.get("roles") or update_data.get("roles") == []: - roles = await db.execute( - select(models.Role).filter(models.Role.name.in_(obj_in.roles)) - ) - db_obj.roles = roles.scalars().all() - del update_data["roles"] - if update_data.get("study_areas") or update_data.get("study_areas") == []: - study_areas = await db.execute( - select(models.StudyArea).filter( - models.StudyArea.id.in_(obj_in.study_areas) - ) - ) - db_obj.study_areas = study_areas.scalars().all() - del update_data["study_areas"] - - return await super().update(db, db_obj=db_obj, obj_in=update_data) - - async def authenticate( - self, db: AsyncSession, *, email: str, password: str - ) -> Optional[models.User]: - user = await self.get_by_key(db, key="email", value=email) - - if not user or len(user) == 0: - return None - else: - user = user[0] - if not verify_password(password, user.hashed_password): - return None - return user - - async def get_active_study_area(self, db: AsyncSession, user: models.User): - study_area = await CRUDBase(models.StudyArea).get( - db, id=user.active_study_area_id - ) - - world_extent = Polygon( - [[-180, 85], [-180, -85], [180, -85], [180, 85], [-180, 85]] - ) - study_area_geom = to_shape(study_area.geom) - buffer_geom_heatmap = to_shape(study_area.buffer_geom_heatmap) - - study_area_crop = world_extent.difference(study_area_geom) - study_area.geom = from_shape(study_area_crop) - - study_area_dict = dict(study_area) - study_area_dict["bounds"] = buffer_geom_heatmap.bounds - - return study_area_dict - - def is_active(self, user: models.User) -> bool: - return user.is_active - - def is_superuser(self, user: models.User) -> bool: - role = [r for r in user.roles if r.name == "superuser"] - if len(role) > 0: - return True - return False - - async def user_study_area_starting_point_access( - self, db: AsyncSession, user_id: int, points: list[IsochroneStartingPointCoord] - ) -> bool: - user = await self.get(db, id=user_id, extra_fields=[models.User.study_areas]) - - points = [Point(point.lon, point.lat) for point in points] - study_area_geoms = [study_area.shape_of_geom for study_area in user.study_areas] - for point in points: - for study_area in study_area_geoms: - if study_area.contains(point): - break - else: - # if no study area contains the point, return False - return False - - # if all if statements breaked (i.e. all points are in a study area), return True - return True - - -user = CRUDUser(models.User) diff --git a/src/db/legacy/aoi.py b/src/db/legacy/aoi.py deleted file mode 100644 index ca5d9ee..0000000 --- a/src/db/legacy/aoi.py +++ /dev/null @@ -1,84 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, Optional - -from geoalchemy2 import Geometry -from sqlmodel import ( - Column, - DateTime, - Field, - ForeignKey, - Index, - Integer, - Relationship, - SQLModel, - Text, - text, -) -from sqlalchemy.dialects.postgresql import JSONB -if TYPE_CHECKING: - from ..data_upload import DataUpload - from ..scenario import Scenario - - -class AoiBase(SQLModel): - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - category: str = Field(sa_column=Column(Text, index=True, nullable=False)) - name: Optional[str] = Field(sa_column=Column(Text)) - opening_hours: Optional[str] = Field(sa_column=Column(Text)) - wheelchair: Optional[str] = Field(sa_column=Column(Text)) - tags: Optional[dict] = Field(sa_column=Column(JSONB)) - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="MultiPolygon", srid="4326", spatial_index=False), - nullable=False, - ) - ) - - -class Aoi(AoiBase, table=True): - __tablename__ = "aoi" - __table_args__ = {"schema": "basic"} - - -Index("idx_aoi_geom", Aoi.__table__.c.geom, postgresql_using="gist") - - -class AoiModified(AoiBase, table=True): - __tablename__ = "aoi_modified" - __table_args__ = {"schema": "customer"} - - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - scenario_id: Optional[int] = Field( - sa_column=Column( - Integer, ForeignKey("customer.scenario.id", ondelete="CASCADE"), index=True - ) - ) - - scenario: Optional["Scenario"] = Relationship(back_populates="aois_modified") - - -Index("idx_aoi_modified_geom", AoiModified.__table__.c.geom, postgresql_using="gist") - - -class AoiUser(AoiBase, table=True): - __tablename__ = "aoi_user" - __table_args__ = {"schema": "customer"} - - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - data_upload_id: int = Field( - sa_column=Column( - Integer, - ForeignKey("customer.data_upload.id", ondelete="CASCADE"), - index=True, - nullable=False, - ) - ) - - data_upload: Optional["DataUpload"] = Relationship(back_populates="aois_user") - - -Index("idx_aoi_user_geom", AoiUser.__table__.c.geom, postgresql_using="gist") diff --git a/src/db/legacy/building.py b/src/db/legacy/building.py deleted file mode 100644 index 618d7cf..0000000 --- a/src/db/legacy/building.py +++ /dev/null @@ -1,86 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, List, Optional - -from geoalchemy2 import Geometry -from sqlalchemy import SmallInteger -from sqlmodel import ( - Column, - DateTime, - Field, - Float, - ForeignKey, - Index, - Integer, - Relationship, - SQLModel, - Text, - text, - Boolean -) - -if TYPE_CHECKING: - from .population import Population, PopulationModified - from .scenario import Scenario - - -class BuildingBase(SQLModel): - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - building_type: Optional[str] = Field(sa_column=Column(Text)) - amenity: Optional[str] = Field(sa_column=Column(Text)) - residential_status: Optional[str] = Field(sa_column=Column(Text)) - housenumber: Optional[str] = Field(sa_column=Column(Text)) - street: Optional[str] = Field(sa_column=Column(Text)) - building_levels: Optional[int] = Field(sa_column=Column(SmallInteger)) - building_levels_residential: Optional[int] = Field(sa_column=Column(SmallInteger)) - roof_levels: Optional[int] = Field(sa_column=Column(SmallInteger)) - height: Optional[float] = Field(sa_column=Column(Float(53))) - area: Optional[int] - gross_floor_area_residential: Optional[int] - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="Polygon", srid="4326", spatial_index=False), - nullable=False, - ) - ) - - -class Building(BuildingBase, table=True): - __tablename__ = "building" - __table_args__ = {"schema": "basic"} - - osm_id: Optional[int] - populations: List["Population"] = Relationship(back_populates="building") - -Index("idx_building_geom", Building.__table__.c.geom, postgresql_using="gist") - - -class BuildingModified(BuildingBase, table=True): - __tablename__ = "building_modified" - __table_args__ = {"schema": "customer"} - - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - building_id: Optional[int] = Field( - sa_column=Column( - Integer, default=None - ), - ) - scenario_id: int = Field( - sa_column=Column( - Integer, - ForeignKey("customer.scenario.id", ondelete="CASCADE"), - index=True, - nullable=False, - ), - ) - population: Optional[float] = Field(sa_column=Column(Float(53))) - scenario: "Scenario" = Relationship(back_populates="buildings_modified") - populations_modified: Optional[List["PopulationModified"]] = Relationship( - back_populates="building_modified" - ) - edit_type: str = Field(sa_column=Column(Text, nullable=False, index=True)) - outdated: Optional[bool] = Field(sa_column=Column(Boolean, default=False)) - - -Index("idx_building_modified_geom", BuildingModified.__table__.c.geom, postgresql_using="gist") diff --git a/src/db/legacy/config_validation.py b/src/db/legacy/config_validation.py deleted file mode 100644 index 3505007..0000000 --- a/src/db/legacy/config_validation.py +++ /dev/null @@ -1,59 +0,0 @@ -from schema import And, Optional, Schema, SchemaError - - -# Function to check if dict schema is valid -def check_dict_schema(conf_schema, conf): - try: - conf_schema.validate(conf) - return True - except SchemaError: - return False - -mapping_setting_type = {"poi": "poi_groups", "layer": "layer_groups"} - -# Schemas for POIs and Layers -PoiCategory = Schema( - { - str: { - "icon": str, - "color": [str], - Optional("multiple_entrance"): bool, - Optional("sensitivity"): int, - } - } -) - -PoiGroup = Schema({str: {"icon": str, "color": [str], "children": [PoiCategory]}}) - -PoiGroups = Schema({"poi_groups": [PoiGroup]}) - -OtherPoiGroupDummy = {"other": { - "icon": "fas fa-circle", - "color": ["#717171"], - "children": {} - }} - - -LayerCategory = Schema( - { - str: { - Optional("url"): str, - "type": And( - lambda n: n in ["OSM", "BING", "MVT", "XYZ", "GEOJSON", "WMS", "WFS", "GEOBUF"], - str, - ), - Optional("map_attribution"): str, - Optional("imagery_set"): str, - Optional("access_token"): str, - Optional("attributes"): dict, - Optional("translation"): dict, - Optional("style"): dict, - } - } -) - -LayerGroup = Schema({str: {"icon": str, "children": [LayerCategory]}}) - -LayerGroups = Schema({"layer_groups": [LayerGroup]}) - -HeatmapConfiguration = Schema({str: {"sensitivity": int, "weight": int}}) diff --git a/src/db/legacy/customization.py b/src/db/legacy/customization.py deleted file mode 100644 index e738f76..0000000 --- a/src/db/legacy/customization.py +++ /dev/null @@ -1,72 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, List, Optional - -from sqlalchemy.dialects.postgresql import JSONB -from sqlmodel import ( - Column, - DateTime, - Field, - ForeignKey, - Integer, - Relationship, - SQLModel, - Text, - text, -) - -if TYPE_CHECKING: - from .role import Role - from .study_area import StudyArea - from .user import User - - -class CustomizationBase(SQLModel): - type: str - setting: dict - role_id: int - - -class Customization(CustomizationBase, table=True): - __tablename__ = "customization" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - type: str = Field(sa_column=Column(Text, nullable=False)) - setting: dict = Field(sa_column=Column(JSONB, nullable=False)) - role_id: int = Field( - sa_column=Column( - Integer, ForeignKey("customer.role.id", ondelete="CASCADE"), nullable=False - ) - ) - role: "Role" = Relationship(back_populates="customizations") - user_customizations: Optional[List["UserCustomization"]] = Relationship( - back_populates="customizations" - ) - - -class UserCustomization(SQLModel, table=True): - __tablename__ = "user_customization" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - setting: dict = Field(sa_column=Column(JSONB, nullable=False)) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - user_id: int = Field( - default=None, - sa_column=Column( - Integer, ForeignKey("customer.user.id", ondelete="CASCADE"), nullable=False - ), - ) - customization_id: int = Field( - sa_column=Column( - Integer, ForeignKey("customer.customization.id", ondelete="CASCADE"), nullable=False - ) - ) - study_area_id: int = Field( - sa_column=Column(Integer, ForeignKey("basic.study_area.id"), nullable=False) - ) - customizations: "Customization" = Relationship(back_populates="user_customizations") - users: "User" = Relationship(back_populates="user_customizations") - study_areas: "StudyArea" = Relationship(back_populates="user_customizations") diff --git a/src/db/legacy/data_upload.py b/src/db/legacy/data_upload.py deleted file mode 100644 index 98a9052..0000000 --- a/src/db/legacy/data_upload.py +++ /dev/null @@ -1,55 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, List, Optional - -from sqlmodel import ( - ARRAY, - Column, - DateTime, - Field, - ForeignKey, - Integer, - Relationship, - SQLModel, - Text, - text, -) - -if TYPE_CHECKING: - from .legacy.aoi import AoiUser - from .poi import PoiUser - from .user import User - from .study_area import StudyArea - - -class DataUpload(SQLModel, table=True): - - __tablename__ = "data_upload" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - data_type: str = Field(sa_column=Column(Text, nullable=False)) - upload_type: str = Field(sa_column=Column(Text, nullable=False)) - upload_grid_ids: Optional[List[str]] = Field( - sa_column=Column(ARRAY(Text()), nullable=False, server_default=text("'{}'::text[]")) - ) - upload_size: int = Field(nullable=False) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - user_id: int = Field( - default=None, - sa_column=Column( - Integer, ForeignKey("customer.user.id", ondelete="CASCADE"), nullable=False - ) - ) - study_area_id: int = Field( - sa_column=Column( - Integer, ForeignKey("basic.study_area.id"), nullable=False - ) - ) - reached_poi_heatmap_computed: Optional[bool] - - user: "User" = Relationship(back_populates="data_uploads") - study_area: "StudyArea" = Relationship(back_populates="data_uploads") - pois_user: List["PoiUser"] = Relationship(back_populates="data_upload") - aois_user: List["AoiUser"] = Relationship(back_populates="data_upload") diff --git a/src/db/legacy/edge.py b/src/db/legacy/edge.py deleted file mode 100644 index c9e9f87..0000000 --- a/src/db/legacy/edge.py +++ /dev/null @@ -1,124 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, Optional - -from geoalchemy2 import Geometry -from sqlalchemy import SmallInteger -from sqlmodel import ( - JSON, - BigInteger, - Column, - DateTime, - Field, - Float, - ForeignKey, - Index, - Integer, - Relationship, - SQLModel, - Text, - text, - Boolean -) - -if TYPE_CHECKING: - from .node import Node - from .scenario import Scenario - - -class EdgeBase(SQLModel): - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True)) - class_id: int = Field(nullable=False) - name: Optional[str] = Field(sa_column=Column(Text)) - one_way: Optional[int] - maxspeed_forward: Optional[int] - maxspeed_backward: Optional[int] - osm_id: Optional[int] = Field(sa_column=Column(BigInteger())) - bicycle: Optional[str] = Field(sa_column=Column(Text, index=True)) - foot: Optional[str] = Field(sa_column=Column(Text, index=True)) - oneway: Optional[str] = Field(sa_column=Column(Text)) - crossing: Optional[str] = Field(sa_column=Column(Text)) - one_link_crossing: Optional[bool] - crossing_delay_category: Optional[int] = Field(sa_column=Column(SmallInteger)) - bicycle_road: Optional[str] = Field(sa_column=Column(Text)) - cycleway: Optional[str] = Field(sa_column=Column(Text)) - highway: Optional[str] = Field(sa_column=Column(Text)) - incline: Optional[str] = Field(sa_column=Column(Text)) - incline_percent: Optional[int] - lanes: Optional[float] = Field(sa_column=Column(Float(53))) - lit: Optional[str] = Field(sa_column=Column(Text)) - lit_classified: Optional[str] = Field(sa_column=Column(Text)) - parking: Optional[str] = Field(sa_column=Column(Text)) - parking_lane_both: Optional[str] = Field(sa_column=Column(Text)) - parking_lane_right: Optional[str] = Field(sa_column=Column(Text)) - parking_lane_left: Optional[str] = Field(sa_column=Column(Text)) - segregated: Optional[str] = Field(sa_column=Column(Text)) - sidewalk: Optional[str] = Field(sa_column=Column(Text)) - sidewalk_both_width: Optional[float] = Field(sa_column=Column(Float(53))) - sidewalk_left_width: Optional[float] = Field(sa_column=Column(Float(53))) - sidewalk_right_width: Optional[float] = Field(sa_column=Column(Float(53))) - smoothness: Optional[str] = Field(sa_column=Column(Text)) - surface: Optional[str] = Field(sa_column=Column(Text)) - wheelchair: Optional[str] = Field(sa_column=Column(Text)) - wheelchair_classified: Optional[str] = Field(sa_column=Column(Text)) - width: Optional[float] = Field(sa_column=Column(Float(53))) - s_imp: Optional[float] = Field(sa_column=Column(Float(53))) - rs_imp: Optional[float] = Field(sa_column=Column(Float(53))) - impedance_surface: Optional[float] = Field(sa_column=Column(Float(53))) - death_end: Optional[int] - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="Linestring", srid="4326", spatial_index=False), - nullable=False, - ) - ) - - -class Edge(EdgeBase, table=True): - __tablename__ = "edge" - __table_args__ = {"schema": "basic"} - - length_m: float = Field(sa_column=Column(Float(53), nullable=False)) - length_3857: float = Field(sa_column=Column(Float(53), nullable=False)) - coordinates_3857: Optional[dict] = Field(sa_column=Column(JSON, nullable=False)) - source: int = Field(index=True, nullable=False, foreign_key="basic.node.id") - target: int = Field(index=True, nullable=False, foreign_key="basic.node.id") - edge_id: Optional[int] = Field(index=True, default=None, foreign_key="basic.edge.id") - scenario_id: Optional[int] = Field( - sa_column=Column( - Integer, ForeignKey("customer.scenario.id", ondelete="CASCADE"), index=True - ) - ) - - node_source: "Node" = Relationship( - sa_relationship_kwargs={"primaryjoin": "Edge.source==Node.id", "lazy": "joined"} - ) - node_target: "Node" = Relationship( - sa_relationship_kwargs={"primaryjoin": "Edge.target==Node.id", "lazy": "joined"} - ) - scenario: Optional["Scenario"] = Relationship(back_populates="edges") - - -Index("idx_edge_geom", Edge.__table__.c.geom, postgresql_using="gist") - - -class WayModified(EdgeBase, table=True): - __tablename__ = "way_modified" - __table_args__ = {"schema": "customer"} - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - way_type: Optional[str] = Field(sa_column=Column(Text)) - edit_type: Optional[str] = Field(sa_column=Column(Text)) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - way_id: Optional[int] = Field(index=True, default=None) - scenario_id: Optional[int] = Field( - sa_column=Column( - Integer, ForeignKey("customer.scenario.id", ondelete="CASCADE"), index=True - ) - ) - outdated: Optional[bool] = Field(sa_column=Column(Boolean, default=False)) - scenario: Optional["Scenario"] = Relationship(back_populates="ways_modified") - - - -Index("idx_way_modified_geom", WayModified.__table__.c.geom, postgresql_using="gist") diff --git a/src/db/legacy/geostore.py b/src/db/legacy/geostore.py deleted file mode 100644 index 0f8b064..0000000 --- a/src/db/legacy/geostore.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import TYPE_CHECKING, List, Optional - -from sqlalchemy.dialects.postgresql import JSONB - -if TYPE_CHECKING: - from .study_area import StudyArea - -from sqlmodel import Column, Field, Integer, Relationship, SQLModel, Text, UniqueConstraint - -from src.resources.enums import GeostoreType - -from ._link_model import StudyAreaGeostore - - -class Geostore(SQLModel, table=True): - __tablename__ = "geostore" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - name: str = Field(sa_column=Column(Text), nullable=False) - type: GeostoreType = Field(sa_column=Column(Text), nullable=False) - url: str = Field(sa_column=Column(Text), nullable=False) - configuration: Optional[dict] = Field(sa_column=Column(JSONB)) - attribution: str = Field(sa_column=Column(Text), nullable=False) - thumbnail_url: str = Field(sa_column=Column(Text), nullable=False) - study_areas: List["StudyArea"] = Relationship( - back_populates="geostores", - link_model=StudyAreaGeostore - ) - - -UniqueConstraint(Geostore.__table__.c.name) diff --git a/src/db/legacy/isochrone.py b/src/db/legacy/isochrone.py deleted file mode 100644 index 57a60fd..0000000 --- a/src/db/legacy/isochrone.py +++ /dev/null @@ -1,51 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, Optional - -from sqlmodel import ( - Column, - DateTime, - Field, - Float, - ForeignKey, - Integer, - Relationship, - SQLModel, - Text, - text, -) - -if TYPE_CHECKING: - from .scenario import Scenario - from .user import User - - -class IsochroneCalculation(SQLModel, table=True): - __tablename__ = "isochrone_calculation" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - calculation_type: str = Field(sa_column=Column(Text, nullable=False)) - starting_point: str = Field(sa_column=Column(Text, nullable=False)) - routing_profile: str = Field(sa_column=Column(Text, nullable=False)) - speed: float = Field(sa_column=Column(Float(53), nullable=False)) - modus: str = Field(sa_column=Column(Text, nullable=False)) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - parent_id: Optional[int] = Field( - sa_column=Column( - Integer, ForeignKey("customer.isochrone_calculation.id", ondelete="CASCADE") - ) - ) - scenario_id: Optional[int] = Field( - sa_column=Column(Integer, ForeignKey("customer.scenario.id", ondelete="CASCADE")) - ) - user_id: int = Field( - default=None, - sa_column=Column( - Integer, ForeignKey("customer.user.id", ondelete="CASCADE"), nullable=False - ), - ) - - scenario: Optional["Scenario"] = Relationship(back_populates="isochrone_calculations") - user: "User" = Relationship(back_populates="isochrone_calculations") diff --git a/src/db/legacy/layer_library.py b/src/db/legacy/layer_library.py deleted file mode 100644 index d69fc4c..0000000 --- a/src/db/legacy/layer_library.py +++ /dev/null @@ -1,93 +0,0 @@ -from typing import TYPE_CHECKING, List, Optional - -from sqlalchemy.dialects.postgresql import JSONB -from sqlmodel import ( - ARRAY, - Column, - Field, - ForeignKey, - Integer, - Relationship, - SQLModel, - Text, - UniqueConstraint, -) - -if TYPE_CHECKING: - pass - - -class StyleLibrary(SQLModel, table=True): - __tablename__ = "style_library" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - name: str = Field(sa_column=Column(Text(), nullable=False, index=True)) - style: dict = Field(sa_column=Column(JSONB)) - translation: Optional[dict] = Field(sa_column=Column(JSONB)) - layer_libraries: "LayerLibrary" = Relationship(back_populates="style_library") - - -UniqueConstraint(StyleLibrary.__table__.c.name) - - -class LayerLibraryBase(SQLModel): - name: str = Field(sa_column=Column(Text(), nullable=False, index=True)) - url: Optional[str] = Field(sa_column=Column(Text)) - legend_urls: Optional[List[str]] = Field(sa_column=Column(ARRAY(Text()))) - special_attribute: Optional[dict] = Field(sa_column=Column(JSONB)) - access_token: Optional[str] = Field(sa_column=Column(Text)) - type: str = Field(sa_column=Column(Text(), nullable=False, index=True)) - map_attribution: Optional[str] = Field(sa_column=Column(Text)) - date: Optional[str] = Field(sa_column=Column(Text)) - source: Optional[str] - date_1: Optional[str] = Field(sa_column=Column(Text)) - source_1: Optional[str] - style_library_name: Optional[str] - max_resolution: Optional[str] = Field(sa_column=Column(Text, nullable=True)) - min_resolution: Optional[str] = Field(sa_column=Column(Text, nullable=True)) - doc_url: Optional[str] = Field(sa_column=Column(Text, nullable=True)) - - -class LayerLibrary(LayerLibraryBase, table=True): - __tablename__ = "layer_library" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - source: Optional[str] = Field( - sa_column=Column( - Text, - ForeignKey("customer.layer_source.name", onupdate="CASCADE"), - nullable=True, - ) - ) - source_1: Optional[str] = Field( - sa_column=Column( - Text, - ForeignKey("customer.layer_source.name", onupdate="CASCADE"), - nullable=True, - ) - ) - style_library_name: Optional[str] = Field( - sa_column=Column( - Text, - ForeignKey("customer.style_library.name", onupdate="CASCADE"), - nullable=True, - ) - ) - - style_library: "StyleLibrary" = Relationship(back_populates="layer_libraries") - - -UniqueConstraint(LayerLibrary.__table__.c.name) - - -class LayerSource(SQLModel, table=True): - __tablename__ = "layer_source" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - name: str = Field(sa_column=Column(Text(), nullable=False, index=True)) - - -UniqueConstraint(LayerSource.__table__.c.name) diff --git a/src/db/legacy/node.py b/src/db/legacy/node.py deleted file mode 100644 index 422bfde..0000000 --- a/src/db/legacy/node.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import TYPE_CHECKING, Optional - -from geoalchemy2 import Geometry -from sqlmodel import ( - BigInteger, - Column, - Field, - ForeignKey, - Index, - Integer, - Relationship, - SQLModel, -) - -if TYPE_CHECKING: - from .scenario import Scenario - - -class Node(SQLModel, table=True): - __tablename__ = "node" - __table_args__ = {"schema": "basic"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True)) - osm_id: Optional[int] = Field(sa_column=Column(BigInteger())) - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="Point", srid="4326", spatial_index=False), - nullable=False, - ) - ) - scenario_id: Optional[int] = Field( - sa_column=Column( - Integer, ForeignKey("customer.scenario.id", ondelete="CASCADE"), index=True - ) - ) - - scenario: Optional["Scenario"] = Relationship(back_populates="nodes") - - # TODO: Add edge_source and edge_node here.. - - -Index("idx_node_geom", Node.__table__.c.geom, postgresql_using="gist") diff --git a/src/db/legacy/opportunity_config.py b/src/db/legacy/opportunity_config.py deleted file mode 100644 index 6d81b5d..0000000 --- a/src/db/legacy/opportunity_config.py +++ /dev/null @@ -1,118 +0,0 @@ -from typing import TYPE_CHECKING, List, Optional - -from sqlmodel import ( - Column, - Field, - ForeignKey, - Integer, - Relationship, - SQLModel, - Text, - ARRAY, - UniqueConstraint, - Boolean, -) - -if TYPE_CHECKING: - from .user import User - from .study_area import StudyArea - - -class OpportunityGroup(SQLModel, table=True): - __tablename__ = "opportunity_group" - __table_args__ = {"schema": "basic"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - type: str = Field(sa_column=Column(Text, nullable=False)) - group: str = Field(sa_column=Column(Text, nullable=False)) - icon: str = Field(sa_column=Column(Text, nullable=False)) - color: List[str] = Field(sa_column=Column(ARRAY(Text()), nullable=False)) - - opportunity_default_configs: Optional[List["OpportunityDefaultConfig"]] = Relationship( - back_populates="opportunity_group" - ) - opportunity_study_area_configs: Optional[List["OpportunityStudyAreaConfig"]] = Relationship( - back_populates="opportunity_group" - ) - opportunity_user_configs: Optional[List["OpportunityUserConfig"]] = Relationship( - back_populates="opportunity_group" - ) - -UniqueConstraint(OpportunityGroup.__table__.c.group) - - -class OpportunityConfigBase(SQLModel): - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - category: str = Field(sa_column=Column(Text, nullable=False)) - icon: str = Field(sa_column=Column(Text, nullable=False)) - color: List[str] = Field(sa_column=Column(ARRAY(Text()), nullable=False)) - sensitivity: Optional[int] = Field(sa_column=Column(Integer)) - multiple_entrance: Optional[bool] = Field(sa_column=Column(Boolean)) - - -class OpportunityDefaultConfig(OpportunityConfigBase, table=True): - __tablename__ = "opportunity_default_config" - __table_args__ = {"schema": "basic"} - - opportunity_group_id: Optional[int] = Field( - sa_column=Column(Integer, ForeignKey("basic.opportunity_group.id"), nullable=False) - ) - opportunity_group: "OpportunityGroup" = Relationship( - back_populates="opportunity_default_configs" - ) - - -UniqueConstraint(OpportunityDefaultConfig.__table__.c.category) - - -class OpportunityStudyAreaConfig(OpportunityConfigBase, table=True): - __tablename__ = "opportunity_study_area_config" - __table_args__ = {"schema": "basic"} - - opportunity_group_id: Optional[int] = Field( - sa_column=Column(Integer, ForeignKey("basic.opportunity_group.id"), nullable=False) - ) - study_area_id: int = Field( - sa_column=Column(Integer, ForeignKey("basic.study_area.id"), nullable=False) - ) - is_active: bool = Field(sa_column=Column(Boolean, nullable=False)) - study_area: "StudyArea" = Relationship(back_populates="opportunity_study_area_configs") - - opportunity_group: "OpportunityGroup" = Relationship( - back_populates="opportunity_study_area_configs" - ) - - -UniqueConstraint( - OpportunityStudyAreaConfig.__table__.c.category, - OpportunityStudyAreaConfig.__table__.c.study_area_id, -) - - -class OpportunityUserConfig(OpportunityConfigBase, table=True): - __tablename__ = "opportunity_user_config" - __table_args__ = {"schema": "customer"} - - opportunity_group_id: Optional[int] = Field( - sa_column=Column(Integer, ForeignKey("basic.opportunity_group.id"), nullable=False) - ) - study_area_id: int = Field(sa_column=Column(Integer, ForeignKey("basic.study_area.id"))) - user_id: int = Field( - default=None, - sa_column=Column(Integer, ForeignKey("customer.user.id", ondelete="CASCADE")) - ) - data_upload_id: Optional[int] = Field( - sa_column=Column(Integer, ForeignKey("customer.data_upload.id", ondelete="CASCADE")) - ) - - study_area: "StudyArea" = Relationship(back_populates="opportunity_user_configs") - user: "User" = Relationship(back_populates="opportunity_user_configs") - - opportunity_group: "OpportunityGroup" = Relationship(back_populates="opportunity_user_configs") - - -UniqueConstraint( - OpportunityUserConfig.__table__.c.category, - OpportunityUserConfig.__table__.c.study_area_id, - OpportunityUserConfig.__table__.c.user_id, -) diff --git a/src/db/legacy/organization.py b/src/db/legacy/organization.py deleted file mode 100644 index 4599514..0000000 --- a/src/db/legacy/organization.py +++ /dev/null @@ -1,20 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, List, Optional - -from sqlmodel import Column, DateTime, Field, Relationship, SQLModel, Text, text, Integer - -if TYPE_CHECKING: - from .user import User - - -class Organization(SQLModel, table=True): - __tablename__ = "organization" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - name: Optional[str] = Field(sa_column=Column(Text, nullable=False)) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - - users: List["User"] = Relationship(back_populates="organization") diff --git a/src/db/legacy/poi.py b/src/db/legacy/poi.py deleted file mode 100644 index 530b71f..0000000 --- a/src/db/legacy/poi.py +++ /dev/null @@ -1,98 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, Optional - -from geoalchemy2 import Geometry -from sqlmodel import ( - Column, - DateTime, - Field, - ForeignKey, - Index, - Integer, - Relationship, - SQLModel, - Text, - UniqueConstraint, - text, - Boolean -) -from sqlalchemy.dialects.postgresql import JSONB -if TYPE_CHECKING: - from .data_upload import DataUpload - from .scenario import Scenario - - -class PoiBase(SQLModel): - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - category: str = Field(sa_column=Column(Text(), nullable=False, index=True)) - name: Optional[str] = Field(sa_column=Column(Text)) - street: Optional[str] = Field(sa_column=Column(Text)) - housenumber: Optional[str] = Field(sa_column=Column(Text)) - zipcode: Optional[str] = Field(sa_column=Column(Text)) - opening_hours: Optional[str] = Field(sa_column=Column(Text)) - wheelchair: Optional[str] = Field(sa_column=Column(Text)) - tags: Optional[dict] = Field(sa_column=Column(JSONB)) - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="Point", srid="4326", spatial_index=False), - nullable=False, - ) - ) - - -class Poi(PoiBase, table=True): - __tablename__ = "poi" - __table_args__ = {"schema": "basic"} - - uid: str = Field(sa_column=Column(Text, nullable=False, index=True)) - -Index("idx_poi_geom", Poi.__table__.c.geom, postgresql_using="gist") -UniqueConstraint(Poi.__table__.c.uid) - - -class PoiModified(PoiBase, table=True): - __tablename__ = "poi_modified" - __table_args__ = {"schema": "customer"} - uid: str = Field(sa_column=Column(Text, nullable=False, index=True)) - edit_type: str = Field(sa_column=Column(Text, nullable=False, index=True)) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - scenario_id: Optional[int] = Field( - sa_column=Column( - Integer, ForeignKey("customer.scenario.id", ondelete="CASCADE"), nullable=False, index=True - ), - ) - data_upload_id: Optional[int] = Field( - sa_column=Column( - Integer, ForeignKey("customer.data_upload.id", ondelete="CASCADE"), index=True - ), - ) - outdated: Optional[bool] = Field(sa_column=Column(Boolean, default=False)) - scenario: Optional["Scenario"] = Relationship(back_populates="pois_modified") - - -Index("idx_poi_modified_geom", PoiModified.__table__.c.geom, postgresql_using="gist") - - -class PoiUser(PoiBase, table=True): - __tablename__ = "poi_user" - __table_args__ = {"schema": "customer"} - - uid: str = Field(sa_column=Column(Text, nullable=False, index=True)) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - data_upload_id: int = Field( - sa_column=Column( - Integer, - ForeignKey("customer.data_upload.id", ondelete="CASCADE"), - index=True, - nullable=False, - ), - ) - - data_upload: Optional["DataUpload"] = Relationship(back_populates="pois_user") - - -Index("idx_poi_user_geom", PoiUser.__table__.c.geom, postgresql_using="gist") diff --git a/src/db/legacy/population.py b/src/db/legacy/population.py deleted file mode 100644 index fe3d092..0000000 --- a/src/db/legacy/population.py +++ /dev/null @@ -1,78 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, Optional - -from geoalchemy2 import Geometry -from sqlmodel import ( - Column, - DateTime, - Field, - Float, - ForeignKey, - Index, - Integer, - Relationship, - SQLModel, - text, -) -from sqlalchemy.dialects.postgresql import JSONB -if TYPE_CHECKING: - from .building import Building, BuildingModified - from .scenario import Scenario - from .study_area import SubStudyArea - - -class PopulationBase(SQLModel): - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - sub_study_area_id: int = Field(sa_column=Column(Integer, ForeignKey("basic.sub_study_area.id", ondelete="CASCADE"), index=True)) - population: Optional[float] = Field(sa_column=Column(Float(53))) - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="Point", srid="4326", spatial_index=False), - nullable=False, - ) - ) - sub_study_area: Optional["SubStudyArea"] = Relationship(back_populates="populations") - -class Population(PopulationBase, table=True): - __tablename__ = "population" - __table_args__ = {"schema": "basic"} - - demography: Optional[dict] = Field(sa_column=Column(JSONB)) - building_id: Optional[int] = Field( - sa_column=Column(Integer, ForeignKey("basic.building.id", ondelete="CASCADE"), index=True), - ) - building: Optional["Building"] = Relationship(back_populates="populations") - - -Index("idx_population_geom", Population.__table__.c.geom, postgresql_using="gist") - - -class PopulationModified(PopulationBase, table=True): - __tablename__ = "population_modified" - __table_args__ = {"schema": "customer"} - - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - building_modified_id: int = Field( - sa_column=Column( - Integer, - ForeignKey("customer.building_modified.id", ondelete="CASCADE"), - index=True, - nullable=False, - ), - ) - - scenario_id: Optional[int] = Field( - sa_column=Column( - Integer, - ForeignKey("customer.scenario.id", ondelete="CASCADE"), - index=True, - nullable=False, - ), - ) - building_modified: "BuildingModified" = Relationship(back_populates="populations_modified") - scenario: Optional["Scenario"] = Relationship(back_populates="populations_modified") - - -Index("idx_population_modified_geom", PopulationModified.__table__.c.geom, postgresql_using="gist") diff --git a/src/db/legacy/role.py b/src/db/legacy/role.py deleted file mode 100644 index 1c8baaa..0000000 --- a/src/db/legacy/role.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import TYPE_CHECKING, List, Optional - -from sqlmodel import Column, Field, Integer, Relationship, SQLModel, Text - -if TYPE_CHECKING: - from .customization import Customization - from .user import User - -from ._link_model import UserRole - - -class Role(SQLModel, table=True): - __tablename__ = "role" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - name: str = Field(sa_column=Column(Text, nullable=False)) - - customizations: List["Customization"] = Relationship(back_populates="role") - users: List["User"] = Relationship(back_populates="roles", link_model=UserRole) diff --git a/src/db/legacy/static_layer.py b/src/db/legacy/static_layer.py deleted file mode 100644 index d952214..0000000 --- a/src/db/legacy/static_layer.py +++ /dev/null @@ -1,52 +0,0 @@ -from datetime import datetime -from tokenize import String -from typing import TYPE_CHECKING, Optional - -import sqlalchemy -from sqlalchemy.dialects import postgresql -from sqlmodel import ( - Column, - DateTime, - Field, - ForeignKey, - Integer, - Relationship, - SQLModel, - String, - text, -) - -if TYPE_CHECKING: - from .user import User - - -class StaticLayer(SQLModel, table=True): - - __tablename__ = "static_layer" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - creation_date: Optional[datetime] = Field( - sa_column=Column(DateTime, server_default=text("CURRENT_TIMESTAMP")) - ) - user_id: int = Field( - default=None, - sa_column=Column( - Integer, ForeignKey("customer.user.id", ondelete="CASCADE"), nullable=False - ) - ) - table_name: str = Field(sa_column=Column(String(63), nullable=False, unique=True)) - - user: "User" = Relationship(back_populates="static_layers") - - def data_frame_raw_sql(self, limit: int = 100, offset: int = 0) -> str: - """ - Raw sql to get data frame using geopands - """ - metadata_obj = sqlalchemy.MetaData() - table = sqlalchemy.Table(self.table_name, metadata_obj, schema="extra") - query = sqlalchemy.select(table, "*").limit(limit).offset(offset) - raw_query = query.compile( - dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True} - ) - return str(raw_query) diff --git a/src/db/legacy/study_area.py b/src/db/legacy/study_area.py deleted file mode 100644 index a5f69da..0000000 --- a/src/db/legacy/study_area.py +++ /dev/null @@ -1,97 +0,0 @@ -from typing import TYPE_CHECKING, List, Optional - -from geoalchemy2 import Geometry -from pydantic import validator -from sqlalchemy.dialects.postgresql import JSONB -from sqlmodel import ( - Column, - Field, - Float, - Index, - Integer, - Relationship, - SmallInteger, - SQLModel, - Text, -) - -from shapely.wkb import loads -from geoalchemy2.shape import to_shape - -if TYPE_CHECKING: - from .user import User - from .customization import UserCustomization - from .data_upload import DataUpload - from .opportunity_config import OpportunityStudyAreaConfig, OpportunityUserConfig - from .geostore import Geostore - -from ._link_model import UserStudyArea, StudyAreaGeostore -from ._pydantic_geometry import dump_geom - - -class StudyArea(SQLModel, table=True): - __tablename__ = "study_area" - __table_args__ = {"schema": "basic"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - name: str = Field(sa_column=Column(Text, nullable=False)) - population: int = Field(nullable=False) - setting: dict = Field(sa_column=Column(JSONB, nullable=False)) - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="MultiPolygon", srid="4326", spatial_index=False), - nullable=False, - ) - ) - # Buffer geom by 1600 meters which is the flying bird distance for 20 minutes walking with a speed of 5km/h - buffer_geom_heatmap: str = Field( - sa_column=Column( - Geometry(geometry_type="MultiPolygon", srid="4326", spatial_index=False), - nullable=False, - ) - ) - sub_study_areas: List["SubStudyArea"] = Relationship(back_populates="study_area") - users: List["User"] = Relationship(back_populates="study_areas", link_model=UserStudyArea) - user_customizations: List["UserCustomization"] = Relationship(back_populates="study_areas") - data_uploads: List["DataUpload"] = Relationship(back_populates="study_area") - opportunity_study_area_configs: List["OpportunityStudyAreaConfig"] = Relationship( - back_populates="study_area" - ) - opportunity_user_configs: List["OpportunityUserConfig"] = Relationship( - back_populates="study_area" - ) - geostores: List["Geostore"] = Relationship( - back_populates="study_areas", link_model=StudyAreaGeostore - ) - _validate_geom = validator("geom", pre=True, allow_reuse=True)(dump_geom) - - @property - def shape_of_geom(self): - return loads(bytes.fromhex(to_shape(self.geom).wkb_hex)) - - -Index("idx_study_area_geom", StudyArea.__table__.c.geom, postgresql_using="gist") - - -class SubStudyArea(SQLModel, table=True): - __tablename__ = "sub_study_area" - __table_args__ = {"schema": "basic"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - name: str = Field(sa_column=Column(Text, nullable=False)) - population: int = Field(nullable=False) - default_building_levels: Optional[int] = Field(sa_column=Column(SmallInteger)) - default_roof_levels: Optional[int] = Field(sa_column=Column(SmallInteger)) - area: Optional[float] = Field(sa_column=Column(Float(53))) - geom: str = Field( - sa_column=Column( - Geometry(geometry_type="MultiPolygon", srid="4326", spatial_index=False), - nullable=False, - ) - ) - study_area_id: int = Field(foreign_key="basic.study_area.id", index=True, nullable=False) - - study_area: "StudyArea" = Relationship(back_populates="sub_study_areas") - - -Index("idx_sub_study_area_geom", SubStudyArea.__table__.c.geom, postgresql_using="gist") diff --git a/src/db/legacy/system.py b/src/db/legacy/system.py deleted file mode 100644 index 2c75411..0000000 --- a/src/db/legacy/system.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Optional - -from sqlmodel import ( - Column, - Field, - Integer, - SQLModel, - Text -) - -from sqlalchemy.dialects.postgresql import JSONB - - -class System(SQLModel, table=True): - __tablename__ = "system" - __table_args__ = {"schema": "customer"} - - id: Optional[int] = Field(sa_column=Column(Integer, primary_key=True, autoincrement=True)) - type: str = Field(sa_column=Column(Text, nullable=False)) - setting: dict = Field(sa_column=Column(JSONB, nullable=False)) diff --git a/src/db/models/role.py b/src/db/models/role.py index 6fdb395..d322460 100644 --- a/src/db/models/role.py +++ b/src/db/models/role.py @@ -31,8 +31,6 @@ class Role(DateTimeBase, table=True): Attributes: id (str): The unique identifier for the role. name (str): The name of the role. - permissions (List[Permission]): A list of permission objects associated with the role. This is a relation to the permission table. - users (List[User]): A list of user objects associated with the role. This is a relation to the """ __tablename__ = "role" diff --git a/src/endpoints/legacy/deps.py b/src/endpoints/legacy/deps.py deleted file mode 100644 index e0f3aa4..0000000 --- a/src/endpoints/legacy/deps.py +++ /dev/null @@ -1,116 +0,0 @@ -from typing import Generator - -from fastapi import Depends, HTTPException, status -from fastapi.security import OAuth2PasswordBearer -from jose import jwt -from motor.motor_asyncio import AsyncIOMotorClient -from pydantic import ValidationError -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.crud.crud_scenario import scenario as crud_scenario -from src.core import security -from src.core.config import settings -from src.db import models -from src.db.session import async_session, r5_mongo_db_client - -reusable_oauth2 = OAuth2PasswordBearer(tokenUrl=f"{settings.API_V1_STR}/login/access-token") - - -async def get_db() -> Generator: - async with async_session() as session: - yield session - - -async def get_r5_mongo_db() -> AsyncIOMotorClient: - return r5_mongo_db_client - - -async def get_current_user( - db: AsyncSession = Depends(get_db), token: str = Depends(reusable_oauth2) -) -> models.User: - try: - payload = jwt.decode(token, settings.API_SECRET_KEY, algorithms=[security.ALGORITHM]) - token_data = schemas.TokenPayload(**payload) - except (jwt.JWTError, ValidationError): - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Could not validate credentials", - ) - user = await crud.user.get(db, id=token_data.sub, extra_fields=[models.User.roles]) - if not user: - raise HTTPException(status_code=404, detail="User not found") - return user - - -def get_current_active_user( - current_user: models.User = Depends(get_current_user), -) -> models.User: - if not crud.user.is_active(current_user): - raise HTTPException(status_code=400, detail="Inactive user") - return current_user - - -def get_current_active_superuser( - current_user: models.User = Depends(get_current_user), -) -> models.User: - if not crud.user.is_superuser(current_user): - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="The user doesn't have enough privileges") - return current_user - - -async def check_user_owns_scenario( - db: AsyncSession, - scenario_id: int, - current_user: models.User, -) -> int: - - if scenario_id == 0 or scenario_id is None: - return 0 - - scenario = await crud_scenario.get_by_multi_keys( - db, keys={"id": scenario_id, "user_id": current_user.id} - ) - if scenario == []: - raise HTTPException(status_code=400, detail="Scenario not found") - - return scenario[0].id - - -async def check_user_owns_study_area( - db: AsyncSession, - study_area_id: int, - current_user: models.User -) -> int: - - #TODO: Write generic function to check user owns study area - - return study_area_id - - -def check_user_owns_data_uploads( - data_upload_ids: list[int], current_user: models.User -) -> list[int]: - - if data_upload_ids == [0] or data_upload_ids is None: - return [0] - - if set(data_upload_ids).issubset(set(current_user.active_data_upload_ids)) is False: - raise HTTPException(status_code=400, detail="Data upload not found") - - return data_upload_ids - - -async def check_user_owns_isochrone_calculation( - db: AsyncSession, - isochrone_calculation_id: int, - current_user: models.User, -) -> int: - - isochrone_calculation = await crud.isochrone_calculation.get_by_multi_keys( - db, keys={"id": isochrone_calculation_id, "user_id": current_user.id} - ) - if isochrone_calculation == []: - raise HTTPException(status_code=400, detail="Isochrone calculation not found") - - return isochrone_calculation[0].id diff --git a/src/endpoints/v1/__init__.py b/src/endpoints/v1/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/endpoints/v1/api.py b/src/endpoints/v1/api.py deleted file mode 100644 index 5a941d2..0000000 --- a/src/endpoints/v1/api.py +++ /dev/null @@ -1,75 +0,0 @@ -from fastapi import APIRouter - -from src.endpoints.v1 import ( - customizations, - data_preparation, - geostores, - indicators, - layer_library, - layers, - login, - opportunities, - opportunity_config, - organizations, - poi_aoi, - r5, - roles, - scenarios, - static_layers, - static_layers_extra, - study_area, - system, - upload, - users, - utils, - projects, -) - -api_router = APIRouter() -api_router.include_router(login.router, tags=["Login"]) -api_router.include_router(system.router, prefix="", tags=["Health Check"]) - -api_router.include_router(organizations.router, prefix="/organizations", tags=["Organizations"]) -api_router.include_router(roles.router, prefix="/roles", tags=["Roles"]) -api_router.include_router(users.router, prefix="/users", tags=["Users"]) -api_router.include_router(customizations.router, prefix="/customizations", tags=["Customizations"]) -api_router.include_router(utils.router, prefix="/utils", tags=["Utils"]) -api_router.include_router(upload.router, prefix="/custom-data", tags=["Custom Data"]) -api_router.include_router(indicators.router, prefix="/indicators", tags=["Indicators"]) -api_router.include_router(scenarios.router, prefix="/scenarios", tags=["Scenarios"]) -api_router.include_router(poi_aoi.router, prefix="/pois-aois", tags=["POIs and AOIs"]) -api_router.include_router( - static_layers.router, prefix="/read/table", tags=["Read Selected Tables"] -) -api_router.include_router( - static_layers_extra.router, prefix="/config/layers/vector", tags=["Manage extra layers"] -) -api_router.include_router(projects.router, prefix="/projects", tags=["Projects"]) - -# LAYER: Vector tile endpoints. -layer_tiles_prefix = "/layers/tiles" -layer_tiles = layers.VectorTilerFactory( - router_prefix=layer_tiles_prefix, - with_tables_metadata=True, - with_functions_metadata=False, - with_viewer=False, -) - -api_router.include_router(layer_tiles.router, prefix=layer_tiles_prefix, tags=["Layers"]) -api_router.include_router(r5.router, prefix="/r5", tags=["PT-R5"]) -api_router.include_router( - layer_library.styles_router, prefix="/config/layers/library/styles", tags=["Layer Library"] -) -api_router.include_router( - layer_library.router, prefix="/config/layers/library", tags=["Layer Library"] -) -api_router.include_router(study_area.router, prefix="/config/study-area", tags=["Layer Library"]) -api_router.include_router(geostores.router, prefix="/config/geostores", tags=["Geostores"]) -api_router.include_router( - opportunities.router, prefix="/config/opportunities", tags=["Opportunities"] -) -api_router.include_router(data_preparation.router, prefix="/config/data-preparation", tags=["Data Preparation"]) -api_router.include_router( - opportunity_config.router, prefix="/config/opportunity-study-area", tags=["Opportunities"] -) - diff --git a/src/endpoints/v1/customizations.py b/src/endpoints/v1/customizations.py deleted file mode 100644 index b870803..0000000 --- a/src/endpoints/v1/customizations.py +++ /dev/null @@ -1,275 +0,0 @@ -from typing import Any - -from fastapi import APIRouter, Body, Depends, HTTPException, Path -from fastapi.encoders import jsonable_encoder -from fastapi.responses import JSONResponse -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.crud.base import CRUDBase -from src.crud.crud_customization import dynamic_customization -from src.db import models -from src.db.models.legacy.config_validation import * -from src.endpoints.legacy import deps -from src.resources.enums import SettingToModify -from src.schemas.customization import request_examples - -router = APIRouter() - - -@router.get("/me", response_class=JSONResponse) -async def get_user_settings_me( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Get customization settings for user. - """ - settings = await dynamic_customization.build_main_setting_json( - db=db, current_user=current_user - ) - - return settings - - -@router.post("/user/insert/{setting_type}", response_class=JSONResponse) -async def insert_user_settings( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - setting_type: SettingToModify, - obj_in: Any = Body(..., examples=request_examples.get("user_customization_insert")), -) -> Any: - """ - Insert settings for POIs. - """ - obj_dict = jsonable_encoder(obj_in) - if setting_type.value == "poi_groups": - if check_dict_schema(PoiCategory, obj_dict) is False: - raise HTTPException(status_code=400, detail="Invalid JSON-schema") - - await dynamic_customization.insert_opportunity_setting( - db=db, current_user=current_user, insert_settings=obj_dict - ) - - update_settings = await dynamic_customization.build_main_setting_json( - db=db, current_user=current_user - ) - return update_settings - - -@router.delete("/user/reset-style/{setting_type}/{category}", response_class=JSONResponse) -async def delete_user_settings( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - setting_type: SettingToModify, - category: str = Path(..., description="Category name", example="nursery"), -) -> Any: - """ - Reset styles for POIs - """ - if category not in await crud.dynamic_customization.get_all_default_poi_categories(db=db): - raise HTTPException(status_code=400, detail="Cannot reset custom POI category.") - - await dynamic_customization.delete_opportunity_setting( - db=db, current_user=current_user, setting_type=setting_type.value, category=category - ) - update_settings = await dynamic_customization.build_main_setting_json( - db=db, current_user=current_user - ) - return update_settings - - -@router.get("/{user_id}/{study_area_id}", response_class=JSONResponse) -async def get_user_settings( - *, - db: AsyncSession = Depends(deps.get_db), - study_area_id: int = None, - user_id: int = None, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get customization settings for user. - """ - customizations = await crud.customization.get_multi(db) - settings = {} - for customization in customizations: - settings.update(customization.setting) - user_customizations = await CRUDBase(models.UserCustomization).get_by_key( - db, key="user_id", value=user_id - ) - study_area = await CRUDBase(models.StudyArea).get(db, id=study_area_id) - if study_area is not None and study_area.setting: - settings.update(study_area.setting) - - if user_customizations is not None: - for user_customization in user_customizations: - settings.update(user_customization.setting) - - return settings - - -# create user customization -@router.post("/{user_id}/{study_area_id}", response_class=JSONResponse) -async def update_user_settings( - *, - db: AsyncSession = Depends(deps.get_db), - user_id: int, - study_area_id: int, - current_user: models.User = Depends(deps.get_current_active_user), - user_customizations: Any = Body(..., example=schemas.customization.request_examples["create"]), -) -> Any: - """ - Create or Update customization settings for user. - """ - is_superuser = crud.user.is_superuser(current_user) - if user_id != current_user.id and not is_superuser: - raise HTTPException( - status_code=400, detail="The user cannot update another user's settings" - ) - for user_customization_key in user_customizations: - # check if key exists in customization table - customization = await CRUDBase(models.Customization).get_by_key( - db, key="type", value=user_customization_key - ) - - if customization is not None: - customization = customization[0] - user_customization = await CRUDBase(models.UserCustomization).get_by_multi_keys( - db, keys={"user_id": user_id, "customization_id": customization.id} - ) - user_customization_in = models.UserCustomization( - setting=user_customizations[user_customization_key], - user_id=user_id, - customization_id=customization.id, - study_area_id=study_area_id, - ) - if user_customization is not None and len(user_customization) > 0: - del user_customization_in.id - del user_customization_in.creation_date - await CRUDBase(models.UserCustomization).update( - db, db_obj=user_customization[0], obj_in=user_customization_in - ) - else: - await CRUDBase(models.UserCustomization).create(db, obj_in=user_customization_in) - - return {"msg": "ok"} - - -# delete user customization -@router.delete("/{user_id}/{study_area_id}/{customization}", response_class=JSONResponse) -async def delete_user_setting( - *, - db: AsyncSession = Depends(deps.get_db), - user_id: int, - study_area_id: int, - customization: str, - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Delete customization for user. - """ - is_superuser = crud.user.is_superuser(current_user) - if user_id != current_user.id and not is_superuser: - raise HTTPException( - status_code=400, detail="The user cannot delete another user's settings" - ) - customization = await CRUDBase(models.Customization).get_by_key( - db, key="type", value=customization - ) - if not customization: - raise HTTPException(status_code=400, detail="Customization not found") - else: - customization = customization[0] - - user_customization = await CRUDBase(models.UserCustomization).get_by_multi_keys( - db, keys={"user_id": user_id, "customization_id": customization.id} - ) - - if user_customization is not None and len(user_customization) > 0: - await CRUDBase(models.UserCustomization).remove(db, id=user_customization[0].id) - return {"msg": "Customization deleted"} - - -@router.get("/base", response_class=JSONResponse) -async def list_base_customizations( - *, - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - ordering: str = None, - q: str = None, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - settings = await crud.customization.get_multi( - db, skip=skip, limit=limit, ordering=ordering, query=q - ) - - return settings - - -@router.get("/base/{id}", response_class=JSONResponse) -async def get_base_customization_by_id( - *, - db: AsyncSession = Depends(deps.get_db), - id: int, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - customization = await crud.customization.get(db, id=id) - if not customization: - raise HTTPException( - status_code=404, - detail="Customization not found.", - ) - - return customization - - -@router.post("/base", response_class=JSONResponse) -async def create_base_customization( - *, - db: AsyncSession = Depends(deps.get_db), - customization_in: models.CustomizationBase, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - customization = await crud.customization.create(db, obj_in=customization_in) - return customization - - -@router.put("/base/{id}", response_class=JSONResponse) -async def update_base_customization( - *, - db: AsyncSession = Depends(deps.get_db), - id: int, - customization_in: models.CustomizationBase, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - customization = await crud.customization.get(db, id=id) - if not customization: - raise HTTPException( - status_code=404, - detail="Customization not found.", - ) - customization = await crud.customization.update( - db, db_obj=customization, obj_in=customization_in - ) - return customization - - -@router.delete("/base/{id}", response_class=JSONResponse) -async def delete_base_customization( - *, - db: AsyncSession = Depends(deps.get_db), - id: int, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - customization = await crud.customization.get(db, id=id) - if not customization: - raise HTTPException( - status_code=404, - detail="Customization not found.", - ) - customization = await crud.customization.remove(db, id=id) - return customization diff --git a/src/endpoints/v1/data_preparation.py b/src/endpoints/v1/data_preparation.py deleted file mode 100644 index 4996559..0000000 --- a/src/endpoints/v1/data_preparation.py +++ /dev/null @@ -1,133 +0,0 @@ -import json -import linecache -import os -import tracemalloc - -from fastapi import APIRouter, Body, Depends -from fastapi.responses import JSONResponse -from sqlalchemy.ext.asyncio import AsyncSession - -from src.core.config import settings -from src.core.heatmap.heatmap_compute import ComputeHeatmap -from src.db import models -from src.endpoints.legacy import deps -from src.schemas import data_preparation as schemas -from src.workers import heatmap_active_mobility, heatmap_motorized_transport, method_connector - -router = APIRouter() - - -def display_top(snapshot, key_type="lineno", limit=3): - snapshot = snapshot.filter_traces( - ( - tracemalloc.Filter(False, ""), - tracemalloc.Filter(False, ""), - ) - ) - top_stats = snapshot.statistics(key_type) - - print("Top %s lines" % limit) - for index, stat in enumerate(top_stats[:limit], 1): - frame = stat.traceback[0] - # replace "/path/to/module/file.py" with "module/file.py" - filename = os.sep.join(frame.filename.split(os.sep)[-2:]) - print("#%s: %s:%s: %.1f KiB" % (index, filename, frame.lineno, stat.size / 1024)) - line = linecache.getline(frame.filename, frame.lineno).strip() - if line: - print(" %s" % line) - - other = top_stats[limit:] - if other: - size = sum(stat.size for stat in other) - print("%s other: %.1f KiB" % (len(other), size / 1024)) - total = sum(stat.size for stat in top_stats) - print("Total allocated size: %.1f KiB" % (total / 1024)) - - -@router.post("/bulk-ids") -async def get_bulk_ids_for_study_area( - *, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), - parameters: schemas.BulkIdParameters = Body(..., example=schemas.BulkIdParametersExample), -): - crud_compute_heatmap = ComputeHeatmap(current_super_user) - return await crud_compute_heatmap.get_bulk_ids(**parameters.dict()) - - -@router.post("/traveltime-matrices") -async def create_traveltime_matrices( - *, - current_super_user: models.User = Depends(deps.get_current_active_superuser), - parameters: schemas.TravelTimeMatrixParameters = Body( - ..., examples=schemas.examples["travel_time_matrix"] - ), -): - parameters = json.loads(parameters.json()) - parameters_serialized = parameters.copy() - current_super_user = json.loads(current_super_user.json()) - for bulk_id in parameters["bulk_id"]: - parameters_serialized["bulk_id"] = bulk_id - if settings.CELERY_BROKER_URL: - if parameters["isochrone_dto"]["mode"] != "transit": - heatmap_active_mobility.create_traveltime_matrices_sync.delay( - current_super_user, parameters_serialized - ) - else: - heatmap_motorized_transport.create_r5_traveltime_matrices_sync.delay( - current_super_user, parameters_serialized - ) - else: - await method_connector.create_traveltime_matrices_async( - current_super_user, parameters_serialized - ) - return JSONResponse("Ok") - - -@router.post("/opportunity-matrices") -async def create_opportunity_matrices( - *, - current_super_user: models.User = Depends(deps.get_current_active_superuser), - parameters: schemas.OpportunityMatrixParameters = Body( - ..., examples=schemas.examples["opportunity_matrix"] - ), -): - parameters = json.loads(parameters.json()) - parameters_serialized = parameters.copy() - current_super_user = json.loads(current_super_user.json()) - for bulk_id in parameters["bulk_id"]: - parameters_serialized["bulk_id"] = bulk_id - if settings.CELERY_BROKER_URL: - heatmap_active_mobility.create_opportunity_matrices_sync.delay( - current_super_user, parameters_serialized - ) - else: - await method_connector.create_opportunity_matrices_async( - current_super_user, parameters_serialized - ) - return JSONResponse("Ok") - - -@router.post("/connectivity-matrices") -async def create_connectivity_matrices( - *, - current_super_user: models.User = Depends(deps.get_current_active_superuser), - parameters: schemas.ConnectivityMatrixParameters = Body( - ..., example=schemas.ConnectivityMatrixExample - ), -): - parameters = json.loads(parameters.json()) - parameters_serialized = parameters.copy() - current_super_user = json.loads(current_super_user.json()) - for bulk_id in parameters["bulk_id"]: - parameters_serialized["bulk_id"] = bulk_id - if settings.CELERY_BROKER_URL: - heatmap_active_mobility.create_connectivity_matrices_sync.delay( - current_super_user, parameters_serialized - ) - else: - await method_connector.create_connectivity_matrices_async( - current_super_user, parameters_serialized - ) - - return JSONResponse("Ok") diff --git a/src/endpoints/v1/geostores.py b/src/endpoints/v1/geostores.py deleted file mode 100644 index 43178e5..0000000 --- a/src/endpoints/v1/geostores.py +++ /dev/null @@ -1,142 +0,0 @@ -from typing import List - -from fastapi import APIRouter, Depends, HTTPException, Query -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.db import models -from src.endpoints.legacy import deps - -router = APIRouter() - - -@router.get("", response_model=List[models.Geostore]) -async def list_geostores( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - geostores = await crud.geostore.get_multi(db, skip=skip, limit=limit) - if not geostores: - raise HTTPException(status_code=404, detail="there is no (more) geostores.") - return geostores - - -@router.get("/{id:int}", response_model=models.Geostore) -async def read_geostore_by_id( - id: int, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - geostore = await crud.geostore.get(db, id=id) - if not geostore: - raise HTTPException(status_code=404, detail="geostore not found.") - return geostore - - -@router.post("", response_model=models.Geostore) -async def create_a_new_geostore( - geostore_in: schemas.CreateGeostore, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - geostore = await crud.geostore.create(db, obj_in=geostore_in) - return geostore - - -@router.put("/{id:int}", response_model=models.Geostore) -async def update_a_geostore( - id: int, - geostore_in: schemas.CreateGeostore, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - geostore_in_db = await crud.geostore.get(db, id=id) - if not geostore_in_db: - raise HTTPException(status_code=404, detail="geostore not found.") - - geostore = await crud.geostore.update(db, db_obj=geostore_in_db, obj_in=geostore_in) - return geostore - - -@router.delete("/") -async def delete_geostores( - id: List[int] = Query(default=None, gt=0), - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - return await crud.geostore.remove_multi(db, ids=id) - - -@router.get("/study_area/{study_area_id:int}", response_model=List[models.Geostore]) -async def list_study_area_geostores( - study_area_id: int, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - study_area = await crud.study_area.get( - db, id=study_area_id, extra_fields=[models.StudyArea.geostores] - ) - if not study_area: - raise HTTPException(status_code=404, detail="study area not found.") - - geostores = study_area.geostores - if not geostores: - raise HTTPException(status_code=404, detail="this study area has no geostores.") - return geostores - - -@router.post( - "/study_area/{study_area_id:int}/add/{geostore_id:int}", response_model=List[models.Geostore] -) -async def add_geostore_to_study_area( - study_area_id: int, - geostore_id: int, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - study_area = await crud.study_area.get( - db, id=study_area_id, extra_fields=[models.StudyArea.geostores] - ) - if not study_area: - raise HTTPException(status_code=404, detail="study area not found.") - - geostore_in = await crud.geostore.get(db, id=geostore_id) - if not geostore_in: - raise HTTPException(status_code=404, detail="geostore not found.") - - geostores = study_area.geostores - geostore_ids = [geostore.id for geostore in geostores] - if geostore_in.id not in geostore_ids: - await crud.study_area.add_geostore_to_study_area(db, study_area.id, geostore_in.id) - geostores.append(geostore_in) - - return geostores - - -@router.delete( - "/study_area/{study_area_id:int}/remove/{geostore_id:int}", - response_model=List[models.Geostore], -) -async def delete_geostore_to_study_area( - study_area_id: int, - geostore_id: int, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - study_area = await crud.study_area.get( - db, id=study_area_id, extra_fields=[models.StudyArea.geostores] - ) - if not study_area: - raise HTTPException(status_code=404, detail="study area not found.") - geostores = study_area.geostores - geostores = [geo for geo in geostores if geo.id != geostore_id] - study_area_geostore_relationships = await crud.study_area_geostore.get_by_multi_keys( - db, keys={"study_area_id": study_area_id, "geostore_id": geostore_id} - ) - - for relationship in study_area_geostore_relationships: - await crud.study_area_geostore.remove(db, id=relationship.id) - - return geostores diff --git a/src/endpoints/v1/indicators.py b/src/endpoints/v1/indicators.py deleted file mode 100644 index 7ada86e..0000000 --- a/src/endpoints/v1/indicators.py +++ /dev/null @@ -1,297 +0,0 @@ -import json -from typing import Any, Dict, Optional - -from fastapi import APIRouter, Body, Depends, HTTPException, Query, status -from fastapi.responses import StreamingResponse -from sqlalchemy.ext.asyncio.session import AsyncSession -from starlette.responses import JSONResponse - -from src import crud -from src.crud.base import CRUDBase -from src.core.config import settings -from src.db import models -from src.endpoints.legacy import deps -from src.resources.enums import ( - IsochroneExportType, - IndicatorResultsReturnType, -) -from src.schemas.heatmap import HeatmapSettings, ReturnTypeHeatmap -from src.schemas.heatmap import request_examples as heatmap_request_examples -from src.schemas.indicators import ( - CalculateOevGueteklassenParameters, - oev_gueteklasse_config_example, -) - -from src.schemas.legacy.isochrone import ( - IsochroneDTO, - IsochroneMultiCountPois, - request_examples, -) -from src.schemas.utils import validate_return_type -from src.utils import read_results -from src.workers.method_connector import ( - read_heatmap_async, - read_pt_oev_gueteklassen_async, - read_pt_station_count_async, -) -from src.workers.read_heatmap import ( - read_heatmap_task, - read_pt_oev_gueteklassen_task, - read_pt_station_count_task, -) -from src.workers.celery_app import celery_app -from celery.result import AsyncResult - -from src.workers.isochrone import task_calculate_isochrone - - -router = APIRouter() - - -@router.post("/isochrone") -async def calculate_isochrone( - *, - db: AsyncSession = Depends(deps.get_db), - isochrone_in: IsochroneDTO = Body(..., examples=request_examples["isochrone"]), - current_user: models.User = Depends(deps.get_current_active_user), -): - """ - Calculate isochrone indicator. - """ - if isochrone_in.scenario.id: - await deps.check_user_owns_scenario(db, isochrone_in.scenario.id, current_user) - - # TODO: Check to remove study area bounds from here. We can have it in the downstream function only. Where it is used. - study_area = await crud.user.get_active_study_area(db, current_user) - study_area_bounds = study_area["bounds"] - isochrone_in = json.loads(isochrone_in.json()) - current_user = json.loads(current_user.json()) - - if not settings.CELERY_BROKER_URL: - results = task_calculate_isochrone(isochrone_in, current_user, study_area_bounds) - return read_results(results) - - else: - task = task_calculate_isochrone.delay(isochrone_in, current_user, study_area_bounds) - return {"task_id": task.id} - - -@router.post("/isochrone/multi/count-pois", response_class=JSONResponse) -async def count_pois_multi_isochrones( - *, - db: AsyncSession = Depends(deps.get_db), - isochrone_in: IsochroneMultiCountPois = Body( - ..., examples=request_examples["pois_multi_isochrone_count_pois"] - ), - current_user: models.User = Depends(deps.get_current_active_user), -): - """ - Count Isochrone pois under study area. - """ - isochrone_in.scenario_id = await deps.check_user_owns_scenario( - db=db, scenario_id=isochrone_in.scenario_id, current_user=current_user - ) - isochrone_in.active_upload_ids = current_user.active_data_upload_ids - isochrone_in.user_id = current_user.id - cnt = await crud.isochrone.count_opportunity(db=db, obj_in=isochrone_in) - return cnt - - -@router.post("/isochrone/export", response_class=StreamingResponse) -async def export_isochrones( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - geojson: Dict = Body(..., examples=request_examples["to_export"]), - return_type: IsochroneExportType = Query( - description="Return type of the response", default=IsochroneExportType.CSV - ), -) -> Any: - """ - Export isochrones from GeoJSON data. - """ - - result = { - "data": {"geojson": geojson}, - "return_type": return_type.value, - "hexlified": False, - "data_source": "isochrone", - } - - return read_results(result) - - -@router.post("/heatmap") -async def calculate_heatmap( - *, - current_user: models.User = Depends(deps.get_current_active_user), - heatmap_settings: HeatmapSettings = Body(..., examples=heatmap_request_examples), -): - """ - Calculate a heatmap for a given set of parameters. - """ - current_user = json.loads(current_user.json()) - #heatmap_settings = json.loads(heatmap_settings.json()) - if settings.CELERY_BROKER_URL: - task = read_heatmap_task.delay( - current_user=current_user, - heatmap_settings=heatmap_settings, - ) - return {"task_id": task.id} - - else: - results = await read_heatmap_async(current_user=current_user, heatmap_settings=heatmap_settings) - return read_results(results) - - -@router.get("/pt-station-count") -async def count_pt_service_stations( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - start_time: Optional[int] = Query( - description="Start time in seconds since midnight (Default: 07:00)", - default=25200, - ge=0, - le=86400, - ), - end_time: Optional[int] = Query( - description="End time in seconds since midnight (Default: 09:00)", - default=32400, - ge=0, - le=86400, - ), - weekday: Optional[int] = Query( - description="Weekday (1 = Monday, 7 = Sunday) (Default: Monday)", - default=1, - ge=1, - le=7, - ), - study_area_id: Optional[int] = Query( - default=None, description="Study area id (Default: User active study area)" - ), - return_type: ReturnTypeHeatmap = Query( - default=ReturnTypeHeatmap.GEOJSON, description="Return type of the response" - ), -): - """ - Return the number of trips for every route type on every station given a time period and weekday. - """ - if start_time >= end_time: - raise HTTPException(status_code=422, detail="Start time must be before end time") - - is_superuser = crud.user.is_superuser(current_user) - if study_area_id is not None and not is_superuser: - owns_study_area = await CRUDBase(models.UserStudyArea).get_by_multi_keys( - db, keys={"user_id": current_user.id, "study_area_id": study_area_id} - ) - if owns_study_area == []: - raise HTTPException( - status_code=400, - detail="The user doesn't own the study area or user doesn't have enough privileges", - ) - else: - study_area_id = study_area_id or current_user.active_study_area_id - - current_user = json.loads(current_user.json()) - payload = { - "start_time": start_time, - "end_time": end_time, - "weekday": weekday, - "study_area_id": study_area_id, - } - - if settings.CELERY_BROKER_URL: - task = read_pt_station_count_task.delay( - current_user=current_user, - payload=payload, - return_type=return_type.value, - ) - else: - results = await read_pt_station_count_async( - current_user=current_user, payload=payload, return_type=return_type.value - ) - return read_results(results) - return {"task_id": task.id} - - -@router.post("/pt-oev-gueteklassen") -async def calculate_oev_gueteklassen( - *, - current_user: models.User = Depends(deps.get_current_active_user), - params: CalculateOevGueteklassenParameters = Body(..., example=oev_gueteklasse_config_example), - return_type: ReturnTypeHeatmap = Query( - default=ReturnTypeHeatmap.GEOJSON, description="Return type of the response" - ), -): - """ - ÖV-Güteklassen (The public transport quality classes) is an indicator for access to public transport. - The indicator makes it possible to identify locations which, thanks to their good access to public transport, have great potential as focal points for development. - The calculation in an automated process from the data in the electronic timetable (GTFS). - """ - if params.start_time >= params.end_time: - raise HTTPException(status_code=422, detail="Start time must be before end time") - - is_superuser = crud.user.is_superuser(current_user) - - if is_superuser and params.study_area_ids is not None: - study_area_ids = params.study_area_ids - elif not is_superuser and params.study_area_ids and len(params.study_area_ids) > 0: - return HTTPException( - status_code=400, - detail="The user doesn't have enough privileges to calculate the indicator for other study areas", - ) - else: - study_area_ids = [current_user.active_study_area_id] - - current_user = json.loads(current_user.json()) - payload = json.loads(params.json()) - payload["study_area_ids"] = study_area_ids - if settings.CELERY_BROKER_URL: - task = read_pt_oev_gueteklassen_task.delay( - current_user=current_user, - payload=payload, - return_type=return_type.value, - ) - else: - results = await read_pt_oev_gueteklassen_async( - current_user=current_user, payload=payload, return_type=return_type.value - ) - return read_results(results) - return {"task_id": task.id} - - -@router.get("/result/{task_id}") -async def get_indicators_result( - task_id: str, - current_user: models.User = Depends(deps.get_current_active_user), - return_type: IndicatorResultsReturnType = Query( - ..., description="Return type of the response" - ), -): - """Fetch result for given task_id""" - - result = AsyncResult(task_id, app=celery_app) - if result.ready(): - try: - result = result.get() - except Exception: - raise HTTPException(status_code=500, detail="Task failed") - - try: - validate_return_type(result, return_type.value) - except ValueError as e: - raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(e)) - - except HTTPException as e: - raise e - return read_results(result, return_type.value) - - elif result.failed(): - raise HTTPException(status_code=500, detail="Task failed") - else: - content = { - "task-status": result.status, - "details": "Task is still running, please try again later", - } - return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content=content) diff --git a/src/endpoints/v1/isochrones.py b/src/endpoints/v1/isochrones.py deleted file mode 100644 index 74361ee..0000000 --- a/src/endpoints/v1/isochrones.py +++ /dev/null @@ -1,113 +0,0 @@ -from typing import Any, Dict - -from fastapi import APIRouter, Body, Depends, HTTPException, Query, Response, status -from fastapi.responses import StreamingResponse -from sqlalchemy.ext.asyncio.session import AsyncSession -from starlette.responses import JSONResponse -import json -import binascii -from src import crud -from src.workers.isochrone import task_calculate_isochrone -from src.db import models -from src.endpoints.legacy import deps -from src.resources.enums import IsochroneExportType -from src.schemas.legacy.isochrone import ( - IsochroneDTO, - IsochroneMultiCountPois, - request_examples, -) - -router = APIRouter() - - -@router.post("") -async def calculate_isochrone( - *, - db: AsyncSession = Depends(deps.get_db), - isochrone_in: IsochroneDTO = Body(..., examples=request_examples["isochrone"]), - current_user: models.User = Depends(deps.get_current_active_user), -): - """ - Calculate isochrone. - """ - if isochrone_in.scenario.id: - await deps.check_user_owns_scenario(db, isochrone_in.scenario.id, current_user) - - study_area = await crud.user.get_active_study_area(db, current_user) - study_area_bounds = study_area["bounds"] - isochrone_in = json.loads(isochrone_in.json()) - current_user = json.loads(current_user.json()) - - task = task_calculate_isochrone.delay(isochrone_in, current_user, study_area_bounds) - return {"task_id": task.id} - - -@router.get("/task/{task_id}") -async def get_task( - task_id: str, - current_user: models.User = Depends(deps.get_current_active_user), -): - task = task_calculate_isochrone.AsyncResult(task_id) - if task.ready(): - try: - result = task.get() - response = Response( - bytes(binascii.unhexlify(bytes(result, "utf-8"))), - media_type="application/octet-stream", - ) - return response - except Exception: - raise HTTPException(status_code=500, detail="Task failed") - - elif task.failed(): - raise HTTPException(status_code=500, detail="Task failed") - else: - content = { - "task-status": task.status, - "details": "Task is still running, please try again later", - } - return JSONResponse(status_code=status.HTTP_202_ACCEPTED, content=content) - - -@router.post("/multi/count-pois", response_class=JSONResponse) -async def count_pois_multi_isochrones( - *, - db: AsyncSession = Depends(deps.get_db), - isochrone_in: IsochroneMultiCountPois = Body( - ..., examples=request_examples["pois_multi_isochrone_count_pois"] - ), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Count pois under study area. - """ - isochrone_in.scenario_id = await deps.check_user_owns_scenario( - db=db, scenario_id=isochrone_in.scenario_id, current_user=current_user - ) - isochrone_in.active_upload_ids = current_user.active_data_upload_ids - isochrone_in.user_id = current_user.id - cnt = await crud.isochrone.count_opportunity(db=db, obj_in=isochrone_in) - return cnt - - -@router.post("/export", response_class=StreamingResponse) -async def export_isochrones( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - geojson: Dict = Body(..., examples=request_examples["to_export"]), - return_type: IsochroneExportType = Query( - description="Return type of the response", default=IsochroneExportType.geojson - ), -) -> Any: - """ - Export isochrones from GeoJSON data. - """ - - file_response = await crud.isochrone.export_isochrone( - db=db, - current_user=current_user, - return_type=return_type.value, - geojson_dictionary=geojson, - ) - return file_response diff --git a/src/endpoints/v1/layer_library.py b/src/endpoints/v1/layer_library.py deleted file mode 100644 index 9f1f792..0000000 --- a/src/endpoints/v1/layer_library.py +++ /dev/null @@ -1,134 +0,0 @@ -from typing import List - -from fastapi import APIRouter, Depends, HTTPException, Query -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.db import models -from src.endpoints.legacy import deps - -router = APIRouter() - - -@router.get("", response_model=List[models.LayerLibrary]) -async def list_layer_libraries( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - current_user: models.User = Depends(deps.get_current_active_superuser), -): - layers = await crud.layer_library.get_multi(db, skip=skip, limit=limit) - if not layers: - raise HTTPException(status_code=404, detail="there is no (more) layer libraries.") - return layers - - -@router.get("/{name}", response_model=models.LayerLibrary) -async def read_layer_library_by_name( - name: str, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - layer = await crud.layer_library.get_by_key(db, key="name", value=name) - if layer: - layer = layer[0] - else: - raise HTTPException(status_code=404, detail="layer not found.") - return layer - - -@router.post("", response_model=models.LayerLibrary) -async def create_a_new_layer_library( - layer_in: schemas.CreateLayerLibrary, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - layer = await crud.layer_library.create(db, obj_in=layer_in) - return layer - - -@router.put("/{name}", response_model=models.LayerLibrary) -async def update_a_layer_library( - name: str, - layer_in: schemas.CreateLayerLibrary, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - layer_in_db = await crud.layer_library.get_by_key(db, key="name", value=name) - if not layer_in_db: - raise HTTPException(status_code=404, detail="layer library not found.") - - layer = await crud.layer_library.update(db, db_obj=layer_in_db[0], obj_in=layer_in) - return layer - - -@router.delete("/") -async def delete_layer_libraries( - name: List[str] = Query(default=None), - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - return await crud.layer_library.remove_multi_by_key(db, key="name", values=name) - - -styles_router = APIRouter() - - -@styles_router.get("", response_model=List[models.StyleLibrary]) -async def list_styles( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - current_user: models.User = Depends(deps.get_current_active_user), -): - styles = await crud.style_library.get_multi(db, skip=skip, limit=limit) - if not styles: - raise HTTPException(status_code=404, detail="there is no (more) style libraries.") - return styles - - -@styles_router.get("/{name}", response_model=models.StyleLibrary) -async def read_style_by_name( - name: str, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -): - style = await crud.style_library.get_by_key(db, key="name", value=name) - if style: - style = style[0] - else: - raise HTTPException(status_code=404, detail="style not found.") - return style - - -@styles_router.post("", response_model=models.StyleLibrary) -async def create_style( - style_in: schemas.CreateStyleLibrary, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - style = await crud.style_library.create(db, obj_in=style_in) - return style - - -@styles_router.put("/{name}", response_model=models.StyleLibrary) -async def update_style( - name: str, - style_in: schemas.CreateStyleLibrary, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - style_in_db = await crud.style_library.get_by_key(db, key="name", value=name) - if not style_in_db: - raise HTTPException(status_code=404, detail="style library not found.") - style = await crud.style_library.update(db, db_obj=style_in_db[0], obj_in=style_in) - return style - - -@styles_router.delete("/") -async def delete_style_libraries( - name: List[str] = Query(default=None), - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - return await crud.style_library.remove_multi_by_key(db, key="name", values=name) diff --git a/src/endpoints/v1/layers.py b/src/endpoints/v1/layers.py deleted file mode 100644 index 9167d35..0000000 --- a/src/endpoints/v1/layers.py +++ /dev/null @@ -1,430 +0,0 @@ -# MIT License - -# Copyright (c) 2020 Development Seed -# Copyright (c) 2021 Plan4Better - -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from dataclasses import dataclass, field -from enum import Enum -from pathlib import Path as directoryPath -from typing import Any, Callable, Dict, List, Optional, Type -from urllib.parse import urlencode - -from fastapi import APIRouter, Depends, HTTPException, Path, Query -from morecantile import Tile, TileMatrixSet, tms -from sqlalchemy.ext.asyncio.session import AsyncSession -from starlette.requests import Request -from starlette.responses import HTMLResponse, Response -from starlette.routing import NoMatchFound -from starlette.templating import Jinja2Templates - -from src.core.config import settings -from src.crud.legacy.crud_layer import layer as crud_layer -from src.db import models -from src.endpoints.legacy import deps -from src.resources import tms as custom_tms -from src.resources.enums import MimeTypes -from src.schemas.layer import ( - ExternalVector, - TileMatrixSetList, - VectorTileFunction, - VectorTileTable, -) -from src.schemas.layer import registry as FunctionRegistry -from src.schemas.mapbox import TileJSON - -try: - pass # type: ignore -except ImportError: - # Try backported to PY<39 `importlib_resources`. - pass # type: ignore - -templates = Jinja2Templates(directory=directoryPath(settings.LAYER_TEMPLATES_DIR)) # type: ignore - - -TILE_RESPONSE_PARAMS: Dict[str, Any] = { - "responses": {200: {"content": {"application/x-protobuf": {}}}}, - "response_class": Response, -} - -# Register Custom TMS -tms = tms.register([custom_tms.EPSG3413, custom_tms.EPSG6933]) - -TileMatrixSetNames = Enum( # type: ignore - "TileMatrixSetNames", [(a, a) for a in sorted(tms.list())] -) - - -def TileParams( - z: int = Path(..., ge=0, le=30, description="Tiles's zoom level"), - x: int = Path(..., description="Tiles's column"), - y: int = Path(..., description="Tiles's row"), -) -> Tile: - """Tile parameters.""" - return Tile(x, y, z) - - -def TileMatrixSetParams( - TileMatrixSetId: TileMatrixSetNames = Query( - TileMatrixSetNames.WebMercatorQuad, # type: ignore - description="TileMatrixSet Name (default: 'WebMercatorQuad')", - ), -) -> TileMatrixSet: - """TileMatrixSet parameters.""" - return tms.get(TileMatrixSetId.name) - - -def LayerParams( - request: Request, - layer: str = Path(..., description="Layer Name"), -) -> ExternalVector: - """Return Layer Object.""" - - if layer == "building": - layer = "basic.building" - else: - layer = "extra." + layer - - for r in request.app.state.table_catalog: - if r["id"] == layer: - return VectorTileTable(**r) - - raise HTTPException(status_code=404, detail=f"Table/Function '{layer}' not found.") - - -@dataclass -class VectorTilerFactory: - """VectorTiler Factory.""" - - # FastAPI router - router: APIRouter = field(default_factory=APIRouter) - - # Enum of supported TMS - supported_tms: Type[TileMatrixSetNames] = TileMatrixSetNames - # TileMatrixSet dependency - tms_dependency: Callable[..., TileMatrixSet] = TileMatrixSetParams - - # Table/Function dependency - layer_dependency: Callable[..., ExternalVector] = LayerParams - - with_tables_metadata: bool = False - with_functions_metadata: bool = False - with_viewer: bool = False - # Router Prefix is needed to find the path for routes when prefixed - # e.g if you mount the route with `/foo` prefix, router_prefix foo is injected - router_prefix: str = "" - - def __post_init__(self): - """Post Init: register route and configure specific options.""" - self.register_routes() - self.router_prefix = settings.API_V2_STR + self.router_prefix - - def register_routes(self): - """Register Routes.""" - self.register_tiles() - self.register_tiles_matrix_sets() - - if self.with_tables_metadata: - self.register_tables_metadata() - - if self.with_functions_metadata: - self.register_functions_metadata() - - if self.with_viewer: - self.register_viewer() - - def url_for(self, request: Request, name: str, **path_params: Any) -> str: - """Return full url (with prefix) for a specific endpoint.""" - url_path = self.router.url_path_for(name, **path_params) - base_url = str(request.base_url) - if self.router_prefix: - base_url += self.router_prefix.lstrip("/") - return url_path.make_absolute_url(base_url=base_url) - - def register_tiles(self): - """Register /tiles endpoints.""" - - @self.router.get(r"/{layer}/{z}/{x}/{y}.pbf", **TILE_RESPONSE_PARAMS) - @self.router.get( - r"/{TileMatrixSetId}/{layer}/{z}/{x}/{y}.pbf", **TILE_RESPONSE_PARAMS - ) - async def tile( - *, - db: AsyncSession = Depends(deps.get_db), - request: Request, - tile: Tile = Depends(TileParams), - tms: TileMatrixSet = Depends(self.tms_dependency), - layer=Depends(self.layer_dependency), - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Return vector tile.""" - qs_key_to_remove = ["tilematrixsetid"] - kwargs = { - key: value - for (key, value) in request.query_params._list - if key.lower() not in qs_key_to_remove - } - - content = await crud_layer.tile_from_table(db, tile, tms, layer, **kwargs) - return Response(bytes(content), media_type=MimeTypes.pbf.value) - - @self.router.get( - r"/{layer}/tilejson.json", - response_model=TileJSON, - responses={200: {"description": "Return a tilejson"}}, - response_model_exclude_none=True, - ) - @self.router.get( - r"/{TileMatrixSetId}/{layer}/tilejson.json", - response_model=TileJSON, - responses={200: {"description": "Return a tilejson"}}, - response_model_exclude_none=True, - ) - async def tilejson( - request: Request, - layer=Depends(self.layer_dependency), - tms: TileMatrixSet = Depends(self.tms_dependency), - minzoom: Optional[int] = Query( - None, description="Overwrite default minzoom." - ), - maxzoom: Optional[int] = Query( - None, description="Overwrite default maxzoom." - ), - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Return TileJSON document.""" - path_params: Dict[str, Any] = { - "TileMatrixSetId": tms.identifier, - "layer": layer.id, - "z": "{z}", - "x": "{x}", - "y": "{y}", - } - tile_endpoint = self.url_for(request, "tile", **path_params) - qs_key_to_remove = ["tilematrixsetid", "minzoom", "maxzoom"] - query_params = { - key: value - for (key, value) in request.query_params._list - if key.lower() not in qs_key_to_remove - } - - if query_params: - tile_endpoint += f"?{urlencode(query_params)}" - - minzoom = minzoom if minzoom is not None else (layer.minzoom or tms.minzoom) - maxzoom = maxzoom if maxzoom is not None else (layer.maxzoom or tms.maxzoom) - - return { - "minzoom": minzoom, - "maxzoom": maxzoom, - "name": layer.id, - "bounds": layer.bounds, - "tiles": [tile_endpoint], - } - - def register_tiles_matrix_sets(self): - @self.router.get( - r"/tileMatrixSets", - response_model=TileMatrixSetList, - response_model_exclude_none=True, - ) - async def TileMatrixSet_list( - request: Request, - current_user: models.User = Depends(deps.get_current_active_user), - ): - """ - Return list of supported TileMatrixSets. - - Specs: http://docs.opengeospatial.org/per/19-069.html#_tilematrixsets - """ - return { - "tileMatrixSets": [ - { - "id": tms.name, - "title": tms.name, - "links": [ - { - "href": self.url_for( - request, - "TileMatrixSet_info", - TileMatrixSetId=tms.name, - ), - "rel": "item", - "type": "application/json", - } - ], - } - for tms in self.supported_tms - ] - } - - @self.router.get( - r"/tileMatrixSets/{TileMatrixSetId}", - response_model=TileMatrixSet, - response_model_exclude_none=True, - ) - async def TileMatrixSet_info( - tms: TileMatrixSet = Depends(self.tms_dependency), - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Return TileMatrixSet JSON document.""" - return tms - - def register_tables_metadata(self): - """Register metadata endpoints.""" - - @self.router.get( - r"/tables.json", - response_model=List[VectorTileTable], - response_model_exclude_none=True, - ) - async def tables_index( - request: Request, - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Index of tables.""" - - def _get_tiles_url(id) -> str: - try: - return self.url_for( - request, "tile", layer=id, z="{z}", x="{x}", y="{y}" - ) - except NoMatchFound: - return None - - return [ - VectorTileTable(**r, tileurl=_get_tiles_url(r["id"])) - for r in request.app.state.table_catalog - ] - - @self.router.get( - r"/table/{layer}.json", - response_model=VectorTileTable, - responses={200: {"description": "Return table metadata"}}, - response_model_exclude_none=True, - ) - async def table_metadata( - request: Request, - layer=Depends(self.layer_dependency), - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Return table metadata.""" - - def _get_tiles_url(id) -> str: - try: - return self.url_for( - request, "tile", layer=id, z="{z}", x="{x}", y="{y}" - ) - except NoMatchFound: - return None - - layer.tileurl = _get_tiles_url(layer.id) - return layer - - def register_functions_metadata(self): # noqa - """Register function metadata endpoints.""" - - @self.router.get( - r"/functions.json", - response_model=List[VectorTileFunction], - response_model_exclude_none=True, - response_model_exclude={"sql"}, - ) - async def functions_index(request: Request): - """Index of functions.""" - - def _get_tiles_url(id) -> str: - try: - return self.url_for( - request, "tile", layer=id, z="{z}", x="{x}", y="{y}" - ) - except NoMatchFound: - return None - - return [ - VectorTileFunction( - **func.dict(exclude_none=True), tileurl=_get_tiles_url(id) - ) - for id, func in FunctionRegistry.funcs.items() - ] - - @self.router.get( - r"/function/{layer}.json", - response_model=VectorTileFunction, - responses={200: {"description": "Return Function metadata"}}, - response_model_exclude_none=True, - response_model_exclude={"sql"}, - ) - async def function_metadata( - request: Request, - layer=Depends(self.layer_dependency), - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Return table metadata.""" - - def _get_tiles_url(id) -> str: - try: - return self.url_for( - request, "tile", layer=id, z="{z}", x="{x}", y="{y}" - ) - except NoMatchFound: - return None - - layer.tileurl = _get_tiles_url(layer.id) - return layer - - def register_viewer(self): - """Register viewer.""" - - @self.router.get( - r"/list", - response_class=HTMLResponse, - ) - async def list_layers( - request: Request, - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Display layer list.""" - return templates.TemplateResponse( - name="index.html", - context={"index": request.app.state.table_catalog, "request": request}, - media_type="text/html", - ) - - @self.router.get( - r"/{layer}/viewer", - response_class=HTMLResponse, - ) - async def demo( - request: Request, - layer=Depends(LayerParams), - current_user: models.User = Depends(deps.get_current_active_user), - ): - """Demo for each table.""" - tile_url = self.url_for(request, "tilejson", layer=layer.id) - return templates.TemplateResponse( - name="viewer.html", - context={ - "endpoint": tile_url, - "request": request, - "bounds": layer.bounds, - }, - media_type="text/html", - ) diff --git a/src/endpoints/v1/login.py b/src/endpoints/v1/login.py deleted file mode 100644 index e78d481..0000000 --- a/src/endpoints/v1/login.py +++ /dev/null @@ -1,105 +0,0 @@ -from datetime import timedelta -from typing import Any - -from fastapi import APIRouter, Body, Depends, HTTPException -from fastapi.security import OAuth2PasswordRequestForm -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.core import security -from src.core.config import settings -from src.core.security import get_password_hash -from src.db import models -from src.endpoints.legacy import deps -from src.utils import generate_token, send_email, verify_token - -router = APIRouter() - - -@router.post("/login/access-token", response_model=schemas.Token) -async def login_access_token( - db: AsyncSession = Depends(deps.get_db), - form_data: OAuth2PasswordRequestForm = Depends(), -) -> Any: - """ - OAuth2 compatible token login, get an access token for future requests - """ - user = await crud.user.authenticate(db, email=form_data.username, password=form_data.password) - if not user: - raise HTTPException(status_code=400, detail="Incorrect email or password") - elif not crud.user.is_active(user): - raise HTTPException(status_code=400, detail="Inactive user") - access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) - return { - "access_token": security.create_access_token( - user.id, expires_delta=access_token_expires, scopes=form_data.scopes - ), - "token_type": "bearer", - } - - -@router.post( - "/login/test-token", response_model=models.User, response_model_exclude={"hashed_password"} -) -async def test_token(current_user: models.User = Depends(deps.get_current_user)) -> Any: - """ - Test access token - """ - return current_user - - -@router.post("/password-recovery/{email}", response_model=schemas.Msg) -async def recover_password(email: str, db: AsyncSession = Depends(deps.get_db)) -> Any: - """ - Password Recovery - """ - user = await crud.user.get_by_key(db, key="email", value=email) - - if not user: - raise HTTPException( - status_code=404, - detail="The user with this username does not exist in the system.", - ) - else: - user = user[0] - - password_reset_token = generate_token(email=email) - send_email( - type="password_recovery", - email_to=user.email, - name=user.name, - surname=user.surname, - token=password_reset_token, - email_language=user.language_preference, - ) - return {"msg": "Password recovery email sent"} - - -@router.post("/reset-password", response_model=schemas.Msg) -async def reset_password( - token: str = Body(...), - new_password: str = Body(...), - db: AsyncSession = Depends(deps.get_db), -) -> Any: - """ - Reset password - """ - email = verify_token(token) - if not email: - raise HTTPException(status_code=400, detail="Invalid token") - user = await crud.user.get_by_key(db, key="email", value=email) - if not user: - raise HTTPException( - status_code=404, - detail="The user with this username does not exist in the system.", - ) - elif not crud.user.is_active(user[0]): - raise HTTPException(status_code=400, detail="Inactive user") - else: - user = user[0] - - hashed_password = get_password_hash(new_password) - user.hashed_password = hashed_password - db.add(user) - await db.commit() - return {"msg": "Password updated successfully"} diff --git a/src/endpoints/v1/opportunities.py b/src/endpoints/v1/opportunities.py deleted file mode 100644 index f3fbcfc..0000000 --- a/src/endpoints/v1/opportunities.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List - -from fastapi import APIRouter, Depends, HTTPException -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud -from src.db import models -from src.endpoints.legacy import deps - -router = APIRouter() - - -@router.get("", response_model=List[models.OpportunityDefaultConfig]) -async def list_opportunities( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - current_user: models.User = Depends(deps.get_current_active_superuser), -): - opportunities = await crud.opportunity_default_config.get_multi(db, skip=skip, limit=limit) - if not opportunities: - raise HTTPException(status_code=404, detail="there is no (more) opportunities.") - return opportunities - - -@router.get("/groups", response_model=List[models.OpportunityGroup]) -async def list_opportunity_groups( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - current_user: models.User = Depends(deps.get_current_active_superuser), -): - groups = await crud.opportunity_group.get_multi(db, skip=skip, limit=limit) - if not groups: - raise HTTPException(status_code=404, detail="there is no (more) groups.") - return groups diff --git a/src/endpoints/v1/opportunity_config.py b/src/endpoints/v1/opportunity_config.py deleted file mode 100644 index 6a800dd..0000000 --- a/src/endpoints/v1/opportunity_config.py +++ /dev/null @@ -1,78 +0,0 @@ -from typing import List - -from fastapi import APIRouter, Depends, HTTPException -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.db import models -from src.endpoints.legacy import deps - -router = APIRouter() - - -@router.get("", response_model=List[models.OpportunityStudyAreaConfig]) -async def list_opportunity_study_area_configs( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - opportunities = await crud.opportunity_study_area_config.get_multi(db, skip=skip, limit=limit) - if not opportunities: - raise HTTPException(status_code=404, detail="there is no (more) opportunities.") - return opportunities - - -@router.get("/{id:int}", response_model=List[models.OpportunityStudyAreaConfig]) -async def read_opportunity_study_area_config_by_id( - id: int, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - opportunity = await crud.opportunity_study_area_config.get_multi_by_key( - db, key="study_area_id", value=id - ) - if not opportunity: - raise HTTPException(status_code=404, detail="opportunity not found.") - return opportunity - - -@router.post("", response_model=models.OpportunityStudyAreaConfig) -async def create_a_new_opportunity_study_area_config( - opportunity_in: schemas.CreateOpportunityStudyAreaConfig, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - opportunity = await crud.opportunity_study_area_config.create(db, obj_in=opportunity_in) - return opportunity - - -@router.put("/{id:int}", response_model=models.OpportunityStudyAreaConfig) -async def update_an_opportunity_study_area_config( - id: int, - opportunity_in: schemas.CreateOpportunityStudyAreaConfig, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - opportunity_in_db = await crud.opportunity_study_area_config.get(db, id=id) - if not opportunity_in_db: - raise HTTPException(status_code=404, detail="opportunity not found.") - - opportunity = await crud.opportunity_study_area_config.update( - db, db_obj=opportunity_in_db, obj_in=opportunity_in - ) - return opportunity - - -@router.delete("/{id:int}", response_model=models.OpportunityStudyAreaConfig) -async def delete_an_opportunity_study_area_config( - id: int, - db: AsyncSession = Depends(deps.get_db), - current_super_user: models.User = Depends(deps.get_current_active_superuser), -): - opportunity = await crud.opportunity_study_area_config.get(db, id=id) - if not opportunity: - raise HTTPException(status_code=404, detail="opportunity not found.") - - opportunity = await crud.opportunity_study_area_config.remove(db, id=opportunity.id) - return opportunity diff --git a/src/endpoints/v1/organizations.py b/src/endpoints/v1/organizations.py deleted file mode 100644 index 61481be..0000000 --- a/src/endpoints/v1/organizations.py +++ /dev/null @@ -1,124 +0,0 @@ -from typing import Any, List - -from fastapi import APIRouter, Body, Depends, HTTPException -from fastapi.encoders import jsonable_encoder -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.db import models -from src.endpoints.legacy import deps -from src.schemas.organization import request_examples - -router = APIRouter() - - -@router.post("/", response_model=models.Organization) -async def create_organization( - *, - db: AsyncSession = Depends(deps.get_db), - organization_in: schemas.OrganizationCreate = Body(..., example=request_examples["create"]), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Create new organization. - """ - is_superuser = crud.user.is_superuser(current_user) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - organization = await crud.organization.get_by_key(db, key="name", value=organization_in.name) - if organization: - raise HTTPException( - status_code=400, - detail="The organization with this name already exists in the system.", - ) - organization = await crud.organization.create(db, obj_in=organization_in) - return organization - - -@router.get("/", response_model=List[models.Organization]) -async def read_organizations( - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Retrieve organizations. - """ - is_superuser = crud.user.is_superuser(current_user) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - organizations = await crud.organization.get_multi(db, extra_fields=[models.Organization.users]) - return organizations - - -@router.get( - "/{organization_id}/users", - response_model=List[models.User], - response_model_exclude={"hashed_password"}, -) -async def get_users_for_organization( - *, - db: AsyncSession = Depends(deps.get_db), - organization_id: int, - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Get all users for an organization. - """ - is_superuser = crud.user.is_superuser(current_user) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - organization = await crud.organization.get( - db, id=organization_id, extra_fields=[models.Organization.users] - ) - if not organization: - raise HTTPException(status_code=404, detail="Organization not found") - users = organization.users - return users - - -@router.put("/{organization_id}", response_model=models.Organization) -async def update_organization( - *, - db: AsyncSession = Depends(deps.get_db), - organization_id: int, - organization_in: schemas.OrganizationUpdate = Body(..., example=request_examples["update"]), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Update an organization. - """ - is_superuser = crud.user.is_superuser(current_user) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - organization = await crud.organization.get(db, id=organization_id) - if not organization: - raise HTTPException(status_code=404, detail="Organization not found") - organization = await crud.organization.update( - db, db_obj=organization, obj_in=jsonable_encoder(organization_in) - ) - return organization - - -@router.delete("/{organization_id}", response_model=models.Organization) -async def delete_organization( - *, - db: AsyncSession = Depends(deps.get_db), - organization_id: int, - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Delete an organization. - """ - is_superuser = crud.user.is_superuser(current_user) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - organization = await crud.organization.get(db, id=organization_id) - if not organization: - raise HTTPException(status_code=404, detail="Organization not found") - organization = await crud.organization.remove(db, id=organization_id) - return organization diff --git a/src/endpoints/v1/poi_aoi.py b/src/endpoints/v1/poi_aoi.py deleted file mode 100644 index c4053e1..0000000 --- a/src/endpoints/v1/poi_aoi.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Any, Optional - -from fastapi import APIRouter, Depends, Query -from sqlalchemy.ext.asyncio.session import AsyncSession - -from src import crud -from src.db import models -from src.endpoints.legacy import deps -from src.resources.enums import ReturnType -from src.utils import return_geojson_or_geobuf - -router = APIRouter() - - -@router.get("/visualization", response_model=Any) -async def poi_aoi_visualization( - return_type: ReturnType, - scenario_id: Optional[int] = Query( - description="The scenario id to get the POIs in case the modus is 'scenario' or 'comparison'.", - default=0, - example=1, - ), - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Visualize POIs and AOIs based on settings specified by the user. - """ - scenario_id = await deps.check_user_owns_scenario(db, scenario_id, current_user) - pois = await crud.poi_aoi.poi_aoi_visualization( - db=db, scenario_id=scenario_id, current_user=current_user, return_type=return_type - ) - _return_type = return_type.value - if return_type == ReturnType.geobuf.value: - _return_type = "db_geobuf" - return return_geojson_or_geobuf(pois, _return_type) diff --git a/src/endpoints/v1/projects.py b/src/endpoints/v1/projects.py deleted file mode 100644 index be805d8..0000000 --- a/src/endpoints/v1/projects.py +++ /dev/null @@ -1,64 +0,0 @@ - -from fastapi import Depends, APIRouter -from sqlalchemy.ext.asyncio import AsyncSession -from starlette.responses import JSONResponse - -from src.endpoints.legacy import deps -from src.schemas.project import dummy_projects -import uuid - -router = APIRouter() - - -@router.get("", response_class=JSONResponse) -async def list_projects( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, -): - return dummy_projects - - -@router.get("/{id:str}", response_class=JSONResponse) -async def read_project_by_id( - id: uuid.UUID, - db: AsyncSession = Depends(deps.get_db), -): - for project in dummy_projects: - if project["id"] == id: - return project - - -# @router.post("", response_class=JSONResponse) -# async def create_a_new_project( -# project_in: , -# db: AsyncSession = Depends(deps.get_db), -# current_super_user: models.User = Depends(deps.get_current_active_superuser), -# ): -# project = await crud.project.create(db, obj_in=project_in) -# return project - - -# @router.put("/{id:int}", response_model=models.Project) -# async def update_a_project( -# id: int, -# project_in: schemas.CreateProject, -# db: AsyncSession = Depends(deps.get_db), -# current_super_user: models.User = Depends(deps.get_current_active_superuser), -# ): -# project_in_db = await crud.project.get(db, id=id) -# if not project_in_db: -# raise HTTPException(status_code=404, detail="project not found.") - -# project = await crud.project.update(db, db_obj=project_in_db, obj_in=project_in) -# return project - - -# @router.delete("/") -# async def delete_projects( -# id: List[int] = Query(default=None, gt=0), -# db: AsyncSession = Depends(deps.get_db), -# current_super_user: models.User = Depends(deps.get_current_active_superuser), -# ): - -# return await crud.project.remove_multi(db, ids=id) diff --git a/src/endpoints/v1/r5.py b/src/endpoints/v1/r5.py deleted file mode 100644 index af545d8..0000000 --- a/src/endpoints/v1/r5.py +++ /dev/null @@ -1,268 +0,0 @@ -from typing import Any, List - -import requests -from fastapi import ( - APIRouter, - Body, - Depends, - File, - Form, - HTTPException, - UploadFile, -) -from sqlalchemy.ext.asyncio.session import AsyncSession -from starlette.responses import JSONResponse - -from src import crud -from src.core.config import settings -from src.db import models -from src.endpoints.legacy import deps -from src.schemas.msg import Msg -from src.schemas.r5 import ( - R5ProjectCreateDTO, - R5ProjectInDB, - R5ProjectUpdateDTO, - R5RegionCreateDTO, - R5RegionInDB, - request_examples, -) - -router = APIRouter() - -# ----------------------ACTIVITY ENDPOINTS------------------------ -# ---------------------------------------------------------------- - -headers = {} -if settings.R5_AUTHORIZATION: - headers["Authorization"] = settings.R5_AUTHORIZATION - - -@router.get("/activity") -async def get_activity( - current_user: models.User = Depends(deps.get_current_active_superuser), -): - """ - Get all activities. - """ - response = requests.delete(settings.R5_API_URL + "/activity", headers=headers) - return response.json() - - -# ------------------------REGION ENDPOINTS------------------------ -# ---------------------------------------------------------------- - - -@router.get("/region", response_model=List[R5RegionInDB]) -async def get_regions( - *, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get all regions. - """ - regions = await crud.r5.get_all_regions(db) - return regions - - -@router.get("/region/{region_id}", response_model=R5RegionInDB) -async def get_region( - *, - region_id: str, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get region. - """ - region = await crud.r5.get_region(db, region_id) - return region - - -@router.get("/region/{region_id}/project", response_model=List[R5ProjectInDB]) -async def get_projects_for_region( - *, - region_id: str, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get all projects. - """ - projects = await crud.r5.get_projects_for_region(db, region_id) - return projects - - -@router.post("/region", response_model=R5RegionInDB) -async def region_create( - *, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - region_in: R5RegionCreateDTO = Body(..., example=request_examples["region"]["create"]), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Create new region. - """ - region = await crud.r5.create_region(db=db, region_in=region_in) - return region - - -# TODO: Add region update - - -@router.delete("/region/{region_id}", response_model=Msg) -async def region_delete( - *, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - region_id: str, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Delete region. - """ - region = await crud.r5.get_region(db=db, region_id=region_id) - if not region: - raise HTTPException(status_code=400, detail="The region doesn't exist") - - result = await crud.r5.delete_region(db=db, region_id=region_id) - - return result - - -# -----------------------PROJECT ENDPOINTS------------------------ -# ---------------------------------------------------------------- - - -@router.get("/project", response_model=List[R5ProjectInDB]) -async def get_projects( - *, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get all projects. - """ - projects = await crud.r5.get_all_projects(db) - return projects - - -@router.get("/project/{project_id}", response_model=R5ProjectInDB) -async def get_project( - *, - project_id: str, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get project. - """ - project = await crud.r5.get_project(db, project_id) - return project - - -@router.post("/project", response_model=R5ProjectInDB) -async def project_create( - *, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - project_in: R5ProjectCreateDTO = Body(..., example=request_examples["project"]["create"]), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Create new project. - """ - project = await crud.r5.create_project(db=db, project_in=project_in) - return project - - -@router.put("/project", response_model=Msg) -async def project_update( - *, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - project_in: R5ProjectUpdateDTO = Body(..., example=request_examples["project"]["update"]), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Update project. - """ - project = await crud.r5.update_project(db=db, project_in=project_in) - return project - - -@router.delete("/project/{project_id}", response_model=Msg) -async def project_delete( - *, - db: AsyncSession = Depends(deps.get_r5_mongo_db), - project_id: str, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Delete project. - """ - result = await crud.r5.delete_project(db=db, project_id=project_id) - return result - - -# ------------------------BUNDLE ENDPOINTS------------------------ -# ---------------------------------------------------------------- - - -@router.get("/bundle", response_class=JSONResponse) -async def get_bundles( - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get all bundles. - """ - result = requests.get(settings.R5_API_URL + "/bundle", headers=headers) - return result.json() - - -@router.get("/bundle/{bundle_id}", response_class=JSONResponse) -async def get_bundle( - *, - bundle_id: str, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Get bundle. - """ - result = requests.get(settings.R5_API_URL + "/bundle/" + bundle_id, headers=headers) - return result.json() - - -@router.post("/bundle", response_class=JSONResponse) -async def create_bundle( - *, - bundle_name: str = Form(...), - osm: UploadFile = File(...), - feed_group: UploadFile = File(...), - region_id: str = Form(...), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Create new bundle. - """ - response = requests.post( - settings.R5_API_URL + "/bundle", - files={ - "bundleName": bundle_name, - "osm": osm.file, - "feedGroup": feed_group.file, - "regionId": region_id, - }, - headers=headers, - ) - return response.json() - - -@router.delete("/bundle/{bundle_id}", response_class=JSONResponse) -async def delete_bundle( - *, - bundle_id: str, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Delete bundle. - """ - response = requests.delete(settings.R5_API_URL + "/bundle/" + bundle_id, headers=headers) - return response.json() diff --git a/src/endpoints/v1/roles.py b/src/endpoints/v1/roles.py deleted file mode 100644 index 7808bf2..0000000 --- a/src/endpoints/v1/roles.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Any, List - -from fastapi import APIRouter, Depends, HTTPException -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud -from src.db import models -from src.endpoints.legacy import deps - -router = APIRouter() - - -@router.get("", response_model=List[models.Role]) -async def read_roles( - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Retrieve all roles. - """ - is_superuser = crud.user.is_superuser(current_user) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - roles = await crud.role.get_multi(db) - return roles diff --git a/src/endpoints/v1/scenarios.py b/src/endpoints/v1/scenarios.py deleted file mode 100644 index 1697a96..0000000 --- a/src/endpoints/v1/scenarios.py +++ /dev/null @@ -1,317 +0,0 @@ -from typing import Any, List, Optional - -from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query -from fastapi.encoders import jsonable_encoder -from fastapi.responses import JSONResponse -from sqlalchemy import func, select, text -from sqlalchemy.ext.asyncio.session import AsyncSession - -from src import schemas -from src.crud.crud_scenario import scenario as crud_scenario -from src.db import models -from src.endpoints.legacy import deps -from src.resources.enums import ReturnType -from src.schemas.msg import Msg -from src.schemas.scenario import ScenarioWithBrokenField, request_examples -from src.utils import return_geojson_or_geobuf, to_feature_collection - -router = APIRouter() - -# --------------------------------Scenario Table--------------------------------------- -# ------------------------------------------------------------------------------------- - - -@router.post("", response_model=models.Scenario) -async def create_scenario( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - scenario_in: schemas.ScenarioCreate = Body(..., example=request_examples["create"]), -): - """ - Create scenario. - """ - cnt_scenario_user = await db.execute( - select(func.count(models.Scenario.id)).where(models.Scenario.user_id == current_user.id) - ) - cnt_scenario_user = cnt_scenario_user.fetchone()[0] - if cnt_scenario_user >= current_user.limit_scenarios: - raise HTTPException( - status_code=400, - detail="You reached the maximum number of %s scenarios." - % current_user.limit_scenarios, - ) - - obj_scenario = models.Scenario( - scenario_name=scenario_in.scenario_name, - user_id=current_user.id, - study_area_id=current_user.active_study_area_id, - ) - result = await crud_scenario.create(db=db, obj_in=obj_scenario) - return result - - -@router.get("", response_model=List[ScenarioWithBrokenField]) -async def get_scenarios( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -): - """ - Get all scenarios. - """ - # TODO: Check if the scenarios have outdated features in the table poi_modified, way_modified and building_modified - - result = await crud_scenario.get_by_multi_keys( - db=db, - keys={"user_id": current_user.id, "study_area_id": current_user.active_study_area_id}, - ) - for index, r in enumerate(result): - broken = await crud_scenario.is_scenario_broken(db, r.id) - r = ScenarioWithBrokenField.parse_obj(r) - r.broken = broken - result[index] = r - return result - - -@router.put("/{scenario_id}", response_model=models.Scenario) -async def update_scenario( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - scenario_id: int, - scenario_in: schemas.ScenarioUpdate = Body(..., example=request_examples["update"]), -): - """ - Update scenario. - """ - scenario = await crud_scenario.get_by_multi_keys( - db, keys={"id": scenario_id, "user_id": current_user.id} - ) - if len(scenario) > 0: - result = await crud_scenario.update(db=db, db_obj=scenario[0], obj_in=scenario_in) - return result - else: - raise HTTPException(status_code=400, detail="Scenario not found") - - -@router.delete("/") -async def delete_scenario( - *, - id: List[int] = Query(default=None, gt=0), - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -): - """ - Delete scenario. - """ - await crud_scenario.remove_multi_by_id_and_userid(db, ids=id, user_id=current_user.id) - return - - -# ------------------------Scenario Layers (_modified tables)--------------------------- -# ------------------------------------------------------------------------------------- - - -@router.get("/{scenario_id}/{layer_name}/features", response_class=JSONResponse) -async def read_scenario_features( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - scenario_id: int = Path( - ..., description="Scenario ID", example=request_examples["read_features"]["scenario_id"] - ), - layer_name: schemas.ScenarioLayersNoPoisEnum = Path( - ..., - description="Scenario layer name to read", - example=request_examples["read_features"]["layer_name"], - ), - intersect: Optional[str] = Query( - default=None, - description="WKT Geometry to intersect with layer. Geometry must be in EPSG:4326. If not specified, all features are returned (only for _modified tables).", - example=request_examples["read_features"]["intersect"], - ), - return_type: ReturnType = Query( - default=ReturnType.geojson, description="Return type of the response" - ), -) -> Any: - """ - Get features from scenario layers (default or modified). - """ - scenario = await crud_scenario.get_by_multi_keys( - db, keys={"id": scenario_id, "user_id": current_user.id} - ) - if len(scenario) == 0: - raise HTTPException(status_code=400, detail="Scenario not found") - - result = await crud_scenario.read_scenario_features( - db, current_user, scenario_id, layer_name, intersect - ) - features = to_feature_collection( - result, exclude_properties=["coordinates_3857", "node_source", "node_target"] - ) - if return_type.value == ReturnType.geojson.value: - features = jsonable_encoder(features) - - return return_geojson_or_geobuf(features, return_type.value) - - -@router.delete("/{scenario_id}/{layer_name}/features-all", response_model=Msg) -async def delete_scenario_features( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - scenario_id: int = Path( - ..., description="Scenario ID", example=request_examples["delete_feature"]["scenario_id"] - ), - layer_name: schemas.ScenarioLayerFeatureEnum = Path( - ..., - description="Scenario layer name to delete feature from", - example=request_examples["delete_feature"]["layer_name"], - ), -) -> Any: - """ - Delete all features from scenario layers. This endpoint is used to delete features in "modified" tables. - """ - scenario = await crud_scenario.get_by_multi_keys( - db, keys={"id": scenario_id, "user_id": current_user.id} - ) - if len(scenario) == 0: - raise HTTPException(status_code=400, detail="Scenario not found") - - result = await crud_scenario.delete_scenario_features( - db, current_user, scenario_id, layer_name - ) - return result - - -@router.delete("/{scenario_id}/{layer_name}/features", response_model=Msg) -async def delete_selected_scenario_feature( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - scenario_id: int = Path( - ..., description="Scenario ID", example=request_examples["delete_feature"]["scenario_id"] - ), - layer_name: schemas.ScenarioLayerFeatureEnum = Path( - ..., - description="Scenario layer name to delete feature from", - example=request_examples["delete_feature"]["layer_name"], - ), - id: List[int] = Query( - ..., - description="Scenario feature ID to delete", - example=request_examples["delete_feature"]["feature_id"], - ), -): - """ - Delete specific features from scenario layer. This endpoint is used to delete feature in "modified" tables. - """ - scenario = await crud_scenario.get_by_multi_keys( - db, keys={"id": scenario_id, "user_id": current_user.id} - ) - if len(scenario) == 0: - raise HTTPException(status_code=400, detail="Scenario not found") - else: - result = await crud_scenario.delete_scenario_feature( - db, current_user, scenario_id, layer_name, id - ) - return result - - -@router.post( - "/{scenario_id}/{layer_name}/features", - response_class=JSONResponse, -) -async def create_scenario_features( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - scenario_id: int = Path( - ..., description="Scenario ID", example=request_examples["create_feature"]["scenario_id"] - ), - layer_name: schemas.ScenarioLayerFeatureEnum = Path( - ..., - description="Scenario layer name to create feature in", - example=request_examples["create_feature"]["layer_name"], - ), - features_in: schemas.ScenarioFeatureCreate = Body( - ..., - examples=request_examples["create_feature"]["payload"], - ), -): - """ - Create feature in scenario layer. This endpoint is used to create features in "modified" tables. - """ - scenario = await crud_scenario.get_by_multi_keys( - db, keys={"id": scenario_id, "user_id": current_user.id} - ) - if len(scenario) == 0: - raise HTTPException(status_code=400, detail="Scenario not found") - else: - result = await crud_scenario.create_scenario_features( - db, current_user, scenario_id, layer_name, features_in - ) - features = to_feature_collection( - result, exclude_properties=["coordinates_3857", "node_source", "node_target"] - ) - return jsonable_encoder(features) - - -@router.put( - "/{scenario_id}/{layer_name}/features", - response_class=JSONResponse, -) -async def update_scenario_features( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - scenario_id: int = Path( - ..., description="Scenario ID", example=request_examples["update_feature"]["scenario_id"] - ), - layer_name: schemas.ScenarioLayerFeatureEnum = Path( - ..., - description="Scenario layer name to update features in", - example=request_examples["update_feature"]["layer_name"], - ), - features_in: schemas.ScenarioFeatureUpdate = Body( - ..., - examples=request_examples["update_feature"]["payload"], - ), -): - """ - Update feature in scenario layer. This endpoint is used to update features in "modified" tables. - """ - scenario = await crud_scenario.get_by_multi_keys( - db, keys={"id": scenario_id, "user_id": current_user.id} - ) - if len(scenario) == 0: - raise HTTPException(status_code=400, detail="Scenario not found") - else: - result = await crud_scenario.update_scenario_features( - db, current_user, scenario_id, layer_name, features_in - ) - features = to_feature_collection( - result, exclude_properties=["coordinates_3857", "node_source", "node_target"] - ) - return jsonable_encoder(features) - - -@router.get("/population_modification/{scenario_id}", response_class=JSONResponse) -async def population_modification( - *, - db: AsyncSession = Depends(deps.get_db), - scenario_id: int = Path( - ..., description="Scenario ID", example=request_examples["update_feature"]["scenario_id"] - ), - current_user: models.User = Depends(deps.get_current_active_user), -): - scenario_id = await deps.check_user_owns_scenario( - db=db, current_user=current_user, scenario_id=scenario_id - ) - await db.execute( - text("SELECT * FROM basic.population_modification(:scenario_id);"), - {"scenario_id": scenario_id}, - ) - await db.commit() - return {"msg": "Successfully calculated population modification"} diff --git a/src/endpoints/v1/static_layers.py b/src/endpoints/v1/static_layers.py deleted file mode 100644 index e0c3d76..0000000 --- a/src/endpoints/v1/static_layers.py +++ /dev/null @@ -1,65 +0,0 @@ -from typing import Any - -from fastapi import APIRouter, Depends -from fastapi.encoders import jsonable_encoder -from sqlalchemy import select, text -from sqlalchemy.ext.asyncio import AsyncSession - -from src.db import models -from src.db.models.legacy.config_validation import * -from src.endpoints.legacy import deps -from src.resources.enums import ( - AllowedVectorTables, - ReturnType, - SQLReturnTypes, - StaticTableSQLActive, -) -from src.utils import return_geojson_or_geobuf - -router = APIRouter() - - -@router.get("/active-study-area/{layer_name}") -async def get_static_vector_layer_intersected_by_study_area( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - layer_name: AllowedVectorTables, - return_type: ReturnType, -): - """Return features from selected layer intersecting the active study area in different geoformats""" - _return_type = return_type.value - if return_type == ReturnType.geobuf.value: - _return_type = "db_geobuf" - sql_query = text( - SQLReturnTypes[_return_type].value % StaticTableSQLActive[layer_name.value].value - ) - result = await db.execute(sql_query, {"study_area_id": current_user.active_study_area_id}) - - return return_geojson_or_geobuf(result.fetchall()[0][0], _return_type) - - -@router.get("/all/{layer_name}", response_model=Any) -async def get_static_table_all_features( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), - layer_name: AllowedVectorTables, -): - """Return all features from selected layer with some selected columns""" - if layer_name.value == "sub_study_area": - results = await db.execute( - select( - models.SubStudyArea.id, - models.SubStudyArea.study_area_id, - models.SubStudyArea.name, - models.SubStudyArea.population, - ) - ) - elif layer_name.value == "study_area": - results = await db.execute( - select(models.StudyArea.id, models.StudyArea.name, models.StudyArea.population) - ) - - results = results.fetchall() - return jsonable_encoder(results) diff --git a/src/endpoints/v1/static_layers_extra.py b/src/endpoints/v1/static_layers_extra.py deleted file mode 100644 index 6b53820..0000000 --- a/src/endpoints/v1/static_layers_extra.py +++ /dev/null @@ -1,147 +0,0 @@ -from typing import List - -from fastapi import APIRouter, Depends, HTTPException, Query, UploadFile -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud -from src.db import models -from src.db.models.legacy.config_validation import * -from src.db.session import legacy_engine -from src.endpoints.legacy import deps -from src.schemas.data_frame import validate_data_frame -from src.utils import ( - convert_postgist_to_4326, - generate_static_layer_table_name, - geopandas_read_file, -) - -router = APIRouter() - - -@router.post("/static") -async def upload_static_layer( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), - upload_file: UploadFile, -): - try: - data_frame = geopandas_read_file(upload_file) - except HTTPException as e: - # It's HTTP exception, so raise it to the endpoint. - raise e - except Exception as e: - print(e) - raise HTTPException(status_code=400, detail="Could not parse the uploaded file.") - - validate_data_frame(data_frame) - convert_postgist_to_4326(data_frame) - assert data_frame.crs.srs == "epsg:4326" - - static_layer = models.StaticLayer( - user_id=current_user.id, - table_name=await crud.static_layer.uniquify_static_layer_name( - db, file_name=upload_file.filename - ), - ) - - # Save Data Frame to Database - data_frame.to_postgis( - name=static_layer.table_name, - con=legacy_engine.connect(), - schema="extra", - if_exists="fail", - index=True, - ) - # Create Static Layer DB Object - static_layer = await crud.static_layer.create(db, obj_in=static_layer) - return static_layer - - -@router.get("/static/", response_model=List[models.StaticLayer]) -async def list_static_layers( - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - current_user: models.User = Depends(deps.get_current_active_superuser), -): - static_layers = await crud.static_layer.get_multi(db, skip=skip, limit=limit) - if not static_layers: - raise HTTPException(status_code=404, detail="there is no (more) static layers.") - return static_layers - - -@router.get("/static/{layer_id:int}") -async def get_static_layer_data( - *, - layer_id: int, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - static_layer = await crud.static_layer.get(db, id=layer_id) - if not static_layer: - raise HTTPException(status_code=404, detail="static layer not found.") - - return static_layer - - -@router.put("/static/{layer_id:int}") -async def update_static_layer_data( - *, - layer_id: int, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), - upload_file: UploadFile, -): - static_layer = await crud.static_layer.get(db, id=layer_id) - if not static_layer: - raise HTTPException(status_code=404, detail="static layer not found.") - - try: - data_frame = geopandas_read_file(upload_file) - except HTTPException as e: - # It's HTTP exception, so raise it to the endpoint. - raise e - except Exception as e: - print(e) - raise HTTPException(status_code=400, detail="Could not parse the uploaded file.") - - validate_data_frame(data_frame) - convert_postgist_to_4326(data_frame) - assert data_frame.crs.srs == "epsg:4326" - - # Drop previous PostGIS table - await crud.static_layer.drop_postgis_table(db, static_layer.table_name) - static_layer.table_name = generate_static_layer_table_name(prefix=upload_file.filename) - await crud.static_layer.update(db, db_obj=static_layer, obj_in=static_layer) - # Create PostGIS table - data_frame.to_postgis( - name=static_layer.table_name, - con=legacy_engine.connect(), - schema="extra", - if_exists="fail", - index=True, - ) - - return static_layer - - -@router.delete("/static/") -async def delete_static_layer_data( - *, - id: List[int] = Query(default=None, gt=0), - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - """ - Delete multiple static layers at the same time. - """ - layer_ids = id - for layer_id in layer_ids: - static_layer = await crud.static_layer.get(db, id=layer_id) - if static_layer: - # Drop PostGIS table - await crud.static_layer.drop_postgis_table(db, static_layer.table_name) - - # Delete Objects - return await crud.static_layer.remove_multi(db, ids=layer_ids) diff --git a/src/endpoints/v1/study_area.py b/src/endpoints/v1/study_area.py deleted file mode 100644 index f5c9dbf..0000000 --- a/src/endpoints/v1/study_area.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import List - -from fastapi import APIRouter, Depends, HTTPException -from pydantic import ValidationError -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud -from src.db import models -from src.endpoints.legacy import deps -from src.resources.enums import LayerGroupsEnum -from src.schemas.study_area import pydantify_config - -router = APIRouter() - - -@router.get("/settings/{id}/{group_name}", response_model=List[str]) -async def get_group_layers_of_study_area_setting( - id: int, - group_name: LayerGroupsEnum, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - """ - Update layers to specified layer-group settings in specified study-area. - """ - study_area = await crud.study_area.get(db=db, id=id) - if not study_area: - raise HTTPException(status_code=404, detail="study area not found.") - - settings = pydantify_config(study_area.setting, validate=False) - return getattr(settings, group_name) - - -@router.put("/settings/{id}/{group_name}", response_model=List[str]) -async def update_group_layers_in_study_area_setting( - id: int, - group_name: LayerGroupsEnum, - layers: List[str], - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - """ - Get layers under the specified layer-group of specified study-area - """ - study_area = await crud.study_area.get(db=db, id=id) - if not study_area: - raise HTTPException(status_code=404, detail="study area not found.") - - setting = study_area.setting.copy() - layer_groups = pydantify_config(setting, validate=False) # Convert Settings to Pydantic - setattr(layer_groups, group_name, layers) # Set new settings - listed_layer_groups = layer_groups.listify_config() # Convert Back to list settings - setting["layer_groups"] = listed_layer_groups - study_area.setting = setting # To trigger update - - new_study_area = await crud.study_area.update(db=db, db_obj=study_area, obj_in=study_area) - try: - new_setting = pydantify_config(new_study_area.setting) - - except ValidationError as e: - raise HTTPException(status_code=400, detail=e.errors()) - - return getattr(new_setting, group_name) diff --git a/src/endpoints/v1/system.py b/src/endpoints/v1/system.py deleted file mode 100644 index 9ead74f..0000000 --- a/src/endpoints/v1/system.py +++ /dev/null @@ -1,23 +0,0 @@ -from fastapi import APIRouter, Depends -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud -from src.db import models -from src.endpoints.legacy import deps -from src.schemas.system_setting import SystemStatusModel - -router = APIRouter() - - -@router.put("/status", response_model=SystemStatusModel) -async def status_check( - status_in: SystemStatusModel, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -): - results = await crud.system.get_by_key(db, key="type", value="status") - system = results[0] - system.setting = status_in.dict() - system = await crud.system.update(db, db_obj=system, obj_in=system) - - return system.setting diff --git a/src/endpoints/v1/upload.py b/src/endpoints/v1/upload.py deleted file mode 100644 index 3587056..0000000 --- a/src/endpoints/v1/upload.py +++ /dev/null @@ -1,160 +0,0 @@ -import json -import os -import shutil -import uuid -from tempfile import NamedTemporaryFile -from typing import IO, Any - -from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.future import select -from starlette import status - -from src import crud, schemas -from src.crud.crud_customization import dynamic_customization -from src.db import models -from src.endpoints.legacy import deps -from src.resources.enums import MaxUploadFileSize -from src.schemas.upload import request_examples -from src.utils import delete_file - -router = APIRouter() - - -@router.get("/poi") -async def get_custom_pois( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """Get metadata for uploaded user data.""" - - upload_objs = await crud.data_upload.get_by_multi_keys( - db, keys={"user_id": current_user.id, "study_area_id": current_user.active_study_area_id} - ) - - response_objs = [] - for obj in upload_objs: - category = await db.execute( - select(models.PoiUser.category).where(models.PoiUser.data_upload_id == obj.id).limit(1) - ) - category = category.all() - if category != []: - if obj.id in current_user.active_data_upload_ids: - state = True - else: - state = False - - obj_dict = { - "id": obj.id, - "category": category[0][0], - "upload_size": obj.upload_size, - "creation_date": str(obj.creation_date), - "state": state, - "reached_poi_heatmap_computed": obj.reached_poi_heatmap_computed, - } - - response_objs.append(obj_dict) - - return json.loads(json.dumps(response_objs)) - - -@router.post("/poi") -async def upload_custom_pois( - *, - db: AsyncSession = Depends(deps.get_db), - file: UploadFile, - poi_category=Body(..., example=request_examples["poi_category"]), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """Handle uploaded custom pois.""" - defined_uuid = uuid.uuid4().hex - file_name = defined_uuid + os.path.splitext(file.filename)[1] - file_dir = f"/tmp/{file_name}" - - real_file_size = 0 - temp: IO = NamedTemporaryFile(delete=False) - for chunk in file.file: - real_file_size += len(chunk) - if real_file_size > MaxUploadFileSize.max_upload_poi_file_size.value: - temp.close() - delete_file(temp.name) - raise HTTPException( - status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, - detail="The uploaded file size is to big the largest allowd size is %s MB." - % round(MaxUploadFileSize.max_upload_poi_file_size / 1024.0**2, 2), - ) - - temp.write(chunk) - temp.close() - - # Write file to file system - - shutil.move(temp.name, file_dir) - await crud.upload.upload_custom_pois( - db=db, - poi_category=poi_category, - file=file, - file_dir=file_dir, - file_name=file_name, - current_user=current_user, - ) - - updated_settings = await dynamic_customization.build_main_setting_json( - db=db, current_user=current_user - ) - - return updated_settings - - -@router.delete("/poi/all") -async def delete_all_custom_pois( - *, - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """Delete custom pois.""" - data_upload_ids = await crud.data_upload.get_by_key(db, key="user_id", value=current_user.id) - - for obj in data_upload_ids: - await crud.upload.delete_custom_pois( - db=db, data_upload_id=obj.id, current_user=current_user - ) - - return {"msg": "Successfully deleted custom pois"} - - -@router.delete("/poi/{data_upload_id}") -async def delete_custom_pois( - *, - db: AsyncSession = Depends(deps.get_db), - data_upload_id: int, - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """Delete custom pois.""" - data_upload_obj = await crud.data_upload.get_by_multi_keys( - db, keys={"id": data_upload_id, "user_id": current_user.id} - ) - if data_upload_obj == []: - raise HTTPException(status_code=400, detail="Data Upload not found.") - - await crud.upload.delete_custom_pois( - db=db, data_upload_id=data_upload_id, current_user=current_user - ) - return {"msg": "Successfully deleted custom pois"} - - -@router.patch("/poi") -async def set_active_state_of_custom_poi( - *, - db: AsyncSession = Depends(deps.get_db), - custom_data_upload_state: schemas.CutomDataUploadState, - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """Set active state of a custom poi.""" - - await crud.upload.set_active_state_of_custom_poi( - db=db, current_user=current_user, obj_in=custom_data_upload_state - ) - - return {"msg": "Successfully updated active state of custom poi"} diff --git a/src/endpoints/v1/users.py b/src/endpoints/v1/users.py deleted file mode 100644 index 609285f..0000000 --- a/src/endpoints/v1/users.py +++ /dev/null @@ -1,341 +0,0 @@ -import datetime -import json -from typing import Any, List - -from fastapi import APIRouter, Body, Depends, HTTPException, Query, Response -from fastapi.encoders import jsonable_encoder -from fastapi.responses import JSONResponse -from sqlalchemy.ext.asyncio import AsyncSession - -from src import crud, schemas -from src.core.config import settings -from src.crud.base import CRUDBase -from src.db import models -from src.endpoints.legacy import deps -from src.schemas import Msg -from src.schemas.user import request_examples -from src.utils import generate_token, send_email, to_feature_collection, verify_token - -router = APIRouter() - - -@router.get("", response_model=List[models.User], response_model_exclude={"hashed_password"}) -async def read_users( - response: Response, - db: AsyncSession = Depends(deps.get_db), - skip: int = 0, - limit: int = 100, - ordering: str = None, - q: str = None, - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Retrieve users. - """ - is_superuser = crud.user.is_superuser(current_user) - total_count = await crud.user.count(db) - response.headers["X-Total-Count"] = str(total_count) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - users = await crud.user.get_multi(db, skip=skip, limit=limit, ordering=ordering, query=q) - return users - - -@router.get("/me", response_model=models.User, response_model_exclude={"hashed_password"}) -async def read_user_me( - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Get current user. - """ - return current_user - - -@router.get("/me/study-area", response_class=JSONResponse) -async def read_user_study_area( - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Get current user's active study area. - """ - study_area = await crud.user.get_active_study_area(db, current_user) - - features = to_feature_collection( - study_area, exclude_properties=["default_setting", "buffer_geom_heatmap"] - ) - return features - - -@router.get( - "/me/study-areas-list", - response_model=List[schemas.UserStudyAreaList], -) -async def read_user_study_areas( - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Get user study areas. - """ - user = await crud.user.get(db, id=current_user.id, extra_fields=[models.User.study_areas]) - study_area_list = [] - for study_area in user.study_areas: - study_area_list.append(schemas.UserStudyAreaList(id=study_area.id, name=study_area.name)) - return study_area_list - - -@router.put("/me/preference", response_model=models.User) -async def update_user_preference( - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), - preference: schemas.UserPreference = Body( - ..., examples=request_examples["update_user_preference"] - ), -) -> Any: - """ - Update user preference. - """ - if preference.active_study_area_id is not None: - owns_study_area = await CRUDBase(models.UserStudyArea).get_by_multi_keys( - db, keys={"user_id": current_user.id, "study_area_id": preference.active_study_area_id} - ) - if owns_study_area == []: - raise HTTPException(status_code=400, detail="The user doesn't own the study area") - - user = await crud.user.get(db, id=current_user.id) - if not user: - raise HTTPException( - status_code=400, - detail="User not found", - ) - user = await CRUDBase(models.User).update(db, db_obj=user, obj_in=preference) - return user - - -@router.post("") -async def create_user( - *, - db: AsyncSession = Depends(deps.get_db), - user_in: schemas.UserCreate = Body(..., example=request_examples["create"]), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Create new user. - """ - - user = await crud.user.get_by_key(db, key="email", value=user_in.email) - if user and len(user) > 0: - raise HTTPException( - status_code=400, - detail="The user with this email already exists in the system.", - ) - user = await crud.user.create(db, obj_in=user_in) - user = await crud.user.get( - db, id=user.id, extra_fields=[models.User.roles, models.User.study_areas] - ) - user_json = jsonable_encoder(user) - user_json["roles"] = [json.loads(role.json()) for role in user.roles] - user_json["study_areas"] = [study_area.id for study_area in user.study_areas] - del user_json["hashed_password"] - return user_json - - -@router.post("/demo", response_model=models.User, response_model_exclude={"hashed_password"}) -async def create_demo_user( - *, - db: AsyncSession = Depends(deps.get_db), - user_in: schemas.UserCreateDemo = Body(..., example=request_examples["create_demo_user"]), -) -> Any: - """ - Create new user. - """ - user = await crud.user.get_by_key(db, key="email", value=user_in.email) - if user and len(user) > 0: - raise HTTPException( - status_code=400, - detail="The user with this email already exists in the system.", - ) - organization_demo = await crud.organization.get_by_key(db, key="name", value="demo") - study_area_demo = await crud.study_area.get_by_key( - db, key="id", value=settings.DEMO_USER_STUDY_AREA_ID - ) - - if len(organization_demo) == 0 or len(study_area_demo) == 0: - raise HTTPException( - status_code=400, - detail="Can't create a demo user at this time. Please contact the administrator.", - ) - organization_demo = organization_demo[0] - study_area_demo = study_area_demo[0] - user_in = user_in.dict() - user_in.update( - { - "organization_id": organization_demo.id, - "roles": ["user"], - "active_study_area_id": study_area_demo.id, - "active_data_upload_ids": [], - "storage": 0, - "limit_scenarios": settings.DEMO_USER_SCENARIO_LIMIT, - "is_active": False, - } - ) - user_obj = schemas.UserCreate(**user_in) - user = await crud.user.create(db, obj_in=user_obj) - activate_token = generate_token(email=user.email) - if settings.EMAILS_ENABLED and user.email: - send_email( - type="activate_new_account", - email_to=user.email, - name=user.name, - surname=user.surname, - token=activate_token, - email_language=user.language_preference, - ) - return user - - -@router.post( - "/demo/activate", response_model=models.User, response_model_exclude={"hashed_password"} -) -async def activate_demo_user( - *, - db: AsyncSession = Depends(deps.get_db), - token: str = Query(None, description="Activation token"), -) -> Any: - """ - Activate a demo user. - """ - email = verify_token(token=token) - if not email: - raise HTTPException( - status_code=400, - detail="The activation token is invalid.", - ) - user = await crud.user.get_by_key(db, key="email", value=email) - if user and len(user) > 0 and user[0].is_active is False: - user = user[0] - user = await CRUDBase(models.User).update(db, db_obj=user, obj_in={"is_active": True}) - send_email( - type="account_trial_started", - email_to=user.email, - name=user.name, - surname=user.surname, - token="", - email_language=user.language_preference, - ) - return user - else: - raise HTTPException( - status_code=400, - detail="The user with this email doesn't exist in the system.", - ) - - -@router.get("/demo/deactivate", response_model=Msg) -async def deactivate_demo_users( - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_user), -) -> Any: - """ - Deactivate demo user. - """ - is_superuser = crud.user.is_superuser(current_user) - if not is_superuser: - raise HTTPException(status_code=400, detail="The user doesn't have enough privileges") - - organization_demo = await crud.organization.get_by_key(db, key="name", value="demo") - if len(organization_demo) == 0: - raise HTTPException( - status_code=400, - detail="Can't deactivate demo users at this time.", - ) - organization_demo = organization_demo[0] - users = await crud.user.get_by_key(db, key="organization_id", value=organization_demo.id) - for user in users: - time_diff = datetime.datetime.now() - user.creation_date - if (time_diff.days > settings.DEMO_USER_DEACTIVATION_DAYS) and user.is_active: - user = await CRUDBase(models.User).update(db, db_obj=user, obj_in={"is_active": False}) - if settings.EMAILS_ENABLED and user.email: - send_email( - type="account_expired", - email_to=user.email, - name=user.name, - surname=user.surname, - token="", - email_language=user.language_preference, - ) - elif (time_diff.days == settings.DEMO_USER_DEACTIVATION_DAYS - 3) and user.is_active: - if settings.EMAILS_ENABLED and user.email: - send_email( - type="account_expiring", - email_to=user.email, - name=user.name, - surname=user.surname, - token="", - email_language=user.language_preference, - ) - - return {"msg": "Demo users deactivated"} - - -@router.get("/{user_id}") -async def read_user_by_id( - user_id: int, - current_user: models.User = Depends(deps.get_current_active_superuser), - db: AsyncSession = Depends(deps.get_db), -) -> Any: - """ - Get a specific user by id. - """ - user = await crud.user.get( - db, id=user_id, extra_fields=[models.User.roles, models.User.study_areas] - ) - - user_json = jsonable_encoder(user) - user_json["roles"] = [json.loads(role.json()) for role in user.roles] - user_json["study_areas"] = [study_area.id for study_area in user.study_areas] - - del user_json["hashed_password"] - - return user_json - - -@router.delete("/") -async def delete_users( - *, - id: List[int] = Query(default=None, gt=0), - db: AsyncSession = Depends(deps.get_db), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Delete users. - """ - - return await crud.user.remove_multi(db, ids=id) - - -@router.put("/{user_id}", response_model=models.User, response_model_exclude={"hashed_password"}) -async def update_user( - *, - db: AsyncSession = Depends(deps.get_db), - user_id: int, - user_in: schemas.UserUpdate = Body(..., example=request_examples["update"]), - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Update a user. - """ - - user = await crud.user.get( - db, id=user_id, extra_fields=[models.User.study_areas, models.User.roles] - ) - if not user: - raise HTTPException( - status_code=404, - detail="The user with this username does not exist in the system", - ) - user = await crud.user.update(db, db_obj=user, obj_in=user_in) - return user diff --git a/src/endpoints/v1/utils.py b/src/endpoints/v1/utils.py deleted file mode 100644 index ce001e3..0000000 --- a/src/endpoints/v1/utils.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Any - -import requests -from fastapi import APIRouter, Depends, HTTPException, Query, Response -from pydantic.networks import EmailStr - -from src import schemas -from src.db import models -from src.endpoints.legacy import deps -from src.utils import send_test_email - -router = APIRouter() - - -@router.get("/reverse-proxy") -async def reverse_proxy( - *, - current_user: models.User = Depends(deps.get_current_active_user), - url: str = Query(..., description="URL to reverse proxy"), -) -> Response: - """ - Reverse proxy to another server. - """ - if not url: - raise HTTPException(status_code=400, detail="URL is required") - if ( - url.startswith("http://localhost") - or url.startswith("https://localhost") - or url.startswith("localhost") - ): - raise HTTPException(status_code=400, detail="URL should not start with localhost") - if not url.startswith("http://") and not url.startswith("https://"): - url = f"http://{url}" - response = requests.get(url) - return Response(content=response.content) - - -@router.post("/test-email", response_model=schemas.Msg, status_code=201) -def test_email( - email_to: EmailStr, - current_user: models.User = Depends(deps.get_current_active_superuser), -) -> Any: - """ - Test emails. - """ - send_test_email(email_to=email_to) - return {"msg": "Test email sent"} diff --git a/src/schemas/legacy/building.py b/src/schemas/legacy/building.py deleted file mode 100644 index 7d84a97..0000000 --- a/src/schemas/legacy/building.py +++ /dev/null @@ -1 +0,0 @@ -hidden_props_client = ["id", "gross_floor_area"] diff --git a/src/schemas/legacy/customization.py b/src/schemas/legacy/customization.py deleted file mode 100644 index c1478cf..0000000 --- a/src/schemas/legacy/customization.py +++ /dev/null @@ -1,16 +0,0 @@ - -""" -Body of the request -""" -request_examples = { - "create": { - "map": {"zoom": 13, "max_zoom": 21, "min_zoom": 10, "projection_code": "EPSG:3857"} - }, - "user_customization_insert": { - "poi": { - "summary": "Update POI setting", - "value": {"nursery": {"icon": "fa-solid fa-dumbbell", "color": ["#ff0000"]}}, - } - }, - "user_customization_delete": {"poi": {"summary": "Delete POI setting", "value": "nursery"}}, -} diff --git a/src/schemas/legacy/data_preparation.py b/src/schemas/legacy/data_preparation.py deleted file mode 100644 index ed735c0..0000000 --- a/src/schemas/legacy/data_preparation.py +++ /dev/null @@ -1,190 +0,0 @@ -from typing import Optional - -from pydantic import BaseModel, Field - -from .heatmap import HeatmapMode, HeatmapProfile -from .legacy.isochrone import ( - IsochroneAccessMode, - IsochroneDTO, - IsochroneMode, - IsochroneOutput, - IsochroneSettings, -) - - -class HeatmapIsochroneDTO(BaseModel): - mode: IsochroneMode = Field(IsochroneAccessMode.WALK, description="Isochrone Mode") - settings: IsochroneSettings = Field(..., description="Isochrone settings parameters") - output: Optional[IsochroneOutput] = Field(..., description="Isochrone output parameters") - - -class BulkIdParameters(BaseModel): - buffer_distance: int # in meters - study_area_ids: list[int] - - -class TravelTimeMatrixParameters(BaseModel): - bulk_id: list[str] - isochrone_dto: IsochroneDTO - calculation_resolution: int - s3_folder: Optional[str] = "" - - -class TravelTimeMatrixParametersSingleBulk(TravelTimeMatrixParameters): - bulk_id: str - - -class OpportunityMatrixParameters(TravelTimeMatrixParameters): - opportunity_types: Optional[list[str]] = ["population", "aoi", "poi"] - scenario_ids: Optional[list[int]] = [] - user_data_ids: Optional[list[int]] = [] - compute_base_data: Optional[bool] = True - - -class OpportunityMatrixParametersSingleBulk(OpportunityMatrixParameters): - bulk_id: str - - -class ConnectivityMatrixParameters(BaseModel): - mode: str - profile: str - bulk_id: list[str] - max_traveltime: int - s3_folder: Optional[str] = "" - - -ConnectivityMatrixExample = { - "mode": "walking", - "profile": "standard", - "bulk_id": ["861f8d55fffffff"], - "max_traveltime": 20, - "s3_folder": "test", -} - - -class ConnectivityHeatmapParameters(BaseModel): - mode: HeatmapMode - profile: HeatmapProfile - study_area_id: int - max_travel_time: int - - -ConnectivityHeatmapParametersExample = { - "mode": "walking", - "profile": "standard", - "study_area_id": 91620000, - "max_travel_time": 20, -} - - -BulkIdParametersExample = {"buffer_distance": 1000, "study_area_ids": [91620000]} - - -examples = { - "travel_time_matrix": { - "active_mobility": { - "summary": "Opportunity Matrix for Active Mobility", - "value": { - "bulk_id": ["861f8d55fffffff"], - "calculation_resolution": 10, - "isochrone_dto": { - "mode": "walking", - "settings": {"travel_time": 20, "speed": 5, "walking_profile": "standard"}, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": {"type": "grid", "resolution": 12}, - }, - "s3_folder": "test", - }, - }, - "motorized_transport": { - "summary": "Opportunity Matrix for Motorized Transport (Public Transport or Car)", - "value": { - "bulk_id": ["861f8d55fffffff"], - "calculation_resolution": 9, - "isochrone_dto": { - "mode": "transit", - "settings": { - "travel_time": "60", - "transit_modes": ["bus", "tram", "subway", "rail"], - "weekday": "0", - "access_mode": "walk", - "egress_mode": "walk", - "bike_traffic_stress": 4, - "from_time": 25200, - "to_time": 39600, - "max_rides": 4, - "max_bike_time": 20, - "max_walk_time": 20, - "percentiles": [5, 25, 50, 75, 95], - "monte_carlo_draws": 200, - }, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": {"type": "grid", "resolution": "9"}, - }, - "s3_folder": "test", - }, - }, - }, - "opportunity_matrix": { - "active_mobility": { - "summary": "Opportunity Matrix for Active Mobility", - "value": { - "bulk_id": ["861f8d55fffffff"], - "calculation_resolution": 10, - "isochrone_dto": { - "mode": "walking", - "settings": {"travel_time": 20, "speed": 5, "walking_profile": "standard"}, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": {"type": "grid", "resolution": 12}, - }, - "scenario_ids": [], - "user_data_ids": [], - "compute_base_data": True, - "s3_folder": "test", - }, - }, - "motorized_transport": { - "summary": "Opportunity Matrix for Motorized Transport (Public Transport or Car)", - "value": { - "bulk_id": ["861f8d55fffffff"], - "calculation_resolution": 9, - "isochrone_dto": { - "mode": "transit", - "settings": { - "travel_time": "60", - "transit_modes": ["bus", "tram", "subway", "rail"], - "weekday": "0", - "access_mode": "walk", - "egress_mode": "walk", - "bike_traffic_stress": 4, - "from_time": 25200, - "to_time": 39600, - "max_rides": 4, - "max_bike_time": 20, - "max_walk_time": 20, - "percentiles": [5, 25, 50, 75, 95], - "monte_carlo_draws": 200, - }, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": {"type": "grid", "resolution": "9"}, - }, - "scenario_ids": [], - "user_data_ids": [], - "compute_base_data": True, - "s3_folder": "test", - }, - }, - }, -} diff --git a/src/schemas/legacy/geostore.py b/src/schemas/legacy/geostore.py deleted file mode 100644 index d77bb5a..0000000 --- a/src/schemas/legacy/geostore.py +++ /dev/null @@ -1,57 +0,0 @@ -from src.db.models import Geostore -from pydantic import validator -import requests -from src.legacy.utils.utils import random_lower_string - - -class RequestExamples: - @property - def geostore(self): - return { - "configuration": { - "url": "mapURL", - "name": "Name", - "description": "LayerDescription", - "type": "geoadmin", - "legend": "legendUrl", - "attribution": "attribution", - "getcapabilities": "GetCapabilities", - }, - "name": "test_" + random_lower_string(), - "type": "geoadmin", - "url": "https://geoportal.freiburg.de/freigis/ressources/services-internet.json", - "attribution": "Stadt Freiburg im Breisgau", - "thumbnail_url": "https://s3.eu-central-1.amazonaws.com/goat-app-assets/geostore_thumbnails/freigis.png", - } - - -request_examples = RequestExamples() - - -class CreateGeostore(Geostore): - @validator("url", "thumbnail_url") - def url_returns_200(cls, field_value, values, field, config): - try: - result = requests.get(field_value) - assert result.status_code == 200 - except: - raise ValueError(f"Could not resolve url from '{field.name}' field.") - return field_value - - @validator("configuration") - def validate_configuration_keys(cls, field_value): - if field_value: - config_keys = request_examples.geostore["configuration"].keys() - if not set(config_keys).issuperset(set(field_value.keys())): - raise ValueError( - f'configuration keys should be subset of "{", ".join(list(config_keys))}".' - ) - return field_value - - @validator("configuration") - def remove_empty_configuration(cls, field_value): - if field_value: - return {k: v for k, v in field_value.items() if v} - - class Config: - schema_extra = {"example": request_examples.geostore} diff --git a/src/schemas/legacy/heatmap.py b/src/schemas/legacy/heatmap.py deleted file mode 100644 index 4fee5b3..0000000 --- a/src/schemas/legacy/heatmap.py +++ /dev/null @@ -1,553 +0,0 @@ -from enum import Enum -from typing import List, Optional - -from pydantic import BaseModel, Field, validator -from sqlmodel import SQLModel - -from src.schemas.legacy.isochrone import ( - CalculationTypes, - IsochroneCyclingProfile, - IsochroneScenario, - IsochroneWalkingProfile, -) - - -class ComputePoiUser(SQLModel): - data_upload_id: int - - -class HeatmapBulkResolution(int, Enum): - """H3 Resolution Bulk.""" - - active_mobility = 6 - motorized_transport = 6 - - -class HeatmapCalculationResolution(int, Enum): - """H3 Resolution Calculation.""" - - active_mobility = 10 - motorized_transport = 8 - - -class HeatmapMode(Enum): - walking = "walking" - cycling = "cycling" - transit = "transit" - - -class HeatmapProfile(Enum): - standard = "standard" - - -class AggregatingDataSource(Enum): - population = "population" - - -class HeatmapType(Enum): - modified_gaussian = "modified_gaussian" - combined_cumulative_modified_gaussian = "combined_cumulative_modified_gaussian" - connectivity = "connectivity" - cumulative = "cumulative" - closest_average = "closest_average" - aggregated_data = "aggregated_data" - modified_gaussian_population = "modified_gaussian_population" - - -class ReturnTypeHeatmap(Enum): - GEOJSON = "geojson" - CSV = "csv" - GEOBUF = "geobuf" - SHAPEFILE = "shapefile" - GEOPACKAGE = "geopackage" - KML = "kml" - XLSX = "xlsx" - - -class AnalysisUnit(Enum): - hexagon = "hexagon" - square = "square" - building = "building" - point = "point" - - -class HeatmapBaseSpeed(Enum): - """Speed in km/h""" - - walking = 5.0 - cycling = 15.0 - - -class HeatmapBase(BaseModel): - max_traveltime: int - weight: int - visible: bool = False - - -class HeatmapConfigGravity(HeatmapBase): - sensitivity: int - - -class HeatmapConfigCombinedGravity(HeatmapConfigGravity): - static_traveltime: int - - -class HeatmapClosestAverage(HeatmapBase): - max_count: int - - -class HeatmapConfigConnectivity(BaseModel): - max_traveltime: int = Field(None, le=60) - - -class HeatmapConfigAggregatedData(BaseModel): - source: AggregatingDataSource - - -class HeatmapSettingsBase(BaseModel): - study_area_ids: List[int] - resolution: int = Field(None, ge=6, le=10) - heatmap_type: HeatmapType - heatmap_config: dict - analysis_unit: AnalysisUnit - analysis_unit_size: Optional[int] = Field(10, description="Size of the analysis") - return_type: ReturnTypeHeatmap = "geojson" - scenario: Optional[IsochroneScenario] = Field( - { - "id": 1, - "modus": CalculationTypes.default, - }, - description="Isochrone scenario parameters. Only supported for POIs and Building scenario at the moment", - ) - - -class HeatmapSettingsAggregatedData(HeatmapSettingsBase): - heatmap_config: HeatmapConfigAggregatedData - - -class HeatmapSettings0(HeatmapSettingsBase): - """Setting for different heatmap types""" - - mode: HeatmapMode = Field(HeatmapMode.walking.value, description="Isochrone Mode") - walking_profile: Optional[IsochroneWalkingProfile] = Field( - IsochroneWalkingProfile.STANDARD.value, - description="Walking profile.", - ) - cycling_profile: Optional[IsochroneCyclingProfile] = Field( - IsochroneCyclingProfile.STANDARD.value, - description="Cycling profile.", - ) - - heatmap_type: HeatmapType = Field( - HeatmapType.modified_gaussian, description="Type of heatmap to compute" - ) - - @validator("heatmap_config") - def heatmap_config_schema_connectivity(cls, value, values): - if values["heatmap_type"] != HeatmapType.connectivity: - return value - else: - return HeatmapConfigConnectivity(**value) - - @validator("heatmap_config") - def heatmap_config_schema(cls, value, values): - """ - Validate each part of heatmap_config against validator class corresponding to heatmap_type - """ - if values["heatmap_type"] == HeatmapType.connectivity: - # This validator should not apply to connectivity heatmap - return value - - validator_classes = { - "modified_gaussian": HeatmapConfigGravity, - "combined_cumulative_modified_gaussian": HeatmapConfigCombinedGravity, - "modified_gaussian_population": HeatmapConfigCombinedGravity, - "closest_average": HeatmapClosestAverage, - } - - heatmap_type = values["heatmap_type"].value - if heatmap_type not in validator_classes.keys(): - raise ValueError(f"Validation for type {heatmap_type} not found.") - validator_class = validator_classes[heatmap_type] - heatmap_config = value - for opportunity in heatmap_config: - for category in heatmap_config[opportunity]: - category_settings = heatmap_config[opportunity][category] - validator_class(**category_settings) - - return value - - -class HeatmapSettings(BaseModel): - def __new__(cls, *args, **kwargs): - heatmap_type = kwargs.get("heatmap_type") - if ( - heatmap_type == HeatmapType.aggregated_data.value - or heatmap_type == HeatmapType.aggregated_data - ): - return HeatmapSettingsAggregatedData(*args, **kwargs) - else: - return HeatmapSettings0(*args, **kwargs) - - -class BulkTravelTime(BaseModel): - west: list[int] - north: list[int] - zoom: list[int] - width: list[int] - height: list[int] - grid_ids: list[int] - travel_times: list[list[int]] - - -""" -Body of the request -""" -request_examples_ = { - "compute_poi_user": {"data_upload_id": 1}, - "heatmap_configuration": """{"supermarket":{"sensitivity":250000,"weight":1}}""", -} - - -request_examples = { - "modified_gaussian_hexagon_10": { - "summary": "Gravity heatmap with hexagon resolution 10", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "modified_gaussian", - "analysis_unit": "hexagon", - "resolution": 10, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": {"weight": 1, "sensitivity": 250000, "max_traveltime": 20}, - "bar": {"weight": 1, "sensitivity": 250000, "max_traveltime": 20}, - "gym": {"weight": 1, "sensitivity": 350000, "max_traveltime": 20}, - }, - }, - }, - }, - "connectivity_heatmap_6_walking": { - "summary": "Connectivity heatmap with hexagon resolution 6 Walking", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "connectivity", - "analysis_unit": "hexagon", - "resolution": 6, - "return_type": "geojson", - "heatmap_config": { - "max_traveltime": 20, - }, - }, - }, - "connectivity_heatmap_6_transit": { - "summary": "Connectivity heatmap with hexagon resolution 6 Public Transport", - "value": { - "mode": "transit", - "study_area_ids": [91620000], - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "connectivity", - "analysis_unit": "hexagon", - "resolution": 6, - "return_type": "geojson", - "heatmap_config": { - "max_traveltime": 60, - }, - }, - }, - "modified_gaussian_hexagon_9": { - "summary": "Gravity heatmap with hexagon resolution 9", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "modified_gaussian", - "analysis_unit": "hexagon", - "resolution": 9, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": {"weight": 1, "sensitivity": 250000, "max_traveltime": 20}, - "bar": {"weight": 1, "sensitivity": 250000, "max_traveltime": 20}, - "gym": {"weight": 1, "sensitivity": 350000, "max_traveltime": 20}, - }, - }, - }, - }, - "modified_gaussian_hexagon_6": { - "summary": "Gravity heatmap with hexagon resolution 6", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "modified_gaussian", - "analysis_unit": "hexagon", - "resolution": 6, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": {"weight": 1, "sensitivity": 250000, "max_traveltime": 20}, - "bar": {"weight": 1, "sensitivity": 250000, "max_traveltime": 20}, - "gym": {"weight": 1, "sensitivity": 350000, "max_traveltime": 20}, - }, - }, - }, - }, - "combined_modified_gaussian_hexagon_6": { - "summary": "Combined Gravity heatmap with hexagon resolution 6", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "combined_cumulative_modified_gaussian", - "analysis_unit": "hexagon", - "resolution": 6, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": { - "weight": 1, - "sensitivity": 250000, - "max_traveltime": 20, - "static_traveltime": 5, - }, - "bar": { - "weight": 1, - "sensitivity": 250000, - "max_traveltime": 20, - "static_traveltime": 5, - }, - "gym": { - "weight": 1, - "sensitivity": 350000, - "max_traveltime": 20, - "static_traveltime": 5, - }, - }, - }, - }, - }, - "closest_average_hexagon_10": { - "summary": "Closest average heatmap with hexagon resolution 10", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "closest_average", - "analysis_unit": "hexagon", - "resolution": 10, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "bar": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "gym": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "pub": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "bank": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "cafe": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "fuel": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "park": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "yoga": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "hotel": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "bakery": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "cinema": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "forest": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "museum": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "butcher": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "dentist": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "nursery": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "bus_stop": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "pharmacy": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "post_box": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "fast_food": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "gymnasium": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "nightclub": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "recycling": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "tram_stop": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "playground": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "realschule": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "restaurant": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "car_sharing": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "convenience": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "grundschule": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "hypermarket": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "marketplace": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "post_office": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "supermarket": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "bike_sharing": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "discount_gym": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "kindergarten": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "rail_station": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "subway_entrance": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "charging_station": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "organic_supermarket": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "discount_supermarket": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "general_practitioner": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "swimming_pool_outdoor": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "hauptschule_mittelschule": { - "weight": 1, - "max_count": 1, - "max_traveltime": 20, - }, - }, - }, - }, - }, - "closest_average_hexagon_9": { - "summary": "Closest average hexagon with resolution 9", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "closest_average", - "analysis_unit": "hexagon", - "resolution": 9, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "bar": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "gym": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - }, - }, - }, - }, - "closest_average_hexagon_6": { - "summary": "Closest average hexagon with resolution 6", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "closest_average", - "analysis_unit": "hexagon", - "resolution": 6, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "bar": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - "gym": {"weight": 1, "max_count": 1, "max_traveltime": 20}, - }, - }, - }, - }, - "connectivity_heatmap_10": { - "summary": "Connectivity heatmap with hexagon resolution 10", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "connectivity", - "analysis_unit": "hexagon", - "resolution": 10, - "return_type": "geojson", - "heatmap_config": {"max_traveltime": 10}, - }, - }, - "aggregated_data_heatmap_10": { - "summary": "Aggregated data with hexagon resolution 10", - "value": { - "study_area_ids": [91620000], - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "aggregated_data", - "analysis_unit": "hexagon", - "resolution": 10, - "heatmap_config": {"source": "population"}, - }, - }, - "modified_gaussian_population_6": { - "summary": "Modified gaussian population with hexagon resolution 6", - "value": { - "mode": "walking", - "study_area_ids": [91620000], - "walking_profile": "standard", - "scenario": { - "id": 1, - "name": "default", - }, - "heatmap_type": "modified_gaussian_population", - "analysis_unit": "hexagon", - "resolution": 6, - "return_type": "geojson", - "heatmap_config": { - "poi": { - "atm": { - "weight": 1, - "sensitivity": 250000, - "max_traveltime": 20, - "static_traveltime": 5, - }, - "bar": { - "weight": 1, - "sensitivity": 250000, - "max_traveltime": 20, - "static_traveltime": 5, - }, - "gym": { - "weight": 1, - "sensitivity": 350000, - "max_traveltime": 20, - "static_traveltime": 5, - }, - }, - }, - }, - }, -} - -# sort request_examples by resolution -try: - request_examples = {k: v for k, v in sorted(request_examples.items(), key=lambda item: item[1]['value']['resolution'])} -except Exception: - pass - -# add warning to request_examples with high resolution -for key, value in request_examples.items(): - if value["value"]["resolution"] > 6: - request_examples[key]["summary"] += " (Warning! May freeze browser.)" \ No newline at end of file diff --git a/src/schemas/legacy/isochrone.py b/src/schemas/legacy/isochrone.py deleted file mode 100644 index d86d0c8..0000000 --- a/src/schemas/legacy/isochrone.py +++ /dev/null @@ -1,737 +0,0 @@ -from enum import Enum -from typing import List, Optional, Union - -from pydantic import BaseModel, Field, root_validator - -""" -Body of the request -""" - - -class IsochroneTypeEnum(str, Enum): - single = "single_isochrone" - multi = "multi_isochrone" - heatmap = "heatmap" - - -class IsochroneMultiCountPois(BaseModel): - user_id: Optional[int] - scenario_id: Optional[int] = 0 - amenities: List[str] - minutes: int - modus: str - region: List[str] - region_type: str - speed: int - active_upload_ids: Optional[List[int]] = [0] - - -class CalculationTypes(str, Enum): - """Calculation types for isochrone.""" - - default = "default" - scenario = "scenario" - comparison = "comparison" - - -class IsochroneMode(Enum): - WALKING = "walking" - CYCLING = "cycling" - TRANSIT = "transit" - CAR = "car" - BUFFER = "buffer" - - -class IsochroneWalkingProfile(Enum): - STANDARD = "standard" - - -class IsochroneCyclingProfile(Enum): - STANDARD = "standard" - PEDELEC = "pedelec" - - -class IsochroneAccessMode(Enum): - WALK = "walk" - BICYCLE = "bicycle" - CAR = "car" - # CAR_PARK = "car_park" //TODO: not supported yet - - -class IsochroneTransitMode(Enum): - BUS = "bus" - TRAM = "tram" - RAIL = "rail" - SUBWAY = "subway" - FERRY = "ferry" - CABLE_CAR = "cable_car" - GONDOLA = "gondola" - FUNICULAR = "funicular" - - -class IsochroneEgressMode(Enum): - WALK = "walk" - # BICYCLE = "bicycle" //TODO: not supported yet - - -class IsochroneOutputType(Enum): - GRID = "grid" - GEOJSON = "geojson" - NETWORK = "network" - - CSV = "csv" - GEOBUF = "geobuf" - SHAPEFILE = "shapefile" - GEOPACKAGE = "geopackage" - KML = "kml" - XLSX = "xlsx" - - -class IsochroneDecayFunctionType(Enum): - LOGISTIC = "logistic" - LINEAR = "linear" - EXPONENTIAL = "exponential" - STEP = "step" - - -class IsochroneMultiRegionType(Enum): - STUDY_AREA = "study_area" - DRAW = "draw" - - -class IsochroneDecayFunction(BaseModel): - type: Optional[str] = Field( - IsochroneDecayFunctionType.LOGISTIC, description="Decay function type" - ) - standard_deviation_minutes: Optional[int] = Field( - 12, description="Standard deviation in minutes" - ) - width_minutes: Optional[int] = Field(10, description="Width in minutes") - - -class IsochroneSettings(BaseModel): - # === SETTINGS FOR WALKING AND CYCLING ===# - travel_time: Optional[int] = Field( - 10, - gt=0, - description="Travel time in **minutes**", - ) - buffer_distance: Optional[int] = Field( - 1000, - gt=50, - le=3000, - description="Buffer distance in **meters**", - ) - speed: Optional[float] = Field( - 5, - gt=0, - le=25, - description="Walking or Cycling speed in **km/h** **(Not considered for PT or CAR)**", - ) - walking_profile: Optional[IsochroneWalkingProfile] = Field( - IsochroneWalkingProfile.STANDARD.value, - description="Walking profile. **(Not considered for PT)**", - ) - cycling_profile: Optional[IsochroneCyclingProfile] = Field( - IsochroneCyclingProfile.STANDARD.value, - description="Cycling profile. **(Not considered for PT)**", - ) - # === SETTINGS FOR CAR AND PT ===# - weekday: Optional[int] = Field( - 0, ge=0, le=6, description="(PT) Departure weekday, 0=Monday, 6=Sunday" - ) - from_time: Optional[int] = Field( - 25200, gt=0, lt=86400, description="(PT) From time. Number of seconds since midnight" - ) - to_time: Optional[int] = Field( - 39600, gt=0, lt=86400, description="(PT) To time . Number of seconds since midnight" - ) - transit_modes: List[IsochroneTransitMode] = Field( - [ - IsochroneTransitMode.BUS.value, - IsochroneTransitMode.TRAM.value, - IsochroneTransitMode.SUBWAY.value, - IsochroneTransitMode.RAIL.value, - ], - description="Public Transport modes", - unique_items=True, - ) - access_mode: Optional[IsochroneAccessMode] = Field( - IsochroneAccessMode.WALK, description="(PT) Access mode" - ) - egress_mode: Optional[IsochroneEgressMode] = Field( - IsochroneEgressMode.WALK, description="(PT) Egress mode" - ) - bike_speed: Optional[float] = Field(15, gt=0, le=15, description="(PT) Bike speed") - walk_speed: Optional[float] = Field(5, gt=0, le=15, description="(PT) Walk speed") - bike_traffic_stress: Optional[int] = Field( - 4, ge=1, le=4, description="(PT) Bike traffic stress. 1: Low stress, 4: Very high stress" - ) - max_rides: Optional[int] = Field(4, description="(PT) Max number of rides") - max_bike_time: Optional[int] = Field( - 20, - description="(PT) Max bike time (in minutes) to access and egress the transit network, or to make transfers within the network.", - ) - max_walk_time: Optional[int] = Field( - 20, - description="(PT) The maximum walking time (in minutes) to access and egress the transit network, or to make transfers within the network. Defaults to no restrictions, as long as max_trip_duration is respected. The max time is considered separately for each leg (e.g. if you set max_walk_time to 20, you could potentially walk up to 20 minutes to reach transit, and up to another 20 minutes to reach the destination after leaving transit).", - ) - percentiles: Optional[List[int]] = Field( - [5, 25, 50, 75, 95], - description="(PT) Specifies the percentile to use when returning accessibility estimates within the given time window. Please note that this parameter is applied to the travel time estimates that generate the accessibility results, and not to the accessibility distribution itself (i.e. if the 25th percentile is specified, the accessibility is calculated from the 25th percentile travel time, which may or may not be equal to the 25th percentile of the accessibility distribution itself). Defaults to 50, returning the accessibility calculated from the median travel time. If a vector with length bigger than 1 is passed, the output contains an additional column that specifies the percentile of each accessibility estimate. Due to upstream restrictions, only 5 percentiles can be specified at a time. For more details, please see R5 documentation at https://docs.conveyal.com/analysis/methodology#accounting-for-variability.", - ) - monte_carlo_draws: Optional[int] = Field( - 200, - gt=0, - le=200, - description="(PT) The number of Monte Carlo draws to perform per time window minute when calculating travel time matrices and when estimating accessibility.", - ) - decay_function: Optional[IsochroneDecayFunction] = Field( - { - "type": "logistic", - "standard_deviation_minutes": 12, - "width_minutes": 10, - }, - description="(PT) A family of monotonically decreasing functions from travel times to weight factors in the range [0...1]. This determines how much an opportunity at a given travel time is weighted when included in an accessibility value", - ) - - -class IsochroneScenario(BaseModel): - id: Optional[int] = Field(..., description="Scenario ID") - modus: Optional[CalculationTypes] = Field( - CalculationTypes.default, description="Scenario modus" - ) - - -class IsochroneStartingPointCoord(BaseModel): - lat: float = Field(..., gt=-90, lt=90) - lon: float = Field(..., gt=-180, lt=180) - - -class IsochroneStartingPoint(BaseModel): - region_type: Optional[IsochroneMultiRegionType] = Field( - IsochroneMultiRegionType.STUDY_AREA, - description="The type of region to use for the multi-isochrone calculation", - ) - region: Optional[List[str]] = Field( - [], - description="The region to use for the multi-isochrone calculation. If region_type is study_area, this is a list of study area IDs. If region_type is draw, this is a list of WKT polygons.", - ) - input: Union[List[str], List[IsochroneStartingPointCoord]] = Field( - ..., - description="The input to use for the multi-isochrone calculation. It can be a list of amenities, or a list of coordinates.", - ) - - -class IsochroneOutput(BaseModel): - type: Optional[IsochroneOutputType] = Field( - IsochroneOutputType.GRID, - description="The type of response isochrone is generated. If type is `grid`, the output is a grid of accessibility values on every cell. If type is `geojson`, the output is a geojson file with the accessibility distribution for every step.", - ) - steps: Optional[int] = Field(2, description="Number of isochrone steps for 'geojson' output") - resolution: Optional[int] = Field( - 9, - description="GRID Resolution for `grid` output type. Default (9 for PT Isochrone, 10 for Waking and Cycling Isochrone", - ) - - -class IsochroneDTO(BaseModel): - mode: IsochroneMode = Field(IsochroneAccessMode.WALK, description="Isochrone Mode") - settings: IsochroneSettings = Field(..., description="Isochrone settings parameters") - scenario: Optional[IsochroneScenario] = Field( - { - "id": 1, - "modus": CalculationTypes.default, - }, - description="Isochrone scenario parameters. Only supported for Walking and Cycling Isochrones", - ) - starting_point: Optional[IsochroneStartingPoint] = Field( - ..., - description="Isochrone starting points. If multiple starting points are specified, the isochrone is considered a multi-isochrone calculation. **Multi-Isochrone Only works for Walking and Cycling Isochrones**. Alternatively, amenities can be used to specify the starting points for multi-isochrones.", - ) - output: Optional[IsochroneOutput] = Field(..., description="Isochrone output parameters") - - class Config: - extra = "forbid" - - @root_validator - def validate_output(cls, values): - """Validate""" - - if not values.get("mode"): - raise ValueError("Isochrone mode is required") - - if not values.get("starting_point"): - raise ValueError("Isochrone starting point is required") - - # Validation check on grid resolution and number of steps for geojson for walking and cycling isochrones - if ( - values["output"].type.value == IsochroneOutputType.GRID.value - and values["output"].resolution not in [9, 10, 11, 12] - and values["mode"].value - in [ - IsochroneAccessMode.WALK.value, - IsochroneAccessMode.BICYCLE.value, - ] - ): - raise ValueError( - "Resolution must be between 9 and 14 for walking and cycling isochrones" - ) - - # validate to check if buffer_distance is provided for "BUFFER" mode - if ( - values["mode"].value == IsochroneMode.BUFFER.value - and not values["settings"].buffer_distance - ): - raise ValueError("Buffer distance is required for buffer catchment area") - - # Validation check on grid resolution and number of steps for geojson for public transport isochrones - if ( - values["output"].type.value == IsochroneOutputType.GRID.value - and values["output"].resolution not in [9, 10] - and values["mode"].value - in [ - IsochroneMode.TRANSIT.value, - IsochroneMode.CAR.value, - ] - ): - raise ValueError("Resolution must be between 9 or 10 for public transport isochrones") - - # Validation for geojso output type - if values["output"].type.value == IsochroneOutputType.GEOJSON.value and ( - values["output"].steps > 6 or values["output"].steps < 1 - ): - raise ValueError("Step must be between 1 and 6") - - # Don't allow multi-isochrone calculation for PT and Car Isochrone - if ( - values["starting_point"] - and len(values["starting_point"].input) > 1 - and values["mode"].value - in [ - IsochroneMode.TRANSIT.value, - IsochroneMode.CAR.value, - ] - ): - raise ValueError("Multi-Isochrone is not supported for Transit and Car") - - # For walking and cycling travel time maximumn should be 20 minutes and speed to m/s - if values["mode"].value in [IsochroneMode.WALKING.value, IsochroneMode.CYCLING.value]: - if values["settings"].travel_time > 25: - raise ValueError( - "Travel time maximum for walking and cycling should be less or equal to 25 minutes" - ) - # if values["settings"].speed: - # values["settings"].speed = values["settings"].speed / 3.6 - - # For PT and Car Isochrone starting point should be only lat lon coordinates and not amenities, travel time smaller than 120 minutes - if values["mode"].value in [ - IsochroneMode.TRANSIT.value, - IsochroneMode.CAR.value, - ]: - if values["output"].type.value in [ - IsochroneOutputType.GEOJSON.value, - IsochroneOutputType.NETWORK.value, - ]: - raise ValueError("Geojson output is not supported for PT and Car") - # travel time should be smaller than 120 minutes - if values["settings"].travel_time > 120: - raise ValueError("Travel time should be smaller than 120 minutes") - - if len(values["starting_point"].input) > 0: - for point in values["starting_point"].input: - if not isinstance(point, IsochroneStartingPointCoord): - raise ValueError("Starting point should be lat lon coordinates") - - # from_time should be smaller than to_time - if values["settings"].from_time > values["settings"].to_time: - raise ValueError("Start time should be smaller than end time") - - # # convert bike speed to m/s - # values["settings"].bike_speed = values["settings"].bike_speed / 3.6 - # # convert walk speed to m/s - # values["settings"].walk_speed = values["settings"].walk_speed / 3.6 - - # If starting-point input length is more than 1 then it should be multi-isochrone and region should be specified - if len(values["starting_point"].input) > 1 and len(values["starting_point"].region) == 0: - raise ValueError("Region is not specified for multi-isochrone") - - return values - - @property - def is_multi(self): - """Check if multi-isochrone""" - starting_point_type_is_coord = isinstance( - self.starting_point.input[0], IsochroneStartingPointCoord - ) - - if len(self.starting_point.input) > 1 and starting_point_type_is_coord: - return False - else: - return True - - @property - def is_single(self): - """Check if single-isochrone""" - return not self.is_multi - - -# R5 -R5AvailableDates = { - 0: "2022-05-16", - 1: "2022-05-17", - 2: "2022-05-18", - 3: "2022-05-19", - 4: "2022-05-20", - 5: "2022-05-21", - 6: "2022-05-22", -} - -R5ProjectID = "630c0014aad8682ef8461b44" -R5ProjectIDCarOnly = "64ad9dcf92f18428b858eb2e" - -R5TravelTimePayloadTemplate = { - "accessModes": "WALK", - "transitModes": "BUS,TRAM,SUBWAY,RAIL", - "bikeSpeed": 4.166666666666667, - "walkSpeed": 1.39, - "bikeTrafficStress": 4, - "date": "2022-05-16", - "fromTime": 25200, # 7 AM - "toTime": 39600, # 9 AM - "maxTripDurationMinutes": 120, - "decayFunction": { - "type": "logistic", - "standard_deviation_minutes": 12, - "width_minutes": 10, - }, - "destinationPointSetIds": [], - "bounds": { - "north": 48.27059464660387, - "south": 48.03915718648435, - "east": 11.327192290815145, - "west": 11.756388821971976, - }, - "directModes": "WALK", - "egressModes": "WALK", - "fromLat": 48.1502132, - "fromLon": 11.5696284, - "zoom": 9, - "maxBikeTime": 20, - "maxRides": 4, - "maxWalkTime": 20, - "monteCarloDraws": 200, - "percentiles": [5, 25, 50, 75, 95], - "variantIndex": -1, - "workerVersion": "v6.4", - "projectId": "630c0014aad8682ef8461b44", -} - - -request_examples = { - "isochrone": { - "single_walking_default": { - "summary": "Single Walking Isochrone with Default Profile", - "value": { - "mode": "walking", - "settings": { - "travel_time": "10", - "speed": "5", - "walking_profile": "standard", - }, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": { - "type": "grid", - "resolution": "12", - }, - }, - }, - "single_cycling_default": { - "summary": "Single Cycling Isochrone with Default Profile", - "value": { - "mode": "cycling", - "settings": { - "travel_time": "15", - "speed": "10", - "cycling_profile": "standard", - }, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": { - "type": "grid", - "resolution": "12", - }, - }, - }, - "pois_multi_isochrone": { - "summary": "Multi Isochrone with Pois", - "value": { - "mode": "walking", - "settings": { - "travel_time": "20", - "speed": "5", - "walking_profile": "standard", - }, - "starting_point": { - "input": ["nursery"], - "region_type": "study_area", - "region": [27, 144], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": { - "type": "grid", - "resolution": "11", - }, - }, - }, - "transit_single": { - "summary": "Single Transit Isochrone", - "value": { - "mode": "transit", - "settings": { - "travel_time": "60", - "transit_modes": ["bus", "tram", "subway", "rail"], - "weekday": "0", # 0 - Monday, 6 - Sunday - "access_mode": "walk", - "egress_mode": "walk", - "bike_traffic_stress": 4, - "from_time": 25200, - "to_time": 39600, - "max_rides": 4, - "max_bike_time": 20, - "max_walk_time": 20, - "percentiles": [5, 25, 50, 75, 95], - "monte_carlo_draws": 200, - }, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": { - "type": "grid", - "resolution": "9", - }, - }, - }, - "single_buffer_catchment": { - "summary": "Single Buffer Catchment", - "value": { - "mode": "buffer", - "settings": {"buffer_distance": "2000"}, - "starting_point": { - "input": [{"lat": 48.1502132, "lon": 11.5696284}], - }, - "scenario": {"id": 0, "modus": "default"}, - "output": { - "type": "grid", - "resolution": "12", - }, - }, - }, - }, - "to_export": { - "single_isochrone": { - "summary": "Single isochrone", - "value": { - "type": "FeatureCollection", - "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, - "features": [ - { - "type": "Feature", - "properties": { - "id": 1, - "isochrone_calculation_id": 10, - "traveltime": 5, - "modus": "default", - "routing_profile": "walking_standard", - "reached_opportunities": { - "cafe": 1, - "park": {"cnt": 2, "area": 38613}, - "hotel": 4, - "nursery": 1, - "sum_pop": 2705, - "post_box": 1, - "fast_food": 1, - "recycling": 1, - "tram_stop": 2, - "playground": 6, - "restaurant": 3, - "grundschule": 1, - "bike_sharing": 2, - "kindergarten": 4, - "charging_station": 1, - "discount_supermarket": 1, - }, - }, - "geometry": { - "type": "MultiPolygon", - "coordinates": [ - [ - [ - [11.536658755874345, 48.144345996653797], - [11.537061254841161, 48.143442507346805], - [11.537139521158997, 48.143376435250516], - [11.537191279470155, 48.143343866212035], - [11.5374341, 48.143330799999987], - [11.538859290552271, 48.142221920365188], - [11.544110890465541, 48.142956875316386], - [11.544196172720932, 48.142909469682657], - [11.5444121, 48.1429149], - [11.5446788, 48.142898], - [11.5447, 48.142899], - [11.5447858, 48.1428654], - [11.5449574, 48.1428715], - [11.545375583217202, 48.142822278856279], - [11.545488790304747, 48.14283469276856], - [11.546166696757849, 48.143118735730042], - [11.546642751647143, 48.143376165954912], - [11.546642751647148, 48.143376165954912], - [11.547064889748599, 48.14377999679396], - [11.547015147649532, 48.143872500250502], - [11.546903388987284, 48.143993188934154], - [11.546833928779018, 48.144068918145287], - [11.5461186984767, 48.14500790348432], - [11.545899333743268, 48.145279629977736], - [11.545549163530334, 48.145155553656451], - [11.545461334376407, 48.145125205811034], - [11.543688217515243, 48.146641336661574], - [11.5436489, 48.146629099999977], - [11.54286095056653, 48.146706414261239], - [11.541162533849324, 48.14658405830393], - [11.541035374915147, 48.146570975389658], - [11.540111510317022, 48.146470587868492], - [11.540006178611856, 48.146453661269526], - [11.538424018197535, 48.145817122216336], - [11.538331825367305, 48.145765846095159], - [11.537397113751769, 48.14534096711494], - [11.536658755874345, 48.144345996653797], - ] - ] - ], - }, - } - ], - }, - }, - "multi_isochrones": { - "summary": "Multi isochrones", - "value": { - "type": "FeatureCollection", - "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}}, - "features": [ - { - "type": "Feature", - "properties": { - "id": 1, - "isochrone_calculation_id": 10, - "traveltime": 5, - "modus": "default", - "routing_profile": "walking_standard", - "reached_opportunities": { - "population": { - "study_area1": 1000, - "study_area2": 2000, - } - }, - }, - "geometry": { - "type": "MultiPolygon", - "coordinates": [ - [ - [ - [11.536658755874345, 48.144345996653797], - [11.537061254841161, 48.143442507346805], - [11.537139521158997, 48.143376435250516], - [11.537191279470155, 48.143343866212035], - [11.5374341, 48.143330799999987], - [11.538859290552271, 48.142221920365188], - [11.544110890465541, 48.142956875316386], - [11.544196172720932, 48.142909469682657], - [11.5444121, 48.1429149], - [11.5446788, 48.142898], - [11.5447, 48.142899], - [11.5447858, 48.1428654], - [11.5449574, 48.1428715], - [11.545375583217202, 48.142822278856279], - [11.545488790304747, 48.14283469276856], - [11.546166696757849, 48.143118735730042], - [11.546642751647143, 48.143376165954912], - [11.546642751647148, 48.143376165954912], - [11.547064889748599, 48.14377999679396], - [11.547015147649532, 48.143872500250502], - [11.546903388987284, 48.143993188934154], - [11.546833928779018, 48.144068918145287], - [11.5461186984767, 48.14500790348432], - [11.545899333743268, 48.145279629977736], - [11.545549163530334, 48.145155553656451], - [11.545461334376407, 48.145125205811034], - [11.543688217515243, 48.146641336661574], - [11.5436489, 48.146629099999977], - [11.54286095056653, 48.146706414261239], - [11.541162533849324, 48.14658405830393], - [11.541035374915147, 48.146570975389658], - [11.540111510317022, 48.146470587868492], - [11.540006178611856, 48.146453661269526], - [11.538424018197535, 48.145817122216336], - [11.538331825367305, 48.145765846095159], - [11.537397113751769, 48.14534096711494], - [11.536658755874345, 48.144345996653797], - ] - ] - ], - }, - } - ], - }, - }, - }, - "pois_multi_isochrone_count_pois": { - "draw": { - "summary": "Count pois with draw", - "value": { - "region_type": "draw", - "region": [ - "POLYGON((11.53605224646383 48.15855242757948,11.546141990292947 48.16035646918763,11.54836104048217 48.15434275044706,11.535497483916524 48.15080357881183,11.526586610500429 48.15300113241156,11.531302092152526 48.15799732509075,11.53605224646383 48.15855242757948))" - ], - "scenario_id": 0, - "modus": "default", - "routing_profile": "walking_standard", - "minutes": 10, - "speed": 5, - "amenities": [ - "kindergarten", - "grundschule", - "hauptschule_mittelschule", - "realschule", - "gymnasium", - "library", - ], - }, - }, - "study_area": { - "summary": "Count pois with study area", - "value": { - "region_type": "study_area", - "region": ["1", "2"], - "scenario_id": 0, - "modus": "default", - "routing_profile": "walking_standard", - "minutes": 10, - "speed": 5, - "amenities": [ - "kindergarten", - "grundschule", - "hauptschule_mittelschule", - "realschule", - "gymnasium", - "library", - ], - }, - }, - }, -} diff --git a/src/schemas/legacy/item.py b/src/schemas/legacy/item.py deleted file mode 100644 index ac992cf..0000000 --- a/src/schemas/legacy/item.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Optional - -from pydantic import BaseModel - - -# Shared properties -class ItemBase(BaseModel): - title: Optional[str] = None - description: Optional[str] = None - - -# Properties to receive on item creation -class ItemCreate(ItemBase): - title: str - - -# Properties to receive on item update -class ItemUpdate(ItemBase): - pass - - -# Properties shared by models stored in DB -class ItemInDBBase(ItemBase): - id: int - title: str - owner_id: int - - class Config: - orm_mode = True - - -# Properties to return to client -class Item(ItemInDBBase): - pass - - -# Properties properties stored in DB -class ItemInDB(ItemInDBBase): - pass diff --git a/src/schemas/legacy/layer_library.py b/src/schemas/legacy/layer_library.py deleted file mode 100644 index b7d6971..0000000 --- a/src/schemas/legacy/layer_library.py +++ /dev/null @@ -1,195 +0,0 @@ -from src.db import models -from pydantic import root_validator -from .utils import findkeys -from fastapi import HTTPException -from src.legacy.utils.utils import random_lower_string -from datetime import datetime - - -class RequestExamples: - @property - def single_layer_library(self): - return { - "url": "https://{a-c}.basemaps.somecdn.com/dark_all/{z}/{x}/{y}.png", - "legend_urls": [random_lower_string()], - "special_attribute": {"imagery_set": "Aerial"}, - "access_token": "some_token", - "map_attribution": "Attribution to the source", - "date": str(datetime.today().year), - "source": None, - "date_1": str(datetime.today().year), - "source_1": None, - "style_library_name": None, - "max_resolution": "0", - "min_resolution": "0", - "doc_url": "https://www.plan4better.de", - "name": random_lower_string(), - "type": "BING", - } - - @property - def single_style_library(self): - return { - "translation": { - "red": {"de": "stressig", "en": "stressful"}, - "grey": {"de": "Lücke im Radnetz", "en": "gap in the cycling network"}, - "black": {"de": "sehr stressig", "en": "very stressful"}, - "green": {"de": "komfortabel", "en": "comfortable"}, - "yellow": {"de": "durchschnittlich", "en": "average"}, - }, - "name": random_lower_string(), - "style": { - "name": "munichways", - "rules": [ - { - "name": "green", - "filter": ["==", "farbe", "grün"], - "symbolizers": [ - { - "cap": "square", - "join": "bevel", - "kind": "Line", - "color": "#609e72", - "width": 2, - } - ], - }, - { - "name": "yellow", - "filter": ["==", "farbe", "gelb"], - "symbolizers": [ - { - "cap": "square", - "join": "bevel", - "kind": "Line", - "color": "#edc937", - "width": 2, - } - ], - }, - { - "name": "red", - "filter": ["==", "farbe", "rot"], - "symbolizers": [ - { - "cap": "square", - "join": "bevel", - "kind": "Line", - "color": "#df6235", - "width": 2, - } - ], - }, - { - "name": "black", - "filter": ["==", "farbe", "schwarz"], - "symbolizers": [ - { - "cap": "square", - "join": "bevel", - "kind": "Line", - "color": "#000000", - "width": 2, - } - ], - }, - { - "name": "grey", - "filter": ["==", "farbe", "grau"], - "symbolizers": [ - { - "cap": "square", - "join": "bevel", - "kind": "Line", - "color": "#717070", - "width": 2, - } - ], - }, - ], - }, - } - - -request_examples = RequestExamples() - - -class CreateLayerLibrary(models.LayerLibrary): - @root_validator - def urls_emptiness_vs_type(cls, values): - # according to the issue: https://github.com/goat-community/goat/issues/1328 - url, the_type = values.get("url"), values.get("type") - url_mandatory_types = ("WMS", "OSM", "BING") - if the_type in url_mandatory_types and not url: - raise ValueError("url should not be empty for the layer type %s." % the_type) - - return values - - @root_validator - def legend_urls_emptiness_vs_type(cls, values): - # according to the issue: https://github.com/goat-community/goat/issues/1328 - legend_urls, the_type = values.get("legend_urls"), values.get("type") - legend_url_mandatory_types = ("WMS",) - if the_type in legend_url_mandatory_types and not legend_urls: - raise ValueError("legend_urls should not be empty for the layer type %s." % the_type) - - return values - - @root_validator - def style_library_name_emptiness_vs_type(cls, values): - # according to the issue: https://github.com/goat-community/goat/issues/1328 - style_library_name, the_type = values.get("style_library_name"), values.get("type") - legend_url_mandatory_types = ("MVT", "GEOBUF", "WFS") - if the_type in legend_url_mandatory_types and not style_library_name: - raise ValueError( - "style_library_name should not be empty for the layer type %s." % the_type - ) - - return values - - class Config: - schema_extra = {"example": request_examples.single_layer_library} - - -class CreateStyleLibrary(models.StyleLibrary): - @root_validator - def translations_present_for_all_names(cls, values): - style, translation = values.get("style"), values.get("translation") - rules = style.get("rules") - translation_keywords_set = set(findkeys(rules, "name")) - translation_set = set(translation.keys()) - warnings = {} - # Check if all keywords are present in translation - if translation_keywords_set != translation_set: - absent_translations = translation_keywords_set - translation_set - if absent_translations: - warnings["absent_translations"] = list(absent_translations) - - # We can find unneeded translations: - # unneeded_translations = translation_set - translation_keywords_set - # if unneeded_translations: - # warnings["unneeded_translations"] = list(unneeded_translations) - - # Check if all keywords have all translations - all_languages = set() - for key in translation.keys(): - # Collect all languages - all_languages = all_languages.union(set(translation[key].keys())) - - # Search for incomplete translations - incomplete_translations = [] - for key in translation.keys(): - # Is this keyword have translations for all detected languages? - if all_languages - set(translation[key].keys()): - incomplete_translations.append(key) - - if incomplete_translations: - warnings["incomplete_translations"] = incomplete_translations - - if warnings: - raise HTTPException(status_code=400, detail=warnings) - - return values - - class Config: - schema_extra = {"example": request_examples.single_style_library} diff --git a/src/schemas/legacy/mapbox.py b/src/schemas/legacy/mapbox.py deleted file mode 100644 index aa5c413..0000000 --- a/src/schemas/legacy/mapbox.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Tilejson response models.""" - -from enum import Enum -from typing import List, Optional, Tuple - -from pydantic import BaseModel, Field, root_validator - - -class SchemeEnum(str, Enum): - """TileJSON scheme choice.""" - - xyz = "xyz" - tms = "tms" - - -class TileJSON(BaseModel): - """ - TileJSON model. - - Based on https://github.com/mapbox/tilejson-spec/tree/master/2.2.0 - - """ - - tilejson: str = "2.2.0" - name: Optional[str] - description: Optional[str] - version: str = "1.0.0" - attribution: Optional[str] - template: Optional[str] - legend: Optional[str] - scheme: SchemeEnum = SchemeEnum.xyz - tiles: List[str] - grids: Optional[List[str]] - data: Optional[List[str]] - minzoom: int = Field(0, ge=0, le=30) - maxzoom: int = Field(30, ge=0, le=30) - bounds: List[float] = [-180, -90, 180, 90] - center: Optional[Tuple[float, float, int]] - - @root_validator - def compute_center(cls, values): - """Compute center if it does not exist.""" - bounds = values["bounds"] - if not values.get("center"): - values["center"] = ( - (bounds[0] + bounds[2]) / 2, - (bounds[1] + bounds[3]) / 2, - values["minzoom"], - ) - return values - - class Config: - """TileJSON model configuration.""" - - use_enum_values = True diff --git a/src/schemas/legacy/opportunity_config.py b/src/schemas/legacy/opportunity_config.py deleted file mode 100644 index d4104e1..0000000 --- a/src/schemas/legacy/opportunity_config.py +++ /dev/null @@ -1,47 +0,0 @@ -from src.db import models -from src.legacy.utils.utils import random_lower_string -from src.crud.crud_study_area import study_area as crud_study_area -from src.crud.crud_opportunity_config import opportunity_group as crud_opportunity_group - - -class RequestExample: - @property - def oportunity_study_area_config(self): - return { - "sensitivity": None, - "multiple_entrance": True, - "opportunity_group_id": 12, - "category": "test_" + random_lower_string(), - "icon": "fas fa-train-subway-tunnel", - "color": ["#E182A5"], - "study_area_id": 83110000, - "is_active": False, - } - - async def async_oportunity_study_area_config(self, db): - study_area = await crud_study_area.get_first(db=db) - if not study_area: - raise ValueError("There is no study area available") - opportunity_groups = await crud_opportunity_group.get_all(db=db) - if opportunity_groups: - opportunity_group = opportunity_groups[0] - else: - raise ValueError("There is no opportunity group available") - return { - "sensitivity": None, - "multiple_entrance": True, - "opportunity_group_id": opportunity_group.id, - "category": "test_" + random_lower_string(), - "icon": "fas fa-train-subway-tunnel", - "color": ["#E182A5"], - "study_area_id": study_area.id, - "is_active": False, - } - - -request_examples = RequestExample() - - -class CreateOpportunityStudyAreaConfig(models.OpportunityStudyAreaConfig): - class Config: - schema_extra = {"example": request_examples.oportunity_study_area_config} diff --git a/src/schemas/legacy/organization.py b/src/schemas/legacy/organization.py deleted file mode 100644 index 050a0e3..0000000 --- a/src/schemas/legacy/organization.py +++ /dev/null @@ -1,26 +0,0 @@ -from sqlmodel import SQLModel - - -class OrganizationBase(SQLModel): - name: str - - -class OrganizationCreate(OrganizationBase): - pass - - -class OrganizationUpdate(OrganizationBase): - pass - - -""" -Body of the request -""" -request_examples = { - "create": { - "name": "Plan4Better", - }, - "update": { - "name": "Plan4Better gmbh", - }, -} diff --git a/src/schemas/legacy/r5.py b/src/schemas/legacy/r5.py deleted file mode 100644 index f617832..0000000 --- a/src/schemas/legacy/r5.py +++ /dev/null @@ -1,173 +0,0 @@ -from datetime import datetime -from typing import List, Optional - -from bson import ObjectId -from pydantic import Field -from sqlmodel import SQLModel - -from src.resources.enums import R5DecayFunctionType - - -class PyObjectId(ObjectId): - @classmethod - def __get_validators__(cls): - yield cls.validate - - @classmethod - def validate(cls, v): - if not ObjectId.is_valid(v): - raise ValueError("Invalid objectid") - return ObjectId(v) - - @classmethod - def __modify_schema__(cls, field_schema): - field_schema.update(type="string") - - -class R5Common(SQLModel): - id: Optional[PyObjectId] = Field(alias="_id") - accessGroup: Optional[str] = "local" - nonce: str - name: str - createdAt: datetime - createdBy: Optional[str] = "local" - updatedAt: datetime - updatedBy: Optional[str] = "local" - - class Config: - allow_population_by_field_name = True - arbitrary_types_allowed = True - json_encoders = {ObjectId: str} - - -class R5Bounds(SQLModel): - north: float - south: float - east: float - west: float - - -class R5RegionCreateDTO(SQLModel): - bounds: R5Bounds - name: str - description: str - - -class R5ProjectCreateDTO(SQLModel): - variants: List[str] - regionId: str - bundleId: str - name: str - - -class R5RegionUpdateDTO(SQLModel): - name: str - description: str - - -class R5ProjectUpdateDTO(SQLModel): - id: str - name: str - - -class R5RegionInDB(R5Common): - description: str - bounds: R5Bounds - - -class R5ProjectInDB(R5Common): - variants: List[str] - regionId: str - bundleId: str - - -class R5DecayFunction(SQLModel): - type: Optional[R5DecayFunctionType] = R5DecayFunctionType.logistic - standardDeviationMinutes: Optional[int] = 12 - widthMinutes: Optional[int] = 10 - - -class R5IsochroneAnalysisDTO(SQLModel): - accessModes: str - bikeSpeed: Optional[float] - bikeTrafficStress: Optional[int] - date: Optional[str] = "2022-05-31" - decayFunction: R5DecayFunction - destinationPointSetIds: Optional[List[str]] = [] - directModes: Optional[str] = "WALK" - egressModes: Optional[str] = "WALK" - fromLat: float - fromLon: float - fromTime: int - toTime: int - zoom: Optional[int] = 10 - maxBikeTime: Optional[int] = 20 # minutes - maxRides: Optional[int] = 4 - maxWalkTime: Optional[int] = 20 # minutes - walkSpeed: Optional[float] = 1.3888888888888888 # m/s - monteCarloDraws: Optional[int] = 200 - percentiles: Optional[List[int]] = [5, 25, 50, 75, 95] - transitModes: Optional[str] - variantIndex: Optional[int] = -1 - workerVersion: Optional[str] = "v6.4" - # projectId: "6294f0ae0cfee1c6747d696c" ===> IS SET FROM STUDY ARE ON THE FLY #TODO: - # bounds: R5Bounds ===> ARE SET FROM STUDY ARE ON THE FLY #TODO: - - -request_examples = { - "region": { - "create": { - "name": "Freiburg", - "description": "Demo - Freiburg Region for PT", - "bounds": {"north": 48.11293, "south": 47.87214, "east": 8.06671, "west": 7.66296}, - }, - "update": {"name": "Freiburg", "description": "Demo - Freiburg Region for PT (UPDATE)"}, - }, - "bundle": { - "create": { - "bundleName": "Demo Bundle", - "osm": "", - "feedGroup": "", # binary - "regionId": "5e8f8f8f8f8f8f8f8f8f8f8", - } - }, - "project": { - "create": { - "name": "Demo Project", - "bundleId": "5e8f8f8f8f8f8f8f8f8f8f8", - "regionId": "5e8f8f8f8f8f8f8f8f8f8f8", - "variants": ["Default"], - }, - "update": { - "name": "Demo Project (UPDATE)", - }, - }, - "analysis": { - "accessModes": "WALK", - "bikeSpeed": 4.166666666666667, - "bikeTrafficStress": 4, - "date": "2022-06-10", - "decayFunction": { - "type": "logistic", - "standardDeviationMinutes": 12, - "widthMinutes": 10, - }, - "destinationPointSetIds": [], - "directModes": "WALK", - "egressModes": "WALK", - "fromLat": 48.11724008376831, - "fromLon": 11.54651520171336, - "fromTime": 25200, - "toTime": 39600, - "zoom": 9, - "maxBikeTime": 20, - "maxRides": 4, - "maxWalkTime": 20, - "monteCarloDraws": 200, - "percentiles": [5, 25, 50, 75, 95], - "transitModes": "BUS,TRAM,SUBWAY,RAIL", - "variantIndex": -1, - "walkSpeed": 1.3888888888888888, - "workerVersion": "v6.4", - }, -} diff --git a/src/schemas/legacy/role.py b/src/schemas/legacy/role.py deleted file mode 100644 index c219018..0000000 --- a/src/schemas/legacy/role.py +++ /dev/null @@ -1,26 +0,0 @@ -from sqlmodel import SQLModel - - -class RoleBase(SQLModel): - name: str - - -class RoleCreate(RoleBase): - pass - - -class RoleUpdate(RoleBase): - pass - - -""" -Body of the request -""" -request_examples = { - "create": { - "name": "role_test", - }, - "update": { - "name": "role_update", - }, -} diff --git a/src/schemas/legacy/study_area.py b/src/schemas/legacy/study_area.py deleted file mode 100644 index 6848718..0000000 --- a/src/schemas/legacy/study_area.py +++ /dev/null @@ -1,116 +0,0 @@ -from pydantic import BaseModel, validator -from typing import Optional, List, Dict -from src import crud -from src.db.session import sync_session - - -groups_example_data = """ -{ - "layer_groups": [ - { - "buildings_landuse": [ - "building", - "landuse_atkis", - "landuse_osm" - ] - }, - { - "environmental_quality": [ - "bayern_laerm_ballungszentren_strassen_tag", - "bayern_laerm_ballungszentren_strassen_nacht", - "bayern_oekoflaechenkataster", - "bayern_schutzgebiete_naturschutz", - "bayern_biotop_kartierung" - ] - }, - { - "additional_data": [ - "accidents_pedestrians", - "accidents_cyclists" - ] - } - ] -} -""" - -GROUP_ORDER = [ - "buildings_landuse", - "street_level_quality", - "environmental_quality", - "additional_data", - "basemap", - "heatmap", - "indicator", -] - - -# Used for output, Doesn't fetch database -class LayerGroupBase(BaseModel): - buildings_landuse: Optional[List[str]] = [] - street_level_quality: Optional[List[str]] = [] - environmental_quality: Optional[List[str]] = [] - additional_data: Optional[List[str]] = [] - basemap: Optional[List[str]] = [] - heatmap: Optional[List[str]] = [] - indicator: Optional[List[str]] = [] - - def listify_config(self) -> Dict: - """ - Convert me to listing config to save into databse - """ - out_config = [] - for group in GROUP_ORDER: - value = getattr(self, group) - if value: - out_config.append({group: value}) - return out_config - - -# Used for output, Doesn't fetch database -class StudyAreaSettingsBase(BaseModel): - layer_groups: List[LayerGroupBase] - - -class LayerGroup(LayerGroupBase): - @validator("buildings_landuse") - def validate_layer_names(cls, v): - """ - Check if layers are in available layer libraries - """ - valid_layers = cls.get_valid_layers() - for layer_name in v: - if layer_name not in valid_layers: - raise ValueError(f"layer {layer_name} is not a valid layer") - - return v - - @classmethod - def get_valid_layers(cls): - """ - Fetch corrently available layer libraries - """ - if hasattr(cls, "valid_layers"): - return cls.valid_layers - else: - cls.valid_layers = crud.layer_library.get_all_layer_names(db=sync_session()) - return cls.valid_layers - - -class StudyAreaSettings(BaseModel): - layer_groups: List[LayerGroup] - - -def pydantify_config(in_config: Dict, validate: bool = True) -> LayerGroup: - """ - Convert database-saved config into Pydantic (Remove listing) - """ - temp_config = {} - for group in in_config.get("layer_groups"): - for key in group.keys(): - temp_config[key] = group[key] - - LayerGroup_ = LayerGroup - if not validate: - LayerGroup_ = LayerGroupBase - out_config = LayerGroup_(**temp_config) - return out_config diff --git a/src/schemas/legacy/style.py b/src/schemas/legacy/style.py deleted file mode 100644 index d2968bf..0000000 --- a/src/schemas/legacy/style.py +++ /dev/null @@ -1,22 +0,0 @@ -from uuid import UUID - -from pydantic import Field - -from src.db.models.style import Style, StyleBase - - -class StyleCreate(StyleBase): - pass - - -class StyleUpdate(StyleBase): - name: str | None = Field(None, description="Content name") - description: str | None = Field(None, description="Content description") - tags: list | None = Field(None, description="Content tags") - thumbnail_url: str | None = Field(None, description="Content thumbnail URL") - style: dict | None = Field(None, description="Style object in the geostyler format") - owner_id: UUID | None = Field(None, description="Content owner ID") - - -class StyleRead(Style): - pass diff --git a/src/schemas/legacy/system.py b/src/schemas/legacy/system.py deleted file mode 100644 index 49fb142..0000000 --- a/src/schemas/legacy/system.py +++ /dev/null @@ -1,7 +0,0 @@ -from pydantic import BaseModel - -from src.resources.enums import SystemStatus - - -class SystemStatusModel(BaseModel): - status: SystemStatus diff --git a/src/schemas/legacy/token.py b/src/schemas/legacy/token.py deleted file mode 100644 index ea85b46..0000000 --- a/src/schemas/legacy/token.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Optional - -from pydantic import BaseModel - - -class Token(BaseModel): - access_token: str - token_type: str - - -class TokenPayload(BaseModel): - sub: Optional[int] = None diff --git a/src/schemas/legacy/upload.py b/src/schemas/legacy/upload.py deleted file mode 100644 index ce5c739..0000000 --- a/src/schemas/legacy/upload.py +++ /dev/null @@ -1,9 +0,0 @@ -from pydantic import BaseModel - - -class CutomDataUploadState(BaseModel): - data_upload_id: int - state: bool - - -request_examples = {"poi_category": "french_bakery", "delete_upload": 1} diff --git a/src/schemas/legacy/user.py b/src/schemas/legacy/user.py deleted file mode 100644 index 5b6c15f..0000000 --- a/src/schemas/legacy/user.py +++ /dev/null @@ -1,129 +0,0 @@ -from typing import List, Optional - -from pydantic import BaseModel, EmailStr - -from src.db import models -from src.resources.enums import LanguageEnum - - -class UserBase(models.user.UserBase): - roles: List[str] = [] - study_areas: List[int] = [] - - -class UserCreate(UserBase): - password: str - is_active: bool = False - language_preference: Optional[LanguageEnum] = LanguageEnum.de - newsletter: bool = False - occupation: Optional[str] - domain: Optional[str] - - -class UserCreateDemo(BaseModel): - name: str - surname: str - email: EmailStr - password: str - newsletter: bool = False - occupation: str - domain: str - language_preference: Optional[LanguageEnum] = LanguageEnum.de - - class Config: - extra = "forbid" - - -class UserUpdate(UserBase): - name: Optional[str] = None - surname: Optional[str] = None - email: EmailStr = None - password: Optional[str] = None - organization_id: Optional[int] = None - roles: Optional[List[str]] = None - study_areas: Optional[List[int]] = None - is_active: bool = False - - -class UserStudyAreaList(BaseModel): - id: int - name: str - - -class UserPreference(BaseModel): - language_preference: Optional[LanguageEnum] - active_study_area_id: Optional[int] - - class Config: - extra = "forbid" - - -""" -Body of the request -""" -request_examples = { - "create": { - "name": "John", - "surname": "Doe", - "email": "john.doe@email.com", - "password": "secret", - "roles": ["user"], - "study_areas": [91620000], # muenchen - "active_study_area_id": 91620000, - "organization_id": 4, - "active_data_upload_ids": [], - "newsletter": False, - "occupation": "Student", - "domain": "Urban Planning", - "is_active": True, - "storage": 512000, - "limit_scenarios": 50, - "language_preference": "de", - }, - "update": { - "name": "Kevin", - "surname": "Cross", - "email": "kevin.cross@email.com", - "password": "secret", - "roles": ["user"], - "study_areas": [91620000], - "active_study_area_id": 91620000, - "organization_id": 4, - "active_data_upload_ids": [], - "is_active": True, - "storage": 512000, - "limit_scenarios": 50, - "language_preference": "de", - }, - "create_demo_user": { - "name": "John", - "surname": "Doe", - "email": "john.doe@email.com", - "password": "secret", - "newsletter": False, - "occupation": "Student", - "domain": "Urban Planning", - "language_preference": "de", - }, - "update_user_preference": { - "language_preference": { - "summary": "Update language preference", - "value": { - "language_preference": "en", - }, - }, - "study_area_preference": { - "summary": "Update study area preference", - "value": { - "active_study_area_id": 1, - }, - }, - "language_study_area_preference": { - "summary": "Both language and study area preferences", - "value": { - "language_preference": "en", - "active_study_area_id": 1, - }, - }, - }, -} diff --git a/src/schemas/legacy/vector_tile.py b/src/schemas/legacy/vector_tile.py deleted file mode 100644 index 42c57ec..0000000 --- a/src/schemas/legacy/vector_tile.py +++ /dev/null @@ -1,174 +0,0 @@ -# MIT License - -# Copyright (c) 2020 Development Seed -# Copyright (c) 2021 Plan4Better -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. - -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -import abc -from dataclasses import dataclass -from typing import Any, ClassVar, Dict, List, Optional - -from pydantic import BaseModel, Field -from pydantic.class_validators import root_validator -from pydantic.networks import AnyHttpUrl - -from src.core.config import settings -from src.resources.enums import MimeTypes - - -# TODO: Refactor this part to use with the new schemas further down -# =========================VECTOR TILE SCHEMAS========================= -class ExternalVectorTile(BaseModel, metaclass=abc.ABCMeta): - """Layer's Abstract BaseClass. - Attributes: - id (str): Layer's name. - bounds (list): Layer's bounds (left, bottom, right, top). - minzoom (int): Layer's min zoom level. - maxzoom (int): Layer's max zoom level. - tileurl (str, optional): Layer's tiles url. - - """ - - id: str - bounds: List[float] = [-180, -90, 180, 90] - minzoom: int = settings.DEFAULT_MINZOOM - maxzoom: int = settings.DEFAULT_MAXZOOM - tileurl: Optional[str] - - -class VectorTileTable(ExternalVectorTile): - """Table Reader. - Attributes: - id (str): Layer's name. - bounds (list): Layer's bounds (left, bottom, right, top). - minzoom (int): Layer's min zoom level. - maxzoom (int): Layer's max zoom level. - tileurl (str, optional): Layer's tiles url. - type (str): Layer's type. - schema (str): Table's database schema (e.g public). - geometry_type (str): Table's geometry type (e.g polygon). - geometry_column (str): Name of the geomtry column in the table. - properties (Dict): Properties available in the table. - """ - - type: str = "Table" - dbschema: str = Field(..., alias="schema") - table: str - geometry_type: str - geometry_column: str - properties: Dict[str, str] - - -class VectorTileFunction(VectorTileTable): - """Function Reader. - Attributes: - id (str): Layer's name. - bounds (list): Layer's bounds (left, bottom, right, top). - minzoom (int): Layer's min zoom level. - maxzoom (int): Layer's max zoom level. - tileurl (str, optional): Layer's tiles url. - type (str): Layer's type. - function_name (str): Nane of the SQL function to call. Defaults to `id`. - sql (str): Valid SQL function which returns VectorTile data. - options (list, optional): options available for the SQL function. - """ - - type: str = "Function" - sql: str - function_name: Optional[str] - options: Optional[List[Dict[str, Any]]] - - @root_validator - def function_name_default(cls, values): - """Define default function's name to be same as id.""" - function_name = values.get("function_name") - if function_name is None: - values["function_name"] = values.get("id") - return values - - @classmethod - def from_file(cls, id: str, infile: str, **kwargs: Any): - """load sql from file""" - with open(infile) as f: - sql = f.read() - - return cls(id=id, sql=sql, **kwargs) - - -class TileMatrixSetLink(BaseModel): - """ - TileMatrixSetLink model. - - Based on http://docs.opengeospatial.org/per/19-069.html#_tilematrixsets - - """ - - href: AnyHttpUrl - rel: str = "item" - type: MimeTypes = MimeTypes.json - - class Config: - """Config for model.""" - - use_enum_values = True - - -class TileMatrixSetRef(BaseModel): - """ - TileMatrixSetRef model. - - Based on http://docs.opengeospatial.org/per/19-069.html#_tilematrixsets - - """ - - id: str - title: str - links: List[TileMatrixSetLink] - - -class TileMatrixSetList(BaseModel): - """ - TileMatrixSetList model. - - Based on http://docs.opengeospatial.org/per/19-069.html#_tilematrixsets - - """ - - tileMatrixSets: List[TileMatrixSetRef] - - -@dataclass -class Registry: - """function registry""" - - funcs: ClassVar[Dict[str, VectorTileFunction]] = {} - - @classmethod - def get(cls, key: str): - """lookup function by name""" - return cls.funcs.get(key) - - @classmethod - def register(cls, *args: VectorTileFunction): - """register function(s)""" - for func in args: - cls.funcs[func.id] = func - - -registry = Registry() diff --git a/src/schemas/legacy/way.py b/src/schemas/legacy/way.py deleted file mode 100644 index c7581a5..0000000 --- a/src/schemas/legacy/way.py +++ /dev/null @@ -1 +0,0 @@ -hidden_props_client = ["id", "scenario_id", "foot", "bicycle", "lit", "edit_type"] From e9cf0ed3a34b45184cb52ed34696c7fdb829138a Mon Sep 17 00:00:00 2001 From: EPajares Date: Sun, 15 Sep 2024 14:22:20 +0000 Subject: [PATCH 6/8] Deleting legacy DB functions --- .../legacy/active_data_uploads_study_area.sql | 30 - .../functions/legacy/active_opportunities.sql | 58 -- .../legacy/active_opportunities_json.sql | 41 -- .../functions/legacy/coordinate_to_pixel.sql | 23 - .../legacy/count_pois_multi_isochrones.sql | 80 --- .../legacy/create_artificial_edges.sql | 56 -- .../legacy/create_intersection_line.sql | 10 - .../create_multiple_artificial_edges.sql | 188 ------ .../legacy/create_perpendicular_line.sql | 13 - .../legacy/create_snapped_split_line.sql | 50 -- src/db/sql/functions/legacy/extend_line.sql | 55 -- .../legacy/fetch_network_routing.sql | 46 -- .../legacy/fetch_network_routing_heatmap.sql | 86 --- .../legacy/fetch_network_routing_multi.sql | 40 -- .../legacy/fix_multiple_artificial_edges.sql | 34 -- .../legacy/get_reference_study_area.sql | 21 - .../legacy/heatmap_prepare_artificial.sql | 51 -- .../functions/legacy/modified_buildings.sql | 29 - .../sql/functions/legacy/modified_edges.sql | 29 - src/db/sql/functions/legacy/modified_pois.sql | 28 - .../functions/legacy/network_modification.sql | 562 ------------------ .../legacy/poi_aoi_visualization.sql | 115 ---- .../sql/functions/legacy/poi_categories.sql | 28 - .../legacy/poi_categories_data_uploads.sql | 40 -- .../legacy/population_modification.sql | 51 -- .../functions/legacy/query_edges_routing.sql | 96 --- .../legacy/reachable_population_polygon.sql | 66 -- .../reachable_population_study_area.sql | 59 -- .../legacy/reached_population_study_area.sql | 124 ---- .../functions/legacy/select_customization.sql | 31 - .../legacy/select_user_customization.sql | 17 - .../functions/legacy/split_by_drawn_lines.sql | 37 -- .../starting_points_multi_isochrones.sql | 72 --- .../functions/legacy/thematic_data_sum.sql | 180 ------ 34 files changed, 2446 deletions(-) delete mode 100644 src/db/sql/functions/legacy/active_data_uploads_study_area.sql delete mode 100644 src/db/sql/functions/legacy/active_opportunities.sql delete mode 100644 src/db/sql/functions/legacy/active_opportunities_json.sql delete mode 100644 src/db/sql/functions/legacy/coordinate_to_pixel.sql delete mode 100644 src/db/sql/functions/legacy/count_pois_multi_isochrones.sql delete mode 100644 src/db/sql/functions/legacy/create_artificial_edges.sql delete mode 100644 src/db/sql/functions/legacy/create_intersection_line.sql delete mode 100644 src/db/sql/functions/legacy/create_multiple_artificial_edges.sql delete mode 100644 src/db/sql/functions/legacy/create_perpendicular_line.sql delete mode 100644 src/db/sql/functions/legacy/create_snapped_split_line.sql delete mode 100644 src/db/sql/functions/legacy/extend_line.sql delete mode 100644 src/db/sql/functions/legacy/fetch_network_routing.sql delete mode 100644 src/db/sql/functions/legacy/fetch_network_routing_heatmap.sql delete mode 100644 src/db/sql/functions/legacy/fetch_network_routing_multi.sql delete mode 100644 src/db/sql/functions/legacy/fix_multiple_artificial_edges.sql delete mode 100644 src/db/sql/functions/legacy/get_reference_study_area.sql delete mode 100644 src/db/sql/functions/legacy/heatmap_prepare_artificial.sql delete mode 100644 src/db/sql/functions/legacy/modified_buildings.sql delete mode 100644 src/db/sql/functions/legacy/modified_edges.sql delete mode 100644 src/db/sql/functions/legacy/modified_pois.sql delete mode 100644 src/db/sql/functions/legacy/network_modification.sql delete mode 100644 src/db/sql/functions/legacy/poi_aoi_visualization.sql delete mode 100644 src/db/sql/functions/legacy/poi_categories.sql delete mode 100644 src/db/sql/functions/legacy/poi_categories_data_uploads.sql delete mode 100644 src/db/sql/functions/legacy/population_modification.sql delete mode 100644 src/db/sql/functions/legacy/query_edges_routing.sql delete mode 100644 src/db/sql/functions/legacy/reachable_population_polygon.sql delete mode 100644 src/db/sql/functions/legacy/reachable_population_study_area.sql delete mode 100644 src/db/sql/functions/legacy/reached_population_study_area.sql delete mode 100644 src/db/sql/functions/legacy/select_customization.sql delete mode 100644 src/db/sql/functions/legacy/select_user_customization.sql delete mode 100644 src/db/sql/functions/legacy/split_by_drawn_lines.sql delete mode 100644 src/db/sql/functions/legacy/starting_points_multi_isochrones.sql delete mode 100644 src/db/sql/functions/legacy/thematic_data_sum.sql diff --git a/src/db/sql/functions/legacy/active_data_uploads_study_area.sql b/src/db/sql/functions/legacy/active_data_uploads_study_area.sql deleted file mode 100644 index 9a692b4..0000000 --- a/src/db/sql/functions/legacy/active_data_uploads_study_area.sql +++ /dev/null @@ -1,30 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.active_data_uploads_study_area(user_id_input integer) - RETURNS integer[] - LANGUAGE plpgsql -AS $function$ -DECLARE - upload_ids integer[]; - area_id integer; - upload_id integer; - poi_user_category TEXT; - valid_ids integer[]; -BEGIN - SELECT u.active_data_upload_ids, u.active_study_area_id - INTO upload_ids, area_id - FROM customer.USER u - WHERE u.id = user_id_input; - - SELECT ARRAY_AGG(d.id) - INTO valid_ids - FROM customer.data_upload d, (SELECT UNNEST(upload_ids) id) u - WHERE d.study_area_id = area_id - AND d.id = u.id; - - RETURN valid_ids; - -END ; -$function$ - -/* -SELECT * FROM basic.active_data_uploads_study_area(4) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/active_opportunities.sql b/src/db/sql/functions/legacy/active_opportunities.sql deleted file mode 100644 index 79436f1..0000000 --- a/src/db/sql/functions/legacy/active_opportunities.sql +++ /dev/null @@ -1,58 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.active_opportunities(user_id_input integer, active_study_area_id integer) - RETURNS TABLE(category TEXT, category_group TEXT, icon TEXT, color TEXT[], multiple_entrance bool, data_upload_id integer, sensitivity integer) - LANGUAGE plpgsql -AS $function$ -DECLARE - active_data_upload_ids integer[] = (SELECT active_data_upload_ids FROM customer.USER WHERE id = user_id_input); -BEGIN - RETURN QUERY - WITH first_combination AS - ( - SELECT CASE WHEN u.category IS NULL THEN s.category ELSE u.category END AS category, - CASE WHEN u.GROUP IS NULL THEN s."group" ELSE u."group" END AS "group", - CASE WHEN u.icon IS NULL THEN s.icon ELSE u.icon END AS icon, - CASE WHEN u.color IS NULL THEN s.color ELSE u.color END AS color, s.is_active, u.data_upload_id, - CASE WHEN u.multiple_entrance IS NULL THEN s.multiple_entrance END AS multiple_entrance, - CASE WHEN u.sensitivity IS NULL THEN s.sensitivity END AS sensitivity - FROM ( - SELECT y.*, g."group" - FROM basic.opportunity_study_area_config y, basic.opportunity_group g - WHERE y.study_area_id = active_study_area_id - AND g.id = y.opportunity_group_id - ) s - FULL JOIN ( - SELECT x.*, g."group" - FROM customer.opportunity_user_config x, basic.opportunity_group g - WHERE x.user_id = user_id_input - AND x.study_area_id = active_study_area_id - AND (x.data_upload_id IN (SELECT UNNEST(ARRAY[active_data_upload_ids])) OR x.data_upload_id IS NULL) - AND g.id = x.opportunity_group_id - ) u - ON s.category = u.category - ), - second_combination AS - ( - SELECT CASE WHEN s.category IS NULL THEN o.category ELSE s.category END AS category, - CASE WHEN s."group" IS NULL THEN o."group" ELSE s."group" END AS "group", - CASE WHEN s.icon IS NULL THEN o.icon ELSE s.icon END AS icon, - CASE WHEN s.color IS NULL THEN o.color ELSE s.color END AS color, s.is_active, s.data_upload_id, - CASE WHEN s.multiple_entrance IS NULL THEN o.multiple_entrance END AS multiple_entrance, - CASE WHEN s.sensitivity IS NULL THEN o.sensitivity END AS sensitivity - FROM ( - SELECT x.*, g."group" - FROM basic.opportunity_default_config x, basic.opportunity_group g - WHERE g.id = x.opportunity_group_id - ) o - FULL JOIN first_combination s - ON o.category = s.category - ) - SELECT s.category, s."group", s.icon, s.color, s.multiple_entrance, s.data_upload_id, s.sensitivity - FROM second_combination s - WHERE (is_active IS NULL OR is_active IS TRUE); - -END ; -$function$ - -/* -SELECT * FROM basic.active_opportunities(40, 91620000) -*/ diff --git a/src/db/sql/functions/legacy/active_opportunities_json.sql b/src/db/sql/functions/legacy/active_opportunities_json.sql deleted file mode 100644 index 682fad8..0000000 --- a/src/db/sql/functions/legacy/active_opportunities_json.sql +++ /dev/null @@ -1,41 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.active_opportunities_json(opportunity_type text, user_id_input integer, active_study_area_id integer) - RETURNS json - LANGUAGE plpgsql -AS $function$ -DECLARE - settings json; -BEGIN - - WITH prepared_json AS - ( - SELECT category_group AS "group", category, json_build_object('icon', icon, 'color', color) AS icon_color, - CASE WHEN multiple_entrance IS NULL THEN '{}'::json ELSE json_build_object('multiple_entrance', multiple_entrance) END AS multiple_entrance, - CASE WHEN sensitivity IS NULL THEN '{}'::json ELSE json_build_object('sensitivity', sensitivity) END AS sensitivity - FROM basic.active_opportunities(user_id_input, active_study_area_id) - ), - grouped_categories AS - ( - SELECT "group", json_agg(json_build_object(category, (icon_color::jsonb || multiple_entrance::jsonb || sensitivity::jsonb)::json)) AS children - FROM prepared_json - GROUP BY "group" - ), - sorted_as_group AS - ( - SELECT json_build_object(o."group", json_build_object('icon', o.icon, 'color', o.color, 'children', g.children)) AS opportunity_groups - FROM basic.opportunity_group o, grouped_categories g - WHERE o."group" = g."group" - AND o.type = opportunity_type - ORDER BY o.id - ) - SELECT json_agg(opportunity_groups) - INTO settings - FROM sorted_as_group; - - RETURN settings; - -END ; -$function$ - -/* -SELECT basic.active_opportunities_json('poi', 40, 91620000) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/coordinate_to_pixel.sql b/src/db/sql/functions/legacy/coordinate_to_pixel.sql deleted file mode 100644 index 0d72aaf..0000000 --- a/src/db/sql/functions/legacy/coordinate_to_pixel.sql +++ /dev/null @@ -1,23 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.coordinate_to_pixel(lat double precision, lon double precision, zoom integer) - RETURNS integer[] - LANGUAGE plpgsql - IMMUTABLE -AS $function$ -DECLARE - invCos float; - tan float; - ln float; - lon_pixel integer; - lat_pixel integer; - rds float; -BEGIN - rds = (lat * pi()) / 180; - invCos = 1 / cos(rds); - tan = tan(rds); - ln = ln(tan + invCos); - lat_pixel = ((1 - (ln / pi())) * pow(2, zoom - 1) * 256); - lon_pixel = ((lon + 180) / 360 * pow(2, zoom) * 256); - RETURN ARRAY[lat_pixel, lon_pixel]; -END -$function$ -; diff --git a/src/db/sql/functions/legacy/count_pois_multi_isochrones.sql b/src/db/sql/functions/legacy/count_pois_multi_isochrones.sql deleted file mode 100644 index 5488991..0000000 --- a/src/db/sql/functions/legacy/count_pois_multi_isochrones.sql +++ /dev/null @@ -1,80 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.count_pois_multi_isochrones(userid_input integer, modus text, minutes integer, speed_input numeric, region_type text, region text[], amenities text[], scenario_id_input integer DEFAULT 0, active_upload_ids integer[] DEFAULT '{}'::integer[]) - RETURNS TABLE(region_name text, count_pois integer, geom geometry) - LANGUAGE plpgsql -AS $function$ -DECLARE - buffer_geom geometry; - region_geom geometry; - region_name text; - excluded_pois_id text[] := ARRAY[]::text[]; - data_upload_poi_categories text[]; - detour_factor numeric = 0.8; -BEGIN - - data_upload_poi_categories = basic.poi_categories_data_uploads(userid_input); - - IF data_upload_poi_categories IS NULL THEN - data_upload_poi_categories = '{}'::text[]; - END IF; - - IF modus = 'scenario' THEN - excluded_pois_id = basic.modified_pois(scenario_id_input); - END IF; - - IF region_type = 'study_area' THEN - SELECT st_union(s.geom), array_agg(name)::TEXT - INTO region_geom, region_name - FROM basic.sub_study_area s - WHERE id IN (SELECT UNNEST(region::integer[])); - ELSEIF region_type = 'draw' THEN - SELECT ST_GeomFromText(region[1]) - INTO region_geom; - region_name = 'draw'; - ELSE - RAISE EXCEPTION 'Unknown region type: %', region_type; - END IF; - buffer_geom = ST_Buffer(region_geom::geography, speed_input * 60 * minutes * detour_factor)::geometry; - - RETURN query - WITH intersected_pois AS ( - SELECT p.id - FROM basic.poi p - WHERE ST_Intersects(buffer_geom, p.geom) - AND p.category IN (SELECT UNNEST(amenities)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories)) - UNION ALL - SELECT p.id - FROM customer.poi_user p - WHERE ST_Intersects(buffer_geom, p.geom) - AND p.category IN (SELECT UNNEST(amenities)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND p.data_upload_id IN (SELECT UNNEST(active_upload_ids)) - UNION ALL - SELECT p.id - FROM customer.poi_modified p - WHERE ST_Intersects(buffer_geom, p.geom) - AND p.category IN (SELECT UNNEST(amenities)) - AND p.scenario_id = scenario_id_input - ), - count_pois AS - ( - SELECT count(*) AS cnt - FROM intersected_pois - ) - SELECT region_name, c.cnt::integer, - region_geom - FROM count_pois c; -END -$function$ -; - -/* Example with starting point to find study_area -SELECT * FROM basic.count_pois_multi_isochrones(1,'scenario',10,1.33,'study_area', -'POINT(11.570115749093093 48.15360025891228)', ARRAY['bar','restaurant','pub','french_supermarket','fancy_market'], 1, ARRAY[3]); - - * Example with drawn polygon -SELECT * FROM basic.count_pois_multi_isochrones(1,'scenario',10,1.33,'draw', -'POLYGON ((11.570115749093093 48.15360025891228, 11.570274296106232 48.1518693270582, 11.572708788648153 48.15118483030911, 11.574984827528402 48.15223125586774, 11.574826384986741 48.15396220424526, 11.57239179909107 48.154646710542, 11.570115749093093 48.15360025891228))', -ARRAY['bar','restaurant','pub','french_supermarket','fancy_market'], 1, ARRAY[3]); - */ diff --git a/src/db/sql/functions/legacy/create_artificial_edges.sql b/src/db/sql/functions/legacy/create_artificial_edges.sql deleted file mode 100644 index 2514e62..0000000 --- a/src/db/sql/functions/legacy/create_artificial_edges.sql +++ /dev/null @@ -1,56 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.create_artificial_edges(sql_network text, point geometry, snap_distance integer, new_node_id integer, line_part1_id integer) - RETURNS TABLE(wid integer, id integer, cost double precision, reverse_cost double precision, length_m double precision, source integer, target integer, fraction double precision, geom geometry) - LANGUAGE plpgsql -AS $function$ -DECLARE - sql_start_vertices TEXT; - rec record; - line_part1 geometry; - line_part2 geometry; - length_part1 float; - length_part2 float; - line_part2_id integer; - buffer geometry; - total_length_m float; -BEGIN - - buffer = ST_Buffer(point::geography, snap_distance)::geometry; - sql_start_vertices = 'SELECT ST_LineLocatePoint(geom,$2) AS fraction, - w.geom AS w_geom, w.SOURCE, w.target, w.COST, w.reverse_cost, $3 AS vid, w.id AS wid - FROM - ( - %1$s - ) w - WHERE $1 && geom - ORDER BY ST_CLOSESTPOINT(geom,$2) <-> $2 - LIMIT 1;'; - - - EXECUTE format(sql_start_vertices, sql_network) USING buffer, point, new_node_id INTO rec; - - total_length_m = ST_LENGTH(rec.w_geom::geography); - line_part1 = ST_LINESUBSTRING(rec.w_geom,0,rec.fraction); - line_part2 = ST_LINESUBSTRING(rec.w_geom,rec.fraction,1); - length_part1 = ST_Length(line_part1::geography); - length_part2 = total_length_m - length_part1; - line_part2_id = line_part1_id - 1; - - RETURN query - WITH pair_artificial AS ( - SELECT rec.wid, line_part1_id AS id, - rec.COST * (length_part1 / total_length_m) AS COST, - rec.reverse_cost * (length_part1 / total_length_m) AS reverse_cost, - length_part1 AS length_m, rec.SOURCE, rec.vid AS target, rec.fraction AS fraction, line_part1 AS geom - UNION ALL - SELECT rec.wid, line_part2_id AS id, - rec.COST * (length_part2 / total_length_m) AS COST, - rec.reverse_cost * (length_part2 / total_length_m) AS reverse_cost, - length_part2, rec.vid AS SOURCE, rec.target, rec.fraction AS fraction, line_part2 AS geom - ) - SELECT p.wid, p.id, p.COST, p.reverse_cost, p.length_m, p.SOURCE, p.target, p.fraction, - CASE WHEN ST_Geometrytype(p.geom) = 'ST_Point' THEN ST_MAKELINE(p.geom, p.geom) ELSE p.geom END AS geom - FROM pair_artificial p; - --WHERE p.COST <> 0; -END -$function$ -; diff --git a/src/db/sql/functions/legacy/create_intersection_line.sql b/src/db/sql/functions/legacy/create_intersection_line.sql deleted file mode 100644 index 507b3a6..0000000 --- a/src/db/sql/functions/legacy/create_intersection_line.sql +++ /dev/null @@ -1,10 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.create_intersection_line(point_geom geometry, length_line double precision) - RETURNS SETOF geometry - LANGUAGE sql - IMMUTABLE -AS $function$ - SELECT ST_SETSRID(ST_MAKELINE( - ST_Translate(point_geom, length_line, length_line), - ST_Translate(point_geom, -length_line, -length_line) - ), 4326) -$function$; \ No newline at end of file diff --git a/src/db/sql/functions/legacy/create_multiple_artificial_edges.sql b/src/db/sql/functions/legacy/create_multiple_artificial_edges.sql deleted file mode 100644 index f34b004..0000000 --- a/src/db/sql/functions/legacy/create_multiple_artificial_edges.sql +++ /dev/null @@ -1,188 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.create_multiple_artificial_edges(x float[], y float[], max_cutoff float, speed float, modus text, scenario_id integer, -routing_profile text, grid_ids text[] DEFAULT NULL) - RETURNS VOID - LANGUAGE plpgsql -AS $function$ -DECLARE - buffer_starting_point geometry; - union_buffer_network geometry; - point geometry; - snap_distance_network integer; - cnt_starting_points integer := 0; - length_starting_points integer := array_length(x, 1); - max_new_node_id integer := 2147483647; - max_new_edge_id integer := 2147483647; - current_grid_id text; - setting_study_area_id integer; -BEGIN - /*Prepare temporary tables*/ - DROP TABLE IF EXISTS artificial_edges; - CREATE TEMP TABLE artificial_edges ( - wid integer, - id integer, - COST float, - reverse_cost float, - length_m float, - SOURCE integer, - target integer, - fraction float, - geom geometry, - vid integer, - point_geom geometry, - grid_id text - ); - - setting_study_area_id = basic.get_reference_study_area(ST_SETSRID(ST_MAKEPOINT(x[1], y[1]), 4326)); - snap_distance_network = basic.select_customization('snap_distance_network', setting_study_area_id)::integer; - - DROP TABLE IF EXISTS duplicated_artificial_edges; - CREATE TEMP TABLE duplicated_artificial_edges (LIKE artificial_edges); - - DROP TABLE IF EXISTS buffer_network; - CREATE TEMP TABLE buffer_network (id serial, geom geometry); - - /*Loop through starting points*/ - WHILE cnt_starting_points < length_starting_points - LOOP - cnt_starting_points = cnt_starting_points + 1; - IF grid_ids IS NOT NULL THEN - current_grid_id = grid_ids[cnt_starting_points]; - ELSE - current_grid_id = NULL; - END IF; - - point = ST_SETSRID(ST_POINT(x[cnt_starting_points],y[cnt_starting_points]), 4326); - - SELECT ST_SETSRID(ST_Buffer(point::geography,snap_distance_network)::geometry, 4326) - INTO buffer_starting_point; - - INSERT INTO artificial_edges - SELECT c.*, max_new_node_id, point AS point_geom, current_grid_id - FROM basic.create_artificial_edges(basic.query_edges_routing(ST_ASTEXT(buffer_starting_point),modus,scenario_id,speed,routing_profile,FALSE), - point, snap_distance_network, max_new_node_id, max_new_edge_id - ) c; - - INSERT INTO buffer_network(geom) - SELECT ST_Buffer(point::geography,max_cutoff * speed)::geometry; - - max_new_node_id = max_new_node_id - 1; - max_new_edge_id = max_new_edge_id - 2; - END LOOP; - - union_buffer_network = (SELECT ST_UNION(b.geom) FROM buffer_network b); - - DROP TABLE IF EXISTS starting_vertices; - CREATE TEMP TABLE starting_vertices (id integer, geom geometry, grid_id text); - - /*Identify duplicates and unique artificial edges */ - DROP TABLE IF EXISTS final_artificial_edges; - CREATE TEMP TABLE final_artificial_edges AS - WITH cnt_artificial_edges AS - ( - SELECT a.wid, count(*)::integer AS cnt - FROM artificial_edges a - GROUP BY a.wid - ), - not_duplicates AS - ( - SELECT a.wid, a.vid, a.id, a.COST, a.reverse_cost, a.length_m, a.SOURCE, a.target, a.geom, a.point_geom, a.grid_id - FROM artificial_edges a, cnt_artificial_edges c - WHERE a.wid = c.wid - AND c.cnt <= 2 - ), - insert_not_duplicates AS - ( - INSERT INTO starting_vertices - SELECT DISTINCT n.vid, n.point_geom, n.grid_id - FROM not_duplicates n - ), - insert_duplicates AS - ( - INSERT INTO duplicated_artificial_edges - SELECT a.* - FROM artificial_edges a, cnt_artificial_edges c - WHERE a.wid = c.wid - AND c.cnt > 2 - ) - SELECT n.wid, n.id, n.COST, n.reverse_cost, n.length_m, n.SOURCE, n.target, n.geom, n.point_geom, n.grid_id - FROM not_duplicates n; - - /*Handle duplicated artificial edges*/ - DROP TABLE IF EXISTS cleaned_duplicates; - CREATE TEMP TABLE cleaned_duplicates AS - WITH sum_costs AS - ( - SELECT d.vid, round(SUM(d.COST::numeric), 4) AS cost, round(SUM(d.reverse_cost::numeric), 4) AS reverse_cost - FROM duplicated_artificial_edges d - GROUP BY d.vid - ), - ordered AS - ( - SELECT DISTINCT d.wid, d.vid, d.fraction, s.COST, s.reverse_cost, d.point_geom, d.grid_id - FROM duplicated_artificial_edges d, sum_costs s - WHERE d.vid = s.vid - AND d.fraction NOT IN (0,1) - ORDER BY d.wid, d.fraction - ), - insert_distinct_starting_points AS - ( - INSERT INTO starting_vertices - SELECT o.vid, o.point_geom, o.grid_id - FROM ordered o - ), - grouped AS - ( - SELECT g.wid, array_agg(g.vid) vids, array_agg(g.fraction) fractions - FROM ordered g - GROUP BY g.wid - ), - distinct_costs AS - ( - SELECT DISTINCT o.wid, o.COST, o.reverse_cost - FROM ordered o - ), - distinct_duplicated_edges AS - ( - SELECT g.wid, o.COST, o.reverse_cost, g.vids, g.fractions, e.SOURCE, e.target, e.geom - FROM grouped g - LEFT JOIN distinct_costs o - ON g.wid = o.wid - LEFT JOIN basic.edge e - ON g.wid = e.id - ) - SELECT edge_id AS wid, (max_new_edge_id - ROW_NUMBER() OVER()) AS id, f.COST, f.reverse_cost, - ST_LENGTH(f.geom::geography) AS length_m, f.SOURCE, f.target, f.geom, NULL AS point_geom - FROM distinct_duplicated_edges d, - LATERAL basic.fix_multiple_artificial_edges(d.wid, d.SOURCE, d.target, d.COST, d.reverse_cost, d.geom, d.vids, d.fractions) f; - - UPDATE cleaned_duplicates d - SET point_geom = s.geom - FROM starting_vertices s - WHERE d.SOURCE = s.id; - - UPDATE cleaned_duplicates d - SET point_geom = s.geom - FROM starting_vertices s - WHERE d.target = s.id; - - INSERT INTO final_artificial_edges - SELECT * FROM cleaned_duplicates; - -END; -$function$; - -/* -WITH p AS -( - SELECT ST_CENTROID(geom) AS geom - FROM basic.grid_calculation - LIMIT 10 -), -agg AS -( - SELECT array_agg(ST_X(geom)) AS x, array_agg(ST_Y(geom)) AS y - FROM p -) -SELECT basic.create_multiple_artificial_edges(x, y, 1200., 1.33, 'default', 1, 'walking_standard') a -FROM agg -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/create_perpendicular_line.sql b/src/db/sql/functions/legacy/create_perpendicular_line.sql deleted file mode 100644 index 4c7b07a..0000000 --- a/src/db/sql/functions/legacy/create_perpendicular_line.sql +++ /dev/null @@ -1,13 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.create_perpendicular_line(line_geom geometry, point_geom geometry, length_line float) - RETURNS SETOF geometry AS -$func$ - - SELECT ST_SETSRID( - ST_MAKELINE( - ST_MAKEPOINT(x2+length_line,y2+(-1/((y2-y1)/(x2-x1)))*length_line), - ST_MAKEPOINT(x2-length_line,y2-(-1/((y2-y1)/(x2-x1)))*length_line) - ),4326) - FROM ( - SELECT ST_X(st_startpoint(line_geom)) x1, ST_Y(st_startpoint(line_geom)) y1, ST_X(point_geom) x2, ST_Y(point_geom) y2 - ) x_y -$func$ LANGUAGE sql IMMUTABLE; \ No newline at end of file diff --git a/src/db/sql/functions/legacy/create_snapped_split_line.sql b/src/db/sql/functions/legacy/create_snapped_split_line.sql deleted file mode 100644 index 5b1b996..0000000 --- a/src/db/sql/functions/legacy/create_snapped_split_line.sql +++ /dev/null @@ -1,50 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.create_snapped_split_line(geom geometry, extend_distance NUMERIC, point_to_extend text) -RETURNS geometry -LANGUAGE plpgsql -AS $function$ -DECLARE - start_geom geometry; - end_geom geometry; - azimuth_A float; - azimuth_B float; - length_A NUMERIC; - length_B NUMERIC; - newpoint_A geometry; - newpoint_B geometry; - new_line geometry; -BEGIN - - -- get the points A and B given a line L - start_geom = ST_STARTPOINT(geom); - end_geom = ST_ENDPOINT(geom); - - -- Start line section - azimuth_A = ST_AZIMUTH(ST_POINTN(geom,2),start_geom); - - -- End line section - azimuth_B = ST_AZIMUTH(ST_POINTN(geom,-2),end_geom); - - -- get the length of the line A --> B - length_A = ST_DISTANCE(ST_STARTPOINT(geom),ST_POINTN(geom,2)); - length_B = ST_DISTANCE(ST_ENDPOINT(geom),ST_POINTN(geom,-2)); - - newpoint_A = ST_TRANSLATE(start_geom, sin(azimuth_A) * extend_distance, cos(azimuth_A) * extend_distance); - newpoint_B = ST_TRANSLATE(end_geom, sin(azimuth_B) * extend_distance, cos(azimuth_B) * extend_distance); - - IF point_to_extend = 'start' THEN - new_line = ST_MAKELINE(start_geom,newpoint_A); - ELSEIF point_to_extend = 'end' THEN - new_line = ST_MAKELINE(end_geom,newpoint_B); - ELSE - RAISE EXCEPTION 'Please specify a valid point_to_extend type.'; - END IF; - - RETURN new_line; -END -$function$ -/*point_to_extend = 'start', 'end'*/ ---1 meter in Germany approx. 0.0000127048 -/* -SELECT basic.create_snapped_split_line(geom, 0.0000127048, 'start') -FROM customer.way_modified WHERE id = 112; -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/extend_line.sql b/src/db/sql/functions/legacy/extend_line.sql deleted file mode 100644 index 067d234..0000000 --- a/src/db/sql/functions/legacy/extend_line.sql +++ /dev/null @@ -1,55 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.extend_line(geom geometry, extend_distance NUMERIC, point_to_extend text) -RETURNS geometry -LANGUAGE plpgsql -AS $function$ -DECLARE - start_geom geometry; - end_geom geometry; - azimuth_A float; - azimuth_B float; - length_A NUMERIC; - length_B NUMERIC; - newpoint_A geometry; - newpoint_B geometry; - new_line geometry; -BEGIN - - -- get the points A and B given a line L - start_geom = ST_STARTPOINT(geom); - end_geom = ST_ENDPOINT(geom); - - -- Start line section - azimuth_A = ST_AZIMUTH(ST_POINTN(geom,2),start_geom); - - -- End line section - azimuth_B = ST_AZIMUTH(ST_POINTN(geom,-2),end_geom); - - -- get the length of the line A --> B - length_A = ST_DISTANCE(ST_STARTPOINT(geom),ST_POINTN(geom,2)); - length_B = ST_DISTANCE(ST_ENDPOINT(geom),ST_POINTN(geom,-2)); - - newpoint_A = ST_TRANSLATE(start_geom, sin(azimuth_A) * extend_distance, cos(azimuth_A) * extend_distance); - newpoint_B = ST_TRANSLATE(end_geom, sin(azimuth_B) * extend_distance, cos(azimuth_B) * extend_distance); - - IF point_to_extend = 'start' THEN - new_line = st_addpoint(geom, newpoint_a, 0); - ELSEIF point_to_extend = 'end' THEN - new_line = st_addpoint(geom, newpoint_b, -1); - ELSEIF point_to_extend = 'both' THEN - new_line = st_addpoint(st_addpoint(geom,newpoint_B), newpoint_A, 0); - ELSE - RAISE EXCEPTION 'Please specify a valid point_to_extend type.'; - END IF; - - If new_line IS NULL THEN - RAISE NOTICE 'The new line is NULL. Please check the input parameters.'; - new_line = geom; - END IF; - - RETURN new_line; -END -$function$ -/*point_to_extend = 'start', 'end', 'both'*/ ---1 meter in Germany approx. 0.0000127048 ---SELECT basic.extend_line(geom, 0.0127048, 'both') ---FROM customer.way_modified WHERE id = 112; diff --git a/src/db/sql/functions/legacy/fetch_network_routing.sql b/src/db/sql/functions/legacy/fetch_network_routing.sql deleted file mode 100644 index d5b1eda..0000000 --- a/src/db/sql/functions/legacy/fetch_network_routing.sql +++ /dev/null @@ -1,46 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.fetch_network_routing(x float[], y float[], max_cutoff float, speed float, modus text, scenario_id integer, routing_profile text) - RETURNS SETOF type_fetch_edges_routing - LANGUAGE plpgsql -AS $function$ -DECLARE - buffer_starting_point geometry; - buffer_network geometry; - point geometry := ST_SETSRID(ST_POINT(x[1],y[1]), 4326); - snap_distance_network integer; - max_new_node_id integer := 2147483647; - max_new_edge_id integer := 2147483647; - setting_study_area_id integer; -BEGIN - - setting_study_area_id = basic.get_reference_study_area(ST_SETSRID(ST_POINT(x[1], y[1]), 4326)); - snap_distance_network = basic.select_customization('snap_distance_network', setting_study_area_id)::integer; - - SELECT ST_Buffer(point::geography,snap_distance_network)::geometry - INTO buffer_starting_point; - - SELECT ST_Buffer(point::geography,max_cutoff * speed)::geometry - INTO buffer_network; - - DROP TABLE IF EXISTS artificial_edges; - CREATE TEMP TABLE artificial_edges AS - SELECT * - FROM basic.create_artificial_edges(basic.query_edges_routing(ST_ASTEXT(buffer_starting_point),modus,scenario_id,speed,routing_profile,FALSE),point, - snap_distance_network, max_new_node_id, max_new_edge_id - ); - - RETURN query EXECUTE - 'SELECT 0, 0, 0, 0, 0, 0, NULL,''[[1.1,1.1],[1.1,1.1]]''::json, $1, $2 - UNION ALL ' || - basic.query_edges_routing(ST_ASTEXT(buffer_network),modus,scenario_id,speed,routing_profile,True) || - ' AND id NOT IN (SELECT wid FROM artificial_edges) - UNION ALL - SELECT id, source, target, ST_LENGTH(ST_TRANSFORM(geom, 3857)) AS length_3857, cost, reverse_cost, NULL AS death_end, ST_AsGeoJSON(ST_Transform(geom,3857))::json->''coordinates'', NULL AS starting_ids, NULL AS starting_geoms - FROM artificial_edges' USING ARRAY[max_new_node_id]::integer[], ARRAY[ST_ASTEXT(point)]::TEXT[]; - -END; -$function$; - -/*Fetches the routing network -SELECT * -FROM basic.fetch_network_routing(ARRAY[11.543274],ARRAY[48.195524], 1200., 1.33, 'default', 1, 'walking_standard') -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/fetch_network_routing_heatmap.sql b/src/db/sql/functions/legacy/fetch_network_routing_heatmap.sql deleted file mode 100644 index 2eb295d..0000000 --- a/src/db/sql/functions/legacy/fetch_network_routing_heatmap.sql +++ /dev/null @@ -1,86 +0,0 @@ - -CREATE OR REPLACE FUNCTION basic.fetch_network_routing_heatmap( - x float[], y float[], max_cutoff float, speed float, modus text, - scenario_id integer, routing_profile TEXT, artificial_tables_prefix TEXT = 'worker1') - RETURNS SETOF type_fetch_edges_routing - LANGUAGE plpgsql -AS $function$ -DECLARE - union_buffer_network geometry; - cnt_starting_points integer := 0; - length_starting_points integer := array_length(x, 1); - point geometry; - artificial_table_name TEXT := artificial_tables_prefix || '_' || 'heatmap_edges_artificial'; - vertices_table_name TEXT := artificial_tables_prefix || '_' || 'heatmap_starting_vertices'; -BEGIN - - PERFORM basic.create_multiple_artificial_edges(x, y, max_cutoff, speed, modus, scenario_id, routing_profile); - - DROP TABLE IF EXISTS buffer_network; - CREATE TEMP TABLE buffer_network (id serial, geom geometry); - - DROP TABLE IF EXISTS buffer_starting_points; - CREATE TEMP TABLE buffer_starting_points(geom geometry); - - /*Loop through starting points*/ - WHILE cnt_starting_points < length_starting_points - LOOP - cnt_starting_points = cnt_starting_points + 1; - point = ST_SETSRID(ST_POINT(x[cnt_starting_points],y[cnt_starting_points]), 4326); - - INSERT INTO buffer_network(geom) - SELECT ST_Buffer(point::geography,max_cutoff * speed)::geometry; - - INSERT INTO buffer_starting_points(geom) - SELECT ST_BUFFER(point, 0.000000001); - END LOOP; - - CREATE INDEX ON buffer_starting_points USING GIST(geom); - - union_buffer_network = (SELECT ST_UNION(b.geom) FROM buffer_network b); - - DROP TABLE IF EXISTS batch_artificial_edges; - EXECUTE 'CREATE TEMP TABLE batch_artificial_edges AS - SELECT * - FROM temporal.'||artificial_table_name||' a - WHERE ST_Intersects(a.geom, $1);' USING union_buffer_network; - - DROP TABLE IF EXISTS batch_starting_vertices; - EXECUTE 'CREATE TEMP TABLE batch_starting_vertices AS - SELECT v.* - FROM temporal.'||vertices_table_name||' v, buffer_starting_points s - WHERE ST_Intersects(v.geom, s.geom);'; - - EXECUTE 'DROP TABLE IF EXISTS temporal.'||artificial_table_name; - EXECUTE 'DROP TABLE IF EXISTS temporal.'||vertices_table_name; - - /*Fetch Network*/ - RETURN query EXECUTE - 'SELECT 1, 1, 1, 1, 1, 1, NULL, ''[[1.1,1.1],[1.1,1.1]]''::json, $1, $2 - UNION ALL ' || - basic.query_edges_routing(ST_ASTEXT(union_buffer_network),modus,scenario_id,speed,routing_profile,True) || - ' AND id NOT IN (SELECT wid FROM batch_artificial_edges) - UNION ALL - SELECT id, source, target, ST_LENGTH(ST_TRANSFORM(geom, 3857)) AS length_3857, cost, reverse_cost, - NULL AS death_end, ST_AsGeoJSON(ST_Transform(geom,3857))::json->''coordinates'', NULL AS starting_ids, NULL AS starting_geoms - FROM batch_artificial_edges' USING (SELECT array_agg(s.id) FROM batch_starting_vertices s), (SELECT array_agg(ST_ASTEXT(s.geom)) FROM batch_starting_vertices s); - -END; -$function$; - -/* -WITH p AS -( - SELECT ST_CENTROID(geom) geom - FROM temporal.heatmap_grid_helper h - WHERE cid = 0 -), -agg AS -( - SELECT array_agg(ST_X(geom)) AS x, array_agg(ST_Y(geom)) AS y - FROM p -) -SELECT n.* -FROM agg, -LATERAL basic.fetch_network_routing_heatmap(x,y, 1200., 1.33, 'default', 1, 'walking_standard') n -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/fetch_network_routing_multi.sql b/src/db/sql/functions/legacy/fetch_network_routing_multi.sql deleted file mode 100644 index 8228644..0000000 --- a/src/db/sql/functions/legacy/fetch_network_routing_multi.sql +++ /dev/null @@ -1,40 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.fetch_network_routing_multi(x float[], y float[], max_cutoff float, speed float, modus text, scenario_id integer, routing_profile text) - RETURNS SETOF type_fetch_edges_routing - LANGUAGE plpgsql -AS $function$ -DECLARE - union_buffer_network geometry; -BEGIN - - PERFORM basic.create_multiple_artificial_edges(x, y, max_cutoff, speed, modus, scenario_id, routing_profile); - union_buffer_network = (SELECT ST_UNION(geom) FROM buffer_network); - - /*Fetch Network*/ - RETURN query EXECUTE - 'SELECT 1, 1, 1, 1, 1, 1, NULL, ''[[1.1,1.1],[1.1,1.1]]''::json, $1, $2 - UNION ALL ' || - basic.query_edges_routing(ST_ASTEXT(union_buffer_network),modus,scenario_id,speed,routing_profile,True) || - ' AND id NOT IN (SELECT wid FROM artificial_edges) - UNION ALL - SELECT id, source, target, ST_LENGTH(ST_TRANSFORM(geom, 3857)) AS length_3857, cost, reverse_cost, NULL AS death_end, ST_AsGeoJSON(ST_Transform(geom,3857))::json->''coordinates'', NULL AS starting_ids, NULL AS starting_geoms - FROM artificial_edges' USING (SELECT array_agg(s.id) FROM starting_vertices s), (SELECT array_agg(ST_ASTEXT(s.geom)) FROM starting_vertices s); - -END; -$function$; - -/* -WITH p AS -( - SELECT geom - FROM basic.poi - LIMIT 100 -), -agg AS -( - SELECT array_agg(ST_X(geom)) AS x, array_agg(ST_Y(geom)) AS y - FROM p -) -SELECT n.* -FROM agg, -LATERAL basic.fetch_network_routing_multi(x,y, 1200., 1.33, 'default', 1, 'walking_standard') n -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/fix_multiple_artificial_edges.sql b/src/db/sql/functions/legacy/fix_multiple_artificial_edges.sql deleted file mode 100644 index aa3aa53..0000000 --- a/src/db/sql/functions/legacy/fix_multiple_artificial_edges.sql +++ /dev/null @@ -1,34 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.fix_multiple_artificial_edges(wid integer, s integer, t integer, c float, rc float, w_geom geometry, vids integer[],fractions float[]) - RETURNS TABLE (edge_id integer, COST float, reverse_cost float, SOURCE integer, target integer, geom geometry) - LANGUAGE plpgsql -AS $function$ -DECLARE - start_fraction float := 0; - source_id integer := s; - end_fraction float; - start_id integer := s; - cnt integer := 1; -BEGIN - fractions = array_append(fractions,1::float); - vids = array_append(vids,t); - - FOREACH end_fraction IN ARRAY fractions - LOOP - RETURN query - WITH parts AS - ( - SELECT wid, c*(end_fraction-start_fraction) AS cost,rc*(end_fraction-start_fraction) AS reverse_cost, - source_id AS source, vids[cnt] AS target, ST_LINESUBSTRING(w_geom,start_fraction,end_fraction) AS geom - ) - SELECT p.wid, p.cost, p.reverse_cost, p.source, p.target, p.geom - FROM parts p - WHERE p.cost <> 0; - - start_fraction = end_fraction; - source_id = vids[cnt]; - edge_id = edge_id - 1; - cnt = cnt + 1; - END LOOP; - RETURN; -END -$function$; diff --git a/src/db/sql/functions/legacy/get_reference_study_area.sql b/src/db/sql/functions/legacy/get_reference_study_area.sql deleted file mode 100644 index 8871016..0000000 --- a/src/db/sql/functions/legacy/get_reference_study_area.sql +++ /dev/null @@ -1,21 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.get_reference_study_area(point geometry) -RETURNS integer - LANGUAGE plpgsql -AS $function$ -DECLARE - setting_study_area_id integer; -BEGIN - - setting_study_area_id = ( - SELECT s.id - FROM basic.study_area s - ORDER BY ST_CLOSESTPOINT(s.geom, point) <-> point - LIMIT 1 - ); - RETURN setting_study_area_id; -END; -$function$ IMMUTABLE; -/* -SELECT * -FROM basic.get_reference_study_area(ST_SETSRID(ST_MAKEPOINT(8.5, 47.5), 4326)); -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/heatmap_prepare_artificial.sql b/src/db/sql/functions/legacy/heatmap_prepare_artificial.sql deleted file mode 100644 index 282bdbc..0000000 --- a/src/db/sql/functions/legacy/heatmap_prepare_artificial.sql +++ /dev/null @@ -1,51 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.heatmap_prepare_artificial( - x double precision[], y double precision[], max_cutoff double precision, - speed double precision, modus text, scenario_id integer, routing_profile TEXT, - drop_table BOOLEAN = True, grid_ids Text[] = NULL, artificial_tables_prefix TEXT = 'worker1') - RETURNS TABLE(starting_id integer, grid_id text) - LANGUAGE plpgsql -AS $function$ -DECLARE - artificial_table_name TEXT := artificial_tables_prefix || '_' || 'heatmap_edges_artificial'; - vertices_table_name TEXT := artificial_tables_prefix || '_' || 'heatmap_starting_vertices'; - -BEGIN - - PERFORM basic.create_multiple_artificial_edges(x, y, max_cutoff, speed, modus, scenario_id, routing_profile, grid_ids); - IF drop_table = TRUE THEN - EXECUTE 'DROP TABLE IF EXISTS temporal.'||artificial_table_name; - EXECUTE 'CREATE TABLE temporal.'||artificial_table_name||' AS - SELECT * FROM final_artificial_edges; - ALTER TABLE temporal.'||artificial_table_name||' ADD PRIMARY KEY(id); - CREATE INDEX ON temporal.'||artificial_table_name||' USING GIST(geom);'; - - EXECUTE 'DROP TABLE IF EXISTS temporal.'||vertices_table_name||';'; - - EXECUTE 'CREATE TABLE temporal.'||vertices_table_name||' AS - SELECT * FROM starting_vertices; - CREATE INDEX ON temporal.'||vertices_table_name||' USING GIST(geom);'; - - ELSE - EXECUTE 'INSERT INTO temporal.'||artificial_table_name||' - SELECT * FROM final_artificial_edges;'; - - EXECUTE 'INSERT INTO temporal.'||vertices_table_name||' - SELECT * FROM starting_vertices;'; - END IF; - RETURN query - SELECT s.id, s.grid_id FROM starting_vertices s; -END; -$function$; -/* - * SELECT basic.heatmap_prepare_artificial( - ARRAY[11.33151181658887, 11.329836927525742, 11.38984564558536, 11.363184248765355, 11.387008240434048, 11.403206099194247, 11.371122451727029, 11.354846800772224, 11.386274800277878, 11.319657275473947, 11.36398379345171, 11.36452378521658, 11.309681912582171, 11.372770944369712, 11.351968526673739, 11.314154239198311, 11.365373097681584, 11.364028196866364, 11.390881096623627, 11.354995580208708, 11.37967387556875, 11.367948748979948, 11.357973303242176, 11.383677995847302, 11.380168628705773, 11.383500077197363, 11.382751256622504, 11.348444933008688, 11.368102026211377, 11.383087677646568, 11.38010791600567, 11.32631222156514, 11.37808099732201, 11.388602169249861, 11.32378389338054, 11.339457707388398, 11.34014996707774, 11.338073992134811, 11.370432433014454, 11.373159806656542, 11.379613370527727, 11.36671317492067, 11.327397653429099, 11.367055779763074, 11.326861500743393, 11.32605630299858, 11.378732474110379, 11.328687845806298, 11.35092578271132, 11.358718335567124, 11.37312032452882, 11.34735143432613, 11.394213243525217, 11.367551167065502, 11.376935074378537, 11.384791636125582, 11.337627154647397, 11.399871932667164, 11.354697972028028, 11.392428203094237, 11.345071372476605, 11.355740148101743, 11.398932425104016, 11.317868515478992, 11.349535596127007, 11.368297507207453, 11.3901966285047, 11.334305678903867, 11.36234424523353, 11.317524627089968, 11.31315607701018, 11.364278415758655, 11.33976037546456, 11.38998865889582, 11.323234210274522, 11.363237711504988, 11.332314999632976, 11.386657682907964, 11.378177647964499, 11.337424935485847, 11.371771880145692, 11.390976311338793, 11.342882401891508, 11.361795387674686, 11.332365839280056, 11.391821045058254, 11.357923876535166, 11.343334652697273, 11.343279107521484, 11.348991478863363, 11.350676621316603, 11.346161120103988, 11.33200984379607, 11.322288654491855, 11.340502861088172, 11.34045246436408, 11.378865481852499, 11.35420159645185, 11.338318801211715, 11.31112776407307], - ARRAY[49.11782520618968, 49.07858153329658, 49.09992169511411, 49.08121612454567, 49.08309227558087, 49.107243888107575, 49.07003073647988, 49.089415464548054, 49.10849612312842, 49.09511947549542, 49.122603670116625, 49.07800006514745, 49.0954354732499, 49.113336092377956, 49.07664037241907, 49.08472007394885, 49.115335465930116, 49.0831279891603, 49.08561117002683, 49.07724841896959, 49.116462038942245, 49.08159173483491, 49.08191228245964, 49.07138834246062, 49.111336128831134, 49.12303429016542, 49.11301508213677, 49.0811600637743, 49.104851588326504, 49.084629197093044, 49.079965867768756, 49.083100200350245, 49.10453076160659, 49.09502691531495, 49.077364119779915, 49.106648736754906, 49.12072175993353, 49.078495081405975, 49.09138262923572, 49.080895105940165, 49.08509390184752, 49.11212089254843, 49.100160268223235, 49.08373572875068, 49.07391705717722, 49.10337464215555, 49.122659073363074, 49.101000334742785, 49.09095100674898, 49.09193510294221, 49.12037273225729, 49.099524567403066, 49.097313024169246, 49.07860779861267, 49.09152578068004, 49.12387365075378, 49.07956694780051, 49.0955434987481, 49.10158057897265, 49.10160065262924, 49.07351195500605, 49.087271792059695, 49.10174270297546, 49.09940537749128, 49.09822118519638, 49.08863119431383, 49.10695976593394, 49.06786236391733, 49.11472772280409, 49.092367529946415, 49.09497308068801, 49.098277650648306, 49.08231904263751, 49.087755329429704, 49.086546770518154, 49.112584712224034, 49.08837278508696, 49.076052079666326, 49.09642077292062, 49.09578810299831, 49.08816688924514, 49.077498943474886, 49.11023982884998, 49.08848759665756, 49.084317474777926, 49.07941080803824, 49.08596796068206, 49.07374380794078, 49.11322248833463, 49.07197613192512, 49.111224534580316, 49.09057465711033, 49.112700140629734, 49.092745044321234, 49.0923415239494, 49.096396487296005, 49.07506987402358, 49.07128018784661, 49.09364483770577, 49.084111023567154], - 1200, - 1.3888888888888888, - 'default', - 1, - 'walking_standard', - FALSE - ) - */ - \ No newline at end of file diff --git a/src/db/sql/functions/legacy/modified_buildings.sql b/src/db/sql/functions/legacy/modified_buildings.sql deleted file mode 100644 index 5734cc0..0000000 --- a/src/db/sql/functions/legacy/modified_buildings.sql +++ /dev/null @@ -1,29 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.modified_buildings(scenario_id_input integer) - RETURNS INTEGER[] - LANGUAGE plpgsql -AS $function$ -DECLARE - modified_features integer[]; -BEGIN - - modified_features = - ( - WITH ids AS - ( - SELECT DISTINCT building_id - FROM customer.building_modified - WHERE scenario_id = scenario_id_input - AND building_id IS NOT NULL - AND edit_type IN ('d') - ) - SELECT COALESCE(ARRAY_AGG(building_id), array[]::integer[]) - FROM ids - ); - - RETURN modified_features; - -END; -$function$ -/* -SELECT * FROM basic.modified_buildings(1) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/modified_edges.sql b/src/db/sql/functions/legacy/modified_edges.sql deleted file mode 100644 index 3b10d79..0000000 --- a/src/db/sql/functions/legacy/modified_edges.sql +++ /dev/null @@ -1,29 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.modified_edges(scenario_id_input integer) - RETURNS INTEGER[] - LANGUAGE plpgsql -AS $function$ -DECLARE - modified_features integer[]; -BEGIN - - modified_features = - ( - WITH ids AS - ( - SELECT DISTINCT way_id - FROM customer.way_modified - WHERE scenario_id = scenario_id_input - AND way_id IS NOT NULL - AND edit_type IN ('d', 'm') - ) - SELECT COALESCE(ARRAY_AGG(way_id), array[]::integer[]) - FROM ids - ); - - RETURN modified_features; - -END; -$function$ -/* -SELECT * FROM basic.modified_edges(1) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/modified_pois.sql b/src/db/sql/functions/legacy/modified_pois.sql deleted file mode 100644 index 351e0e8..0000000 --- a/src/db/sql/functions/legacy/modified_pois.sql +++ /dev/null @@ -1,28 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.modified_pois(scenario_id_input integer) - RETURNS TEXT[] - LANGUAGE plpgsql -AS $function$ -DECLARE - modified_features text[]; -BEGIN - - modified_features = - ( - WITH ids AS - ( - SELECT uid - FROM customer.poi_modified p - WHERE scenario_id = scenario_id_input - AND edit_type IN ('d', 'm') - ) - SELECT COALESCE(ARRAY_AGG(uid), array[]::text[]) - FROM ids - ); - - RETURN modified_features; - -END; -$function$ -/* -SELECT * FROM basic.modified_pois(75) -*/ diff --git a/src/db/sql/functions/legacy/network_modification.sql b/src/db/sql/functions/legacy/network_modification.sql deleted file mode 100644 index 842bdd8..0000000 --- a/src/db/sql/functions/legacy/network_modification.sql +++ /dev/null @@ -1,562 +0,0 @@ - - -CREATE OR REPLACE FUNCTION basic.network_modification(scenario_id_input integer) - RETURNS SETOF integer - LANGUAGE plpgsql -AS $function$ -DECLARE - cnt integer; - rec record; - max_id integer; -BEGIN - --------------------------------------------------------------------------------------------------------------------- - --Prepare Table - --------------------------------------------------------------------------------------------------------------------- - - /*Assumption Translation Meters and Degree: 1m = 0.000009 degree */ - DELETE FROM basic.edge e - WHERE e.scenario_id = scenario_id_input; - DELETE FROM basic.node n - WHERE n.scenario_id = scenario_id_input; - DROP TABLE IF EXISTS split_drawn_features, drawn_features, existing_network, intersection_existing_network, drawn_features_union, new_network, delete_extended_part, vertices_to_assign; - - DROP TABLE IF EXISTS modified_attributes_only; - CREATE TEMP TABLE modified_attributes_only AS - SELECT w.* - FROM basic.edge e, customer.way_modified w - WHERE w.scenario_id = scenario_id_input - AND e.id = w.way_id - AND ST_ASTEXT(ST_ReducePrecision(w.geom,0.00001)) = ST_ASTEXT(ST_ReducePrecision(e.geom,0.00001)) - AND edit_type = 'm'; - CREATE INDEX ON modified_attributes_only USING GIST(geom); - - DROP TABLE IF EXISTS drawn_features; - CREATE TEMP TABLE drawn_features as - SELECT w.id, ST_RemoveRepeatedPoints(w.geom) AS geom, - w.way_type, w.surface, w.wheelchair, w.lit, w.foot, w.bicycle, w.scenario_id, w.way_id AS original_id - FROM customer.way_modified w - WHERE w.scenario_id = scenario_id_input - AND w.edit_type IN ('n', 'm'); - CREATE INDEX ON drawn_features USING GIST(geom); - - --------------------------------------------------------------------------------------------------------------------- - --Snap start and end points - --------------------------------------------------------------------------------------------------------------------- - /*Round start and end point for snapping*/ - DROP TABLE IF EXISTS snapped_drawn_features; - CREATE TEMP TABLE snapped_drawn_features AS - WITH start_end_point AS - ( - SELECT d.id AS did, st_startpoint(d.geom) geom, 's' AS point_type, FALSE AS snapped, NULL::integer AS node_id - FROM drawn_features d - UNION ALL - SELECT d.id AS did, st_endpoint(d.geom) geom, 'e' AS point_type, FALSE AS snapped, NULL AS node_id - FROM drawn_features d - ), - clusters AS - ( - SELECT did, geom, ST_ClusterDBSCAN(geom, eps := 0.00001, minpoints := 1) OVER() AS cid, point_type - FROM start_end_point - ), - grouped AS - ( - SELECT ARRAY_AGG(did) AS did, ST_CENTROID(ST_COLLECT(geom)) AS geom, ARRAY_AGG(point_type)::text[] AS point_types - FROM clusters - GROUP BY cid - ) - SELECT UNNEST(did) AS did, geom, UNNEST(point_types) AS point_type, FALSE AS snapped, NULL::integer AS node_id, ARRAY_LENGTH(point_types, 1) AS group_size - FROM grouped; - - ALTER TABLE snapped_drawn_features ADD COLUMN id serial; - CREATE INDEX ON snapped_drawn_features USING GIST(geom); - CREATE INDEX ON snapped_drawn_features (id); - - /*Snapping to existing Nodes*/ - DROP TABLE IF EXISTS snapped_to_node; - CREATE TEMP TABLE snapped_to_node AS - SELECT r.id, r.did, r.point_type, r.geom original_geom, s.geom node_geom, s.id AS node_id - FROM snapped_drawn_features r - CROSS JOIN LATERAL - ( - SELECT n.id, n.geom - FROM basic.node n - WHERE ST_Intersects(ST_BUFFER(r.geom,0.00001), n.geom) - AND n.scenario_id IS NULL - ORDER BY r.geom <-> n.geom - LIMIT 1 - ) s; - CREATE INDEX ON snapped_to_node USING GIST(node_geom); - - UPDATE snapped_drawn_features d - SET geom = node_geom, snapped = TRUE, node_id = s.node_id - FROM snapped_to_node s - WHERE s.did = d.did - AND d.point_type = s.point_type; - - /*Snapping to existing edges*/ - DROP TABLE IF EXISTS snapped_to_edge; - CREATE TEMP TABLE snapped_to_edge AS - SELECT r.id, r.did, r.point_type, r.geom original_geom, s.geom closest_point_edge - FROM snapped_drawn_features r - CROSS JOIN LATERAL - ( - SELECT n.id, ST_CLOSESTPOINT(n.geom, r.geom) AS geom - FROM basic.edge n - WHERE ST_Intersects(ST_BUFFER(r.geom,0.00001), n.geom) - AND n.scenario_id IS NULL - ORDER BY r.geom <-> ST_CLOSESTPOINT(n.geom, r.geom) - LIMIT 1 - ) s - WHERE r.snapped = False; - - /*Update based on snapped to new*/ - UPDATE snapped_drawn_features d - SET geom = closest_point_edge, snapped = True - FROM snapped_to_edge s - WHERE s.did = d.did - AND d.point_type = s.point_type; - - /*Snapping to each other*/ - DROP TABLE IF EXISTS snapped_to_each_other; - CREATE TEMP TABLE snapped_to_each_other AS - SELECT r.id, r.did, r.point_type, r.geom original_geom, s.geom closest_point_edge - FROM snapped_drawn_features r - CROSS JOIN LATERAL - ( - SELECT n.id, ST_CLOSESTPOINT(n.geom, r.geom) AS geom - FROM drawn_features n - WHERE ST_Intersects(ST_BUFFER(r.geom,0.00001), n.geom) - AND r.did <> n.id - ORDER BY r.geom <-> ST_CLOSESTPOINT(n.geom, r.geom) - LIMIT 1 - ) s - WHERE r.snapped = FALSE - AND r.group_size = 1 - UNION ALL - SELECT s.id, s.did, s.point_type, s.geom, s.geom - FROM snapped_drawn_features s - WHERE s.group_size > 1; - - /*Update based on snapped to each other*/ - UPDATE snapped_drawn_features d - SET geom = closest_point_edge, snapped = True - FROM snapped_to_each_other s - WHERE s.did = d.did - AND d.point_type = s.point_type; - - /*Update drawn features*/ - UPDATE drawn_features d - SET geom = st_setpoint(d.geom, 0, s.geom) - FROM snapped_drawn_features s - WHERE d.id = s.did - AND s.snapped = TRUE - AND s.point_type = 's'; - - UPDATE drawn_features d - SET geom = st_setpoint(d.geom, -1, s.geom) - FROM snapped_drawn_features s - WHERE d.id = s.did - AND s.snapped = TRUE - AND s.point_type = 'e'; - - UPDATE drawn_features d - SET geom = st_setpoint(d.geom, 0, s.geom) - FROM snapped_drawn_features s - WHERE s.snapped = FALSE - AND s.point_type = 's' - AND d.id = s.did; - - UPDATE drawn_features d - SET geom = st_setpoint(d.geom, -1, s.geom) - FROM snapped_drawn_features s - WHERE s.snapped = FALSE - AND s.point_type = 'e' - AND d.id = s.did; - - --------------------------------------------------------------------------------------------------------------------- - --Cut network - --------------------------------------------------------------------------------------------------------------------- - - /*Extend lines to cut network*/ - DROP TABLE IF EXISTS extended_lines; - CREATE TEMP TABLE extended_lines AS - WITH agg_snapped_nodes AS - ( - SELECT d.id, ARRAY_AGG(point_type) AS point_type - FROM snapped_to_node s, drawn_features d - WHERE d.id = s.did - GROUP BY d.id - ) - SELECT CASE WHEN ARRAY['e', 's'] && point_type THEN d.geom - WHEN ARRAY['s'] = point_type THEN basic.extend_line(d.geom, 0.00001, 'end') - WHEN ARRAY['e'] = point_type THEN basic.extend_line(d.geom, 0.00001, 'start') - END AS geom, d.original_id - FROM agg_snapped_nodes a, drawn_features d - WHERE a.id = d.id - AND (d.way_type = 'road' OR d.way_type IS NULL) - UNION ALL - SELECT basic.extend_line(d.geom, 0.00001, 'both'), d.original_id - FROM drawn_features d - LEFT JOIN snapped_to_node s - ON d.id = s.did - WHERE s.id IS NULL - AND (d.way_type = 'road' OR d.way_type IS NULL); - - /*Intersects drawn bridges*/ - DROP TABLE IF EXISTS start_end_bridges; - CREATE TEMP TABLE start_end_bridges AS - SELECT st_startpoint(geom) AS geom - FROM drawn_features - WHERE way_type = 'bridge' - UNION - SELECT ST_endpoint(geom) AS geom - FROM drawn_features - WHERE way_type = 'bridge'; - CREATE INDEX ON start_end_bridges USING GIST(geom); - - /*Intersect drawn ways with existing ways*/ - DROP TABLE IF EXISTS intersection_existing_network; - CREATE TEMP TABLE intersection_existing_network AS - WITH intersection_result AS - ( - SELECT (ST_DUMP(ST_Intersection(d.geom, w.geom))).geom AS geom, w.id - FROM extended_lines d, basic.edge w - WHERE ST_Intersects(ST_BUFFER(d.geom, 0.00001), w.geom) - AND w.scenario_id IS NULL - ) - SELECT i.* - FROM intersection_result i - LEFT JOIN extended_lines e - ON i.id = e.original_id - WHERE e.original_id IS NULL - AND st_geometrytype(i.geom) = 'ST_Point'; - - INSERT INTO intersection_existing_network - WITH to_add AS - ( - SELECT scenario_id_input AS scenario_id, x.closest_point AS geom - FROM start_end_bridges s - CROSS JOIN LATERAL - ( - SELECT ST_CLOSESTPOINT(w.geom,s.geom) AS closest_point, ST_LineLocatePoint(w.geom,s.geom) AS fraction - FROM basic.edge w - WHERE w.scenario_id IS NULL - AND ST_Intersects(St_buffer(s.geom,0.00001), w.geom) - ORDER BY ST_CLOSESTPOINT(geom,s.geom) <-> s.geom - LIMIT 1 - ) x - ) - SELECT a.geom - FROM to_add a - LEFT JOIN intersection_existing_network i - ON ST_Intersects(ST_BUFFER(a.geom, 0.00001), i.geom) - WHERE i.geom IS NULL; - - DROP TABLE IF EXISTS distinct_intersection_existing_network; - CREATE TABLE distinct_intersection_existing_network AS - SELECT DISTINCT geom - FROM intersection_existing_network i; - - CREATE INDEX ON distinct_intersection_existing_network USING GIST(geom); - ALTER TABLE distinct_intersection_existing_network ADD COLUMN id serial; - ALTER TABLE distinct_intersection_existing_network ADD PRIMARY key(id); - - /*Filter out snapped start or end point*/ - DELETE FROM intersection_existing_network h - USING - ( - SELECT h.geom - FROM snapped_to_node n, distinct_intersection_existing_network h - WHERE ST_Intersects(ST_BUFFER(n.node_geom,0.00001), h.geom) - ) d - WHERE h.geom = d.geom; - - DROP TABLE IF EXISTS split_drawn_features; - /*Split network with itself*/ - SELECT count(*) - INTO cnt - FROM drawn_features - WHERE (way_type IS NULL OR way_type <> 'bridge') - LIMIT 2; - - IF cnt <= 1 THEN - CREATE TEMP TABLE split_drawn_features as - SELECT id as did, geom, way_type, surface, wheelchair, lit, foot, bicycle, scenario_id, original_id - FROM drawn_features; - ELSE - CREATE TEMP TABLE split_drawn_features AS - SELECT id AS did, basic.split_by_drawn_lines(id::integer, geom) AS geom, way_type, surface, wheelchair, lit, foot, bicycle, scenario_id, original_id - FROM drawn_features x - WHERE (way_type IS NULL OR way_type <> 'bridge') - UNION ALL - SELECT id AS did, geom, way_type, surface, wheelchair, lit, foot, bicycle, scenario_id, original_id - FROM drawn_features - WHERE way_type = 'bridge'; - END IF; - CREATE INDEX ON split_drawn_features USING GIST(geom); - - /*Create perpendicular lines to split new network*/ - DROP TABLE IF EXISTS perpendicular_split_lines; - CREATE TEMP TABLE perpendicular_split_lines AS - SELECT basic.create_intersection_line(i.geom, 0.000001) AS geom - FROM intersection_existing_network i; - - DROP TABLE IF EXISTS union_perpendicular_split_lines; - CREATE TEMP TABLE union_perpendicular_split_lines AS - SELECT ST_Union(geom) AS geom - FROM perpendicular_split_lines p; - - /*Split new network with existing network*/ - DROP TABLE IF EXISTS new_network; - CREATE TEMP TABLE new_network AS - SELECT d.did, (dp.geom).geom, way_type, surface, wheelchair, lit, foot, bicycle, scenario_id, original_id, 'geom' AS edit_type - FROM split_drawn_features d, union_perpendicular_split_lines w, - LATERAL (SELECT ST_DUMP(ST_CollectionExtract(ST_SPLIT(d.geom,w.geom),2)) AS geom) dp - WHERE (d.way_type IS NULL OR d.way_type <> 'bridge'); - CREATE INDEX ON new_network USING GIST(geom); - - /*Delete extended part*/ - DELETE FROM new_network - WHERE st_length(geom) < 0.0000011; - - /*Inject drawn bridges*/ - INSERT INTO new_network(did, geom, way_type, surface, wheelchair, lit, foot, bicycle, scenario_id, original_id, edit_type) - SELECT id, geom, way_type, surface, wheelchair, lit, foot, bicycle, scenario_id, original_id, 'geom' - FROM drawn_features - WHERE way_type = 'bridge'; - - ALTER TABLE new_network ADD COLUMN id serial; - ALTER TABLE new_network ADD COLUMN source integer; - ALTER TABLE new_network ADD COLUMN target integer; - - --------------------------------------------------------------------------------------------------------------------- - --Prepare source and target - --------------------------------------------------------------------------------------------------------------------- - /*Existing network is split using perpendicular lines*/ - DROP TABLE IF EXISTS existing_network; - CREATE TEMP TABLE existing_network as - SELECT w.id AS original_id, w.class_id, w.surface, w.foot, w.bicycle, (dp.geom).geom, w.source, w.target, - w.lit_classified, w.wheelchair_classified, w.impedance_surface - FROM basic.edge w, union_perpendicular_split_lines p, - LATERAL (SELECT ST_DUMP(ST_CollectionExtract(ST_SPLIT(w.geom,p.geom),2)) AS geom) dp - WHERE ST_Intersects(w.geom, p.geom) - AND w.scenario_id IS NULL - AND w.id NOT IN (SELECT way_id FROM modified_attributes_only); - ALTER TABLE existing_network ADD COLUMN id serial; - ALTER TABLE existing_network ADD PRIMARY KEY(id); - CREATE INDEX ON existing_network USING GIST(geom); - - /*Assign vertices that where snapped to new features*/ - UPDATE new_network n - SET SOURCE = s.node_id - FROM snapped_drawn_features s - WHERE n.did = s.did - AND s.node_id IS NOT NULL - AND s.point_type = 's' - AND ST_ASTEXT(st_startpoint(n.geom)) = ST_ASTEXT(s.geom); - - UPDATE new_network n - SET target = s.node_id - FROM snapped_drawn_features s - WHERE n.did = s.did - AND s.node_id IS NOT NULL - AND ST_ASTEXT(st_endpoint(n.geom)) = ST_ASTEXT(s.geom); - - /*Create new vertices*/ - DROP TABLE IF EXISTS loop_vertices; - CREATE TEMP TABLE loop_vertices AS - WITH start_end_point AS - ( - SELECT e.id, st_startpoint(geom) geom, 's' AS point_type - FROM new_network e - WHERE SOURCE IS NULL - UNION ALL - SELECT e.id, st_endpoint(geom) geom, 'e' AS point_type - FROM new_network e - WHERE target IS NULL - ), - clusters AS - ( - SELECT s.id, s.geom, ST_ClusterDBSCAN(geom, eps := 0.000001, minpoints := 1) OVER() AS cid, point_type - FROM start_end_point s - ), - grouped AS - ( - SELECT ST_CENTROID(ST_COLLECT(geom)) AS geom, ARRAY_AGG(point_type)::text[] AS point_types, ARRAY_AGG(id)::integer[] new_network_ids - FROM clusters c - GROUP BY cid - ) - SELECT geom, point_types, new_network_ids - FROM grouped; - - DROP TABLE IF EXISTS new_vertices; - CREATE TEMP TABLE new_vertices - ( - node_id integer, - new_network_ids integer[], - point_types text[], - geom geometry - ); - /* - DO $$ - DECLARE - rec record; - max_id integer; - BEGIN*/ - FOR rec IN SELECT * FROM loop_vertices v - LOOP - max_id = (SELECT max(id) FROM basic.node); - WITH i AS - ( - INSERT INTO basic.node (id, scenario_id, geom) - VALUES(max_id + 1, scenario_id_input, rec.geom) - RETURNING id, geom - ) - INSERT INTO new_vertices(node_id, new_network_ids, point_types, geom) - SELECT i.id, rec.new_network_ids, rec.point_types, i.geom - FROM i; - END LOOP; - /*END $$;*/ - CREATE INDEX ON new_vertices USING GIST(geom); - - WITH unnest_to_update AS - ( - SELECT v.node_id, UNNEST(v.new_network_ids) new_network_id, UNNEST(v.point_types) point_type - FROM new_vertices v - ) - UPDATE new_network n - SET SOURCE = u.node_id - FROM unnest_to_update u - WHERE n.id = u.new_network_id - AND point_type = 's'; - - WITH unnest_to_update AS - ( - SELECT v.node_id, UNNEST(v.new_network_ids) new_network_id, UNNEST(v.point_types) point_type - FROM new_vertices v - ) - UPDATE new_network n - SET target = u.node_id - FROM unnest_to_update u - WHERE n.id = u.new_network_id - AND point_type = 'e'; - - DROP TABLE IF EXISTS new_source_target_existing; - CREATE TEMP TABLE new_source_target_existing AS - WITH start_and_end AS - ( - SELECT e.id, st_startpoint(geom) geom, 's' AS point_type - FROM existing_network e - UNION ALL - SELECT e.id, st_endpoint(geom) geom, 'e' AS point_type - FROM existing_network e - ) - SELECT v.id, point_type, c.node_id, v.geom - FROM start_and_end v - CROSS JOIN LATERAL - ( - SELECT n.node_id - FROM new_vertices n - WHERE ST_Intersects(ST_BUFFER(v.geom, 0.00001), n.geom) - ORDER BY n.geom <-> v.geom - LIMIT 1 - ) c; - - UPDATE existing_network e - SET SOURCE = n.node_id - FROM new_source_target_existing n - WHERE e.id = n.id - AND n.point_type = 's'; - - UPDATE existing_network e - SET target = n.node_id - FROM new_source_target_existing n - WHERE e.id = n.id - AND n.point_type = 'e'; - - - DROP TABLE IF EXISTS network_to_add; - CREATE TEMP TABLE network_to_add AS - SELECT original_id, class_id, surface, foot, bicycle, geom, SOURCE, target, lit_classified, wheelchair_classified, impedance_surface - FROM existing_network - UNION ALL - SELECT NULL, 100, surface, foot, bicycle, geom, SOURCE, target, lit, wheelchair, NULL - FROM new_network; - - CREATE INDEX ON network_to_add USING GIST(geom); - - ---Attach attributes to vertices - DROP TABLE IF EXISTS vertices_to_add; - CREATE TEMP TABLE vertices_to_add AS - SELECT vv.node_id, array_remove(array_agg(DISTINCT x.class_id),NULL) class_ids, - array_remove(array_agg(DISTINCT x.foot),NULL) AS foot, - array_remove(array_agg(DISTINCT x.bicycle),NULL) bicycle, - array_remove(array_agg(DISTINCT x.lit_classified),NULL) lit_classified, - array_remove(array_agg(DISTINCT x.wheelchair_classified),NULL) wheelchair_classified, - vv.geom - FROM new_vertices vv - LEFT JOIN - ( - SELECT v.node_id, w.class_id, w.foot, w.bicycle, w.lit_classified, w.wheelchair_classified - FROM new_vertices v, network_to_add w - WHERE st_intersects(ST_BUFFER(v.geom,0.00001),w.geom) - ) x - ON vv.node_id = x.node_id - GROUP BY vv.node_id, vv.geom; - CREATE INDEX ON vertices_to_add (node_id); - - ---------------------------------------------------------------------------------------------------------------------- - --INSERT NEW VERTICES AND WAYS INTO THE EXISTING TABLES - ---------------------------------------------------------------------------------------------------------------------- - - /*DO $$ - DECLARE - rec record; - max_id integer; - BEGIN*/ - FOR rec IN SELECT * FROM network_to_add v - LOOP - max_id = (SELECT max(id) FROM basic.edge); - - INSERT INTO basic.edge(id, class_id, source, target, foot, bicycle, wheelchair_classified, lit_classified, impedance_surface, geom, coordinates_3857, length_m, length_3857, scenario_id, edge_id) - SELECT max_id + 1, rec.class_id, rec.SOURCE, rec.target, rec.foot, rec.bicycle, rec.wheelchair_classified, - rec.lit_classified, rec.impedance_surface, rec.geom, (ST_AsGeoJSON(ST_Transform(rec.geom,3857))::json->'coordinates')::json, - ST_LENGTH(rec.geom::geography), ST_LENGTH(ST_TRANSFORM(rec.geom, 3857)), scenario_id_input, rec.original_id - ; - END LOOP; - /*END $$;*/ - - - /*Set impedances for existing but now split network - UPDATE ways_userinput ww - SET s_imp = w.s_imp, rs_imp = w.rs_imp - FROM ways_userinput w - WHERE ww.original_id = w.id - AND ww.scenario_id = 1 - AND w.scenario_id IS NULL; - - IF EXISTS - ( SELECT 1 - FROM information_schema.tables - WHERE table_schema = 'public' - AND table_name = 'dem' - ) - THEN - WITH impedances AS - ( - SELECT w.id, ci.imp, ci.rs_imp - FROM ways_userinput w, - LATERAL get_slope_profile(w.id, 10, 'ways_userinput') sp, LATERAL compute_impedances(sp.elevs, sp.linkLength, 10) ci - WHERE scenario_id = scenario_id_input - AND original_id IS NULL - ) - UPDATE ways_userinput w - SET s_imp = i.imp, rs_imp = i.rs_imp - FROM impedances i - WHERE w.id = i.id; - - END IF; - */ -END -$function$; - diff --git a/src/db/sql/functions/legacy/poi_aoi_visualization.sql b/src/db/sql/functions/legacy/poi_aoi_visualization.sql deleted file mode 100644 index f0d8ef1..0000000 --- a/src/db/sql/functions/legacy/poi_aoi_visualization.sql +++ /dev/null @@ -1,115 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.poi_aoi_visualization(user_id_input integer, scenario_id_input integer, active_upload_ids integer[], active_study_area_id integer, -grouped_multi_entrance boolean = FALSE) -RETURNS TABLE (id integer, uid TEXT, category TEXT, name TEXT, opening_hours TEXT, street TEXT, housenumber TEXT, zipcode TEXT, edit_type TEXT, geom geometry) -LANGUAGE plpgsql -AS $function$ -DECLARE - aoi_categories TEXT[]; - data_upload_poi_categories TEXT[] = '{}'::TEXT[]; - all_poi_categories text[]; - excluded_pois_id text[] := '{}'::text[]; - buffer_geom_study_area geometry; -BEGIN - data_upload_poi_categories = basic.poi_categories_data_uploads(user_id_input); - active_study_area_id = (SELECT u.active_study_area_id FROM customer.user u WHERE u.id = user_id_input); - - IF grouped_multi_entrance = TRUE THEN - /*Get combined poi categories*/ - SELECT array_agg(o.category) - INTO all_poi_categories - FROM basic.active_opportunities(user_id_input, active_study_area_id) o, basic.opportunity_group g - WHERE o.category_group = g.GROUP - AND g.TYPE = 'poi' - AND multiple_entrance = grouped_multi_entrance; - - /*Get aoi categories*/ - SELECT ARRAY_AGG(o.category) - INTO aoi_categories - FROM basic.active_opportunities(user_id_input, active_study_area_id) o, basic.opportunity_group g - WHERE o.category_group = g.GROUP - AND g.TYPE = 'aoi' - AND multiple_entrance = grouped_multi_entrance; - ELSE - SELECT array_agg(o.category) - INTO all_poi_categories - FROM basic.active_opportunities(user_id_input, active_study_area_id) o, basic.opportunity_group g - WHERE o.category_group = g.GROUP - AND g.TYPE = 'poi'; - - /*Get aoi categories*/ - SELECT ARRAY_AGG(o.category) - INTO aoi_categories - FROM basic.active_opportunities(user_id_input, active_study_area_id) o, basic.opportunity_group g - WHERE o.category_group = g.GROUP - AND g.TYPE = 'aoi'; - - END IF; - - /*Check if POI scenario*/ - IF scenario_id_input <> 0 THEN - excluded_pois_id = basic.modified_pois(scenario_id_input); - END IF; - /*Buffer study area to avoid border effects*/ - buffer_geom_study_area = (SELECT buffer_geom_heatmap AS geom FROM basic.study_area s WHERE s.id = active_study_area_id); - - RETURN query - SELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, - NULL AS edit_type, p.geom - FROM basic.poi p - WHERE p.category IN (SELECT UNNEST(all_poi_categories)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND ST_Intersects(p.geom, buffer_geom_study_area) - AND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories)); - - RETURN query - SELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, - NULL AS edit_type, p.geom - FROM customer.poi_user p - WHERE p.category IN (SELECT UNNEST(all_poi_categories)) - AND p.data_upload_id IN (SELECT UNNEST(active_upload_ids)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND ST_Intersects(p.geom, buffer_geom_study_area); - - RETURN query - /*No scenarios nor aoi_user is implemented at the moment*/ - SELECT p.id, NULL, p.category, p.name, p.opening_hours, NULL AS street, NULL AS housenumber, NULL AS zipcode, - NULL AS edit_type, p.geom - FROM basic.aoi p - WHERE p.category IN (SELECT UNNEST(aoi_categories)) - AND p.geom && buffer_geom_study_area; - - IF scenario_id_input <> 0 THEN - RETURN query - SELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, - p.edit_type, p.geom - FROM customer.poi_modified p - WHERE p.category IN (SELECT UNNEST(all_poi_categories)) - AND ST_Intersects(p.geom, buffer_geom_study_area) - AND p.scenario_id = scenario_id_input; - - RETURN query - SELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, - 'd' AS edit_type, p.geom - FROM basic.poi p - WHERE p.category IN (SELECT UNNEST(all_poi_categories)) - AND p.uid IN (SELECT UNNEST(excluded_pois_id)) - AND ST_Intersects(p.geom, buffer_geom_study_area) - AND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories)); - - RETURN query - SELECT p.id, p.uid, p.category, p.name, p.opening_hours, p.street, p.housenumber, p.zipcode, - 'd' AS edit_type, p.geom - FROM customer.poi_user p - WHERE p.category IN (SELECT UNNEST(all_poi_categories)) - AND p.data_upload_id IN (SELECT UNNEST(active_upload_ids)) - AND p.uid IN (SELECT UNNEST(excluded_pois_id)) - AND ST_Intersects(p.geom, buffer_geom_study_area); - END IF; -END ; -$function$; - -/* -Modus should be default, scenario, comparison -SELECT * -FROM basic.poi_aoi_visualization(4, 2, 'default', ARRAY[0], 1) -*/ diff --git a/src/db/sql/functions/legacy/poi_categories.sql b/src/db/sql/functions/legacy/poi_categories.sql deleted file mode 100644 index e09f831..0000000 --- a/src/db/sql/functions/legacy/poi_categories.sql +++ /dev/null @@ -1,28 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.poi_categories(user_id_input integer) - RETURNS jsonb - LANGUAGE plpgsql -AS $function$ -DECLARE - active_study_area_id integer := (SELECT active_study_area_id FROM customer.USER WHERE id = user_id_input); - classified_pois jsonb; -BEGIN - - classified_pois = ( - WITH poi_categories AS - ( - SELECT o.multiple_entrance, array_agg(o.category) arr_categories - FROM basic.active_opportunities(user_id_input, active_study_area_id) o, basic.opportunity_group g - WHERE g.TYPE = 'poi' - AND o.category_group = g.GROUP - GROUP BY o.multiple_entrance - ) - SELECT jsonb_object_agg(COALESCE(multiple_entrance, FALSE), arr_categories) - FROM poi_categories - ); - RETURN classified_pois; -END ; -$function$ - -/* Function that returns the default and user poi categories grouped into multiple_entrance and single_entrance: -SELECT basic.poi_categories(1) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/poi_categories_data_uploads.sql b/src/db/sql/functions/legacy/poi_categories_data_uploads.sql deleted file mode 100644 index 35220ed..0000000 --- a/src/db/sql/functions/legacy/poi_categories_data_uploads.sql +++ /dev/null @@ -1,40 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.poi_categories_data_uploads(user_id_input integer) - RETURNS text[] - LANGUAGE plpgsql -AS $function$ -DECLARE - upload_ids integer[]; - area_id integer; - upload_id integer; - poi_user_category TEXT; - categories text[] := '{}'::TEXT[]; -BEGIN - SELECT u.active_data_upload_ids, u.active_study_area_id - INTO upload_ids, area_id - FROM customer.USER u - WHERE u.id = user_id_input; - - FOREACH upload_id IN ARRAY upload_ids - LOOP - SELECT category - INTO poi_user_category - FROM customer.poi_user p, customer.data_upload d - WHERE p.data_upload_id = upload_id - AND p.data_upload_id = d.id - AND d.study_area_id = area_id - LIMIT 1; - - IF poi_user_category IS NOT NULL THEN - categories = array_append(categories, poi_user_category ); - END IF; - - END LOOP; - - RETURN COALESCE(categories, '{}'::TEXT[]); - -END ; -$function$ - -/* -SELECT * FROM basic.poi_categories_data_uploads(4) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/population_modification.sql b/src/db/sql/functions/legacy/population_modification.sql deleted file mode 100644 index 9f8d002..0000000 --- a/src/db/sql/functions/legacy/population_modification.sql +++ /dev/null @@ -1,51 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.population_modification(scenario_id_input integer) -RETURNS SETOF void -LANGUAGE plpgsql -AS $function$ -DECLARE - setting_study_area_id integer; - average_gross_living_area integer; -BEGIN - - setting_study_area_id = ( - SELECT basic.get_reference_study_area(ST_CENTROID(geom)) - FROM customer.building_modified - WHERE scenario_id = scenario_id_input - LIMIT 1 - ); - average_gross_living_area = basic.select_customization('average_gross_living_area', setting_study_area_id); - - UPDATE customer.building_modified b - SET area = ST_AREA(geom::geography), - population = ( - CASE WHEN population IS NULL THEN (b.building_levels_residential * ST_AREA(b.geom::geography) / average_gross_living_area) - ELSE population END - ) - WHERE scenario_id = scenario_id_input; - - WITH count_pop AS ( - SELECT count(*) AS count_points, building_modified_id - FROM customer.population_modified - WHERE scenario_id = scenario_id_input - GROUP BY building_modified_id - ), - points_to_update AS - ( - SELECT p.id, b.id AS building_modified_id, b.population / c.count_points AS population - FROM customer.building_modified b, customer.population_modified p, count_pop c - WHERE b.id = p.building_modified_id - AND b.id = c.building_modified_id - AND b.scenario_id = scenario_id_input - AND p.scenario_id = scenario_id_input - AND b.building_type = 'residential' - ) - UPDATE customer.population_modified p - SET population = u.population - FROM points_to_update u - WHERE p.id = u.id; - -END -$function$; -/* -SELECT basic.population_modification(13) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/query_edges_routing.sql b/src/db/sql/functions/legacy/query_edges_routing.sql deleted file mode 100644 index ca12795..0000000 --- a/src/db/sql/functions/legacy/query_edges_routing.sql +++ /dev/null @@ -1,96 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.query_edges_routing(buffer_geom text, modus_input text, scenario_id_input integer, speed_input float, routing_profile TEXT, coordinates_only BOOLEAN) - RETURNS text - LANGUAGE plpgsql -AS $function$ -DECLARE - sql_ways_ids text := ''; - sql_scenario_id text := ''; - sql_routing_profile text := ''; - sql_geom text := format(' AND ST_Intersects(geom, ST_SETSRID(ST_GEOMFROMTEXT(''%1$s''), 4326))', buffer_geom); - excluded_class_id text; - filter_categories text; - transport_mode TEXT := split_part(routing_profile,'_',1); - cost_function TEXT; - category text := jsonb_build_object('walking','foot','cycling','bicycle') ->> transport_mode; - sql_select_ways text; - sql_cost TEXT; - time_loss_intersections jsonb := '{}'::jsonb; - geom_column TEXT = 'geom'; - setting_study_area_id integer; - -BEGIN - IF modus_input = 'default' THEN - scenario_id_input = 0; - END IF; - - setting_study_area_id = basic.get_reference_study_area(ST_SETSRID(ST_CENTROID(buffer_geom), 4326)); - - excluded_class_id = (basic.select_customization('excluded_class_id_' || transport_mode, setting_study_area_id))::text; - excluded_class_id = substr(excluded_class_id, 2, length(excluded_class_id) - 2); - - filter_categories = replace(basic.select_customization('categories_no_' || category, setting_study_area_id)::TEXT, '"', ''''); - filter_categories = substr(filter_categories, 2, length(filter_categories) - 2); - - IF transport_mode IN ('cycling','ebike') THEN - time_loss_intersections = basic.select_customization('cycling_crossings_delay', setting_study_area_id); - END IF; - - IF routing_profile = 'cycling_pedelec' THEN - cost_function = 'ebike'; - ELSE - cost_function = transport_mode; - END IF; - - sql_cost = jsonb_build_object( - 'cycling','CASE WHEN crossing IS NOT NULL THEN (''%2$s''::jsonb ->> (''delay_'' || crossing_delay_category))::integer + ((length_m*(1+COALESCE(s_imp,0)+COALESCE(impedance_surface,0))::float)/%1$s) - ELSE (length_m*(1+COALESCE(s_imp,0)+COALESCE(impedance_surface,0))::float)/ - CASE WHEN bicycle IN (''no'', ''dismount'') THEN 1.33 ELSE %1$s END END AS cost, - CASE WHEN crossing IS NOT NULL THEN ( ''%2$s''::jsonb ->> (''delay_'' || crossing_delay_category))::integer + ((length_m*(1+COALESCE(rs_imp,0)+COALESCE(impedance_surface,0))::float)/%1$s) - ELSE (length_m*(1+COALESCE(rs_imp,0)+COALESCE(impedance_surface,0))::float)/ - CASE WHEN bicycle IN (''no'', ''dismount'') THEN 1.33 ELSE %1$s END END AS reverse_cost', - 'walking', 'length_m/%1$s as cost, length_m/%1$s as reverse_cost', - 'ebike', 'CASE WHEN crossing IS NOT NULL THEN (''%2$s''::jsonb ->> (''delay_'' || crossing_delay_category))::integer + ((length_m*(1+-greatest(-COALESCE(s_imp,0),0)+COALESCE(impedance_surface,0))::float)/%1$s) - ELSE (length_m*(1+COALESCE(impedance_surface,0))::float)/ - CASE WHEN bicycle IN (''no'', ''dismount'') THEN 1.33 ELSE %1$s END END AS cost, - CASE WHEN crossing IS NOT NULL THEN (''%2$s''::jsonb ->> (''delay_'' || crossing_delay_category))::integer + ((length_m*(1+-greatest(-COALESCE(rs_imp,0),0)+COALESCE(impedance_surface,0))::float)/%1$s) - ELSE (length_m*(1+COALESCE(impedance_surface,0))::float)/ - CASE WHEN bicycle IN (''no'', ''dismount'') THEN 1.33 ELSE %1$s END END AS reverse_cost' - ) ->> cost_function; - - sql_cost = format(sql_cost, speed_input, time_loss_intersections::text); - - - sql_scenario_id = ' AND (scenario_id IS NULL OR scenario_id='||scenario_id_input||')'; - - IF modus_input = 'scenario' THEN - sql_ways_ids = ' AND NOT id::int4 = any('''|| basic.modified_edges(scenario_id_input)::text ||''') '; - END IF; - - IF routing_profile = 'walking_safe_night' THEN - sql_routing_profile = 'AND (lit_classified = ''yes'' OR lit_classified = ''unclassified'')'; - ELSEIF routing_profile = 'walking_wheelchair' THEN - sql_routing_profile = 'AND ((wheelchair_classified = ''yes'') OR wheelchair_classified = ''limited'' - OR wheelchair_classified = ''unclassified'')'; - END IF; - - IF coordinates_only = TRUE THEN - geom_column = 'coordinates_3857'; - END IF; - - - sql_select_ways = - 'SELECT id::integer, source, target, length_3857,'||sql_cost||',death_end,'||quote_ident(geom_column)||', NULL AS starting_ids, NULL AS starting_geoms - FROM basic.edge - WHERE class_id NOT IN ('||excluded_class_id||') - AND ('||quote_ident(category)||' NOT IN ('||filter_categories||') - OR '||quote_ident(category)||' IS NULL) - '||sql_geom||sql_scenario_id||sql_ways_ids||sql_routing_profile; - return sql_select_ways; -END; -$function$; - -/*Produces the sql query as text to fetch the network*/ -/* -SELECT basic.query_edges_routing(ST_ASTEXT(ST_BUFFER(ST_POINT(11.543274,48.195524),0.0018)),'default',0,1.33,'walking_standard',true) -*/ - diff --git a/src/db/sql/functions/legacy/reachable_population_polygon.sql b/src/db/sql/functions/legacy/reachable_population_polygon.sql deleted file mode 100644 index b920328..0000000 --- a/src/db/sql/functions/legacy/reachable_population_polygon.sql +++ /dev/null @@ -1,66 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.reachable_population_polygon(scenario_id_input integer, modus TEXT, region TEXT) -RETURNS TABLE (name TEXT, population integer) -AS $function$ -DECLARE - excluded_buildings_id integer[]; - region_geom geometry := ST_SETSRID(ST_GeomFromText(region), 4326); - reachable_population integer; - reachable_population_default integer; - reachable_population_scenario integer; -BEGIN - - /*Check if invalid modus*/ - IF modus NOT IN ('default','scenario') THEN - RAISE EXCEPTION 'Unknown modus: %', modus; - END IF; - /*Get reachable population*/ - IF modus IN ('default','scenario') THEN - WITH pop AS - ( - SELECT COALESCE(SUM(p.population), 0) population - FROM basic.population p - WHERE ST_Intersects(p.geom, region_geom) - ) - SELECT p.population - INTO reachable_population_default - FROM pop p; - - END IF; - - IF modus = 'default' THEN - reachable_population = reachable_population_default::integer; - END IF; - - IF modus = 'scenario' THEN - excluded_buildings_id = basic.modified_buildings(scenario_id_input); - - WITH prepared_scenario AS - ( - SELECT COALESCE(-sum(p.population), 0) AS population - FROM basic.population p - WHERE ST_Intersects(p.geom, region_geom) - AND p.building_id IN (SELECT UNNEST(excluded_buildings_id)) - UNION ALL - SELECT COALESCE(sum(p.population), 0) AS population - FROM customer.population_modified p - WHERE ST_Intersects(p.geom, region_geom) - AND p.scenario_id = scenario_id_input - ) - SELECT COALESCE(sum(p.population), 0)::integer - INTO reachable_population_scenario - FROM prepared_scenario p; - reachable_population = (reachable_population_default + reachable_population_scenario)::integer; - END IF; - - - RETURN query - SELECT 'polygon' AS name, floor((reachable_population / 5)*5)::integer AS population; - -END; -$function$ LANGUAGE plpgsql; - -/* -SELECT * -FROM basic.reachable_population_polygon(2, 'default', -'POLYGON ((11.570115749093093 48.15360025891228, 11.570274296106232 48.1518693270582, 11.572708788648153 48.15118483030911, 11.574984827528402 48.15223125586774, 11.574826384986741 48.15396220424526, 11.57239179909107 48.154646710542, 11.570115749093093 48.15360025891228))') -*/ diff --git a/src/db/sql/functions/legacy/reachable_population_study_area.sql b/src/db/sql/functions/legacy/reachable_population_study_area.sql deleted file mode 100644 index ed0cd85..0000000 --- a/src/db/sql/functions/legacy/reachable_population_study_area.sql +++ /dev/null @@ -1,59 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.reachable_population_study_area(scenario_id_input integer, modus text, study_area_ids integer[]) - RETURNS TABLE(sub_study_area_id integer, name TEXT, population integer) - LANGUAGE plpgsql -AS $function$ -DECLARE - excluded_buildings_id integer[] := '{}'::integer[]; -BEGIN - - DROP TABLE IF EXISTS reachable_population; - - /*Get reachable population*/ - IF modus = 'default' THEN - RETURN query - SELECT s.id AS sub_study_area_id, s.name, s.population - FROM basic.sub_study_area s - WHERE s.id IN (SELECT UNNEST(study_area_ids)); - - ELSEIF modus = 'scenario' THEN - excluded_buildings_id = basic.modified_buildings(scenario_id_input); - - RETURN query - WITH prepared_scenario AS - ( - SELECT p.sub_study_area_id, -sum(p.population) AS population - FROM basic.population p - WHERE p.sub_study_area_id IN (SELECT UNNEST(study_area_ids)) - AND p.building_id IN (SELECT UNNEST(excluded_buildings_id)) - GROUP BY p.sub_study_area_id - UNION ALL - SELECT p.sub_study_area_id, sum(p.population) AS population - FROM customer.population_modified p - WHERE p.sub_study_area_id IN (SELECT UNNEST(study_area_ids)) - AND p.scenario_id = scenario_id_input - GROUP BY p.sub_study_area_id - ), - scenario_population AS - ( - SELECT p.sub_study_area_id, sum(p.population) population - FROM prepared_scenario p - GROUP BY p.sub_study_area_id - ) - SELECT s.id AS sub_study_area_id, s.name, (s.population + COALESCE(sp.population, 0))::integer AS population - FROM basic.sub_study_area s - LEFT JOIN scenario_population sp - ON s.id = sp.sub_study_area_id - WHERE s.id IN (SELECT UNNEST(study_area_ids)); - - ELSE - RAISE EXCEPTION 'Unknown modus: %', modus; - END IF; - - -END; -$function$; - -/* -SELECT * -FROM basic.reachable_population_study_area(2,'default', ARRAY[17,24,26]) -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/reached_population_study_area.sql b/src/db/sql/functions/legacy/reached_population_study_area.sql deleted file mode 100644 index c6b0ccb..0000000 --- a/src/db/sql/functions/legacy/reached_population_study_area.sql +++ /dev/null @@ -1,124 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.reached_population_study_area(ischrone_calculation_id_input integer, scenario_id_input integer, modus text, study_area_ids integer[]) - RETURNS TABLE(id integer, step integer, reached_opportunities jsonb) - LANGUAGE plpgsql -AS $function$ -DECLARE - excluded_buildings_id integer[] := '{}'::integer[]; -BEGIN - - DROP TABLE IF EXISTS reachable_population; - - /*Get reachable population*/ - IF modus = 'default' THEN - CREATE TEMP TABLE reachable_population AS - SELECT i.id AS isochrone_feature_id, s.id AS sub_study_area_id, s.name, s.population - FROM basic.sub_study_area s, customer.isochrone_feature i - WHERE s.id IN (SELECT UNNEST(study_area_ids)) - AND i.isochrone_calculation_id = ischrone_calculation_id_input; - - ELSEIF modus = 'scenario' THEN - excluded_buildings_id = basic.modified_buildings(scenario_id_input); - - CREATE TEMP TABLE reachable_population AS - WITH prepared_scenario AS - ( - SELECT p.sub_study_area_id, -sum(p.population) AS population - FROM basic.population p - WHERE p.sub_study_area_id IN (SELECT UNNEST(study_area_ids)) - AND p.building_id IN (SELECT UNNEST(excluded_buildings_id)) - GROUP BY p.sub_study_area_id - UNION ALL - SELECT p.sub_study_area_id, sum(p.population) AS population - FROM customer.population_modified p - WHERE p.sub_study_area_id IN (SELECT UNNEST(study_area_ids)) - AND p.scenario_id = scenario_id_input - GROUP BY p.sub_study_area_id - ), - scenario_population AS - ( - SELECT p.sub_study_area_id, sum(population) population - FROM prepared_scenario p - GROUP BY p.sub_study_area_id - ), - combined_population AS - ( - SELECT s.id AS sub_study_area_id, s.name, (s.population + COALESCE(sp.population, 0)) AS population - FROM basic.sub_study_area s - LEFT JOIN scenario_population sp - ON s.id = sp.sub_study_area_id - WHERE s.id IN (SELECT UNNEST(study_area_ids)) - ) - SELECT i.id AS isochrone_feature_id, c.* - FROM customer.isochrone_feature i, combined_population c - WHERE i.isochrone_calculation_id = ischrone_calculation_id_input; - ELSE - RAISE EXCEPTION 'Unknown modus: %', modus; - END IF; - - /*Get reached population*/ - DROP TABLE IF EXISTS reached_population; - CREATE TEMP TABLE reached_population AS - WITH to_group AS - ( - SELECT i.id, s.population, s.sub_study_area_id - FROM customer.isochrone_feature i - CROSS JOIN LATERAL - ( - SELECT p.sub_study_area_id, sum(p.population) AS population - FROM basic.population p - WHERE st_intersects(i.geom,p.geom) - AND p.building_id NOT IN (SELECT UNNEST(excluded_buildings_id)) - AND p.sub_study_area_id IN (SELECT UNNEST(study_area_ids)) - GROUP BY p.sub_study_area_id - ) s - WHERE i.isochrone_calculation_id = ischrone_calculation_id_input - UNION ALL - SELECT i.id, s.population, s.sub_study_area_id - FROM customer.isochrone_feature i - CROSS JOIN LATERAL - ( - SELECT p.sub_study_area_id, sum(p.population) AS population - FROM customer.population_modified p - WHERE st_intersects(i.geom,p.geom) - AND p.sub_study_area_id IN (SELECT UNNEST(study_area_ids)) - AND p.scenario_id = scenario_id_input - GROUP BY p.sub_study_area_id - ) s - WHERE i.isochrone_calculation_id = ischrone_calculation_id_input - ) - SELECT g.id, (floor(COALESCE(sum(g.population)::integer,0)/5)*5) AS population, g.sub_study_area_id - FROM to_group g - GROUP BY g.id, g.sub_study_area_id; - - /*Combine and return results*/ - RETURN query - WITH combined AS - ( - SELECT a.isochrone_feature_id, a.sub_study_area_id, a.name, - CASE WHEN COALESCE(r.population, 0) > a.population THEN a.population - ELSE COALESCE(r.population, 0) END AS reached_population, a.population AS total_population - FROM reachable_population a - LEFT JOIN reached_population r - ON a.isochrone_feature_id = r.id - AND a.sub_study_area_id = r.sub_study_area_id - ), - as_object AS - ( - SELECT c.isochrone_feature_id, jsonb_object_agg(c.sub_study_area_id, - jsonb_build_object('name', c.name, 'reached_population', c.reached_population::integer, 'total_population', c.total_population::integer)) AS population - FROM combined c - GROUP BY c.isochrone_feature_id - ) - UPDATE customer.isochrone_feature i - SET reached_opportunities = o.population - FROM as_object o - WHERE o.isochrone_feature_id = i.id - RETURNING i.id, i.step, i.reached_opportunities; - -END; -$function$; - -/* -SELECT * -FROM basic.reached_population_study_area(39, 2,'default', ARRAY[17,24,26]) -*/ diff --git a/src/db/sql/functions/legacy/select_customization.sql b/src/db/sql/functions/legacy/select_customization.sql deleted file mode 100644 index 34a6b91..0000000 --- a/src/db/sql/functions/legacy/select_customization.sql +++ /dev/null @@ -1,31 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.select_customization(setting_type text, study_area_id integer) -RETURNS jsonb - LANGUAGE plpgsql -AS $function$ -DECLARE - setting_study_area jsonb; - setting_standard jsonb; -BEGIN - - setting_study_area = ( - SELECT (setting -> setting_type) AS customization - FROM basic.study_area sa - WHERE id = study_area_id - ); - - IF setting_study_area IS NOT NULL THEN - RETURN setting_study_area; - ELSE - setting_standard = ( - SELECT setting -> c.type - FROM customer.customization c - WHERE c.type = setting_type - ); - RETURN setting_standard; - END IF; -END; -$function$ IMMUTABLE; -/* -SELECT * -FROM basic.select_customization('excluded_class_id_walking', 11000009); -*/ \ No newline at end of file diff --git a/src/db/sql/functions/legacy/select_user_customization.sql b/src/db/sql/functions/legacy/select_user_customization.sql deleted file mode 100644 index f5d1002..0000000 --- a/src/db/sql/functions/legacy/select_user_customization.sql +++ /dev/null @@ -1,17 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.select_user_customization(user_id_input integer, setting_type text) -RETURNS jsonb - LANGUAGE sql -AS $function$ - - SELECT u.setting -> c.type - FROM customer.customization c, customer.user_customization u, customer.user u2 - WHERE u2.id = user_id_input - AND u2.active_study_area_id = u.study_area_id - AND c.type = setting_type - AND u.customization_id = c.id - AND u.user_id = user_id_input; - -$function$ IMMUTABLE; -/* -SELECT select_user_customization(1, 'poi_groups'); -*/ diff --git a/src/db/sql/functions/legacy/split_by_drawn_lines.sql b/src/db/sql/functions/legacy/split_by_drawn_lines.sql deleted file mode 100644 index ce43926..0000000 --- a/src/db/sql/functions/legacy/split_by_drawn_lines.sql +++ /dev/null @@ -1,37 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.split_by_drawn_lines(id_input integer, input_geom geometry) - RETURNS SETOF geometry - LANGUAGE plpgsql - AS $function$ - DECLARE - union_geom geometry; - does_intersect boolean := FALSE; - BEGIN - - does_intersect = ( - SELECT TRUE - FROM drawn_features d - WHERE ST_Intersects(basic.extend_line(d.geom, 0.00001, 'both'), - (SELECT geom FROM drawn_features WHERE id = id_input)) - LIMIT 1 - ); - - IF does_intersect = TRUE THEN - union_geom = - ( - SELECT ST_UNION(basic.extend_line(geom, 0.00001, 'both')) AS geom - FROM drawn_features - WHERE id <> id_input - AND ST_Intersects(input_geom, basic.extend_line(geom, 0.00001, 'both')) - AND (way_type IS NULL OR way_type <> 'bridge') - ); - END IF; - - IF union_geom IS NOT NULL THEN - RETURN query - SELECT (dump).geom - FROM (SELECT ST_DUMP(ST_CollectionExtract(ST_SPLIT(input_geom, union_geom),2)) AS dump) d; - ELSE - RETURN query SELECT input_geom; - END IF; - END -$function$; diff --git a/src/db/sql/functions/legacy/starting_points_multi_isochrones.sql b/src/db/sql/functions/legacy/starting_points_multi_isochrones.sql deleted file mode 100644 index 9ee0ad5..0000000 --- a/src/db/sql/functions/legacy/starting_points_multi_isochrones.sql +++ /dev/null @@ -1,72 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.starting_points_multi_isochrones(user_id_input integer, modus text, minutes integer, speed_input numeric, -amenities text[], scenario_id_input integer DEFAULT 0, active_upload_ids integer[] DEFAULT '{}'::integer[], region TEXT DEFAULT NULL, study_area_ids integer[] DEFAULT NULL) -RETURNS TABLE (x float[], y float[]) -AS $function$ - -DECLARE - excluded_pois_id text[] := ARRAY[]::text[]; - region_geom geometry; - buffer_geom geometry; - data_upload_poi_categories TEXT[] = '{}'::TEXT[]; - detour_factor numeric = 0.8; -BEGIN - data_upload_poi_categories = basic.poi_categories_data_uploads(user_id_input); - - IF region IS NULL AND study_area_ids IS NOT NULL THEN - SELECT ST_UNION(s.geom) - INTO region_geom - FROM basic.sub_study_area s - WHERE s.id IN (SELECT UNNEST(study_area_ids)); - ELSEIF region IS NOT NULL AND study_area_ids IS NULL THEN - SELECT ST_GeomFromText(region) AS geom - INTO region_geom; - ELSE - RAISE EXCEPTION 'Please specify either region or study_area_ids but not both.'; - END IF; - - buffer_geom = ST_Buffer(region_geom::geography, speed_input * 60 * minutes * detour_factor)::geometry; - - IF modus = 'scenario' THEN - excluded_pois_id = basic.modified_pois(scenario_id_input); - ELSEIF modus = 'default' THEN - scenario_id_input = 0; - END IF; - - - RETURN QUERY - WITH relevant_pois AS - ( - SELECT ST_X(p.geom) x, ST_Y(p.geom) y - FROM basic.poi p - WHERE ST_Intersects(buffer_geom, p.geom) - AND p.category IN (SELECT UNNEST(amenities)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories)) - UNION ALL - SELECT ST_X(p.geom) x, ST_Y(p.geom) y - FROM customer.poi_user p - WHERE ST_Intersects(buffer_geom, p.geom) - AND p.category IN (SELECT UNNEST(amenities)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND p.data_upload_id IN (SELECT UNNEST(active_upload_ids)) - UNION ALL - SELECT ST_X(p.geom) x, ST_Y(p.geom) y - FROM customer.poi_modified p - WHERE ST_Intersects(buffer_geom, p.geom) - AND p.category IN (SELECT UNNEST(amenities)) - AND p.scenario_id = scenario_id_input - AND (p.data_upload_id IN (SELECT UNNEST(active_upload_ids)) OR p.data_upload_id IS NULL) - AND p.edit_type <> 'd' - ) - SELECT ARRAY_AGG(r.x) AS x, ARRAY_AGG(r.y) AS y - FROM relevant_pois r; - -END; -$function$ -LANGUAGE plpgsql; - - -/* -SELECT * -FROM basic.starting_points_multi_isochrones('default', 10, 1.33, ARRAY['bar','restaurant','pub','french_supermarket','fancy_market'], 0, ARRAY[0], NULL, ARRAY[1,2,3,4]) - */ diff --git a/src/db/sql/functions/legacy/thematic_data_sum.sql b/src/db/sql/functions/legacy/thematic_data_sum.sql deleted file mode 100644 index 6548c80..0000000 --- a/src/db/sql/functions/legacy/thematic_data_sum.sql +++ /dev/null @@ -1,180 +0,0 @@ -CREATE OR REPLACE FUNCTION basic.thematic_data_sum(user_id_input integer, input_isochrone_calculation_id integer, modus text, scenario_id_input integer DEFAULT 0, active_upload_ids integer[] DEFAULT '{}'::integer[]) - RETURNS TABLE(isochrone_feature_id integer, isochrone_feature_step integer, isochrone_feature_reached_opportunities jsonb) - LANGUAGE plpgsql -AS $function$ -DECLARE - poi_categories jsonb = basic.poi_categories(user_id_input); - pois_one_entrance jsonb = poi_categories -> 'false'; - pois_more_entrance jsonb = poi_categories -> 'true'; - excluded_pois_id text[] := ARRAY[]::text[]; - excluded_buildings_id integer[] := ARRAY[]::integer[]; - data_upload_poi_categories text[]; -BEGIN - - IF modus = 'scenario' THEN - excluded_pois_id = basic.modified_pois(scenario_id_input); - excluded_buildings_id = basic.modified_buildings(scenario_id_input); - ELSE - scenario_id_input = 0; - END IF; - - data_upload_poi_categories = basic.poi_categories_data_uploads(user_id_input); - - IF data_upload_poi_categories IS NULL THEN - data_upload_poi_categories = '{}'::text[]; - END IF; - --Calculate reached AOIs - DROP TABLE IF EXISTS reached_aois; - CREATE TEMP TABLE reached_aois AS - WITH area_cnt AS - ( - SELECT i.id, a.category, count(*) as cnt, intersec.area - FROM customer.isochrone_feature i, basic.aoi a, - LATERAL (SELECT ST_Area(st_intersection(i.geom,a.geom)::geography)::integer area) AS intersec - WHERE isochrone_calculation_id = input_isochrone_calculation_id - AND st_intersects(i.geom,a.geom) - GROUP BY i.id, category, name, intersec.area - ), - json_area_cnt AS - ( - SELECT p.id, p.category, jsonb_build_object('cnt',sum(cnt),'area',sum(area)) AS aois_json - FROM area_cnt p - GROUP BY p.id, p.category - ) - SELECT j.id, jsonb_object_agg(category, aois_json) aois_json_agg - FROM json_area_cnt j - GROUP BY j.id; - - - DROP TABLE IF EXISTS reached_opportunities; - CREATE TEMP TABLE reached_opportunities - ( - id integer, - opportunity_type TEXT, - cnt integer - ); - - --Calculate reached population - INSERT INTO reached_opportunities - WITH temp_sum AS - ( - SELECT s.population,i.id - FROM customer.isochrone_feature i - CROSS JOIN LATERAL - ( - SELECT sum(p.population) AS population - FROM basic.population p - WHERE st_intersects(i.geom,p.geom) - AND p.building_id NOT IN (SELECT UNNEST(excluded_buildings_id)) - ) s - WHERE i.isochrone_calculation_id = input_isochrone_calculation_id - UNION ALL - SELECT s.population,i.id - FROM customer.isochrone_feature i - CROSS JOIN LATERAL - ( - SELECT sum(p.population) AS population - FROM customer.population_modified p - WHERE st_intersects(i.geom,p.geom) - AND p.scenario_id = scenario_id_input - ) s - WHERE i.isochrone_calculation_id = input_isochrone_calculation_id - ) - SELECT s.id, 'sum_pop', sum(s.population)::integer+(5-(sum(s.population)::integer % 5)) as sum_pop - FROM temp_sum s - GROUP BY s.id; - - --Calculate reached POIs one entrance - INSERT INTO reached_opportunities - SELECT i.id, s.category, count(*) - FROM customer.isochrone_feature i - CROSS JOIN LATERAL - ( - SELECT p.category, i.id - FROM basic.poi p - WHERE ST_Intersects(i.geom, p.geom) - AND p.category IN (SELECT jsonb_array_elements_text(pois_one_entrance)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND p.category NOT IN (SELECT UNNEST(data_upload_poi_categories)) - UNION ALL - SELECT p.category, i.id - FROM customer.poi_user p - WHERE ST_Intersects(i.geom, p.geom) - AND p.category IN (SELECT jsonb_array_elements_text(pois_one_entrance)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND p.data_upload_id IN (SELECT UNNEST(active_upload_ids)) - UNION ALL - SELECT p.category, i.id - FROM customer.poi_modified p - WHERE ST_Intersects(i.geom, p.geom) - AND p.category IN (SELECT jsonb_array_elements_text(pois_one_entrance)) - AND p.scenario_id = scenario_id_input - ) s - WHERE i.isochrone_calculation_id = input_isochrone_calculation_id - GROUP BY category, i.id; - - --Calculate reached POIs more entrances - INSERT INTO reached_opportunities - WITH more_entrances AS - ( - SELECT s.category, i.id - FROM customer.isochrone_feature i - CROSS JOIN LATERAL - ( - SELECT p.category, p.name, i.id - FROM basic.poi p - WHERE ST_Intersects(i.geom, p.geom) - AND p.category IN (SELECT jsonb_array_elements_text(pois_more_entrance)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - UNION ALL - SELECT p.category, p.name, i.id - FROM customer.poi_user p - WHERE ST_Intersects(i.geom, p.geom) - AND p.category IN (SELECT jsonb_array_elements_text(pois_more_entrance)) - AND p.uid NOT IN (SELECT UNNEST(excluded_pois_id)) - AND p.data_upload_id IN (SELECT UNNEST(active_upload_ids)) - UNION ALL - SELECT p.category, p.name, i.id - FROM customer.poi_modified p - WHERE ST_Intersects(i.geom, p.geom) - AND p.category IN (SELECT jsonb_array_elements_text(pois_more_entrance)) - AND p.scenario_id = scenario_id_input - ) s - WHERE i.isochrone_calculation_id = input_isochrone_calculation_id - GROUP BY name, category, i.id - ) - SELECT m.id, m.category, count(*) - FROM more_entrances m - GROUP BY m.category, m.id; - - RETURN QUERY - WITH group_reached_opportunities AS - ( - SELECT r.id, jsonb_object_agg(opportunity_type, cnt) reached_opportunities - FROM reached_opportunities r - GROUP BY id - ), - combined_opportunities AS - ( - SELECT COALESCE(g.id, r.id) AS id, COALESCE(reached_opportunities, '{}'::jsonb) || COALESCE(aois_json_agg, '{}'::jsonb) AS reached_opportunities - FROM group_reached_opportunities g - FULL JOIN reached_aois r - ON r.id = g.id - ) - UPDATE customer.isochrone_feature i - SET reached_opportunities = c.reached_opportunities - FROM combined_opportunities c - WHERE i.id = c.id - RETURNING i.id, i.step, i.reached_opportunities; -END ; -$function$; - -/* Example with starting point to find study_area -SELECT * FROM basic.count_pois_multi_isochrones(1,'scenario',10,1.33,'study_area', -'POINT(11.570115749093093 48.15360025891228)', ARRAY['bar','restaurant','pub','french_supermarket','fancy_market'], 1, ARRAY[3]); - - * Example with drawn polygon -SELECT * FROM basic.count_pois_multi_isochrones(1,'scenario',10,1.33,'draw', -'POLYGON ((11.570115749093093 48.15360025891228, 11.570274296106232 48.1518693270582, 11.572708788648153 48.15118483030911, 11.574984827528402 48.15223125586774, 11.574826384986741 48.15396220424526, 11.57239179909107 48.154646710542, 11.570115749093093 48.15360025891228))', -ARRAY['bar','restaurant','pub','french_supermarket','fancy_market'], 1, ARRAY[3]); - */ From 83a0ff59ed1b70bdfb45e7e3e64c0bcd3f554b56 Mon Sep 17 00:00:00 2001 From: EPajares Date: Sun, 15 Sep 2024 14:23:14 +0000 Subject: [PATCH 7/8] Remove user table from customer and added owned_by to content --- src/core/content.py | 282 ++++++++++++++++++++++-------- src/crud/crud_layer.py | 5 +- src/crud/crud_project.py | 6 +- src/db/models/_base_class.py | 30 ++-- src/db/models/_link_model.py | 36 ++-- src/db/models/data_store.py | 12 +- src/db/models/folder.py | 34 ++-- src/db/models/job.py | 23 ++- src/db/models/layer.py | 14 +- src/db/models/project.py | 15 +- src/db/models/scenario.py | 8 +- src/db/models/scenario_feature.py | 8 +- src/db/models/system_setting.py | 7 +- src/db/models/system_task.py | 6 +- src/db/models/user.py | 10 +- src/endpoints/v2/layer.py | 1 - src/endpoints/v2/user.py | 65 ++----- src/schemas/layer.py | 2 + src/schemas/project.py | 1 + tests/api/test_user.py | 25 +-- tests/conftest.py | 45 ++++- 21 files changed, 393 insertions(+), 242 deletions(-) diff --git a/src/core/content.py b/src/core/content.py index 4b7dfb1..8b442fd 100644 --- a/src/core/content.py +++ b/src/core/content.py @@ -7,8 +7,9 @@ from sqlmodel import SQLModel from src.db.session import AsyncSession from src.schemas.common import ContentIdList -from sqlalchemy import select, and_ +from sqlalchemy import select, and_, or_ from sqlalchemy.orm import contains_eager, selectinload +from src.db.models import User ### Generic helper functions for content @@ -106,6 +107,111 @@ async def delete_content_by_id( return +# def create_query_shared_content( +# model, +# team_link_model, +# organization_link_model, +# team_model, +# organization_model, +# role_model, +# filters, +# team_id=None, +# organization_id=None, +# ): +# """ +# Creates a dynamic query for a given model (Layer or Project) and its associated team, organization, and owner user. + +# :param model: The main model (Layer or Project) +# :param team_link_model: The model linking the main model with teams (LayerTeamLink or ProjectTeamLink) +# :param organization_link_model: The model linking the main model with organizations (LayerOrganizationLink or ProjectOrganizationLink) +# :param team_model: The Team model +# :param organization_model: The Organization model +# :param role_model: The Role model +# :param filters: Additional filters to apply +# :param team_id: ID of the team (optional) +# :param organization_id: ID of the organization (optional) +# :return: A SQLAlchemy query object +# """ + +# # Determine the link field based on the model +# link_field = f"{model.__tablename__}_id" + +# # Basic query to join the User who owns the Layer or Project +# base_query = select( +# model, +# role_model.id.label("valid_role_id"), +# team_model.name, +# team_model.id, +# team_model.avatar, +# User.firstname.label("user_firstname"), +# User.lastname.label("user_lastname"), +# User.avatar.label("user_avatar"), +# ).join( +# User, model.user_id == User.id # Join on owner_id field with User model +# ) + +# if team_id: +# query = ( +# base_query +# .join( +# team_link_model, getattr(team_link_model, link_field) == model.id +# ) # Dynamically replace `layer_id` or `project_id` +# .join(role_model, team_link_model.role_id == role_model.id) +# .join(team_model, team_link_model.team_id == team_model.id) +# .where( +# and_( +# team_link_model.team_id == team_id, +# *filters, +# ) +# ) +# .options( +# contains_eager(getattr(model, "team_links")) +# ) # Adjust field as needed for relationships +# ) +# elif organization_id: +# query = ( +# base_query +# .join( +# organization_link_model, +# getattr(organization_link_model, link_field) == model.id, +# ) # Dynamically replace `layer_id` or `project_id` +# .join(role_model, organization_link_model.role_id == role_model.id) +# .join( +# organization_model, +# organization_link_model.organization_id == organization_model.id, +# ) +# .where( +# and_( +# organization_link_model.organization_id == organization_id, +# *filters, +# ) +# ) +# .options( +# contains_eager(getattr(model, "organization_links")) +# ) # Adjust field as needed for relationships +# ) +# else: +# query = ( +# base_query +# .outerjoin( +# team_link_model, getattr(team_link_model, link_field) == model.id +# ) # Dynamically replace `layer_id` or `project_id` +# .outerjoin( +# organization_link_model, +# getattr(organization_link_model, link_field) == model.id, +# ) # Dynamically replace `layer_id` or `project_id` +# .where(and_(*filters)) +# .options( +# selectinload(getattr(model, "team_links")).selectinload( +# getattr(team_link_model, "team") +# ), # Adjust fields as needed +# selectinload(getattr(model, "organization_links")).selectinload( +# getattr(organization_link_model, "organization") +# ), # Adjust fields as needed +# ) +# ) + + def create_query_shared_content( model, team_link_model, @@ -118,7 +224,7 @@ def create_query_shared_content( organization_id=None, ): """ - Creates a dynamic query for a given model (Layer or Project) and its associated team and organization links. + Creates a dynamic query for a given model (Layer or Project) and its associated team, organization, and owner user. :param model: The main model (Layer or Project) :param team_link_model: The model linking the main model with teams (LayerTeamLink or ProjectTeamLink) @@ -135,16 +241,38 @@ def create_query_shared_content( # Determine the link field based on the model link_field = f"{model.__tablename__}_id" + # Get team or layer model + if team_id: + read_column = [ + team_model.name.label("team_name"), + team_model.id, + team_model.avatar.label("team_avatar"), + ] + elif organization_id: + read_column = [ + organization_model.name.label("team_name"), + organization_model.id, + organization_model.avatar.label("team_avatar"), + ] + else: + read_column = [] + + # Basic query to join the User who owns the Layer or Project + base_query = select( + model, + role_model.id.label("valid_role_id"), + User.id.label("valid_user_id"), + User.firstname.label("user_firstname"), + User.lastname.label("user_lastname"), + User.avatar.label("user_avatar"), + *read_column, + ).join( + User, model.user_id == User.id # Join on owner_id field with User model + ) + if team_id: query = ( - select( - model, - role_model.name, - team_model.name, - team_model.id, - team_model.avatar, - ) - .join( + base_query.join( team_link_model, getattr(team_link_model, link_field) == model.id ) # Dynamically replace `layer_id` or `project_id` .join(role_model, team_link_model.role_id == role_model.id) @@ -161,14 +289,7 @@ def create_query_shared_content( ) elif organization_id: query = ( - select( - model, - role_model.name, - organization_model.name, - organization_model.id, - organization_model.avatar, - ) - .join( + base_query.join( organization_link_model, getattr(organization_link_model, link_field) == model.id, ) # Dynamically replace `layer_id` or `project_id` @@ -188,29 +309,38 @@ def create_query_shared_content( ) # Adjust field as needed for relationships ) else: + # Query for the case with no team_id or organization_id query = ( - select(model) - .outerjoin( + base_query.outerjoin( team_link_model, getattr(team_link_model, link_field) == model.id - ) # Dynamically replace `layer_id` or `project_id` + ) # Outer join for team_link_model + .outerjoin( + team_model, team_link_model.team_id == team_model.id + ) # Outer join for team_model + .outerjoin( + role_model, team_link_model.role_id == role_model.id + ) # Outer join for role_model .outerjoin( organization_link_model, getattr(organization_link_model, link_field) == model.id, - ) # Dynamically replace `layer_id` or `project_id` + ) # Outer join for organization_link_model + .outerjoin( + organization_model, + organization_link_model.organization_id == organization_model.id, + ) # Outer join for organization_model .where(and_(*filters)) .options( selectinload(getattr(model, "team_links")).selectinload( - getattr(team_link_model, "team") - ), # Adjust fields as needed + team_link_model.team + ), # Preload team links and corresponding teams selectinload(getattr(model, "organization_links")).selectinload( - getattr(organization_link_model, "organization") - ), # Adjust fields as needed + organization_link_model.organization + ), # Preload organization links and corresponding organizations ) ) - return query - +#TODO: Make a pydantic schema for shared_with and owned_by def build_shared_with_object( items, role_mapping, @@ -232,60 +362,64 @@ def build_shared_with_object( :param organization_id: Optional ID for organization-specific sharing :return: A list of dictionaries containing the model and the shared_with data """ - result_arr = [] - # Determine shared_with key - if team_id: - shared_with_key = "teams" - elif organization_id: - shared_with_key = "organizations" + def get_owned_by(item): + """Helper function to build the 'owned_by' dictionary.""" + return { + "id": item[2], + "firstname": item[3], + "lastname": item[4], + "avatar": item[5], + } - # Case where shared_with is for a specific team or organization - if team_id or organization_id: - for item in items: - shared_with = {shared_with_key: []} - shared_with[shared_with_key].append( + def process_links(item, link_key, link_type): + """Helper function to process either team or organization links.""" + shared_with = [] + links = getattr(item, link_key, None) + if not links: + return shared_with + for link in links: + shared_with.append( { - "role": item[1], # Role name - "id": item[3], # Team or Organization ID - "name": item[2], # Team or Organization name - "avatar": item[4], # Team or Organization avatar + "role": role_mapping[link.role_id], # Role based on role_mapping + "id": getattr(link, link_type).id, + "name": getattr(link, link_type).name, + "avatar": getattr(link, link_type).avatar, } ) - result_arr.append({**item[0].dict(), "shared_with": shared_with}) - else: - # Case where shared_with includes both teams and organizations - for item in items: - shared_with = {"teams": [], "organizations": []} - - # Process team links - team_links = getattr(item, team_key) - for team_link in team_links: - shared_with["teams"].append( - { - "role": role_mapping[ - team_link.role_id - ], # Role based on role_mapping - "id": team_link.team.id, - "name": team_link.team.name, - "avatar": team_link.team.avatar, - } - ) + return shared_with + + result_arr = [] - # Process organization links - organization_links = getattr(item, org_key) - for organization_link in organization_links: - shared_with["organizations"].append( + # Determine shared_with key + shared_with_key = ( + "teams" if team_id else "organizations" if organization_id else None + ) + + for item in items: + if team_id or organization_id: + # Case where shared_with is for a specific team or organization + shared_with = { + shared_with_key: [ { - "role": role_mapping[ - organization_link.role_id - ], # Role based on role_mapping - "id": organization_link.organization.id, - "name": organization_link.organization.name, - "avatar": organization_link.organization.avatar, + "role": role_mapping[item[1]], # Role name + "id": item[3], # Team or Organization ID + "name": item[2], # Team or Organization name + "avatar": item[4], # Team or Organization avatar } - ) + ] + } + else: + # Case where shared_with includes both teams and organizations + shared_with = { + "teams": process_links(item[0], team_key, "team"), + "organizations": process_links(item[0], org_key, "organization"), + } - result_arr.append({**item.dict(), "shared_with": shared_with}) + # Add owned_by information + owned_by = get_owned_by(item) + result_arr.append( + {**item[0].dict(), "shared_with": shared_with, "owned_by": owned_by} + ) return result_arr diff --git a/src/crud/crud_layer.py b/src/crud/crud_layer.py index 0a29e46..cb83160 100644 --- a/src/crud/crud_layer.py +++ b/src/crud/crud_layer.py @@ -635,6 +635,7 @@ async def get_layers_with_filter( ) role_mapping = {role.id: role.name for role in roles} + # Build query query = create_query_shared_content( Layer, LayerTeamLink, @@ -663,8 +664,8 @@ async def get_layers_with_filter( **params, ) layers_arr = build_shared_with_object( - layers.items, - role_mapping, + items=layers.items, + role_mapping=role_mapping, team_key="team_links", org_key="organization_links", model_name="layer", diff --git a/src/crud/crud_project.py b/src/crud/crud_project.py index 52551e2..dc3dc91 100644 --- a/src/crud/crud_project.py +++ b/src/crud/crud_project.py @@ -92,7 +92,7 @@ async def get_projects( ) -> Page[IProjectRead]: """Get projects for a user and folder""" - # If ids are provided apply filter by ids, otherwise apply filter by folder_id and user_id + # Build query and filters if team_id or organization_id: filters = [] elif folder_id: @@ -134,8 +134,8 @@ async def get_projects( order=order, ) projects.items = build_shared_with_object( - projects.items, - role_mapping, + items=projects.items, + role_mapping=role_mapping, team_key="team_links", org_key="organization_links", model_name="project", diff --git a/src/db/models/_base_class.py b/src/db/models/_base_class.py index 935046e..c492ca9 100644 --- a/src/db/models/_base_class.py +++ b/src/db/models/_base_class.py @@ -1,24 +1,12 @@ -from sqlmodel import Field, SQLModel, Column, DateTime, text from datetime import datetime, timezone -from typing import Optional, Any, List -from sqlalchemy.ext.declarative import as_declarative, declared_attr -from sqlalchemy.dialects.postgresql import UUID as UUID_PG -from sqlalchemy import ForeignKey, Text, ARRAY +from typing import Any, List, Optional from uuid import UUID - -# class DateTimeBase(SQLModel): -# """Base class for models with created_at and updated_at fields.""" - -# updated_at: Optional[datetime] = Field( -# sa_column=Column(DateTime(timezone=True), nullable=False, onupdate=datetime.utcnow), -# default_factory=datetime.utcnow, -# ) -# created_at: Optional[datetime] = Field( -# sa_column=Column( -# DateTime(timezone=True), nullable=False, server_default=text("timezone('UTC', now())") -# ) -# ) +from sqlalchemy import ARRAY, ForeignKey, Text +from sqlalchemy.dialects.postgresql import UUID as UUID_PG +from sqlalchemy.ext.declarative import as_declarative, declared_attr +from sqlmodel import Column, DateTime, Field, SQLModel, text +from src.core.config import settings class DateTimeBase(SQLModel): @@ -34,7 +22,9 @@ class DateTimeBase(SQLModel): sa_column=Column( DateTime(timezone=True), nullable=False, - server_default=text("""to_char(CURRENT_TIMESTAMP AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SSOF')::timestamptz"""), + server_default=text( + """to_char(CURRENT_TIMESTAMP AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SSOF')::timestamptz""" + ), ) ) @@ -45,7 +35,7 @@ class ContentBaseAttributes(SQLModel): folder_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.folder.id", ondelete="CASCADE"), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.folder.id", ondelete="CASCADE"), nullable=False, ), description="Layer folder ID", diff --git a/src/db/models/_link_model.py b/src/db/models/_link_model.py index 571f49c..613a85c 100644 --- a/src/db/models/_link_model.py +++ b/src/db/models/_link_model.py @@ -9,29 +9,29 @@ ForeignKey, Integer, Relationship, + SQLModel, Text, UniqueConstraint, ) -from src.db.models._base_class import DateTimeBase from src.core.config import settings -from sqlmodel import SQLModel +from src.db.models._base_class import DateTimeBase from src.db.models.organization import Organization if TYPE_CHECKING: + from src.db.models.organization import Organization + from .layer import Layer from .project import Project from .scenario import Scenario from .scenario_feature import ScenarioFeature - from .user import User from .team import Team - from .role import Role - from src.db.models.organization import Organization + from .user import User class LayerProjectLink(DateTimeBase, table=True): __tablename__ = "layer_project" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: int | None = Field( sa_column=Column(Integer, primary_key=True, autoincrement=True) @@ -43,13 +43,15 @@ class LayerProjectLink(DateTimeBase, table=True): ) layer_id: UUID = Field( sa_column=Column( - UUID_PG(as_uuid=True), ForeignKey("customer.layer.id", ondelete="CASCADE") + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.layer.id", ondelete="CASCADE"), ), description="Layer ID", ) project_id: UUID = Field( sa_column=Column( - UUID_PG(as_uuid=True), ForeignKey("customer.project.id", ondelete="CASCADE") + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.project.id", ondelete="CASCADE"), ), description="Project ID", ) @@ -84,7 +86,7 @@ class LayerProjectLink(DateTimeBase, table=True): class ScenarioScenarioFeatureLink(DateTimeBase, table=True): __tablename__ = "scenario_scenario_feature" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: int | None = Field( sa_column=Column(Integer, primary_key=True, autoincrement=True) @@ -92,7 +94,7 @@ class ScenarioScenarioFeatureLink(DateTimeBase, table=True): scenario_id: UUID | None = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.scenario.id", ondelete="CASCADE"), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.scenario.id", ondelete="CASCADE"), primary_key=True, nullable=False, ), @@ -101,7 +103,9 @@ class ScenarioScenarioFeatureLink(DateTimeBase, table=True): scenario_feature_id: UUID | None = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.scenario_feature.id", ondelete="CASCADE"), + ForeignKey( + f"{settings.CUSTOMER_SCHEMA}.scenario_feature.id", ondelete="CASCADE" + ), primary_key=True, nullable=False, ), @@ -114,20 +118,22 @@ class ScenarioScenarioFeatureLink(DateTimeBase, table=True): class UserProjectLink(DateTimeBase, table=True): __tablename__ = "user_project" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: int | None = Field( sa_column=Column(Integer, primary_key=True, autoincrement=True) ) user_id: UUID = Field( sa_column=Column( - UUID_PG(as_uuid=True), ForeignKey("customer.user.id", ondelete="CASCADE") + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), ), description="User ID", ) project_id: UUID = Field( sa_column=Column( - UUID_PG(as_uuid=True), ForeignKey("customer.project.id", ondelete="CASCADE") + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.project.id", ondelete="CASCADE"), ), description="Project ID", ) @@ -171,7 +177,7 @@ class UserTeamLink(SQLModel, table=True): user_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey(f"{settings.CUSTOMER_SCHEMA}.user.id", ondelete="CASCADE"), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), nullable=False, ) ) diff --git a/src/db/models/data_store.py b/src/db/models/data_store.py index 0c77eaf..44922b8 100644 --- a/src/db/models/data_store.py +++ b/src/db/models/data_store.py @@ -1,6 +1,7 @@ from typing import TYPE_CHECKING, List from uuid import UUID +from sqlalchemy.dialects.postgresql import UUID as UUID_PG from sqlmodel import ( Column, Field, @@ -9,14 +10,14 @@ Text, text, ) - from ._base_class import DateTimeBase -from sqlalchemy.dialects.postgresql import UUID as UUID_PG +from src.core.config import settings if TYPE_CHECKING: from src.schemas.data_store import DataStoreType from .layer import Layer + # TODO: Add further attributes for the different data store types class DataStoreBase(SQLModel): type: "DataStoreType" = Field( @@ -28,11 +29,14 @@ class DataStore(DataStoreBase, DateTimeBase, table=True): """Data store model.""" __tablename__ = "data_store" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( - UUID_PG(as_uuid=True), primary_key=True, nullable=False, server_default=text("uuid_generate_v4()") + UUID_PG(as_uuid=True), + primary_key=True, + nullable=False, + server_default=text("uuid_generate_v4()"), ) ) # Relationships diff --git a/src/db/models/folder.py b/src/db/models/folder.py index c002c6b..9c093da 100644 --- a/src/db/models/folder.py +++ b/src/db/models/folder.py @@ -1,22 +1,18 @@ +from typing import List from uuid import UUID + from sqlalchemy import ForeignKey from sqlalchemy.dialects.postgresql import UUID as UUID_PG -from sqlmodel import ( - Column, - Field, - Relationship, - Text, - text, - UniqueConstraint -) -from typing import List +from sqlmodel import Column, Field, Relationship, Text, UniqueConstraint, text + from src.db.models._base_class import DateTimeBase -from src.db.models.user import User from src.db.models.layer import Layer +from src.db.models.user import User +from src.core.config import settings class Folder(DateTimeBase, table=True): __tablename__ = "folder" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( @@ -29,16 +25,24 @@ class Folder(DateTimeBase, table=True): user_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.user.id", ondelete="CASCADE"), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), nullable=False, ), description="Folder owner ID", ) - name: str = Field(sa_column=Column(Text, nullable=False), description="Folder name", max_length=255) + name: str = Field( + sa_column=Column(Text, nullable=False), + description="Folder name", + max_length=255, + ) # Relationships user: "User" = Relationship(back_populates="folders") - layers: List["Layer"] = Relationship(back_populates="folder", sa_relationship_kwargs={"cascade": "all, delete-orphan"}) + layers: List["Layer"] = Relationship( + back_populates="folder", + sa_relationship_kwargs={"cascade": "all, delete-orphan"}, + ) + # Constraints -UniqueConstraint(Folder.__table__.c.user_id, Folder.__table__.c.name) \ No newline at end of file +UniqueConstraint(Folder.__table__.c.user_id, Folder.__table__.c.name) diff --git a/src/db/models/job.py b/src/db/models/job.py index 04463d5..ed6a4e6 100644 --- a/src/db/models/job.py +++ b/src/db/models/job.py @@ -1,22 +1,22 @@ -from typing import List, TYPE_CHECKING +from typing import TYPE_CHECKING, List from uuid import UUID from sqlalchemy.dialects.postgresql import JSONB -from sqlmodel import Column, Field, Text, text, ARRAY, Boolean, ForeignKey, Relationship from sqlalchemy.dialects.postgresql import UUID as UUID_PG +from sqlmodel import ARRAY, Boolean, Column, Field, ForeignKey, Relationship, Text, text +from src.schemas.job import JobStatusType, JobType from ._base_class import DateTimeBase -from src.schemas.job import JobType, JobStatusType +from src.core.config import settings if TYPE_CHECKING: from .user import User - from .project import Project class Job(DateTimeBase, table=True): """Analysis Request model.""" __tablename__ = "job" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( @@ -29,7 +29,7 @@ class Job(DateTimeBase, table=True): user_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.user.id", ondelete="CASCADE"), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), nullable=False, ), description="User ID of the user who created the job", @@ -38,7 +38,9 @@ class Job(DateTimeBase, table=True): sa_column=Column(UUID_PG(as_uuid=True), nullable=True), description="Project ID of the project the job belongs to", ) - type: JobType = Field(sa_column=Column(Text, nullable=False), description="Type of the job") + type: JobType = Field( + sa_column=Column(Text, nullable=False), description="Type of the job" + ) layer_ids: List[UUID] | None = Field( sa_column=Column( ARRAY(UUID_PG()), @@ -47,9 +49,12 @@ class Job(DateTimeBase, table=True): ), description="Layer IDs that are produced by the job", ) - status: dict = Field(sa_column=Column(JSONB, nullable=False), description="Status of the job") + status: dict = Field( + sa_column=Column(JSONB, nullable=False), description="Status of the job" + ) status_simple: JobStatusType = Field( - sa_column=Column(Text, nullable=False, index=True), description="Simple status of the job" + sa_column=Column(Text, nullable=False, index=True), + description="Simple status of the job", ) msg_simple: str | None = Field( sa_column=Column(Text, nullable=True), description="Simple message of the job" diff --git a/src/db/models/layer.py b/src/db/models/layer.py index 5c2d37e..ee6f10d 100644 --- a/src/db/models/layer.py +++ b/src/db/models/layer.py @@ -26,7 +26,8 @@ if TYPE_CHECKING: from src.db.models.folder import Folder - from ._link_model import LayerProjectLink, LayerOrganizationLink, LayerTeamLink + + from ._link_model import LayerOrganizationLink, LayerProjectLink, LayerTeamLink from .data_store import DataStore @@ -352,7 +353,7 @@ class Layer(LayerBase, GeospatialAttributes, DateTimeBase, table=True): """Layer model.""" __tablename__ = "layer" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( @@ -366,7 +367,7 @@ class Layer(LayerBase, GeospatialAttributes, DateTimeBase, table=True): user_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.user.id", ondelete="CASCADE"), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), nullable=False, ), description="Layer owner ID", @@ -374,7 +375,7 @@ class Layer(LayerBase, GeospatialAttributes, DateTimeBase, table=True): folder_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.folder.id", ondelete="CASCADE"), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.folder.id", ondelete="CASCADE"), nullable=False, ), description="Layer folder ID", @@ -383,7 +384,10 @@ class Layer(LayerBase, GeospatialAttributes, DateTimeBase, table=True): sa_column=Column(Text, nullable=False), description="Layer type" ) data_store_id: UUID | None = Field( - sa_column=Column(UUID_PG(as_uuid=True), ForeignKey("customer.data_store.id")), + sa_column=Column( + UUID_PG(as_uuid=True), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.data_store.id"), + ), description="Data store ID of the layer", ) extent: str | None = Field( diff --git a/src/db/models/project.py b/src/db/models/project.py index 9230ff8..37939f6 100644 --- a/src/db/models/project.py +++ b/src/db/models/project.py @@ -20,13 +20,19 @@ from src.db.models.layer import ContentBaseAttributes if TYPE_CHECKING: - from _link_model import LayerProjectLink, UserProjectLink, ProjectTeamLink, ProjectOrganizationLink + from _link_model import ( + LayerProjectLink, + ProjectOrganizationLink, + ProjectTeamLink, + UserProjectLink, + ) + from .scenario import Scenario class Project(ContentBaseAttributes, DateTimeBase, table=True): __tablename__ = "project" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( @@ -40,7 +46,7 @@ class Project(ContentBaseAttributes, DateTimeBase, table=True): user_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.user.id", ondelete="CASCADE"), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), nullable=False, ), description="Project owner ID", @@ -48,7 +54,7 @@ class Project(ContentBaseAttributes, DateTimeBase, table=True): folder_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.folder.id", ondelete="CASCADE"), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.folder.id", ondelete="CASCADE"), nullable=False, ), description="Project folder ID", @@ -94,4 +100,5 @@ class Project(ContentBaseAttributes, DateTimeBase, table=True): sa_relationship_kwargs={"cascade": "all, delete-orphan"}, ) + UniqueConstraint(Project.__table__.c.folder_id, Project.__table__.c.name) diff --git a/src/db/models/scenario.py b/src/db/models/scenario.py index 2c1e9b4..dd933c9 100644 --- a/src/db/models/scenario.py +++ b/src/db/models/scenario.py @@ -10,8 +10,8 @@ Text, text, ) - from ._base_class import DateTimeBase +from src.core.config import settings if TYPE_CHECKING: from ._link_model import ScenarioScenarioFeatureLink @@ -21,7 +21,7 @@ class Scenario(DateTimeBase, table=True): __tablename__ = "scenario" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( @@ -35,7 +35,7 @@ class Scenario(DateTimeBase, table=True): project_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.project.id", ondelete="CASCADE"), + ForeignKey(f"{settings.CUSTOMER_SCHEMA}.project.id", ondelete="CASCADE"), nullable=False, ), ) @@ -43,7 +43,7 @@ class Scenario(DateTimeBase, table=True): default=None, sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.user.id", ondelete="CASCADE"), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), nullable=False, ), ) diff --git a/src/db/models/scenario_feature.py b/src/db/models/scenario_feature.py index f2def29..c18c861 100644 --- a/src/db/models/scenario_feature.py +++ b/src/db/models/scenario_feature.py @@ -23,8 +23,8 @@ Text, text, ) - from ._base_class import DateTimeBase +from src.core.config import settings if TYPE_CHECKING: from ._link_model import LayerProjectLink, ScenarioScenarioFeatureLink @@ -108,7 +108,7 @@ class ScenarioFeature(DateTimeBase, UserData, table=True): """Layer model.""" __tablename__ = "scenario_feature" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( @@ -125,7 +125,9 @@ class ScenarioFeature(DateTimeBase, UserData, table=True): layer_project_id: int | None = Field( sa_column=Column( Integer, - ForeignKey("customer.layer_project.id", ondelete="CASCADE"), + ForeignKey( + f"{settings.CUSTOMER_SCHEMA}.layer_project.id", ondelete="CASCADE" + ), nullable=False, ), description="Project layer ID", diff --git a/src/db/models/system_setting.py b/src/db/models/system_setting.py index cf27e73..c03b386 100644 --- a/src/db/models/system_setting.py +++ b/src/db/models/system_setting.py @@ -4,10 +4,9 @@ from sqlalchemy import ForeignKey, text from sqlalchemy.dialects.postgresql import UUID as UUID_PG from sqlmodel import Column, Field, Relationship, SQLModel, Text - from src.db.models._base_class import DateTimeBase from src.db.models.user import User - +from src.core.config import settings class ClientThemeType(str, Enum): """Layer types that are supported.""" @@ -42,7 +41,7 @@ class SystemSettingBase(SQLModel): class SystemSetting(SystemSettingBase, DateTimeBase, table=True): __tablename__ = "system_setting" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} id: UUID | None = Field( sa_column=Column( @@ -56,7 +55,7 @@ class SystemSetting(SystemSettingBase, DateTimeBase, table=True): user_id: UUID = Field( sa_column=Column( UUID_PG(as_uuid=True), - ForeignKey("customer.user.id", ondelete="CASCADE"), + ForeignKey(f"{settings.ACCOUNTS_SCHEMA}.user.id", ondelete="CASCADE"), nullable=False, ), description="System Setting owner ID", diff --git a/src/db/models/system_task.py b/src/db/models/system_task.py index b042b0a..a3813f9 100644 --- a/src/db/models/system_task.py +++ b/src/db/models/system_task.py @@ -1,5 +1,6 @@ -from sqlmodel import Field, SQLModel, Column, DateTime, Text from datetime import datetime +from sqlmodel import Column, DateTime, Field, SQLModel, Text +from src.core.config import settings class SystemTaskBase(SQLModel): """Base class for system tasks requiring a last run timestamp.""" @@ -18,8 +19,9 @@ class SystemTaskBase(SQLModel): ), ) + class SystemTask(SystemTaskBase, table=True): """Table class for system tasks requiring a last run timestamp.""" __tablename__ = "system_task" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.CUSTOMER_SCHEMA} diff --git a/src/db/models/user.py b/src/db/models/user.py index bb7d85d..fb57121 100644 --- a/src/db/models/user.py +++ b/src/db/models/user.py @@ -7,23 +7,29 @@ Field, Relationship, SQLModel, + Text, ) +from src.core.config import settings if TYPE_CHECKING: + from ._link_model import UserTeamLink from .folder import Folder from .job import Job from .scenario import Scenario from .system_setting import SystemSetting - from ._link_model import UserTeamLink class User(SQLModel, table=True): __tablename__ = "user" - __table_args__ = {"schema": "customer"} + __table_args__ = {"schema": settings.ACCOUNTS_SCHEMA} id: UUID = Field( sa_column=Column(UUID_PG(as_uuid=True), primary_key=True, nullable=False) ) + firstname: str = Field(sa_column=Column(Text, nullable=True)) + lastname: str = Field(sa_column=Column(Text, nullable=True)) + avatar: str = Field(sa_column=Column(Text, nullable=True)) + # Relationships scenarios: List["Scenario"] = Relationship( back_populates="user", sa_relationship_kwargs={"cascade": "all, delete-orphan"} diff --git a/src/endpoints/v2/layer.py b/src/endpoints/v2/layer.py index 91ba66c..7b10c58 100644 --- a/src/endpoints/v2/layer.py +++ b/src/endpoints/v2/layer.py @@ -55,7 +55,6 @@ ILayerFromDatasetCreate, ILayerGet, ILayerRead, - ILayerReadShared, IMetadataAggregate, IMetadataAggregateRead, IRasterCreate, diff --git a/src/endpoints/v2/user.py b/src/endpoints/v2/user.py index 60594c0..a0c1c3f 100644 --- a/src/endpoints/v2/user.py +++ b/src/endpoints/v2/user.py @@ -10,61 +10,36 @@ router = APIRouter() @router.post( - "", - response_model=User, - summary="Create a user.", + "/data-schema", + response_model=None, + summary="Create data base schemas for the user.", status_code=201, ) -async def create_user( +async def create_user_base_data( *, async_session: AsyncSession = Depends(get_db), user_id: UUID4 = Depends(get_user_id), ): """Create a user. This will read the user ID from the JWT token or use the pre-defined user_id if running without authentication.""" - # Check if user already exists - user = await crud_user.get(async_session, id=user_id) - if user: - raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="User already exists") - else: - # Create user tables - await crud_user.create_user_data_tables(async_session, user_id=user_id) - try: - # Create user - user = await crud_user.create(async_session, obj_in=User(id=user_id)) - # Create home folder - folder = Folder(name="home", user_id=user_id) - await crud_folder.create( - async_session, - obj_in=folder, - ) - return user - except Exception as e: - await crud_user.delete_user_data_tables(async_session, user_id) - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) - -@router.get( - "", - response_model=User, - summary="Get a user.", - status_code=200, -) -async def get_user( - *, - async_session: AsyncSession = Depends(get_db), - user_id: UUID4 = Depends(get_user_id), -): - """Get a user. This will read the user ID saved in the GOAT DB.""" - user = await crud_user.get(async_session, id=user_id) - if user: - return user - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + # Create user tables + await crud_user.create_user_data_tables(async_session, user_id=user_id) + try: + # Create home folder + folder = Folder(name="home", user_id=user_id) + await crud_folder.create( + async_session, + obj_in=folder, + ) + except Exception as e: + await crud_user.delete_user_data_tables(async_session, user_id) + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) + return @router.delete( - "", + "/data-schema", response_model=None, - summary="Delete a user and all of the related contents.", + summary="Delete all user related related contents.", status_code=204, ) async def delete_user( @@ -76,8 +51,6 @@ async def delete_user( user = await crud_user.get(async_session, id=user_id) if user: - await crud_user.remove(async_session, id=user_id) - # Delete user tables await crud_user.delete_user_data_tables(async_session, user_id=user.id) else: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") diff --git a/src/schemas/layer.py b/src/schemas/layer.py index ea68ddc..972f883 100644 --- a/src/schemas/layer.py +++ b/src/schemas/layer.py @@ -132,6 +132,7 @@ class LayerReadBaseAttributes(BaseModel): shared_with: dict | None = Field( None, description="List of user IDs the layer is shared with" ) + owned_by: dict | None = Field(None, description="User ID of the owner") class LayerProperties(BaseModel): @@ -507,6 +508,7 @@ def __new__(cls, *args, **kwargs): ) return layer_read_class(**kwargs) + class ILayerReadShared(BaseModel): def __new__(cls, *args, **kwargs): layer = kwargs["layer"] diff --git a/src/schemas/project.py b/src/schemas/project.py index 6bba18a..59f53f6 100644 --- a/src/schemas/project.py +++ b/src/schemas/project.py @@ -72,6 +72,7 @@ class IProjectRead(ContentBaseAttributes, DateTimeBase): thumbnail_url: HttpUrl | None = Field(description="Project thumbnail URL") active_scenario_id: UUID | None = Field(None, description="Active scenario ID") shared_with: dict | None = Field(None, description="Shared with") + owned_by: dict | None = Field(None, description="Owned by") @optional diff --git a/tests/api/test_user.py b/tests/api/test_user.py index 723a080..ae14858 100644 --- a/tests/api/test_user.py +++ b/tests/api/test_user.py @@ -1,33 +1,18 @@ import pytest from httpx import AsyncClient - from src.core.config import settings - -async def test_get_user(client: AsyncClient, fixture_create_user): - response = await client.get( - f"{settings.API_V2_STR}/user", - ) - assert response.status_code == 200 - assert response.json()["id"] == fixture_create_user["id"] - @pytest.mark.asyncio -async def test_create_user(client: AsyncClient, fixture_create_user): - assert fixture_create_user["id"] is not None +async def test_create_user_data_schema(client: AsyncClient, fixture_create_user): + assert fixture_create_user is not None @pytest.mark.asyncio -async def test_delete_user( +async def test_delete_user_data_schema( client: AsyncClient, fixture_create_user, ): response = await client.delete( - f"{settings.API_V2_STR}/user", - ) - assert response.status_code == 204 - - # Check if user is deleted - response = await client.get( - f"{settings.API_V2_STR}/user", + f"{settings.API_V2_STR}/user/data-schema", ) - assert response.status_code == 404 + assert response.status_code == 204 \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 63f1bc7..d14eb15 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,10 +8,19 @@ import pytest_asyncio from httpx import AsyncClient from sqlalchemy import text +from jose import jwt # Local application imports from src.core.config import settings -from src.db.models import LayerTeamLink, LayerOrganizationLink, Role, Team, Organization, ProjectTeamLink, ProjectOrganizationLink +from src.db.models import ( + LayerOrganizationLink, + LayerTeamLink, + Organization, + ProjectTeamLink, + Role, + Team, + User, +) from src.db.models.layer import LayerType from src.endpoints.deps import get_db, session_manager from src.main import app @@ -26,6 +35,7 @@ from src.schemas.project import ( request_examples as project_request_examples, ) +from src.crud.base import CRUDBase from tests.utils import ( check_job_status, check_user_data_deleted, @@ -108,13 +118,29 @@ async def db_session(): @pytest.fixture -async def fixture_create_user(client: AsyncClient): - # Setup: Create the user - response = await client.post(f"{settings.API_V2_STR}/user") - user = response.json() - yield user +async def fixture_create_user(client: AsyncClient, db_session): + # Get base user_id + scheme, _, token = settings.SAMPLE_AUTHORIZATION.partition(" ") + user_id = jwt.get_unverified_claims(token)["sub"] + + # Create a user + user = User( + id=user_id, + firstname="Green", + lastname="GOAT", + avatar="https://assets.plan4better.de/img/goat_app_subscription_professional.jpg", + ) + db_session.add(user) + await db_session.commit() + await db_session.refresh(user) + + # Setup: Create user data schemas + result = await client.post(f"{settings.API_V2_STR}/user/data-schema") + assert result.status_code == 201 + yield user.id # Teardown: Delete the user after the test - await client.delete(f"{settings.API_V2_STR}/user") + await CRUDBase(User).delete(db_session, id=user_id) + await client.delete(f"{settings.API_V2_STR}/user/data-schema") @pytest.fixture @@ -1094,7 +1120,7 @@ async def fixture_create_shared_team_layers( team2 = Team(name="test_team2", avatar="https://www.plan4better.de/logo.png") # Create role - role = Role(name="team_member") + role = Role(name="team-member") db_session.add(role) await db_session.commit() await db_session.refresh(role) @@ -1167,6 +1193,7 @@ async def fixture_create_shared_organization_layers( return {"organizations": [organization1, organization2], "layers": layers} + @pytest.fixture async def fixture_create_shared_team_projects( client: AsyncClient, fixture_create_folder, fixture_create_projects, db_session @@ -1180,7 +1207,7 @@ async def fixture_create_shared_team_projects( team2 = Team(name="test_team2", avatar="https://www.plan4better.de/logo.png") # Create role - role = Role(name="team_member") + role = Role(name="team-member") db_session.add(role) await db_session.commit() await db_session.refresh(role) From 76500b96f6845fdc9c6fdef4eed94a1c12d9b1ef Mon Sep 17 00:00:00 2001 From: majkshkurti Date: Tue, 17 Sep 2024 18:36:14 +0200 Subject: [PATCH 8/8] fix: auhz --- pyproject.toml | 2 +- src/core/config.py | 2 + src/deps/auth.py | 105 +++++++++++++++++++++++++ src/endpoints/v2/active_mobility.py | 15 +++- src/endpoints/v2/folder.py | 27 ++++++- src/endpoints/v2/job.py | 5 ++ src/endpoints/v2/layer.py | 22 +++++- src/endpoints/v2/motorized_mobility.py | 55 +++++++------ src/endpoints/v2/project.py | 25 +++++- src/endpoints/v2/system.py | 11 ++- src/endpoints/v2/tool.py | 21 ++++- src/endpoints/v2/user.py | 25 ++++-- src/main.py | 27 ++----- 13 files changed, 273 insertions(+), 69 deletions(-) create mode 100644 src/deps/auth.py diff --git a/pyproject.toml b/pyproject.toml index 510554a..3586361 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,7 +65,6 @@ morecantile = "^2.1.4" cppimport = "^21.3.7" pandas = "^2.0.2" pytest = "^7.1.1" -sentry-sdk = "^1.5.0" alembic_utils = "^0.7.4" rich = "^11.0.0" sqlmodel = "^0.0.8" @@ -96,6 +95,7 @@ pymgl = [ matplotlib = "^3.8.2" cairosvg = "^2.7.1" +sentry-sdk = {extras = ["fastapi"], version = "^2.14.0"} [tool.poetry.group.dev.dependencies] sqlalchemy-stubs = "^0.3" debugpy = "^1.4.1" diff --git a/src/core/config.py b/src/core/config.py index 2c3d9d1..a4f7c6e 100644 --- a/src/core/config.py +++ b/src/core/config.py @@ -119,6 +119,8 @@ def goat_routing_authorization( return None SAMPLE_AUTHORIZATION = "Bearer eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICI0OG80Z1JXelh3YXBTY3NTdHdTMXZvREFJRlNOa0NtSVFpaDhzcEJTc2kwIn0.eyJleHAiOjE2OTEwMDQ1NTYsImlhdCI6MTY5MTAwNDQ5NiwiYXV0aF90aW1lIjoxNjkxMDAyNjIzLCJqdGkiOiI1MjBiN2RhNC0xYmM0LTRiM2QtODY2ZC00NDU0ODY2YThiYjIiLCJpc3MiOiJodHRwczovL2Rldi5hdXRoLnBsYW40YmV0dGVyLmRlL3JlYWxtcy9tYXN0ZXIiLCJzdWIiOiI3NDRlNGZkMS02ODVjLTQ5NWMtOGIwMi1lZmViY2U4NzUzNTkiLCJ0eXAiOiJCZWFyZXIiLCJhenAiOiJzZWN1cml0eS1hZG1pbi1jb25zb2xlIiwibm9uY2UiOiJjNGIzMDQ3Yi0xODVmLTQyOWEtOGZlNS1lNDliNTVhMzE3MzIiLCJzZXNzaW9uX3N0YXRlIjoiMzk5ZTc2NWMtYjM1MC00NDEwLTg4YTMtYjU5NDIyMmJkZDlhIiwiYWNyIjoiMCIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwczovL2Rldi5hdXRoLnBsYW40YmV0dGVyLmRlIl0sInNjb3BlIjoib3BlbmlkIGVtYWlsIHByb2ZpbGUiLCJzaWQiOiIzOTllNzY1Yy1iMzUwLTQ0MTAtODhhMy1iNTk0MjIyYmRkOWEiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsInByZWZlcnJlZF91c2VybmFtZSI6InA0YiJ9.mjywr9Dv19egsXwM1fK6g3sZ0trk87X0tEfK7oOizuBuCdkr6PZN1Eg58FCdjIgEBXqjltOWV43UIkXde4iPVa-KU5Q34Qjv6w0STa3Aq9vFbaUfSm_690qCdr8XSKMJUWQXWYwD2cjck5UCqf7-QqsF2Ab56i40_CJLZkJOi25WKIC855qPDi8BkJgh5eWoxobdyCbwJMEeoM-3QnxY5ikib5a2_AASEN3_5MYmT6-fvpW2t-MS6u4vtcG-WfqriK8YNoGPS2a1pFjLqQLHkM__j0O_t4wXP56x9yjkUdHCXqVcSlDvZYNWrv5CLqecqjOoliNMs6RTu9gV0Gr-cA" + KEYCLOAK_SERVER_URL: Optional[str] = "http://auth-keycloak:8080" + REALM_NAME: Optional[str] = "p4b" CELERY_TASK_TIME_LIMIT: Optional[int] = 60 # seconds RUN_AS_BACKGROUND_TASK: Optional[bool] = True MAX_NUMBER_PARALLEL_JOBS: Optional[int] = 6 diff --git a/src/deps/auth.py b/src/deps/auth.py new file mode 100644 index 0000000..3f59572 --- /dev/null +++ b/src/deps/auth.py @@ -0,0 +1,105 @@ +import requests +from fastapi import Depends, HTTPException, Request, status +from fastapi.security import OAuth2PasswordBearer +from jose import JOSEError, jwt +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from src.core.config import settings +from src.endpoints.deps import get_db + +auth_key = None +try: + ISSUER_URL = f"{settings.KEYCLOAK_SERVER_URL}/realms/{settings.REALM_NAME}" + + _auth_server_public_key = requests.get(ISSUER_URL).json().get("public_key") + auth_key = ( + "-----BEGIN PUBLIC KEY-----\n" + + _auth_server_public_key + + "\n-----END PUBLIC KEY-----" + ) # noqa: E501 +except Exception: + print("Error getting public key from Keycloak") + +oauth2_scheme = OAuth2PasswordBearer( + tokenUrl=f"{settings.API_V2_STR}/auth/access-token", +) + + +def decode_token(token: str): + """ + Decodes a JWT token. + """ + user_token = jwt.decode( + token, + key=auth_key, + options={ + "verify_signature": True, + "verify_aud": False, + "verify_iss": ISSUER_URL, + }, + ) + + return user_token + + +async def auth(token: str = Depends(oauth2_scheme)) -> str: + try: + decode_token(token) + except JOSEError as e: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e)) + + return token + + +def user_token(token: str = Depends(auth)) -> dict: + payload = decode_token(token) + return payload + + +def is_superuser(user_token: dict = Depends(user_token), throw_error: bool = True): + is_superuser = False + if user_token["realm_access"] and user_token["realm_access"]["roles"]: + is_superuser = "superuser" in user_token["realm_access"]["roles"] + + if not is_superuser and throw_error: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Unauthorized" + ) + + return is_superuser + + +def clean_path(path: str) -> str: + return path.replace(settings.API_V2_STR + "/", "") + + +async def auth_z( + request: Request, + user_token: dict = Depends(user_token), + async_session: AsyncSession = Depends(get_db), +) -> bool: + try: + user_id = user_token["sub"] + path = request.scope.get("path") + route = request.scope.get("route") + method = request.scope.get("method") + if path and route and method and user_id: + cleaned_path = clean_path( + path + ) # e.g /organizations/b65e040a-f8f0-453f-9888-baa2b9342cce + cleaned_route_path = clean_path( + route.path + ) # e.g /organizations/{organization_id} + authz_query = text( + f"SELECT * FROM {settings.ACCOUNTS_SCHEMA}.authorization('{user_id}', '{cleaned_route_path}', '{cleaned_path}', '{method}');" + ) + response = await async_session.execute(authz_query) + state = response.scalars().all() + if not state or not len(state) or state[0] is False: + raise ValueError("Unauthorized") + return True + else: + raise ValueError("Missing path, route, or method in request scope") + except Exception as e: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e)) diff --git a/src/endpoints/v2/active_mobility.py b/src/endpoints/v2/active_mobility.py index 1b19b21..2c9b380 100644 --- a/src/endpoints/v2/active_mobility.py +++ b/src/endpoints/v2/active_mobility.py @@ -2,19 +2,22 @@ from src.core.tool import start_calculation from src.crud.crud_catchment_area import CRUDCatchmentAreaActiveMobility +from src.crud.crud_heatmap_closest_average import CRUDHeatmapClosestAverage +from src.crud.crud_heatmap_connectivity import CRUDHeatmapConnectivity +from src.crud.crud_heatmap_gravity import CRUDHeatmapGravity +from src.deps.auth import auth_z from src.endpoints.deps import get_http_client from src.schemas.catchment_area import ( ICatchmentAreaActiveMobility, +) +from src.schemas.catchment_area import ( request_examples_catchment_area_active_mobility as active_mobility_request_examples, ) from src.schemas.heatmap import ( - IHeatmapGravityActive, IHeatmapClosestAverageActive, IHeatmapConnectivityActive, + IHeatmapGravityActive, ) -from src.crud.crud_heatmap_gravity import CRUDHeatmapGravity -from src.crud.crud_heatmap_closest_average import CRUDHeatmapClosestAverage -from src.crud.crud_heatmap_connectivity import CRUDHeatmapConnectivity from src.schemas.job import JobType from src.schemas.toolbox_base import CommonToolParams, IToolResponse @@ -26,6 +29,7 @@ summary="Compute catchment areas for active mobility", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_active_mobility_catchment_area( *, @@ -56,6 +60,7 @@ async def compute_active_mobility_catchment_area( summary="Compute heatmap gravity for active mobility", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_active_mobility_heatmap_gravity( *, @@ -85,6 +90,7 @@ async def compute_active_mobility_heatmap_gravity( summary="Compute heatmap closest-average for active mobility", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_active_mobility_heatmap_closest_average( *, @@ -114,6 +120,7 @@ async def compute_active_mobility_heatmap_closest_average( summary="Compute heatmap connectivity for active mobility", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_active_mobility_heatmap_connectivity( *, diff --git a/src/endpoints/v2/folder.py b/src/endpoints/v2/folder.py index d5e1744..00fe5e4 100644 --- a/src/endpoints/v2/folder.py +++ b/src/endpoints/v2/folder.py @@ -1,10 +1,23 @@ from typing import List -from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, status, BackgroundTasks + +from fastapi import ( + APIRouter, + BackgroundTasks, + Body, + Depends, + HTTPException, + Path, + Query, + status, +) from pydantic import UUID4 -from sqlalchemy import select, func +from sqlalchemy import func, select + +from src.core.config import settings from src.crud.crud_folder import folder as crud_folder from src.db.models.folder import Folder from src.db.session import AsyncSession +from src.deps.auth import auth_z from src.endpoints.deps import get_db, get_user_id from src.schemas.common import OrderEnum from src.schemas.folder import ( @@ -15,7 +28,6 @@ from src.schemas.folder import ( request_examples as folder_request_examples, ) -from src.core.config import settings router = APIRouter() @@ -26,6 +38,7 @@ summary="Create a new folder", response_model=FolderRead, status_code=201, + dependencies=[Depends(auth_z)], ) async def create_folder( *, @@ -57,6 +70,7 @@ async def create_folder( summary="Retrieve a folder by its ID", response_model=FolderRead, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_folder( *, @@ -87,6 +101,7 @@ async def read_folder( response_model=List[FolderRead], response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_folders( *, @@ -123,6 +138,7 @@ async def read_folders( summary="Update a folder with new data", response_model=FolderUpdate, status_code=200, + dependencies=[Depends(auth_z)], ) async def update_folder( *, @@ -154,6 +170,7 @@ async def update_folder( summary="Delete a folder and all its contents", response_model=None, status_code=204, + dependencies=[Depends(auth_z)], ) async def delete_folder( *, @@ -168,5 +185,7 @@ async def delete_folder( ): """Delete a folder and all its contents""" - await crud_folder.delete(async_session, background_tasks=background_tasks, id=folder_id, user_id=user_id) + await crud_folder.delete( + async_session, background_tasks=background_tasks, id=folder_id, user_id=user_id + ) return diff --git a/src/endpoints/v2/job.py b/src/endpoints/v2/job.py index e4eca8e..8407dff 100644 --- a/src/endpoints/v2/job.py +++ b/src/endpoints/v2/job.py @@ -9,6 +9,7 @@ from src.crud.crud_job import job as crud_job from src.db.models.job import Job +from src.deps.auth import auth_z from src.endpoints.deps import get_db, get_user_id from src.schemas.common import OrderEnum from src.schemas.job import JobStatusType, JobType @@ -22,6 +23,7 @@ response_model_exclude_none=True, status_code=200, summary="Get a job by its ID.", + dependencies=[Depends(auth_z)], ) async def get_job( async_session: AsyncSession = Depends(get_db), @@ -49,6 +51,7 @@ async def get_job( response_model_exclude_none=True, status_code=200, summary="Retrieve a list of jobs using different filters.", + dependencies=[Depends(auth_z)], ) async def read_jobs( async_session: AsyncSession = Depends(get_db), @@ -107,6 +110,7 @@ async def read_jobs( response_model_exclude_none=True, status_code=200, summary="Mark jobs as read.", + dependencies=[Depends(auth_z)], ) async def mark_jobs_as_read( async_session: AsyncSession = Depends(get_db), @@ -132,6 +136,7 @@ async def mark_jobs_as_read( response_model_exclude_none=True, status_code=200, summary="Kill a job.", + dependencies=[Depends(auth_z)], ) async def kill_job( async_session: AsyncSession = Depends(get_db), diff --git a/src/endpoints/v2/layer.py b/src/endpoints/v2/layer.py index 7b10c58..40bd1fe 100644 --- a/src/endpoints/v2/layer.py +++ b/src/endpoints/v2/layer.py @@ -27,7 +27,6 @@ # Local application imports from src.core.content import ( read_content_by_id, - read_contents_by_ids, ) from src.crud.crud_job import job as crud_job from src.crud.crud_layer import CRUDLayerDatasetUpdate, CRUDLayerExport, CRUDLayerImport @@ -41,8 +40,9 @@ TableUploadType, ) from src.db.session import AsyncSession +from src.deps.auth import auth_z from src.endpoints.deps import get_db, get_user_id -from src.schemas.common import ContentIdList, OrderEnum +from src.schemas.common import OrderEnum from src.schemas.error import HTTPErrorHandler from src.schemas.job import JobType from src.schemas.layer import ( @@ -75,6 +75,7 @@ summary="Upload file to server and validate", response_model=IFileUploadMetadata, status_code=201, + dependencies=[Depends(auth_z)], ) async def file_upload( *, @@ -122,6 +123,7 @@ async def file_upload( summary="Fetch data from external service into a file, upload file to server and validate", response_model=IFileUploadMetadata, status_code=201, + dependencies=[Depends(auth_z)], ) async def file_upload_external_service( *, @@ -230,6 +232,7 @@ async def _create_layer_from_dataset( response_class=JSONResponse, status_code=201, description="Generate a new layer from a file that was previously uploaded using the file-upload endpoint.", + dependencies=[Depends(auth_z)], ) async def create_layer_feature_standard( background_tasks: BackgroundTasks, @@ -263,6 +266,7 @@ async def create_layer_feature_standard( response_model=IRasterRead, status_code=201, description="Generate a new layer based on a URL for a raster service hosted externally.", + dependencies=[Depends(auth_z)], ) async def create_layer_raster( async_session: AsyncSession = Depends(get_db), @@ -286,6 +290,7 @@ async def create_layer_raster( response_class=JSONResponse, status_code=201, description="Generate a new layer from a file that was previously uploaded using the file-upload endpoint.", + dependencies=[Depends(auth_z)], ) async def create_layer_table( background_tasks: BackgroundTasks, @@ -319,6 +324,7 @@ async def create_layer_table( response_class=FileResponse, status_code=201, description="Export a layer to a zip file.", + dependencies=[Depends(auth_z)], ) async def export_layer( async_session: AsyncSession = Depends(get_db), @@ -353,6 +359,7 @@ async def export_layer( response_model=ILayerRead, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_layer( async_session: AsyncSession = Depends(get_db), @@ -367,12 +374,14 @@ async def read_layer( async_session=async_session, id=layer_id, model=Layer, crud_content=crud_layer ) + @router.post( "", response_model=Page[ILayerRead], response_model_exclude_none=True, status_code=200, summary="Retrieve a list of layers using different filters including a spatial filter. If not filter is specified, all layers will be returned.", + dependencies=[Depends(auth_z)], ) async def read_layers( async_session: AsyncSession = Depends(get_db), @@ -431,6 +440,7 @@ async def read_layers( response_model_exclude_none=True, status_code=200, summary="Retrieve a list of layers using different filters including a spatial filter. If not filter is specified, all layers will be returned.", + dependencies=[Depends(auth_z)], ) async def read_catalog_layers( async_session: AsyncSession = Depends(get_db), @@ -472,6 +482,7 @@ async def read_catalog_layers( response_model=ILayerRead, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def update_layer( async_session: AsyncSession = Depends(get_db), @@ -496,6 +507,7 @@ async def update_layer( "/{layer_id}/dataset", response_class=JSONResponse, status_code=200, + dependencies=[Depends(auth_z)], ) async def update_layer_dataset( background_tasks: BackgroundTasks, @@ -564,6 +576,7 @@ async def update_layer_dataset( response_model=None, summary="Delete a layer and its data in case of an internal layer.", status_code=204, + dependencies=[Depends(auth_z)], ) async def delete_layer( async_session: AsyncSession = Depends(get_db), @@ -588,6 +601,7 @@ async def delete_layer( summary="Get feature count", response_class=JSONResponse, status_code=200, + dependencies=[Depends(auth_z)], ) async def get_feature_count( async_session: AsyncSession = Depends(get_db), @@ -628,6 +642,7 @@ async def get_feature_count( summary="Get area statistics of a layer", response_class=JSONResponse, status_code=200, + dependencies=[Depends(auth_z)], ) async def get_area_statistics( async_session: AsyncSession = Depends(get_db), @@ -666,6 +681,7 @@ async def get_area_statistics( summary="Get unique values of a column", response_model=Page[IUniqueValue], status_code=200, + dependencies=[Depends(auth_z)], ) async def get_unique_values( async_session: AsyncSession = Depends(get_db), @@ -712,6 +728,7 @@ async def get_unique_values( summary="Get statistics of a column", response_class=JSONResponse, status_code=200, + dependencies=[Depends(auth_z)], ) async def class_breaks( async_session: AsyncSession = Depends(get_db), @@ -768,6 +785,7 @@ async def class_breaks( summary="Return the count of layers for different metadata values acting as filters", response_model=IMetadataAggregateRead, status_code=200, + dependencies=[Depends(auth_z)], ) async def metadata_aggregate( async_session: AsyncSession = Depends(get_db), diff --git a/src/endpoints/v2/motorized_mobility.py b/src/endpoints/v2/motorized_mobility.py index a7216a2..ed9b1ab 100644 --- a/src/endpoints/v2/motorized_mobility.py +++ b/src/endpoints/v2/motorized_mobility.py @@ -1,40 +1,40 @@ from fastapi import APIRouter, Body, Depends + from src.core.tool import start_calculation from src.crud.crud_catchment_area import ( - CRUDCatchmentAreaPT, CRUDCatchmentAreaCar, + CRUDCatchmentAreaPT, ) -from src.crud.crud_trip_count_station import CRUDTripCountStation -from src.crud.crud_oev_gueteklasse import CRUDOevGueteklasse +from src.crud.crud_heatmap_closest_average import CRUDHeatmapClosestAverage +from src.crud.crud_heatmap_connectivity import CRUDHeatmapConnectivity +from src.crud.crud_heatmap_gravity import CRUDHeatmapGravity from src.crud.crud_nearby_station_access import CRUDNearbyStationAccess +from src.crud.crud_oev_gueteklasse import CRUDOevGueteklasse +from src.crud.crud_trip_count_station import CRUDTripCountStation +from src.deps.auth import auth_z from src.endpoints.deps import get_http_client -from src.schemas.job import JobType from src.schemas.catchment_area import ( - ICatchmentAreaPT, ICatchmentAreaCar, - request_examples_catchment_area_pt, + ICatchmentAreaPT, request_examples_catchment_area_car, + request_examples_catchment_area_pt, ) -from src.schemas.oev_gueteklasse import ( - IOevGueteklasse, - request_example_oev_gueteklasse, +from src.schemas.heatmap import ( + IHeatmapClosestAverageMotorized, + IHeatmapConnectivityMotorized, + IHeatmapGravityMotorized, ) -from src.schemas.trip_count_station import ITripCountStation +from src.schemas.job import JobType from src.schemas.nearby_station_access import ( INearbyStationAccess, request_example_nearby_station_access, ) -from src.schemas.heatmap import ( - IHeatmapGravityMotorized, - IHeatmapClosestAverageMotorized, - IHeatmapConnectivityMotorized, +from src.schemas.oev_gueteklasse import ( + IOevGueteklasse, + request_example_oev_gueteklasse, ) -from src.crud.crud_heatmap_gravity import CRUDHeatmapGravity -from src.crud.crud_heatmap_closest_average import CRUDHeatmapClosestAverage -from src.crud.crud_heatmap_connectivity import CRUDHeatmapConnectivity -from src.schemas.toolbox_base import IToolResponse -from src.schemas.toolbox_base import CommonToolParams - +from src.schemas.toolbox_base import CommonToolParams, IToolResponse +from src.schemas.trip_count_station import ITripCountStation router = APIRouter() @@ -44,6 +44,7 @@ summary="Compute catchment areas for public transport.", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_pt_catchment_area( *, @@ -73,6 +74,7 @@ async def compute_pt_catchment_area( summary="Compute catchment areas for car.", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_car_catchment_area( *, @@ -102,6 +104,7 @@ async def compute_car_catchment_area( summary="Calculate ÖV-Güteklassen.", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_oev_gueteklassen( *, @@ -125,18 +128,18 @@ async def compute_oev_gueteklassen( params=params, ) + @router.post( "/trip-count-station", summary="Calculate trip count per station.", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_trip_count_station( *, common: CommonToolParams = Depends(), - params: ITripCountStation = Body( - ..., examples=request_example_oev_gueteklasse - ), + params: ITripCountStation = Body(..., examples=request_example_oev_gueteklasse), ): """Calculates the number of trips per station and public transport mode.""" @@ -151,11 +154,13 @@ async def compute_trip_count_station( params=params, ) + @router.post( "/nearby-station-access", summary="Get public transport stops and their trips that are accessible by walking/cycling.", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_nearby_station_access( *, @@ -178,11 +183,13 @@ async def compute_nearby_station_access( params=params, ) + @router.post( "/heatmap-gravity", summary="Compute heatmap gravity for motorized mobility", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_motorized_mobility_heatmap_gravity( *, @@ -212,6 +219,7 @@ async def compute_motorized_mobility_heatmap_gravity( summary="Compute heatmap closest-average for motorized mobility", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_motorized_mobility_heatmap_closest_average( *, @@ -241,6 +249,7 @@ async def compute_motorized_mobility_heatmap_closest_average( summary="Compute heatmap connectivity for motorized mobility", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def compute_motorized_mobility_heatmap_connectivity( *, diff --git a/src/endpoints/v2/project.py b/src/endpoints/v2/project.py index c4e99c2..58c21ab 100644 --- a/src/endpoints/v2/project.py +++ b/src/endpoints/v2/project.py @@ -18,8 +18,9 @@ from src.db.models.scenario import Scenario from src.db.models.scenario_feature import ScenarioFeature from src.db.session import AsyncSession +from src.deps.auth import auth_z from src.endpoints.deps import get_db, get_scenario, get_user_id -from src.schemas.common import ContentIdList, OrderEnum +from src.schemas.common import OrderEnum from src.schemas.error import HTTPErrorHandler from src.schemas.project import ( IFeatureStandardProjectRead, @@ -55,6 +56,7 @@ response_model=IProjectRead, response_model_exclude_none=True, status_code=201, + dependencies=[Depends(auth_z)], ) async def create_project( async_session: AsyncSession = Depends(get_db), @@ -80,6 +82,7 @@ async def create_project( response_model=IProjectRead, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_project( async_session: AsyncSession = Depends(get_db), @@ -107,6 +110,7 @@ async def read_project( response_model=Page[IProjectRead], response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_projects( async_session: AsyncSession = Depends(get_db), @@ -151,11 +155,13 @@ async def read_projects( return projects + @router.put( "/{project_id}", response_model=IProjectRead, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def update_project( async_session: AsyncSession = Depends(get_db), @@ -183,6 +189,7 @@ async def update_project( "/{project_id}", response_model=None, status_code=204, + dependencies=[Depends(auth_z)], ) async def delete_project( async_session: AsyncSession = Depends(get_db), @@ -211,6 +218,7 @@ async def delete_project( response_model=InitialViewState, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_project_initial_view_state( async_session: AsyncSession = Depends(get_db), @@ -235,6 +243,7 @@ async def read_project_initial_view_state( response_model=InitialViewState, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def update_project_initial_view_state( async_session: AsyncSession = Depends(get_db), @@ -277,6 +286,7 @@ async def update_project_initial_view_state( ], response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def add_layers_to_project( async_session: AsyncSession = Depends(get_db), @@ -313,6 +323,7 @@ async def add_layers_to_project( ], response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def get_layers_from_project( async_session: AsyncSession = Depends(get_db), @@ -340,6 +351,7 @@ async def get_layers_from_project( | IRasterProjectRead, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def get_layer_from_project( async_session: AsyncSession = Depends(get_db), @@ -368,6 +380,7 @@ async def get_layer_from_project( | IRasterProjectRead, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def update_layer_in_project( async_session: AsyncSession = Depends(get_db), @@ -416,6 +429,7 @@ async def update_layer_in_project( "/{project_id}/layer", response_model=None, status_code=204, + dependencies=[Depends(auth_z)], ) async def delete_layer_from_project( async_session: AsyncSession = Depends(get_db), @@ -465,6 +479,7 @@ async def delete_layer_from_project( response_model=dict, response_model_exclude_none=True, status_code=200, + dependencies=[Depends(auth_z)], ) async def get_chart_data( async_session: AsyncSession = Depends(get_db), @@ -506,6 +521,7 @@ async def get_chart_data( summary="Retrieve a list of scenarios", response_model=Page[Scenario], status_code=200, + dependencies=[Depends(auth_z)], ) async def read_scenarios( async_session: AsyncSession = Depends(get_db), @@ -547,6 +563,7 @@ async def read_scenarios( status_code=201, response_model=Scenario, response_model_exclude_none=True, + dependencies=[Depends(auth_z)], ) async def create_scenario( async_session: AsyncSession = Depends(get_db), @@ -578,6 +595,7 @@ async def create_scenario( "/{project_id}/scenario/{scenario_id}", summary="Update scenario", status_code=201, + dependencies=[Depends(auth_z)], ) async def update_scenario( async_session: AsyncSession = Depends(get_db), @@ -601,6 +619,7 @@ async def update_scenario( "/{project_id}/scenario/{scenario_id}", summary="Delete scenario", status_code=204, + dependencies=[Depends(auth_z)], ) async def delete_scenario( async_session: AsyncSession = Depends(get_db), @@ -626,6 +645,7 @@ async def delete_scenario( summary="Retrieve a list of scenario features", response_class=JSONResponse, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_scenario_features( async_session: AsyncSession = Depends(get_db), @@ -648,6 +668,7 @@ async def read_scenario_features( summary="Create scenario features", response_class=JSONResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def create_scenario_features( async_session: AsyncSession = Depends(get_db), @@ -674,6 +695,7 @@ async def create_scenario_features( "/{project_id}/layer/{layer_project_id}/scenario/{scenario_id}/features", summary="Update scenario features", status_code=201, + dependencies=[Depends(auth_z)], ) async def update_scenario_feature( async_session: AsyncSession = Depends(get_db), @@ -716,6 +738,7 @@ async def update_scenario_feature( "/{project_id}/layer/{layer_project_id}/scenario/{scenario_id}/features/{feature_id}", summary="Delete scenario feature", status_code=204, + dependencies=[Depends(auth_z)], ) async def delete_scenario_features( async_session: AsyncSession = Depends(get_db), diff --git a/src/endpoints/v2/system.py b/src/endpoints/v2/system.py index e988bdd..e01ebc6 100644 --- a/src/endpoints/v2/system.py +++ b/src/endpoints/v2/system.py @@ -3,6 +3,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from src.crud.crud_system_setting import system_setting as crud_system_setting +from src.deps.auth import auth_z from src.endpoints.deps import get_db, get_user_id from src.schemas.system_setting import ( SystemSettingsCreate, @@ -22,16 +23,21 @@ summary="Retrieve system settings", response_model=SystemSettingsRead, status_code=200, + dependencies=[Depends(auth_z)], ) async def read_system_settings( - *, async_session: AsyncSession = Depends(get_db), user_id: UUID4 = Depends(get_user_id) + *, + async_session: AsyncSession = Depends(get_db), + user_id: UUID4 = Depends(get_user_id), ): """Retrieve system settings""" system_settings = await crud_system_setting.get_by_multi_keys( async_session, keys={"user_id": user_id} ) if not system_settings or len(system_settings) == 0: - default_system_settings_obj_in = SystemSettingsCreate(**default_system_settings.dict()) + default_system_settings_obj_in = SystemSettingsCreate( + **default_system_settings.dict() + ) default_system_settings_obj_in.user_id = user_id system_settings = await crud_system_setting.create( async_session, obj_in=default_system_settings_obj_in @@ -45,6 +51,7 @@ async def read_system_settings( summary="Update system settings", response_model=SystemSettingsRead, status_code=200, + dependencies=[Depends(auth_z)], ) async def update_system_settings( *, diff --git a/src/endpoints/v2/tool.py b/src/endpoints/v2/tool.py index c5c4217..4368744 100644 --- a/src/endpoints/v2/tool.py +++ b/src/endpoints/v2/tool.py @@ -1,20 +1,26 @@ from fastapi import APIRouter, Body, Depends + from src.core.tool import CRUDToolBase, start_calculation from src.crud.crud_data_management import CRUDJoin -from src.crud.crud_geoanalysis import CRUDAggregatePoint, CRUDAggregatePolygon, CRUDOriginDestination +from src.crud.crud_geoanalysis import ( + CRUDAggregatePoint, + CRUDAggregatePolygon, + CRUDOriginDestination, +) from src.crud.crud_geoprocessing import CRUDBuffer +from src.deps.auth import auth_z from src.schemas.error import http_error_handler from src.schemas.job import JobType, Msg from src.schemas.tool import ( IAggregationPoint, IAggregationPolygon, - IJoin, IBuffer, + IJoin, IOriginDestination, + request_example_buffer, request_examples_aggregation_point, request_examples_aggregation_polygon, request_examples_join, - request_example_buffer, ) from src.schemas.toolbox_base import ( CommonToolParams, @@ -30,6 +36,7 @@ summary="Check reference area", response_model=Msg, status_code=200, + dependencies=[Depends(auth_z)], ) async def check_reference_area( common: CommonToolParams = Depends(), @@ -67,6 +74,7 @@ async def check_reference_area( summary="Join two layers.", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def join( common: CommonToolParams = Depends(), @@ -95,6 +103,7 @@ async def join( summary="Aggregate points", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def aggregate_points( *, @@ -123,6 +132,7 @@ async def aggregate_points( summary="Aggregate polygons", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def aggregate_polygons( *, @@ -151,6 +161,7 @@ async def aggregate_polygons( summary="Buffer", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def buffer( *, @@ -173,11 +184,13 @@ async def buffer( params=params, ) + @router.post( "/origin-destination", summary="Origin Destination", response_model=IToolResponse, status_code=201, + dependencies=[Depends(auth_z)], ) async def origin_destination( *, @@ -197,4 +210,4 @@ async def origin_destination( background_tasks=common.background_tasks, project_id=common.project_id, params=params, - ) \ No newline at end of file + ) diff --git a/src/endpoints/v2/user.py b/src/endpoints/v2/user.py index a0c1c3f..39d7bc2 100644 --- a/src/endpoints/v2/user.py +++ b/src/endpoints/v2/user.py @@ -1,19 +1,22 @@ -from fastapi import APIRouter, Depends, HTTPException -from fastapi import status -from src.db.models.user import User +from fastapi import APIRouter, Depends, HTTPException, status +from pydantic import UUID4 + +from src.crud.crud_folder import folder as crud_folder +from src.crud.crud_user import user as crud_user from src.db.models.folder import Folder from src.db.session import AsyncSession -from pydantic import UUID4 +from src.deps.auth import auth_z from src.endpoints.deps import get_db, get_user_id -from src.crud.crud_user import user as crud_user -from src.crud.crud_folder import folder as crud_folder + router = APIRouter() + @router.post( "/data-schema", response_model=None, summary="Create data base schemas for the user.", status_code=201, + dependencies=[Depends(auth_z)], ) async def create_user_base_data( *, @@ -33,14 +36,18 @@ async def create_user_base_data( ) except Exception as e: await crud_user.delete_user_data_tables(async_session, user_id) - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) return + @router.delete( "/data-schema", response_model=None, summary="Delete all user related related contents.", status_code=204, + dependencies=[Depends(auth_z)], ) async def delete_user( *, @@ -53,5 +60,7 @@ async def delete_user( if user: await crud_user.delete_user_data_tables(async_session, user_id=user.id) else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="User not found" + ) return diff --git a/src/main.py b/src/main.py index 6d339f9..77845b8 100644 --- a/src/main.py +++ b/src/main.py @@ -7,7 +7,6 @@ from fastapi.openapi.docs import get_swagger_ui_html from fastapi.responses import JSONResponse from fastapi.staticfiles import StaticFiles -from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sqlalchemy.exc import IntegrityError from starlette.middleware.cors import CORSMiddleware @@ -16,6 +15,13 @@ from src.endpoints.deps import close_http_client from src.endpoints.v2.api import router as api_router_v2 +if settings.SENTRY_DSN and settings.ENVIRONMENT: + sentry_sdk.init( + dsn=settings.SENTRY_DSN, + environment=settings.ENVIRONMENT, + traces_sample_rate=1.0 if settings.ENVIRONMENT == "prod" else 0.1, + ) + @asynccontextmanager async def lifespan(app: FastAPI): @@ -68,19 +74,6 @@ async def swagger_ui_html(): allow_headers=["*"], ) -if settings.SENTRY_DSN: - sentry_sdk.init( - dsn=settings.SENTRY_DSN, - environment=os.getenv("NAMESPACE", "dev"), - traces_sample_rate=0.2, - ) - -try: - app.add_middleware(SentryAsgiMiddleware) -except Exception: - # pass silently if the Sentry integration failed - pass - @app.get("/api/healthz", description="Health Check", tags=["Health Check"]) def ping(): @@ -88,12 +81,6 @@ def ping(): return {"ping": "pong!"} -# Calling this endpoint to see if the setup works. If yes, an error message will show in Sentry dashboard -@app.get("/api/sentry", include_in_schema=False) -async def sentry(): - raise Exception("Test sentry integration") - - app.include_router(api_router_v2, prefix=settings.API_V2_STR)