From ad870e08951c88080b5b6ff2d678068eb6214ca9 Mon Sep 17 00:00:00 2001 From: jsj Date: Sun, 22 Dec 2024 03:02:21 +0100 Subject: [PATCH 1/7] add init cli commands --- pyproject.toml | 3 + src/msfabricutils/cli/__init__.py | 0 src/msfabricutils/cli/cli.py | 196 +++++++++++++++++++++++++ src/msfabricutils/cli/workspace.py | 125 ++++++++++++++++ src/msfabricutils/core/generic.py | 37 ++++- src/msfabricutils/core/lakehouse.py | 10 +- src/msfabricutils/core/sql_endpoint.py | 6 +- src/msfabricutils/core/workspace.py | 79 +++++++++- 8 files changed, 441 insertions(+), 15 deletions(-) create mode 100644 src/msfabricutils/cli/__init__.py create mode 100644 src/msfabricutils/cli/cli.py create mode 100644 src/msfabricutils/cli/workspace.py diff --git a/pyproject.toml b/pyproject.toml index f68b2db..7db5b2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,9 @@ dev = [ "python-dotenv>=1.0.0", ] +[project.scripts] +msfu = "msfabricutils.cli.cli:main" + [tool.setuptools_scm] [tool.ruff] diff --git a/src/msfabricutils/cli/__init__.py b/src/msfabricutils/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/msfabricutils/cli/cli.py b/src/msfabricutils/cli/cli.py new file mode 100644 index 0000000..7f08e4d --- /dev/null +++ b/src/msfabricutils/cli/cli.py @@ -0,0 +1,196 @@ +import argparse +import json +import logging +import sys + +from msfabricutils import __version__ +from msfabricutils.cli.workspace import create_workspace_command, delete_workspace_command + + +def main(): + examples = """ +Examples: + Create a workspace: + msfu workspace create --name "My Workspace" --description "My Workspace Description" --capacity-id "beefbeef-beef-beef-beef-beefbeefbeef" --on-conflict "update" + + Create a lakehouse: + msfu lakehouse create --name "My Lakehouse" --description "My Lakehouse Description" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" --on-conflict "update" + + Create a single notebook: + msfu notebook create --path "path/to/myNotebook.Notebook" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" + + Create multiple notebooks: + msfu notebook create --path "path/to/notebooks" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" + """ + + parser = argparse.ArgumentParser( + prog="msfabricutils", + description="Utility CLI for Microsoft Fabric REST API operations", + epilog=examples, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("--version", "-v", action="version", version=__version__) + parser.add_argument( + "--log-level", + "-l", + type=str, + default="INFO", + choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], + help="The log level to use. Defaults to INFO.", + ) + + subparsers = parser.add_subparsers(dest="command", help="Subcommands") + + # Subcommand: workspace + workspace_parser = subparsers.add_parser("workspace", help="Workspace commands") + workspace_subparsers = workspace_parser.add_subparsers( + dest="workspace_command", help="Workspace commands" + ) + + # Subcommand: workspace create + workspace_create_parser = workspace_subparsers.add_parser("create", help="Create a workspace") + workspace_create_parser.add_argument( + "--name", "-n", type=str, required=True, help="The name of the workspace." + ) + workspace_create_parser.add_argument( + "--description", "-d", type=str, help="The description of the workspace." + ) + workspace_create_parser.add_argument( + "--capacity-id", "-c", type=str, help="The Fabric capacity id to assign the workspace to." + ) + workspace_create_parser.add_argument( + "--on-conflict", + type=str, + choices=["ignore", "update", "error"], + default="error", + help="The action to take if the workspace already exists. Defaults to `error`.", + ) + + # Subcommand: workspace delete + workspace_delete_parser = workspace_subparsers.add_parser("delete", help="Delete a workspace") + workspace_delete_group = workspace_delete_parser.add_mutually_exclusive_group(required=True) + workspace_delete_group.add_argument( + "--id", "-i", type=str, help="The ID of the workspace to delete." + ) + workspace_delete_group.add_argument( + "--name", "-n", type=str, help="The name of the workspace to delete." + ) + workspace_delete_parser.add_argument( + "--on-conflict", + type=str, + choices=["ignore", "error"], + default="error", + help="The action to take if the workspace does not exist. Defaults to `error`.", + ) + + # Subcommand: lakehouse + lakehouse_parser = subparsers.add_parser("lakehouse", help="Lakehouse commands") + lakehouse_subparsers = lakehouse_parser.add_subparsers( + dest="lakehouse_command", help="Lakehouse commands" + ) + + # Subcommand: lakehouse create + lakehouse_create_parser = lakehouse_subparsers.add_parser("create", help="Create a lakehouse") + lakehouse_create_parser.add_argument( + "--name", "-n", type=str, required=True, help="The name of the lakehouse." + ) + lakehouse_create_parser.add_argument( + "--description", "-d", type=str, help="The description of the lakehouse." + ) + lakehouse_create_parser.add_argument( + "--workspace-id", + "-w", + type=str, + required=True, + help="The workspace id to create the lakehouse in.", + ) + lakehouse_create_parser.add_argument( + "--on-conflict", + type=str, + choices=["ignore", "update", "error"], + default="error", + help="The action to take if the lakehouse already exists. Defaults to `error`.", + ) + + # Subcommand: notebook + notebook_parser = subparsers.add_parser("notebook", help="Notebook commands") + notebook_subparsers = notebook_parser.add_subparsers( + dest="notebook_command", help="Notebook commands" + ) + + notebook_create_parser = notebook_subparsers.add_parser("create", help="Create a notebook") + notebook_create_parser.add_argument( + "--path", + "-p", + type=str, + required=True, + help="Path to folder of notebooks or a single notebook to publish. Single notebook should end with `.Notebook`.", + ) + notebook_create_parser.add_argument( + "--workspace-id", + "-w", + type=str, + required=True, + help="The workspace id to publish the notebook to.", + ) + notebook_create_parser.add_argument( + "--name", + "-n", + type=str, + help="The name of the notebook. If not provided, the name of the notebook file will be used.", + ) + notebook_create_parser.add_argument( + "--description", + "-d", + type=str, + help="The description of the notebook. Only applicable if publishing a single notebook.", + ) + notebook_create_parser.add_argument( + "--on-conflict", + type=str, + choices=["ignore", "update", "error"], + default="error", + help="The action to take if the notebook already exists. Defaults to `error`.", + ) + + args = parser.parse_args() + + # Format as json + logging.basicConfig( + level=args.log_level, + format='{"timestamp": "%(asctime)s", "module": "%(module)s", "level": "%(levelname)s", "message": "%(message)s"}', + ) + + debug_msg = ", ".join([f"'{arg} = {value}'" for arg, value in args.__dict__.items()]) + logging.debug(f"CLI started with args: {debug_msg}") + + result = {} + try: + match args.command: + case "workspace": + match args.workspace_command: + case "create": + result = create_workspace_command(args) + case "delete": + result = delete_workspace_command(args) + case _: + parser.print_help() + # workspace_command(args) + pass + case "lakehouse": + # lakehouse_command(args) + pass + case "notebook": + # notebook_command(args) + pass + case _: + parser.print_help() + except Exception as e: + logging.error(e) + sys.stderr.write(str(e)) + sys.exit(1) + + sys.stdout.write(json.dumps(result)) + +if __name__ == "__main__": + main() diff --git a/src/msfabricutils/cli/workspace.py b/src/msfabricutils/cli/workspace.py new file mode 100644 index 0000000..fb62140 --- /dev/null +++ b/src/msfabricutils/cli/workspace.py @@ -0,0 +1,125 @@ +import logging +from dataclasses import dataclass + +from msfabricutils.core.workspace import ( + assign_workspace_to_capacity, + create_workspace, + delete_workspace, + get_workspace, + update_workspace, +) + + +@dataclass +class WorkspaceCreateArgs: + """Arguments for creating a workspace + + Args: + name (str): The name of the workspace + on_conflict (str): The action to take if the workspace already exists. + description (str | None): The description of the workspace + capacity_id (str | None): The capacity ID of the workspace + """ + + name: str + on_conflict: str + description: str | None = None + capacity_id: str | None = None + + +@dataclass +class WorkspaceDeleteArgs: + """Arguments for deleting a workspace + + Args: + id (str): The ID of the workspace to delete + """ + + id: str | None = None + name: str | None = None + on_conflict: str + +def create_workspace_command(args: WorkspaceCreateArgs) -> dict[str, str]: + """Create a new workspace with the specified configuration. + + Args: + args (WorkspaceCreateArgs): The arguments to create a workspace + + Returns: + Workspace information as a dictionary + """ + + logging.info(f"Creating workspace {args.__dict__}") + + name = args.name + description = args.description + capacity_id = args.capacity_id + on_conflict = args.on_conflict + + workspace_id = None + try: + workspace = get_workspace(workspace_name=name) + workspace_id = workspace["id"] + logging.info(f"Workspace {name} already exists") + except ValueError: + logging.info(f"Workspace {name} does not exist") + + if workspace_id is not None and on_conflict == "error": + raise ValueError(f"Workspace {name} already exists") + + if workspace_id is not None and on_conflict == "update": + logging.info(f"Updating workspace with `{name}` with description `{description}`") + update_workspace(workspace_id, name, description) + logging.info(f"Workspace `{name}` successfully updated") + + if workspace_id is None: + logging.info(f"Creating workspace with `{name}` with description `{description}`") + workspace = create_workspace(name, description) + logging.info(f"Workspace `{name}` successfully created") + workspace_id = workspace["id"] + + if capacity_id is not None: + logging.info(f"Assigning workspace `{workspace_id}` to capacity `{capacity_id}`") + assign_workspace_to_capacity(workspace_id, capacity_id) + logging.info(f"Workspace `{workspace_id}` successfully assigned to capacity `{capacity_id}`") + + return workspace + + +def delete_workspace_command(args: WorkspaceDeleteArgs) -> dict[str, str]: + """Delete a workspace with the specified configuration. + + Args: + args (WorkspaceDeleteArgs): The arguments to delete a workspace + + Returns: + Workspace information as a dictionary + """ + + logging.info(f"Deleting workspace {args.__dict__}") + + workspace_id = args.id + name = args.name + on_conflict = args.on_conflict + + if workspace_id is None and name is None: + raise ValueError("Either `id` or `name` must be provided") + + if workspace_id is None: + try: + workspace = get_workspace(workspace_name=name) + workspace_id = workspace["id"] + logging.info(f"Workspace {name} exists") + except ValueError: + logging.info(f"Workspace {name} does not exist") + + if workspace_id is None and on_conflict == "error": + raise ValueError(f"Workspace {name} does not exist") + + if workspace_id is None and on_conflict == "ignore": + logging.info(f"Workspace {name} does not exist, skipping deletion") + return + + logging.info(f"Deleting workspace {workspace_id}") + delete_workspace(workspace_id) + logging.info(f"Workspace {workspace_id} successfully deleted") diff --git a/src/msfabricutils/core/generic.py b/src/msfabricutils/core/generic.py index cac23b3..48431f1 100644 --- a/src/msfabricutils/core/generic.py +++ b/src/msfabricutils/core/generic.py @@ -3,7 +3,7 @@ from msfabricutils.core.auth import get_fabric_bearer_token -def get_paginated(endpoint: str, data_key: str) -> list[dict]: +def paginated_get_request(endpoint: str, data_key: str) -> list[dict]: """ Retrieves paginated data from the specified API endpoint. @@ -44,7 +44,7 @@ def get_paginated(endpoint: str, data_key: str) -> list[dict]: return responses -def get_item_from_paginated(endpoint: str, data_key: str, item_key: str, item_value: str) -> list[dict]: +def get_item_from_paginated_get_request(endpoint: str, data_key: str, item_key: str, item_value: str) -> list[dict]: """ Recursively paginates the API endpoint until specified item is found and returns it. @@ -90,7 +90,7 @@ def get_item_from_paginated(endpoint: str, data_key: str, item_key: str, item_va raise ValueError(f"Item with {item_key} {item_value} not found") -def get_page(endpoint: str) -> list[dict]: +def get_request(endpoint: str) -> list[dict]: """ Retrieves data from a specified API endpoint. @@ -116,3 +116,34 @@ def get_page(endpoint: str) -> list[dict]: response.raise_for_status() return response.json() + + +def post_request(endpoint: str, data: dict[str, str]) -> dict[str, str]: + base_url = "https://api.fabric.microsoft.com/v1" + token = get_fabric_bearer_token() + headers = {"Authorization": f"Bearer {token}"} + + response = requests.post(f"{base_url}/{endpoint}", headers=headers, json=data) + response.raise_for_status() + + return response.json() + + +def patch_request(endpoint: str, data: dict[str, str]) -> dict[str, str]: + base_url = "https://api.fabric.microsoft.com/v1" + token = get_fabric_bearer_token() + headers = {"Authorization": f"Bearer {token}"} + + response = requests.patch(f"{base_url}/{endpoint}", headers=headers, json=data) + response.raise_for_status() + + return response.json() + + +def delete_request(endpoint: str) -> dict[str, str]: + base_url = "https://api.fabric.microsoft.com/v1" + token = get_fabric_bearer_token() + headers = {"Authorization": f"Bearer {token}"} + + response = requests.delete(f"{base_url}/{endpoint}", headers=headers) + response.raise_for_status() diff --git a/src/msfabricutils/core/lakehouse.py b/src/msfabricutils/core/lakehouse.py index 284c475..cf4af50 100644 --- a/src/msfabricutils/core/lakehouse.py +++ b/src/msfabricutils/core/lakehouse.py @@ -1,4 +1,4 @@ -from msfabricutils.core.generic import get_item_from_paginated, get_paginated +from msfabricutils.core.generic import get_item_from_paginated_get_request, paginated_get_request from msfabricutils.core.workspace import get_workspace @@ -33,12 +33,12 @@ def get_workspace_lakehouses( if workspace_id is not None: endpoint = f"workspaces/{workspace_id}/lakehouses" - return get_paginated(endpoint, data_key) + return paginated_get_request(endpoint, data_key) if workspace_name is not None: workspace_id = get_workspace(workspace_name=workspace_name)["id"] endpoint = f"workspaces/{workspace_id}/lakehouses" - return get_paginated(endpoint, data_key) + return paginated_get_request(endpoint, data_key) raise ValueError("Either `workspace_id` or `workspace_name` must be provided") @@ -117,7 +117,7 @@ def get_workspace_lakehouse_tables( item_key = "displayName" item_value = lakehouse_name - lakehouse_id = get_item_from_paginated( + lakehouse_id = get_item_from_paginated_get_request( endpoint=endpoint, data_key=data_key, item_key=item_key, @@ -127,4 +127,4 @@ def get_workspace_lakehouse_tables( endpoint = f"workspaces/{workspace_id}/lakehouses/{lakehouse_id}/tables" data_key = "data" - return get_paginated(endpoint, data_key) + return paginated_get_request(endpoint, data_key) diff --git a/src/msfabricutils/core/sql_endpoint.py b/src/msfabricutils/core/sql_endpoint.py index 64ddf01..849b2bd 100644 --- a/src/msfabricutils/core/sql_endpoint.py +++ b/src/msfabricutils/core/sql_endpoint.py @@ -1,4 +1,4 @@ -from msfabricutils.core.generic import get_paginated +from msfabricutils.core.generic import paginated_get_request from msfabricutils.core.workspace import get_workspace @@ -35,11 +35,11 @@ def get_workspace_sql_endpoints( if workspace_id is not None: endpoint = f"workspaces/{workspace_id}/sqlEndpoints" - return get_paginated(endpoint, data_key) + return paginated_get_request(endpoint, data_key) if workspace_name is not None: workspace_id = get_workspace(workspace_name=workspace_name)["id"] endpoint = f"workspaces/{workspace_id}/sqlEndpoints" - return get_paginated(endpoint, data_key) + return paginated_get_request(endpoint, data_key) raise ValueError("Either `workspace_id` or `workspace_name` must be provided") diff --git a/src/msfabricutils/core/workspace.py b/src/msfabricutils/core/workspace.py index 061b473..eb66779 100644 --- a/src/msfabricutils/core/workspace.py +++ b/src/msfabricutils/core/workspace.py @@ -1,6 +1,13 @@ from typing import Any -from msfabricutils.core.generic import get_item_from_paginated, get_page, get_paginated +from msfabricutils.core.generic import ( + delete_request, + get_item_from_paginated_get_request, + get_request, + paginated_get_request, + patch_request, + post_request, +) def get_workspaces() -> list[dict[str, Any]]: @@ -20,7 +27,7 @@ def get_workspaces() -> list[dict[str, Any]]: endpoint = "workspaces" data_key = "value" - return get_paginated(endpoint, data_key) + return paginated_get_request(endpoint, data_key) def get_workspace(workspace_id: str | None = None, workspace_name: str | None = None) -> dict[str, Any]: @@ -51,7 +58,7 @@ def get_workspace(workspace_id: str | None = None, workspace_name: str | None = if workspace_id is not None: endpoint = f"workspaces/{workspace_id}" - return get_page(endpoint) + return get_request(endpoint) if workspace_name is not None: endpoint = "workspaces" @@ -59,6 +66,70 @@ def get_workspace(workspace_id: str | None = None, workspace_name: str | None = item_key = "displayName" item_value = workspace_name - return get_item_from_paginated(endpoint, data_key, item_key, item_value) + return get_item_from_paginated_get_request(endpoint, data_key, item_key, item_value) raise ValueError("Either `workspace_id` or `workspace_name` must be provided") + + +def create_workspace(workspace_name: str, description: str | None = None) -> dict[str, Any]: + endpoint = "workspaces" + data = { + "displayName": workspace_name, + "description": description or "" + } + + return post_request(endpoint, data) + + +def assign_workspace_to_capacity(workspace_id: str, capacity_id: str) -> dict[str, Any]: + """ + Assigns a workspace to a capacity. + + Args: + workspace_id (str): The ID of the workspace to assign to a capacity. + capacity_id (str): The ID of the capacity to assign the workspace to. + + Returns: + A dictionary containing the details of the assigned workspace. + """ + endpoint = f"workspaces/{workspace_id}/assignToCapacity" + data = { + "capacityId": capacity_id + } + return post_request(endpoint, data) + +def update_workspace(workspace_id: str, workspace_name: str | None = None, description: str | None = None) -> dict[str, Any]: + """ + Updates a workspace. + + Args: + workspace_id (str): The ID of the workspace to update. + workspace_name (str | None): The name of the workspace to update. + description (str | None): The description of the workspace to update. + + Returns: + A dictionary containing the details of the updated workspace. + """ + endpoint = f"workspaces/{workspace_id}" + + data = {} + if workspace_name is not None: + data["displayName"] = workspace_name + if description is not None: + data["description"] = description + + return patch_request(endpoint, data) + + +def delete_workspace(workspace_id: str) -> dict[str, Any]: + """ + Deletes a workspace. + + Args: + workspace_id (str): The ID of the workspace to delete. + + Returns: + A dictionary containing the details of the deleted workspace. + """ + endpoint = f"workspaces/{workspace_id}" + return delete_request(endpoint) From fab6aa4b90cfc7e43dc479b3aa57db27a6cb1985 Mon Sep 17 00:00:00 2001 From: jsj Date: Sun, 22 Dec 2024 03:09:34 +0100 Subject: [PATCH 2/7] add exceptions --- src/msfabricutils/cli/cli.py | 22 ++++++++++++++++++---- src/msfabricutils/cli/workspace.py | 2 +- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/msfabricutils/cli/cli.py b/src/msfabricutils/cli/cli.py index 7f08e4d..a5eab04 100644 --- a/src/msfabricutils/cli/cli.py +++ b/src/msfabricutils/cli/cli.py @@ -178,11 +178,25 @@ def main(): # workspace_command(args) pass case "lakehouse": - # lakehouse_command(args) - pass + match args.lakehouse_command: + case "create": + raise NotImplementedError("lakehouse create command not implemented") + # result = create_lakehouse_command(args) + case "delete": + raise NotImplementedError("lakehouse delete command not implemented") + # result = delete_lakehouse_command(args) + case _: + parser.print_help() case "notebook": - # notebook_command(args) - pass + match args.notebook_command: + case "create": + raise NotImplementedError("notebook create command not implemented") + # result = create_notebook_command(args) + case "delete": + raise NotImplementedError("notebook delete command not implemented") + # result = delete_notebook_command(args) + case _: + parser.print_help() case _: parser.print_help() except Exception as e: diff --git a/src/msfabricutils/cli/workspace.py b/src/msfabricutils/cli/workspace.py index fb62140..4abbf36 100644 --- a/src/msfabricutils/cli/workspace.py +++ b/src/msfabricutils/cli/workspace.py @@ -35,9 +35,9 @@ class WorkspaceDeleteArgs: id (str): The ID of the workspace to delete """ + on_conflict: str id: str | None = None name: str | None = None - on_conflict: str def create_workspace_command(args: WorkspaceCreateArgs) -> dict[str, str]: """Create a new workspace with the specified configuration. From d3db0012d2a160cae9a5936d2842c7e667f9479a Mon Sep 17 00:00:00 2001 From: jsj Date: Sun, 22 Dec 2024 17:47:02 +0100 Subject: [PATCH 3/7] update docs for cli and rest api --- .../{fabric-api.md => fabric-api/index.md} | 6 +---- docs/core/fabric-api/lakehouse.md | 3 +++ docs/core/fabric-api/notebook.md | 3 +++ docs/core/fabric-api/sql_endpoint.md | 3 +++ docs/core/fabric-api/workspace.md | 3 +++ docs/usage/cli.md | 26 +++++++++++++++++++ docs/usage/fabric-api.md | 2 +- mkdocs.yml | 11 ++++++-- 8 files changed, 49 insertions(+), 8 deletions(-) rename docs/core/{fabric-api.md => fabric-api/index.md} (62%) create mode 100644 docs/core/fabric-api/lakehouse.md create mode 100644 docs/core/fabric-api/notebook.md create mode 100644 docs/core/fabric-api/sql_endpoint.md create mode 100644 docs/core/fabric-api/workspace.md create mode 100644 docs/usage/cli.md diff --git a/docs/core/fabric-api.md b/docs/core/fabric-api/index.md similarity index 62% rename from docs/core/fabric-api.md rename to docs/core/fabric-api/index.md index ec8c7ab..2a38ed6 100644 --- a/docs/core/fabric-api.md +++ b/docs/core/fabric-api/index.md @@ -1,8 +1,4 @@ # Fabric API A collection of functions to interact with the Fabric API. Automatically handles pagination and authentication. -The functions can either be called with an `id` or `name` parameter, however it is recommended to use the `id` as using the name requires more API requests, and is thus slower. - -::: msfabricutils.core.workspace -::: msfabricutils.core.lakehouse -::: msfabricutils.core.sql_endpoint \ No newline at end of file +The functions can either be called with an `id` or `name` parameter, however it is recommended to use the `id` as using the name requires more API requests, and is thus slower. \ No newline at end of file diff --git a/docs/core/fabric-api/lakehouse.md b/docs/core/fabric-api/lakehouse.md new file mode 100644 index 0000000..e47e046 --- /dev/null +++ b/docs/core/fabric-api/lakehouse.md @@ -0,0 +1,3 @@ +# Lakehouse + +::: msfabricutils.core.lakehouse \ No newline at end of file diff --git a/docs/core/fabric-api/notebook.md b/docs/core/fabric-api/notebook.md new file mode 100644 index 0000000..c744c9e --- /dev/null +++ b/docs/core/fabric-api/notebook.md @@ -0,0 +1,3 @@ +# Notebook + +::: msfabricutils.core.notebook \ No newline at end of file diff --git a/docs/core/fabric-api/sql_endpoint.md b/docs/core/fabric-api/sql_endpoint.md new file mode 100644 index 0000000..781e870 --- /dev/null +++ b/docs/core/fabric-api/sql_endpoint.md @@ -0,0 +1,3 @@ +# SQL Endpoint + +::: msfabricutils.core.sql_endpoint \ No newline at end of file diff --git a/docs/core/fabric-api/workspace.md b/docs/core/fabric-api/workspace.md new file mode 100644 index 0000000..27e90c2 --- /dev/null +++ b/docs/core/fabric-api/workspace.md @@ -0,0 +1,3 @@ +# Workspace + +::: msfabricutils.core.workspace \ No newline at end of file diff --git a/docs/usage/cli.md b/docs/usage/cli.md new file mode 100644 index 0000000..130ca1c --- /dev/null +++ b/docs/usage/cli.md @@ -0,0 +1,26 @@ +# Command Line Interface + +The CLI is a way to interact with the Microsoft Fabric REST API. It includes commands for workspaces, lakehouses, and notebooks creation, deletion and updating. + +For complete documentation, run `msfu --help`. + +## Examples + +### Workspace + +```bash +msfu workspace create --name "My Workspace" --description "My workspace description" +``` + +### Lakehouse + +```bash +msfu lakehouse create --name "My Lakehouse" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" --enable-schemas +``` + +### Notebook + +```bash +msfu notebook create --path "path/to/notebook.Notebook" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" +``` + diff --git a/docs/usage/fabric-api.md b/docs/usage/fabric-api.md index 35746b5..0a91ae2 100644 --- a/docs/usage/fabric-api.md +++ b/docs/usage/fabric-api.md @@ -1,6 +1,6 @@ # Fabric API -A collection of helper functions for working with the Fabric API. +A collection of helper functions for working with the Fabric API. See the [API Reference](../core/fabric-api.md) for more details. ## List workspaces diff --git a/mkdocs.yml b/mkdocs.yml index 9f096e0..b56a422 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -49,23 +49,30 @@ plugins: show_root_heading: false filters: ["!^_", "^__init__$"] show_object_full_path: false - heading_level: 1 + heading_level: 2 members_order: source separate_signature: true show_signature_annotations: true docstring_section_style: table + show_if_no_docstring: false nav: - Home: - index.md - Usage: - usage/installation.md + - usage/cli.md - usage/etl.md - usage/fabric-api.md - API Reference: - Core: - core/authentication.md - - core/fabric-api.md + - Fabric API: + - core/fabric-api/index.md + - core/fabric-api/workspace.md + - core/fabric-api/lakehouse.md + - core/fabric-api/notebook.md + - core/fabric-api/sql_endpoint.md - ETL: - etl/index.md - etl/read.md From 279155fbe934ccc1f09b21bf60a8d663fe3eb6ac Mon Sep 17 00:00:00 2001 From: jsj Date: Sun, 22 Dec 2024 18:00:37 +0100 Subject: [PATCH 4/7] add cli commands --- src/msfabricutils/cli/cli.py | 320 +++++++++++++++++------------ src/msfabricutils/cli/lakehouse.py | 100 +++++++++ src/msfabricutils/cli/notebook.py | 123 +++++++++++ src/msfabricutils/cli/workspace.py | 28 ++- 4 files changed, 425 insertions(+), 146 deletions(-) create mode 100644 src/msfabricutils/cli/lakehouse.py create mode 100644 src/msfabricutils/cli/notebook.py diff --git a/src/msfabricutils/cli/cli.py b/src/msfabricutils/cli/cli.py index a5eab04..fb237f0 100644 --- a/src/msfabricutils/cli/cli.py +++ b/src/msfabricutils/cli/cli.py @@ -1,13 +1,20 @@ import argparse -import json import logging import sys +from typing import Callable from msfabricutils import __version__ +from msfabricutils.cli.lakehouse import create_lakehouse_command, delete_lakehouse_command +from msfabricutils.cli.notebook import ( + bulk_create_notebook_command, + create_notebook_command, + delete_notebook_command, +) from msfabricutils.cli.workspace import create_workspace_command, delete_workspace_command -def main(): +def create_parser(): + """Creates the main parser and subparsers.""" examples = """ Examples: Create a workspace: @@ -17,10 +24,10 @@ def main(): msfu lakehouse create --name "My Lakehouse" --description "My Lakehouse Description" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" --on-conflict "update" Create a single notebook: - msfu notebook create --path "path/to/myNotebook.Notebook" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" + msfu notebook create --path "path/to/notebook.Notebook" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" Create multiple notebooks: - msfu notebook create --path "path/to/notebooks" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" + msfu notebook create --path "directory/of/notebooks" "path/to/notebook.Notebook" --workspace-id "beefbeef-beef-beef-beef-beefbeefbeef" """ parser = argparse.ArgumentParser( @@ -38,173 +45,212 @@ def main(): choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], help="The log level to use. Defaults to INFO.", ) + parser.add_argument( + "--show-azure-identity-logs", + action="store_true", + default=False, + help="Show Azure Identity logs. Defaults to False.", + ) subparsers = parser.add_subparsers(dest="command", help="Subcommands") - # Subcommand: workspace + register_workspace_commands(subparsers) + register_lakehouse_commands(subparsers) + register_notebook_commands(subparsers) + + return parser + + +def register_workspace_commands(subparsers: argparse._SubParsersAction): + """Registers the workspace commands.""" workspace_parser = subparsers.add_parser("workspace", help="Workspace commands") workspace_subparsers = workspace_parser.add_subparsers( - dest="workspace_command", help="Workspace commands" + dest="workspace", help="Workspace commands" ) - - # Subcommand: workspace create - workspace_create_parser = workspace_subparsers.add_parser("create", help="Create a workspace") - workspace_create_parser.add_argument( - "--name", "-n", type=str, required=True, help="The name of the workspace." + add_subcommand( + subparsers=workspace_subparsers, + name="create", + handler=create_workspace_command, + required_args=["--name"], + choices_args={"--on-conflict": ["error", "ignore", "update"]}, + optional_args=["--description", "--capacity-id"], ) - workspace_create_parser.add_argument( - "--description", "-d", type=str, help="The description of the workspace." - ) - workspace_create_parser.add_argument( - "--capacity-id", "-c", type=str, help="The Fabric capacity id to assign the workspace to." - ) - workspace_create_parser.add_argument( - "--on-conflict", - type=str, - choices=["ignore", "update", "error"], - default="error", - help="The action to take if the workspace already exists. Defaults to `error`.", + add_subcommand( + subparsers=workspace_subparsers, + name="delete", + handler=delete_workspace_command, + mutually_exclusive_args=["--id", "--name"], + choices_args={"--on-conflict": ["error", "ignore"]}, ) - # Subcommand: workspace delete - workspace_delete_parser = workspace_subparsers.add_parser("delete", help="Delete a workspace") - workspace_delete_group = workspace_delete_parser.add_mutually_exclusive_group(required=True) - workspace_delete_group.add_argument( - "--id", "-i", type=str, help="The ID of the workspace to delete." - ) - workspace_delete_group.add_argument( - "--name", "-n", type=str, help="The name of the workspace to delete." - ) - workspace_delete_parser.add_argument( - "--on-conflict", - type=str, - choices=["ignore", "error"], - default="error", - help="The action to take if the workspace does not exist. Defaults to `error`.", - ) - # Subcommand: lakehouse +def register_lakehouse_commands(subparsers: argparse._SubParsersAction): + """Registers the lakehouse commands.""" lakehouse_parser = subparsers.add_parser("lakehouse", help="Lakehouse commands") lakehouse_subparsers = lakehouse_parser.add_subparsers( - dest="lakehouse_command", help="Lakehouse commands" + dest="lakehouse", help="Lakehouse commands" ) - # Subcommand: lakehouse create - lakehouse_create_parser = lakehouse_subparsers.add_parser("create", help="Create a lakehouse") - lakehouse_create_parser.add_argument( - "--name", "-n", type=str, required=True, help="The name of the lakehouse." - ) - lakehouse_create_parser.add_argument( - "--description", "-d", type=str, help="The description of the lakehouse." + add_subcommand( + subparsers=lakehouse_subparsers, + name="create", + handler=create_lakehouse_command, + required_args=["--name", "--workspace-id"], + has_long_running_operation=True, + choices_args={ + "--on-conflict": ["error", "ignore", "update"], + }, + optional_args=["--description"], + flags=["--enable-schemas"], ) - lakehouse_create_parser.add_argument( - "--workspace-id", - "-w", - type=str, - required=True, - help="The workspace id to create the lakehouse in.", - ) - lakehouse_create_parser.add_argument( - "--on-conflict", - type=str, - choices=["ignore", "update", "error"], - default="error", - help="The action to take if the lakehouse already exists. Defaults to `error`.", + add_subcommand( + subparsers=lakehouse_subparsers, + name="delete", + handler=delete_lakehouse_command, + required_args=["--workspace-id"], + mutually_exclusive_args=["--id", "--name"], + choices_args={"--on-conflict": ["error", "ignore"]}, ) - # Subcommand: notebook - notebook_parser = subparsers.add_parser("notebook", help="Notebook commands") - notebook_subparsers = notebook_parser.add_subparsers( - dest="notebook_command", help="Notebook commands" - ) - notebook_create_parser = notebook_subparsers.add_parser("create", help="Create a notebook") - notebook_create_parser.add_argument( - "--path", - "-p", - type=str, - required=True, - help="Path to folder of notebooks or a single notebook to publish. Single notebook should end with `.Notebook`.", - ) - notebook_create_parser.add_argument( - "--workspace-id", - "-w", - type=str, - required=True, - help="The workspace id to publish the notebook to.", - ) - notebook_create_parser.add_argument( - "--name", - "-n", - type=str, - help="The name of the notebook. If not provided, the name of the notebook file will be used.", +def register_notebook_commands(subparsers: argparse._SubParsersAction): + """Registers the notebook commands.""" + notebook_parser = subparsers.add_parser("notebook", help="Notebook commands") + notebook_subparsers = notebook_parser.add_subparsers(dest="notebook", help="Notebook commands") + + add_subcommand( + subparsers=notebook_subparsers, + name="create", + handler=create_notebook_command, + required_args=["--workspace-id", "--path"], + optional_args=["--name", "--description"], + has_long_running_operation=True, + choices_args={"--on-conflict": ["error", "ignore", "update"]}, ) - notebook_create_parser.add_argument( - "--description", - "-d", - type=str, - help="The description of the notebook. Only applicable if publishing a single notebook.", + add_subcommand( + subparsers=notebook_subparsers, + name="bulk-create", + handler=bulk_create_notebook_command, + required_args=["--workspace-id"], + nargs=["--path"], + has_long_running_operation=True, + choices_args={"--on-conflict": ["error", "ignore", "update"]}, ) - notebook_create_parser.add_argument( - "--on-conflict", - type=str, - choices=["ignore", "update", "error"], - default="error", - help="The action to take if the notebook already exists. Defaults to `error`.", + add_subcommand( + subparsers=notebook_subparsers, + name="delete", + handler=delete_notebook_command, + required_args=["--workspace-id"], + mutually_exclusive_args=["--id", "--name"], + choices_args={"--on-conflict": ["error", "ignore"]}, ) + +def add_subcommand( + subparsers: argparse._SubParsersAction, + name: str, + handler: Callable, + required_args: list[str] | None = None, + nargs: list[str] | None = None, + choices_args: dict[str, list[str]] | None = None, + mutually_exclusive_args: list[str] | None = None, + optional_args: list[str] | None = None, + has_long_running_operation: bool = False, + flags: list[str] | None = None, +): + """Adds a subcommand to the parser. + + Args: + subparsers (argparse._SubParsersAction): The subparsers to add the subcommand to. + name (str): The name of the subcommand. + handler (Callable): The handler function to call when the subcommand is invoked. + required_args (list[str] | None): The required arguments for the subcommand. + nargs (list[str] | None): The nargs arguments for the subcommand. + choices_args (dict[str, list[str]] | None): The choices arguments for the subcommand. The default choice is the first in the list. + optional_args (list[str] | None): The optional arguments for the subcommand. + """ + + if not required_args: + required_args = [] + + if not choices_args: + choices_args = {} + + if not optional_args: + optional_args = [] + + if not nargs: + nargs = [] + + if not flags: + flags = [] + + create_parser = subparsers.add_parser(name, help=f"{name.capitalize()} commands") + + for arg in required_args: + create_parser.add_argument( + arg, required=True, help=f"The {arg.lstrip('-')} of the {subparsers.dest} to {name}." + ) + + for arg in nargs: + create_parser.add_argument( + arg, nargs="+", help=f"The {arg.lstrip('-')} of the {subparsers.dest}s to {name}." + ) + + for arg in optional_args: + create_parser.add_argument( + arg, required=False, help=f"The {arg.lstrip('-')} of the {subparsers.dest} to {name}." + ) + + for flag in flags: + create_parser.add_argument( + flag, action="store_true", default=False, help=f"{flag.lstrip('-')} flag for the {subparsers.dest} to {name}." + ) + + if has_long_running_operation: + create_parser.add_argument( + "--no-wait", action="store_true", default=False, help="Do not wait for the long running operation to complete." + ) + + if mutually_exclusive_args: + argument_group = create_parser.add_mutually_exclusive_group(required=True) + for arg in mutually_exclusive_args: + argument_group.add_argument( + arg, help=f"The {arg.lstrip('-')} of the {subparsers.dest} to {name}." + ) + + for arg, choices in choices_args.items(): + create_parser.add_argument( + arg, + type=str, + choices=choices, + default=choices[0], + help=f"The {arg.lstrip('-')} of the {subparsers.dest} to {name}. Defaults to `{choices[0]}`.", + ) + + create_parser.set_defaults(func=handler) + + +def main(): + parser = create_parser() args = parser.parse_args() - # Format as json logging.basicConfig( level=args.log_level, - format='{"timestamp": "%(asctime)s", "module": "%(module)s", "level": "%(levelname)s", "message": "%(message)s"}', + format='{"timestamp": "%(asctime)s", "level": "%(levelname)s", "message": "%(message)s"}', ) - debug_msg = ", ".join([f"'{arg} = {value}'" for arg, value in args.__dict__.items()]) - logging.debug(f"CLI started with args: {debug_msg}") - - result = {} try: - match args.command: - case "workspace": - match args.workspace_command: - case "create": - result = create_workspace_command(args) - case "delete": - result = delete_workspace_command(args) - case _: - parser.print_help() - # workspace_command(args) - pass - case "lakehouse": - match args.lakehouse_command: - case "create": - raise NotImplementedError("lakehouse create command not implemented") - # result = create_lakehouse_command(args) - case "delete": - raise NotImplementedError("lakehouse delete command not implemented") - # result = delete_lakehouse_command(args) - case _: - parser.print_help() - case "notebook": - match args.notebook_command: - case "create": - raise NotImplementedError("notebook create command not implemented") - # result = create_notebook_command(args) - case "delete": - raise NotImplementedError("notebook delete command not implemented") - # result = delete_notebook_command(args) - case _: - parser.print_help() - case _: - parser.print_help() + azure_log_level = args.log_level if args.show_azure_identity_logs else logging.CRITICAL + logging.getLogger("azure").setLevel(azure_log_level) + args.func(args) except Exception as e: logging.error(e) sys.stderr.write(str(e)) sys.exit(1) - sys.stdout.write(json.dumps(result)) + sys.exit(0) + if __name__ == "__main__": main() diff --git a/src/msfabricutils/cli/lakehouse.py b/src/msfabricutils/cli/lakehouse.py new file mode 100644 index 0000000..bac0595 --- /dev/null +++ b/src/msfabricutils/cli/lakehouse.py @@ -0,0 +1,100 @@ +import logging +from dataclasses import dataclass + +from msfabricutils.core.lakehouse import ( + create_workspace_lakehouse, + delete_workspace_lakehouse, + get_workspace_lakehouse, + update_workspace_lakehouse, +) + + +@dataclass +class CreateLakehouseArgs: + name: str + workspace_id: str + enable_schemas: bool + on_conflict: str + description: str | None = None + + +@dataclass +class DeleteLakehouseArgs: + workspace_id: str + on_conflict: str + id: str | None = None + name: str | None = None + + +def create_lakehouse_command(args: CreateLakehouseArgs): + """Creates a lakehouse.""" + + name = args.name + workspace_id = args.workspace_id + enable_schemas = bool(args.enable_schemas) + description = args.description + on_conflict = args.on_conflict + + lakehouse_id = None + try: + lakehouse = get_workspace_lakehouse(workspace_id, lakehouse_name=name) + lakehouse_id = lakehouse["id"] + logging.info(f"Lakehouse {name} created successfully with id {lakehouse_id}") + except ValueError: + logging.info(f"Lakehouse {name} does not exist") + pass + + if lakehouse_id is not None: + + if on_conflict == "ignore": + logging.info(f"Lakehouse `{name}` already exists, skipping update") + return lakehouse + + if on_conflict == "error": + raise ValueError(f"Lakehouse {name} already exists") + + if on_conflict == "update": + logging.info(f"Updating lakehouse with `{name}` with description `{description}`") + update_workspace_lakehouse(workspace_id, lakehouse_id, name, enable_schemas, description) + lakehouse = get_workspace_lakehouse(workspace_id, name, enable_schemas, description) + lakehouse_id = lakehouse["id"] + logging.info(f"Lakehouse `{name}` successfully updated") + + else: + logging.info(f"Creating lakehouse with `{name}` with description `{description}`") + lakehouse = create_workspace_lakehouse(workspace_id, name, enable_schemas, description) + logging.info(f"Lakehouse `{name}` successfully created") + lakehouse_id = lakehouse["id"] + + return lakehouse + + +def delete_lakehouse_command(args: DeleteLakehouseArgs): + """Deletes a lakehouse.""" + workspace_id = args.workspace_id + lakehouse_id = args.id + lakehouse_name = args.name + on_conflict = args.on_conflict + + if lakehouse_id is None and lakehouse_name is None: + raise ValueError("Either `lakehouse_id` or `lakehouse_name` must be provided") + + if lakehouse_id is None: + try: + lakehouse = get_workspace_lakehouse(workspace_id, lakehouse_name=lakehouse_name) + lakehouse_id = lakehouse["id"] + logging.info(f"Lakehouse {lakehouse_name} exists") + except ValueError: + logging.info(f"Lakehouse {lakehouse_name} does not exist") + + if workspace_id is None and on_conflict == "error": + raise ValueError(f"Lakehouse {lakehouse_name} does not exist") + + if workspace_id is None and on_conflict == "ignore": + logging.info(f"Lakehouse {lakehouse_name} does not exist, skipping deletion") + return + + logging.info(f"Deleting lakehouse {lakehouse_id}") + response = delete_workspace_lakehouse(workspace_id, lakehouse_id) + response.raise_for_status() + logging.info(f"Lakehouse {lakehouse_id} successfully deleted") \ No newline at end of file diff --git a/src/msfabricutils/cli/notebook.py b/src/msfabricutils/cli/notebook.py new file mode 100644 index 0000000..c4ed2ba --- /dev/null +++ b/src/msfabricutils/cli/notebook.py @@ -0,0 +1,123 @@ +import glob +import logging +import os +from dataclasses import dataclass + +from msfabricutils.core.notebook import ( + create_workspace_notebook, + delete_workspace_notebook, + get_workspace_notebook, + update_workspace_notebook_definition, +) + + +@dataclass +class CreateNotebookArgs: + workspace_id: str + path: str + name: str + description: str + +@dataclass +class BulkCreateNotebookArgs: + workspace_id: str + path: list[str] + on_conflict: str + no_wait: bool + +@dataclass +class DeleteNotebookArgs: + workspace_id: str + on_conflict: str + id: str | None = None + name: str | None = None + + +def create_notebook_command(args: CreateNotebookArgs): + """Creates a notebook.""" + + workspace_id = args.workspace_id + path = args.path + name = args.name + description = args.description + on_conflict = args.on_conflict + no_wait = args.no_wait + notebook_id = None + try: + notebook = get_workspace_notebook(workspace_id, notebook_name=name) + notebook_id = notebook["id"] + logging.info(f"Notebook {name} already exists") + except ValueError: + logging.info(f"Notebook {name} does not exist") + + + if notebook_id is not None: + + if on_conflict == "ignore": + logging.info(f"Notebook `{name}` already exists, skipping update") + return notebook + + if on_conflict == "error": + raise ValueError(f"Notebook {name} already exists") + + if on_conflict == "update": + logging.info(f"Updating notebook with `{name}` with description `{description}`") + notebook = update_workspace_notebook_definition(workspace_id, notebook_id, path, description) + logging.info(f"Notebook `{name}` successfully updated") + + else: + logging.info(f"Creating notebook with `{name}` with description `{description}`") + notebook = create_workspace_notebook(workspace_id, path, name, description, wait_for_completion=not no_wait) + logging.info(f"Notebook `{name}` successfully created") + notebook_id = notebook["id"] + + return notebook + +def bulk_create_notebook_command(args: BulkCreateNotebookArgs): + """Creates one or more notebooks.""" + + raise NotImplementedError("Bulk create notebooks is not implemented yet") + paths = [] + for path in args.path: + path = path if path.endswith(".Notebook") else path + ".Notebook" + # print(path) + paths.extend(glob.glob(path)) + + paths = list(set(paths)) + formatted_paths = ", ".join(paths) + + if len(paths) == 0: + logging.info(f"No notebooks found in current directory `{os.getcwd()}` given the provided paths: {formatted_paths}") + return "" + + + +def delete_notebook_command(args: DeleteNotebookArgs): + """Deletes one or more notebooks.""" + workspace_id = args.workspace_id + notebook_id = args.id + notebook_name = args.name + on_conflict = args.on_conflict + + if notebook_id is None and notebook_name is None: + raise ValueError("Either `notebook_id` or `notebook_name` must be provided") + + if notebook_id is None: + try: + notebook = get_workspace_notebook(workspace_id, notebook_name=notebook_name) + notebook_id = notebook["id"] + logging.info(f"Notebook {notebook_name} exists") + except ValueError: + logging.info(f"Notebook {notebook_name} does not exist") + + if workspace_id is None and on_conflict == "error": + raise ValueError(f"Notebook {notebook_name} does not exist") + + if workspace_id is None and on_conflict == "ignore": + logging.info(f"Notebook {notebook_name} does not exist, skipping deletion") + return + + logging.info(f"Deleting notebook {notebook_id}") + response = delete_workspace_notebook(workspace_id, notebook_id) + response.raise_for_status() + logging.info(f"Notebook {notebook_id} successfully deleted") \ No newline at end of file diff --git a/src/msfabricutils/cli/workspace.py b/src/msfabricutils/cli/workspace.py index 4abbf36..b139971 100644 --- a/src/msfabricutils/cli/workspace.py +++ b/src/msfabricutils/cli/workspace.py @@ -64,15 +64,24 @@ def create_workspace_command(args: WorkspaceCreateArgs) -> dict[str, str]: except ValueError: logging.info(f"Workspace {name} does not exist") - if workspace_id is not None and on_conflict == "error": - raise ValueError(f"Workspace {name} already exists") - if workspace_id is not None and on_conflict == "update": - logging.info(f"Updating workspace with `{name}` with description `{description}`") - update_workspace(workspace_id, name, description) - logging.info(f"Workspace `{name}` successfully updated") + if workspace_id is not None: - if workspace_id is None: + if on_conflict == "ignore": + logging.info(f"Workspace `{name}` already exists, skipping update") + return workspace + + if on_conflict == "error": + raise ValueError(f"Workspace {name} already exists") + + if on_conflict == "update": + logging.info(f"Updating workspace with `{name}` with description `{description}`") + update_workspace(workspace_id, name, description) + workspace = get_workspace(workspace_name=name) + workspace_id = workspace["id"] + logging.info(f"Workspace `{name}` successfully updated") + + else: logging.info(f"Creating workspace with `{name}` with description `{description}`") workspace = create_workspace(name, description) logging.info(f"Workspace `{name}` successfully created") @@ -121,5 +130,6 @@ def delete_workspace_command(args: WorkspaceDeleteArgs) -> dict[str, str]: return logging.info(f"Deleting workspace {workspace_id}") - delete_workspace(workspace_id) - logging.info(f"Workspace {workspace_id} successfully deleted") + response = delete_workspace(workspace_id) + response.raise_for_status() + logging.info(f"Workspace {workspace_id} successfully deleted") \ No newline at end of file From a106be63c9dd88699792700ae86d4c8124f50c2e Mon Sep 17 00:00:00 2001 From: jsj Date: Sun, 22 Dec 2024 18:04:11 +0100 Subject: [PATCH 5/7] better requests --- .../core/{generic.py => fabric_request.py} | 100 +++++++++++++++--- 1 file changed, 86 insertions(+), 14 deletions(-) rename src/msfabricutils/core/{generic.py => fabric_request.py} (58%) diff --git a/src/msfabricutils/core/generic.py b/src/msfabricutils/core/fabric_request.py similarity index 58% rename from src/msfabricutils/core/generic.py rename to src/msfabricutils/core/fabric_request.py index 48431f1..e5154f0 100644 --- a/src/msfabricutils/core/generic.py +++ b/src/msfabricutils/core/fabric_request.py @@ -1,9 +1,11 @@ +import logging + import requests from msfabricutils.core.auth import get_fabric_bearer_token -def paginated_get_request(endpoint: str, data_key: str) -> list[dict]: +def paginated_get_request(endpoint: str, data_key: str) -> list[dict[str, str]]: """ Retrieves paginated data from the specified API endpoint. @@ -44,7 +46,7 @@ def paginated_get_request(endpoint: str, data_key: str) -> list[dict]: return responses -def get_item_from_paginated_get_request(endpoint: str, data_key: str, item_key: str, item_value: str) -> list[dict]: +def get_item_from_paginated_get_request(endpoint: str, data_key: str, item_key: str, item_value: str) -> dict[str, str]: """ Recursively paginates the API endpoint until specified item is found and returns it. @@ -90,7 +92,7 @@ def get_item_from_paginated_get_request(endpoint: str, data_key: str, item_key: raise ValueError(f"Item with {item_key} {item_value} not found") -def get_request(endpoint: str) -> list[dict]: +def get_request(endpoint: str, content_only: bool = True) -> requests.Response | dict[str, str]: """ Retrieves data from a specified API endpoint. @@ -100,9 +102,10 @@ def get_request(endpoint: str) -> list[dict]: Args: endpoint (str): The API endpoint to send the GET request to. - + content_only (bool): Whether to return the content of the response only. + Returns: - A list of dictionaries containing the data returned from the API. + A list of dictionaries containing the data returned from the API or the response object. Raises: requests.exceptions.RequestException: If the HTTP request fails or returns an error. @@ -113,37 +116,106 @@ def get_request(endpoint: str) -> list[dict]: params = {} response = requests.get(f"{base_url}/{endpoint}", headers=headers, params=params) - response.raise_for_status() - return response.json() + if content_only: + if response.status_code >= 400: + logging.error(f"Request failed with status code {response.status_code}: {response.json()}") + response.raise_for_status() + return response.json() + + return response -def post_request(endpoint: str, data: dict[str, str]) -> dict[str, str]: +def post_request(endpoint: str, data: dict[str, str], content_only: bool = True) -> requests.Response | dict[str, str]: + """ + Sends a POST request to a specified API endpoint. + + This function makes a POST request to the specified endpoint of the Azure Fabric API, + using a bearer token for authentication. It sends the provided data in JSON format + and returns either the JSON response or the full response object. + + Args: + endpoint (str): The API endpoint to send the POST request to. + data (dict[str, str]): The data to be sent in the request body. + content_only (bool): Whether to return the content of the response only. + + Returns: + Either the JSON response as a dictionary or the full response object. + + Raises: + requests.exceptions.RequestException: If the HTTP request fails or returns an error. + """ base_url = "https://api.fabric.microsoft.com/v1" token = get_fabric_bearer_token() headers = {"Authorization": f"Bearer {token}"} response = requests.post(f"{base_url}/{endpoint}", headers=headers, json=data) - response.raise_for_status() - return response.json() + if content_only: + if response.status_code >= 400: + logging.error(f"Request failed with status code {response.status_code}: {response.json()}") + response.raise_for_status() + return response.json() + + return response + + +def patch_request(endpoint: str, data: dict[str, str], content_only: bool = True) -> requests.Response | dict[str, str]: + """ + Sends a PATCH request to a specified API endpoint. + This function makes a PATCH request to the specified endpoint of the Azure Fabric API, + using a bearer token for authentication. It sends the provided data in JSON format + and returns either the JSON response or the full response object. -def patch_request(endpoint: str, data: dict[str, str]) -> dict[str, str]: + Args: + endpoint (str): The API endpoint to send the PATCH request to. + data (dict[str, str]): The data to be sent in the request body. + content_only (bool): Whether to return the content of the response only. + + Returns: + Either the JSON response as a dictionary or the full response object. + + Raises: + requests.exceptions.RequestException: If the HTTP request fails or returns an error. + """ base_url = "https://api.fabric.microsoft.com/v1" token = get_fabric_bearer_token() headers = {"Authorization": f"Bearer {token}"} response = requests.patch(f"{base_url}/{endpoint}", headers=headers, json=data) - response.raise_for_status() - return response.json() + if content_only: + if response.status_code >= 400: + logging.error(f"Request failed with status code {response.status_code}: {response.json()}") + response.raise_for_status() + return response.json() + + return response -def delete_request(endpoint: str) -> dict[str, str]: +def delete_request(endpoint: str) -> requests.Response: + """ + Sends a DELETE request to a specified API endpoint. + + This function makes a DELETE request to the specified endpoint of the Azure Fabric API, + using a bearer token for authentication. + + Args: + endpoint (str): The API endpoint to send the DELETE request to. + + Returns: + The response object from the DELETE request. + + Raises: + requests.exceptions.RequestException: If the HTTP request fails or returns an error. + """ base_url = "https://api.fabric.microsoft.com/v1" token = get_fabric_bearer_token() headers = {"Authorization": f"Bearer {token}"} response = requests.delete(f"{base_url}/{endpoint}", headers=headers) + if response.status_code >= 400: + logging.error(f"Request failed with status code {response.status_code}: {response.json()}") response.raise_for_status() + return response From f839b9a6bae4f9abb9bc0109b0a1095f75509f8e Mon Sep 17 00:00:00 2001 From: jsj Date: Sun, 22 Dec 2024 18:06:16 +0100 Subject: [PATCH 6/7] add more api functions --- src/msfabricutils/core/lakehouse.py | 157 ++++++++++++++- src/msfabricutils/core/notebook.py | 256 +++++++++++++++++++++++++ src/msfabricutils/core/operations.py | 91 +++++++++ src/msfabricutils/core/sql_endpoint.py | 2 +- src/msfabricutils/core/workspace.py | 13 +- 5 files changed, 511 insertions(+), 8 deletions(-) create mode 100644 src/msfabricutils/core/notebook.py create mode 100644 src/msfabricutils/core/operations.py diff --git a/src/msfabricutils/core/lakehouse.py b/src/msfabricutils/core/lakehouse.py index cf4af50..26b72c9 100644 --- a/src/msfabricutils/core/lakehouse.py +++ b/src/msfabricutils/core/lakehouse.py @@ -1,10 +1,19 @@ -from msfabricutils.core.generic import get_item_from_paginated_get_request, paginated_get_request +import requests + +from msfabricutils.core.fabric_request import ( + delete_request, + get_item_from_paginated_get_request, + get_request, + paginated_get_request, + patch_request, + post_request, +) from msfabricutils.core.workspace import get_workspace def get_workspace_lakehouses( workspace_id: str | None = None, workspace_name: str | None = None -) -> list[dict]: +) -> list[dict[str, str]]: """ Retrieves lakehouses for a specified workspace by either `workspace_id` or `workspace_name`. @@ -43,6 +52,47 @@ def get_workspace_lakehouses( raise ValueError("Either `workspace_id` or `workspace_name` must be provided") +def get_workspace_lakehouse(workspace_id: str | None = None, lakehouse_id: str | None = None, lakehouse_name: str | None = None) -> dict[str, str]: + """ + Retrieves details of a specified lakehouse by either `lakehouse_id` or `lakehouse_name`. + + Args: + workspace_id (str | None): The ID of the workspace containing the lakehouse. + lakehouse_id (str | None): The ID of the lakehouse to retrieve details for. + lakehouse_name (str | None): The name of the lakehouse to retrieve details for. + + Returns: + A dictionary containing the details of the specified lakehouse. + + Example: + By `workspace_id` and `lakehouse_id`: + ```python + from msfabricutils.core import get_workspace + + lakehouse = get_workspace_lakehouse(workspace_id="12345678-1234-1234-1234-123456789012", lakehouse_id="beefbeef-beef-beef-beef-beefbeefbeef") + ``` + + By `workspace_id` and `lakehouse_name`: + ```python + from msfabricutils.core import get_workspace_lakehouse + lakehouse = get_workspace_lakehouse(workspace_id="12345678-1234-1234-1234-123456789012", lakehouse_name="My Lakehouse") + ``` + """ + + if lakehouse_id is not None: + endpoint = f"workspaces/{workspace_id}/lakehouses/{lakehouse_id}" + return get_request(endpoint) + + if lakehouse_name is not None: + endpoint = f"workspaces/{workspace_id}/lakehouses" + data_key = "value" + item_key = "displayName" + item_value = lakehouse_name + + return get_item_from_paginated_get_request(endpoint, data_key, item_key, item_value) + + raise ValueError("Either `lakehouse_id` or `lakehouse_name` must be provided") + def get_workspace_lakehouse_tables( workspace_id: str | None = None, workspace_name: str | None = None, @@ -128,3 +178,106 @@ def get_workspace_lakehouse_tables( data_key = "data" return paginated_get_request(endpoint, data_key) + + +def create_workspace_lakehouse(workspace_id: str, lakehouse_name: str, enable_schemas: bool = False, description: str | None = None) -> dict[str, str]: + """ + Creates a new lakehouse in the specified workspace. + + Args: + workspace_id (str): The ID of the workspace where the lakehouse will be created. + lakehouse_name (str): The display name for the new lakehouse. + enable_schemas (bool): Whether to enable schemas for the lakehouse. Defaults to False. + description (str | None): Optional description for the lakehouse. Defaults to "test" if not provided. + + Returns: + A dictionary containing the details of the created lakehouse. + + Example: + ```python + from msfabricutils.core import create_workspace_lakehouse + + lakehouse = create_workspace_lakehouse( + workspace_id="12345678-1234-1234-1234-123456789012", + lakehouse_name="My New Lakehouse", + enable_schemas=True, + description="Production lakehouse for data analytics" + ) + ``` + """ + endpoint = f"workspaces/{workspace_id}/lakehouses" + data = { + "displayName": lakehouse_name, + "description": description or "test", + } + + if enable_schemas: + data["creationPayload"] = { + "enableSchemas": enable_schemas + } + + import logging + logging.info(f"Creating lakehouse {lakehouse_name} with data: {data}") + + return post_request(endpoint, data) + + +def update_workspace_lakehouse(workspace_id: str, lakehouse_id: str, lakehouse_name: str | None = None, description: str | None = None) -> dict[str, str]: + """ + Updates an existing lakehouse in the specified workspace. + + Args: + workspace_id (str): The ID of the workspace containing the lakehouse. + lakehouse_id (str): The ID of the lakehouse to update. + lakehouse_name (str | None): Optional new name for the lakehouse. + description (str | None): Optional new description for the lakehouse. + + Returns: + A dictionary containing the details of the updated lakehouse. + + Example: + ```python + from msfabricutils.core import update_workspace_lakehouse + + updated_lakehouse = update_workspace_lakehouse( + workspace_id="12345678-1234-1234-1234-123456789012", + lakehouse_id="beefbeef-beef-beef-beef-beefbeefbeef", + lakehouse_name="Updated Lakehouse Name", + description="Updated description" + ) + ``` + """ + endpoint = f"workspaces/{workspace_id}/lakehouses/{lakehouse_id}" + + data = {} + if lakehouse_name is not None: + data["displayName"] = lakehouse_name + if description is not None: + data["description"] = description + + return patch_request(endpoint, data) + + +def delete_workspace_lakehouse(workspace_id: str, lakehouse_id: str) -> requests.Response: + """ + Deletes a lakehouse from the specified workspace. + + Args: + workspace_id (str): The ID of the workspace containing the lakehouse. + lakehouse_id (str): The ID of the lakehouse to delete. + + Returns: + The response from the delete request. + + Example: + ```python + from msfabricutils.core import delete_workspace_lakehouse + + response = delete_workspace_lakehouse( + workspace_id="12345678-1234-1234-1234-123456789012", + lakehouse_id="beefbeef-beef-beef-beef-beefbeefbeef" + ) + ``` + """ + endpoint = f"workspaces/{workspace_id}/lakehouses/{lakehouse_id}" + return delete_request(endpoint) diff --git a/src/msfabricutils/core/notebook.py b/src/msfabricutils/core/notebook.py new file mode 100644 index 0000000..22654b5 --- /dev/null +++ b/src/msfabricutils/core/notebook.py @@ -0,0 +1,256 @@ +import base64 +import os + +import requests + +from msfabricutils.core.fabric_request import ( + delete_request, + get_item_from_paginated_get_request, + get_request, + post_request, +) +from msfabricutils.core.operations import wait_for_long_running_operation + + +def get_workspace_notebooks(workspace_id: str) -> list[dict]: + endpoint = f"workspaces/{workspace_id}/notebooks" + return get_request(endpoint) + + +def get_workspace_notebook( + workspace_id: str, notebook_id: str | None = None, notebook_name: str | None = None +) -> dict[str, str]: + """ + Retrieves details of a specified notebook by either `notebook_id` or `notebook_name`. + + Args: + workspace_id (str | None): The ID of the workspace containing the notebook. + notebook_id (str | None): The ID of the notebook to retrieve details for. + notebook_name (str | None): The name of the notebook to retrieve details for. + + Returns: + A dictionary containing the details of the specified notebook. + + Example: + By `workspace_id` and `notebook_id`: + ```python + from msfabricutils.core import get_workspace + + notebook = get_workspace_notebook(workspace_id="12345678-1234-1234-1234-123456789012", notebook_id="beefbeef-beef-beef-beef-beefbeefbeef") + ``` + + By `workspace_id` and `notebook_name`: + ```python + from msfabricutils.core import get_workspace_notebook + notebook = get_workspace_notebook(workspace_id="12345678-1234-1234-1234-123456789012", notebook_name="My Notebook") + ``` + """ + + if notebook_id is not None: + endpoint = f"workspaces/{workspace_id}/notebooks/{notebook_id}" + return get_request(endpoint) + + if notebook_name is not None: + endpoint = f"workspaces/{workspace_id}/notebooks" + data_key = "value" + item_key = "displayName" + item_value = notebook_name + + return get_item_from_paginated_get_request(endpoint, data_key, item_key, item_value) + + raise ValueError("Either `notebook_id` or `notebook_name` must be provided") + + +def create_workspace_notebook( + workspace_id: str, + notebook_path: str, + name: str | None = None, + description: str | None = None, + wait_for_completion: bool = True, +) -> requests.Response | dict[str, str]: + """ + Creates a new notebook in the specified workspace. + + Args: + workspace_id (str): The ID of the workspace where the notebook will be created. + notebook_path (str): The path to the notebook files (containing notebook-content.py and .platform). + name (str | None): Optional display name for the notebook. If not provided, + will use the basename of the notebook_path. + description (str | None): Optional description for the notebook. + wait_for_completion (bool): Whether to wait for the creation operation to complete. + Defaults to True. + + Returns: + Either a Response object or a dictionary containing the notebook details, + depending on the operation status and wait_for_completion setting. + + Example: + ```python + from msfabricutils.core import create_workspace_notebook + + notebook = create_workspace_notebook( + workspace_id="12345678-1234-1234-1234-123456789012", + notebook_path="/path/to/notebook", + name="My New Notebook", + description="Analysis notebook", + wait_for_completion=True + ) + ``` + """ + endpoint = f"workspaces/{workspace_id}/notebooks" + + notebook_name = name or os.path.basename(notebook_path).replace(".Notebook", "") + + notebook_content_file = notebook_path + "/notebook-content.py" + notebook_platform_file = notebook_path + "/.platform" + + with open(notebook_content_file, "r") as f: + notebook_content = f.read() + encoded_notebook_content = base64.b64encode(notebook_content.encode()).decode() + + with open(notebook_platform_file, "r") as f: + notebook_platform = f.read() + encoded_notebook_platform = base64.b64encode(notebook_platform.encode()).decode() + + body = { + "displayName": notebook_name, + "description": description or "", + "parts": [ + { + "path": os.path.basename(notebook_content_file), + "payload": encoded_notebook_content, + "payloadType": "InlineBase64", + }, + { + "path": os.path.basename(notebook_platform_file), + "payload": encoded_notebook_platform, + "payloadType": "InlineBase64", + }, + ], + } + + response = post_request(endpoint, body, content_only=False) + + if response.status_code == 201: + return response.json() + + if response.status_code == 202: + if wait_for_completion: + operation_id = response.headers["x-ms-operation-id"] + retry_after = response.headers["Retry-After"] + wait_for_long_running_operation(operation_id, retry_after) + return get_workspace_notebook(workspace_id, notebook_name=name) + + return response + + +def update_workspace_notebook_definition( + workspace_id: str, + notebook_id: str, + notebook_path: str, + wait_for_completion: bool = True, + update_metadata: bool = True, +) -> requests.Response | dict[str, str]: + """ + Updates the definition of an existing notebook in the workspace. + + Args: + workspace_id (str): The ID of the workspace containing the notebook. + notebook_id (str): The ID of the notebook to update. + notebook_path (str): The path to the updated notebook files (containing notebook-content.py and .platform). + wait_for_completion (bool): Whether to wait for the update operation to complete. Defaults to True. + update_metadata (bool): Whether to update the notebook's metadata. Defaults to True. + + Returns: + Either a Response object or a dictionary containing the updated notebook details, + depending on the operation status and wait_for_completion setting. + + Example: + ```python + from msfabricutils.core import update_workspace_notebook_definition + + updated_notebook = update_workspace_notebook_definition( + workspace_id="12345678-1234-1234-1234-123456789012", + notebook_id="beefbeef-beef-beef-beef-beefbeefbeef", + notebook_path="/path/to/updated/notebook", + wait_for_completion=True, + update_metadata=True + ) + ``` + """ + + endpoint = f"workspaces/{workspace_id}/notebooks/{notebook_id}/updateDefinition?updateMetadata={update_metadata}" + + notebook_content_file = notebook_path + "/notebook-content.py" + notebook_platform_file = notebook_path + "/.platform" + + with open(notebook_content_file, "r") as f: + notebook_content = f.read() + encoded_notebook_content = base64.b64encode(notebook_content.encode()).decode() + + with open(notebook_platform_file, "r") as f: + notebook_platform = f.read() + encoded_notebook_platform = base64.b64encode(notebook_platform.encode()).decode() + + body = { + "definition": { + "parts": [ + { + "path": os.path.basename(notebook_content_file), + "payload": encoded_notebook_content, + "payloadType": "InlineBase64", + }, + { + "path": os.path.basename(notebook_platform_file), + "payload": encoded_notebook_platform, + "payloadType": "InlineBase64", + }, + ], + }, + } + + response = post_request(endpoint, body, content_only=False) + + if response.status_code == 200: + notebook = get_workspace_notebook(workspace_id, notebook_id=notebook_id) + return notebook + + if response.status_code == 202: + if wait_for_completion: + operation_id = response.headers["x-ms-operation-id"] + retry_after = response.headers["Retry-After"] + wait_for_long_running_operation(operation_id, retry_after) + return get_workspace_notebook(workspace_id, notebook_id=notebook_id) + + return response + + +def delete_workspace_notebook(workspace_id: str, notebook_id: str) -> requests.Response: + """ + Deletes a notebook from the specified workspace. + + This function permanently removes a notebook from the workspace. The operation cannot be undone, + so use with caution. + + Args: + workspace_id (str): The ID of the workspace containing the notebook. + notebook_id (str): The ID of the notebook to delete. + + Returns: + requests.Response: The response from the delete request. + + Example: + ```python + from msfabricutils.core import delete_workspace_notebook + + response = delete_workspace_notebook( + workspace_id="12345678-1234-1234-1234-123456789012", + notebook_id="beefbeef-beef-beef-beef-beefbeefbeef" + ) + ``` + + Raises: + requests.exceptions.RequestException: If the HTTP request fails or returns an error. + """ + endpoint = f"workspaces/{workspace_id}/notebooks/{notebook_id}" + return delete_request(endpoint) \ No newline at end of file diff --git a/src/msfabricutils/core/operations.py b/src/msfabricutils/core/operations.py new file mode 100644 index 0000000..870c09b --- /dev/null +++ b/src/msfabricutils/core/operations.py @@ -0,0 +1,91 @@ +import logging +import time + +import requests + +from msfabricutils.core.fabric_request import get_request + + +def get_long_running_operation(operation_id: str) -> requests.Response: + + endpoint = f"operations/{operation_id}" + return get_request(endpoint, content_only=False) + + +def wait_for_long_running_operation( + operation_id: str, + retry_after: str, + initial_delay: float = 1.0, + max_delay: float = 32.0, + max_attempts: int = 10, + timeout: float = 60.0 * 5 +) -> requests.Response: + """Wait for a long running operation to complete with exponential backoff. + + Args: + operation_id: The operation ID to check + initial_delay: Starting delay in seconds (default: 1s) + max_delay: Maximum delay between retries in seconds (default: 32s) + max_attempts: Maximum number of retry attempts (default: 10) + timeout: Optional total timeout in seconds (default: None) + + Returns: + Response from the operation + + Raises: + TimeoutError: If the operation times out + Exception: If the operation fails or max retries exceeded + """ + logging.info(f"Waiting {retry_after} seconds for operation {operation_id} to complete...") + time.sleep(float(retry_after)) + + start_time = time.time() + current_delay = initial_delay + attempts = 0 + + while True: + attempts += 1 + response = get_long_running_operation(operation_id) + + if response.status_code != 200: + if attempts < max_attempts: + logging.warning( + f"Request failed (attempt {attempts}/{max_attempts}), retrying...", + extra={ + "operation_id": operation_id, + "status_code": response.status_code, + "delay": current_delay + } + ) + time.sleep(current_delay) + current_delay = min(current_delay * 2, max_delay) + continue + else: + raise Exception( + f"Operation {operation_id} failed after {max_attempts} attempts: {response.json()['error']}" + ) + + match response.json()["status"]: + case "Succeeded": + logging.info(f"Operation {operation_id} completed successfully") + return response + case "Failed": + raise Exception(f"Operation {operation_id} failed: {response.json()['error']}") + case _: + if timeout and (time.time() - start_time) > timeout: + raise TimeoutError( + f"Operation {operation_id} timed out after {timeout} seconds" + ) + + logging.info( + "Operation in progress, waiting...", + extra={ + "operation_id": operation_id, + "status": response.json()["status"], + "delay": current_delay, + "elapsed": time.time() - start_time + } + ) + time.sleep(current_delay) + current_delay = min(current_delay * 2, max_delay) + diff --git a/src/msfabricutils/core/sql_endpoint.py b/src/msfabricutils/core/sql_endpoint.py index 849b2bd..7e031f8 100644 --- a/src/msfabricutils/core/sql_endpoint.py +++ b/src/msfabricutils/core/sql_endpoint.py @@ -1,4 +1,4 @@ -from msfabricutils.core.generic import paginated_get_request +from msfabricutils.core.fabric_request import paginated_get_request from msfabricutils.core.workspace import get_workspace diff --git a/src/msfabricutils/core/workspace.py b/src/msfabricutils/core/workspace.py index eb66779..e2c993c 100644 --- a/src/msfabricutils/core/workspace.py +++ b/src/msfabricutils/core/workspace.py @@ -1,6 +1,8 @@ from typing import Any -from msfabricutils.core.generic import ( +import requests + +from msfabricutils.core.fabric_request import ( delete_request, get_item_from_paginated_get_request, get_request, @@ -81,7 +83,7 @@ def create_workspace(workspace_name: str, description: str | None = None) -> dic return post_request(endpoint, data) -def assign_workspace_to_capacity(workspace_id: str, capacity_id: str) -> dict[str, Any]: +def assign_workspace_to_capacity(workspace_id: str, capacity_id: str) -> requests.Response: """ Assigns a workspace to a capacity. @@ -90,7 +92,7 @@ def assign_workspace_to_capacity(workspace_id: str, capacity_id: str) -> dict[st capacity_id (str): The ID of the capacity to assign the workspace to. Returns: - A dictionary containing the details of the assigned workspace. + The response from the assign request. """ endpoint = f"workspaces/{workspace_id}/assignToCapacity" data = { @@ -121,7 +123,7 @@ def update_workspace(workspace_id: str, workspace_name: str | None = None, descr return patch_request(endpoint, data) -def delete_workspace(workspace_id: str) -> dict[str, Any]: +def delete_workspace(workspace_id: str) -> requests.Response: """ Deletes a workspace. @@ -129,7 +131,8 @@ def delete_workspace(workspace_id: str) -> dict[str, Any]: workspace_id (str): The ID of the workspace to delete. Returns: - A dictionary containing the details of the deleted workspace. + The response from the delete request. """ endpoint = f"workspaces/{workspace_id}" return delete_request(endpoint) + From c8c0a831350bdce8344a368f55f520784d3ac6ea Mon Sep 17 00:00:00 2001 From: jsj Date: Sun, 22 Dec 2024 18:06:35 +0100 Subject: [PATCH 7/7] improve unit tests --- tests/internal/fabric_api/test_lakehouse.py | 48 +++++++++++++++------ tests/internal/fabric_api/test_workspace.py | 13 ++++++ 2 files changed, 49 insertions(+), 12 deletions(-) diff --git a/tests/internal/fabric_api/test_lakehouse.py b/tests/internal/fabric_api/test_lakehouse.py index 89715eb..9423970 100644 --- a/tests/internal/fabric_api/test_lakehouse.py +++ b/tests/internal/fabric_api/test_lakehouse.py @@ -16,32 +16,56 @@ def test_get_workspace_lakehouses(): assert isinstance(lakehouses, list) assert len(lakehouses) > 0 + lakehouse = [lakehouse for lakehouse in lakehouses if lakehouse["id"] == LAKEHOUSE_ID] + + assert len(lakehouse) == 1 + assert isinstance(lakehouse[0], dict) + assert lakehouse[0]["displayName"] == LAKEHOUSE_NAME + assert lakehouse[0]["id"] == LAKEHOUSE_ID + def test_get_workspace_lakehouses_by_id(): lakehouses = get_workspace_lakehouses(workspace_id=WORKSPACE_ID) assert isinstance(lakehouses, list) assert len(lakehouses) > 0 + lakehouse = [lakehouse for lakehouse in lakehouses if lakehouse["id"] == LAKEHOUSE_ID] + + assert len(lakehouse) == 1 + assert isinstance(lakehouse[0], dict) + assert lakehouse[0]["displayName"] == LAKEHOUSE_NAME + assert lakehouse[0]["id"] == LAKEHOUSE_ID + def test_get_workspace_lakehouses_by_name(): lakehouses = get_workspace_lakehouses(workspace_name=WORKSPACE_NAME) assert isinstance(lakehouses, list) assert len(lakehouses) > 0 + lakehouse = [lakehouse for lakehouse in lakehouses if lakehouse["id"] == LAKEHOUSE_ID] + + assert len(lakehouse) == 1 + assert isinstance(lakehouse[0], dict) + assert lakehouse[0]["displayName"] == LAKEHOUSE_NAME + assert lakehouse[0]["id"] == LAKEHOUSE_ID + def test_get_workspace_lakehouse_tables_by_workspace_id_and_lakehouse_id(): - lakehouse = get_workspace_lakehouse_tables(workspace_id=WORKSPACE_ID, lakehouse_id=LAKEHOUSE_ID) - assert isinstance(lakehouse, list) - assert len(lakehouse) > 0 + tables = get_workspace_lakehouse_tables(workspace_id=WORKSPACE_ID, lakehouse_id=LAKEHOUSE_ID) + assert isinstance(tables, list) + assert len(tables) > 0 + def test_get_workspace_lakehouse_tables_by_workspace_id_and_lakehouse_name(): - lakehouse = get_workspace_lakehouse_tables(workspace_id=WORKSPACE_ID, lakehouse_name=LAKEHOUSE_NAME) - assert isinstance(lakehouse, list) - assert len(lakehouse) > 0 + tables = get_workspace_lakehouse_tables(workspace_id=WORKSPACE_ID, lakehouse_name=LAKEHOUSE_NAME) + assert isinstance(tables, list) + assert len(tables) > 0 + def test_get_workspace_lakehouse_tables_by_workspace_name_and_lakehouse_id(): - lakehouse = get_workspace_lakehouse_tables(workspace_name=WORKSPACE_NAME, lakehouse_id=LAKEHOUSE_ID) - assert isinstance(lakehouse, list) - assert len(lakehouse) > 0 + tables = get_workspace_lakehouse_tables(workspace_name=WORKSPACE_NAME, lakehouse_id=LAKEHOUSE_ID) + assert isinstance(tables, list) + assert len(tables) > 0 + def test_get_workspace_lakehouse_tables_by_workspace_name_and_lakehouse_name(): - lakehouse = get_workspace_lakehouse_tables(workspace_name=WORKSPACE_NAME, lakehouse_name=LAKEHOUSE_NAME) - assert isinstance(lakehouse, list) - assert len(lakehouse) > 0 + tables = get_workspace_lakehouse_tables(workspace_name=WORKSPACE_NAME, lakehouse_name=LAKEHOUSE_NAME) + assert isinstance(tables, list) + assert len(tables) > 0 diff --git a/tests/internal/fabric_api/test_workspace.py b/tests/internal/fabric_api/test_workspace.py index 898a9bd..10cc7d0 100644 --- a/tests/internal/fabric_api/test_workspace.py +++ b/tests/internal/fabric_api/test_workspace.py @@ -15,14 +15,27 @@ def test_get_workspaces(): assert len(workspaces) > 0 assert isinstance(workspaces, list) + workspace = [workspace for workspace in workspaces if workspace["id"] == WORKSPACE_ID] + + assert len(workspace) == 1 + assert isinstance(workspace[0], dict) + assert workspace[0]["displayName"] == WORKSPACE_NAME + assert workspace[0]["id"] == WORKSPACE_ID + def test_get_workspace_by_id(): workspace = get_workspace(workspace_id=WORKSPACE_ID) assert isinstance(workspace, dict) assert workspace is not None + assert workspace["displayName"] == WORKSPACE_NAME + assert workspace["id"] == WORKSPACE_ID + def test_get_workspace_by_name(): workspace = get_workspace(workspace_name=WORKSPACE_NAME) assert isinstance(workspace, dict) assert workspace is not None + + assert workspace["displayName"] == WORKSPACE_NAME + assert workspace["id"] == WORKSPACE_ID \ No newline at end of file