From 079cf5ff9f99fea31d7c3edb5467adb206ef1522 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 19 Dec 2024 16:49:56 +0200 Subject: [PATCH 1/6] job instance - root act id --- src/sempy_labs/_job_scheduler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sempy_labs/_job_scheduler.py b/src/sempy_labs/_job_scheduler.py index 86436388..5e9fe8bf 100644 --- a/src/sempy_labs/_job_scheduler.py +++ b/src/sempy_labs/_job_scheduler.py @@ -57,7 +57,8 @@ def list_item_job_instances( "Job Type", "Invoke Type", "Status", - "Root Activity Id" "Start Time UTC", + "Root Activity Id", + "Start Time UTC", "End Time UTC", "Failure Reason", ] From 962f1997b1e104f10320f3ba19bebd925e087405 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 19 Dec 2024 22:00:47 +0200 Subject: [PATCH 2/6] added list_item_schedules --- src/sempy_labs/__init__.py | 6 ++- src/sempy_labs/_job_scheduler.py | 85 ++++++++++++++++++++++++++++++++ 2 files changed, 90 insertions(+), 1 deletion(-) diff --git a/src/sempy_labs/__init__.py b/src/sempy_labs/__init__.py index da57e489..0b849a43 100644 --- a/src/sempy_labs/__init__.py +++ b/src/sempy_labs/__init__.py @@ -1,4 +1,7 @@ -from sempy_labs._job_scheduler import list_item_job_instances +from sempy_labs._job_scheduler import ( + list_item_job_instances, + list_item_schedules, +) from sempy_labs._gateways import ( list_gateway_members, list_gateway_role_assigments, @@ -470,4 +473,5 @@ "bind_semantic_model_to_gateway", "list_semantic_model_errors", "list_item_job_instances", + "list_item_schedules", ] diff --git a/src/sempy_labs/_job_scheduler.py b/src/sempy_labs/_job_scheduler.py index 5e9fe8bf..dd82e644 100644 --- a/src/sempy_labs/_job_scheduler.py +++ b/src/sempy_labs/_job_scheduler.py @@ -91,3 +91,88 @@ def list_item_job_instances( df = pd.concat(dfs, ignore_index=True) return df + + +def list_item_schedules( + item: str | UUID, + type: Optional[str] = None, + job_type: str = "DefaultJob", + workspace: Optional[str | UUID] = None, +) -> pd.DataFrame: + """ + Get scheduling settings for one specific item. + + This is a wrapper function for the following API: `Job Scheduler - List Item Schedules `_. + + Parameters + ---------- + item : str | uuid.UUID + The item name or ID + type : str, default=None + The item type. If specifying the item name as the item, the item type is required. + job_type : str, default="DefaultJob" + The job type. + workspace : str | uuid.UUID, default=None + The Fabric workspace name or ID used by the lakehouse. + Defaults to None which resolves to the workspace of the attached lakehouse + or if no lakehouse attached, resolves to the workspace of the notebook. + + Returns + ------- + pandas.DataFrame + Shows a list of scheduling settings for one specific item. + """ + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (item_name, item_id) = resolve_item_name_and_id( + item=item, type=type, workspace=workspace + ) + + df = pd.DataFrame( + columns=[ + "Job Schedule Id", + "Enabled", + "Created Date Time", + "Start Date Time", + "End Date Time", + "Local Time Zone Id", + "Type", + "Interval", + "Weekdays", + "Times", + "Owner Id", + "Owner Type", + ] + ) + + client = fabric.FabricRestClient() + response = client.get( + f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules" + ) + + if response.status_code != 200: + raise FabricHTTPException(response) + + for v in response.json().get("value", []): + config = v.get("configuration", {}) + own = v.get("owner", {}) + new_data = { + "Job Schedule Id": v.get("id"), + "Enabled": v.get("enabled"), + "Created Date Time": v.get("createdDateTime"), + "Start Date Time": config.get("startDateTime"), + "End Date Time": config.get("endDateTime"), + "Local Time Zone Id": config.get("localTimeZoneId"), + "Type": config.get("type"), + "Interval": config.get("interval"), + "Weekdays": config.get("weekdays"), + "Times": config.get("times"), + "Owner Id": own.get("id"), + "Owner Type": own.get("type"), + } + + df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) + + df["Enabled"] = df["Enabled"].astype(bool) + + return df From 40f0b95d4d81e9a1ef126579d3f4c6da1716aafe Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 19 Dec 2024 22:13:51 +0200 Subject: [PATCH 3/6] added run_on_demand_item_job --- src/sempy_labs/_job_scheduler.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/sempy_labs/_job_scheduler.py b/src/sempy_labs/_job_scheduler.py index dd82e644..97421191 100644 --- a/src/sempy_labs/_job_scheduler.py +++ b/src/sempy_labs/_job_scheduler.py @@ -5,9 +5,11 @@ resolve_workspace_name_and_id, resolve_item_name_and_id, pagination, + lro, ) from sempy.fabric.exceptions import FabricHTTPException from uuid import UUID +import sempy_labs._icons as icons def list_item_job_instances( @@ -176,3 +178,25 @@ def list_item_schedules( df["Enabled"] = df["Enabled"].astype(bool) return df + + +def run_on_demand_item_job( + item: str | UUID, + type: Optional[str] = None, + job_type: str = "DefaultJob", + workspace: Optional[str | UUID] = None, +): + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (item_name, item_id) = resolve_item_name_and_id( + item=item, type=type, workspace=workspace + ) + + client = fabric.FabricRestClient() + response = client.get( + f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances?jobType={job_type}" + ) + + lro(client, response, return_status_code=True) + + print(f"{icons.green_dot} The '{item_name}' {type.lower()} has been executed.") From e24e75790b6227f5323be3e8de73af72f767475c Mon Sep 17 00:00:00 2001 From: Michael Date: Fri, 20 Dec 2024 13:34:20 +0200 Subject: [PATCH 4/6] fix --- src/sempy_labs/_job_scheduler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sempy_labs/_job_scheduler.py b/src/sempy_labs/_job_scheduler.py index 97421191..34cf391c 100644 --- a/src/sempy_labs/_job_scheduler.py +++ b/src/sempy_labs/_job_scheduler.py @@ -74,6 +74,7 @@ def list_item_job_instances( dfs = [] for r in responses: for v in r.get("value", []): + fail = v.get("failureReason", {}) new_data = { "Job Instance Id": v.get("id"), "Item Name": item_name, @@ -85,7 +86,7 @@ def list_item_job_instances( "Root Activity Id": v.get("rootActivityId"), "Start Time UTC": v.get("startTimeUtc"), "End Time UTC": v.get("endTimeUtc"), - "Failure Reason": v.get("failureReason"), + "Error Message": fail.get('message') if fail is not None else "", } dfs.append(pd.DataFrame(new_data, index=[0])) From 6d4a7603ba716da2f4cc7b9cc436ad427404d0a8 Mon Sep 17 00:00:00 2001 From: Michael Date: Sun, 22 Dec 2024 08:53:55 +0200 Subject: [PATCH 5/6] added run_on_demand_item_job --- src/sempy_labs/_job_scheduler.py | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/src/sempy_labs/_job_scheduler.py b/src/sempy_labs/_job_scheduler.py index 34cf391c..7b70c297 100644 --- a/src/sempy_labs/_job_scheduler.py +++ b/src/sempy_labs/_job_scheduler.py @@ -25,7 +25,7 @@ def list_item_job_instances( item : str | uuid.UUID The item name or ID type : str, default=None - The item type. If specifying the item name as the item, the item type is required. + The item `type `_. If specifying the item name as the item, the item type is required. workspace : str | uuid.UUID, default=None The Fabric workspace name or ID used by the lakehouse. Defaults to None which resolves to the workspace of the attached lakehouse @@ -86,7 +86,7 @@ def list_item_job_instances( "Root Activity Id": v.get("rootActivityId"), "Start Time UTC": v.get("startTimeUtc"), "End Time UTC": v.get("endTimeUtc"), - "Error Message": fail.get('message') if fail is not None else "", + "Error Message": fail.get("message") if fail is not None else "", } dfs.append(pd.DataFrame(new_data, index=[0])) @@ -112,7 +112,7 @@ def list_item_schedules( item : str | uuid.UUID The item name or ID type : str, default=None - The item type. If specifying the item name as the item, the item type is required. + The item `type `_. If specifying the item name as the item, the item type is required. job_type : str, default="DefaultJob" The job type. workspace : str | uuid.UUID, default=None @@ -187,6 +187,24 @@ def run_on_demand_item_job( job_type: str = "DefaultJob", workspace: Optional[str | UUID] = None, ): + """ + Run on-demand item job instance. + + This is a wrapper function for the following API: `Job Scheduler - Run On Demand Item Job `_. + + Parameters + ---------- + item : str | uuid.UUID + The item name or ID + type : str, default=None + The item `type `_. If specifying the item name as the item, the item type is required. + job_type : str, default="DefaultJob" + The job type. + workspace : str | uuid.UUID, default=None + The Fabric workspace name or ID used by the lakehouse. + Defaults to None which resolves to the workspace of the attached lakehouse + or if no lakehouse attached, resolves to the workspace of the notebook. + """ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) (item_name, item_id) = resolve_item_name_and_id( @@ -194,7 +212,7 @@ def run_on_demand_item_job( ) client = fabric.FabricRestClient() - response = client.get( + response = client.post( f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances?jobType={job_type}" ) From 2714fdfb84d9c53f1d2a9556f8632eb88a4daff3 Mon Sep 17 00:00:00 2001 From: Michael Date: Wed, 8 Jan 2025 10:24:36 +0200 Subject: [PATCH 6/6] convert to datetime --- src/sempy_labs/_job_scheduler.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sempy_labs/_job_scheduler.py b/src/sempy_labs/_job_scheduler.py index 7b70c297..2bd204ae 100644 --- a/src/sempy_labs/_job_scheduler.py +++ b/src/sempy_labs/_job_scheduler.py @@ -177,6 +177,8 @@ def list_item_schedules( df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True) df["Enabled"] = df["Enabled"].astype(bool) + df["Created Date Time"] = pd.to_datetime(df["Created Date Time"]) + df["Start Date Time"] = pd.to_datetime(df["Start Date Time"]) return df