Skip to content

Commit

Permalink
Merge branch 'm-kovalsky/tom_service_principal'
Browse files Browse the repository at this point in the history
  • Loading branch information
m-kovalsky committed Jan 9, 2025
2 parents e738a7a + c04bdd0 commit ebc659f
Showing 1 changed file with 97 additions and 16 deletions.
113 changes: 97 additions & 16 deletions src/sempy_labs/tom/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from sempy.fabric.exceptions import FabricHTTPException
import ast
from uuid import UUID
from sempy.fabric._token_provider import TokenProvider

if TYPE_CHECKING:
import Microsoft.AnalysisServices.Tabular
Expand All @@ -42,21 +43,91 @@ class TOMWrapper:
_table_map = dict
_column_map = dict

def __init__(self, dataset, workspace, readonly):

(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
self._dataset_id = dataset_id
self._dataset_name = dataset_name
self._workspace_name = workspace_name
self._workspace_id = workspace_id
def __init__(self, dataset, workspace, readonly, token_provider):

self._is_azure_as = False
prefix = "asazure"
prefix_full = f"{prefix}://"
read_write = ":rw"

# Azure AS workspace logic
if workspace.startswith(prefix_full):
# Set read or read/write accordingly
if readonly is False and not workspace.endswith(read_write):
workspace += read_write
elif readonly is True and workspace.endswith(read_write):
workspace = workspace[: -len(read_write)]
self._workspace_name = workspace
self._workspace_id
self._dataset_id = dataset
self._dataset_name = dataset
self._is_azure_as = True
if token_provider is None:
raise ValueError(
f"{icons.red_dot} A token provider must be provided when connecting to an Azure AS workspace."
)
else:
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
(dataset_name, dataset_id) = resolve_dataset_name_and_id(
dataset, workspace_id
)
self._dataset_id = dataset_id
self._dataset_name = dataset_name
self._workspace_name = workspace_name
self._workspace_id = workspace_id
self._readonly = readonly
self._tables_added = []
self._token_provider = token_provider

self._tom_server = fabric.create_tom_server(
readonly=readonly, workspace=workspace_id
)
self.model = self._tom_server.Databases[dataset_id].Model
# No token provider (standard authentication)
if self._token_provider is None:
self._tom_server = fabric.create_tom_server(
readonly=readonly, workspace=workspace_id
)
# Service Principal Authentication for Azure AS via token provider
elif self._is_azure_as:
import Microsoft.AnalysisServices.Tabular as TOM

# Extract region from the workspace
match = re.search(rf"{prefix_full}(.*?).{prefix}", self._workspace_name)
if match:
region = match.group(1)
if self._token_provider is None:
raise ValueError(f"{icons.red_dot} A token provider must be provided when connecting to Azure Analysis Services.")
token = self._token_provider(audience="asazure", region=region)
connection_str = f'Provider=MSOLAP;Data Source={self._workspace_name};Password="{token}";Persist Security Info=True;Impersonation Level=Impersonate'
self._tom_server = TOM.Server()
self._tom_server.Connect(connection_str)
# Service Principal Authentication for Power BI via token provider
else:
from sempy.fabric._client._utils import _build_adomd_connection_string
import Microsoft.AnalysisServices.Tabular as TOM
from Microsoft.AnalysisServices import AccessToken
from sempy.fabric._token_provider import (
create_on_access_token_expired_callback,
ConstantTokenProvider,
)
from System import Func

token = token_provider(audience="pbi")
self._tom_server = TOM.Server()
get_access_token = create_on_access_token_expired_callback(
ConstantTokenProvider(token)
)
self._tom_server.AccessToken = get_access_token(None)
self._tom_server.OnAccessTokenExpired = Func[AccessToken, AccessToken](
get_access_token
)
workspace_url = f"powerbi://api.powerbi.com/v1.0/myorg/{workspace}"
connection_str = _build_adomd_connection_string(
workspace_url, readonly=readonly
)
self._tom_server.Connect(connection_str)

if self._is_azure_as:
self.model = self._tom_server.Databases.GetByName(self._dataset_name).Model
else:
self.model = self._tom_server.Databases[dataset_id].Model

self._table_map = {}
self._column_map = {}
Expand Down Expand Up @@ -4666,7 +4737,10 @@ def close(self):
@log
@contextmanager
def connect_semantic_model(
dataset: str | UUID, readonly: bool = True, workspace: Optional[str] = None
dataset: str | UUID,
readonly: bool = True,
workspace: Optional[str | UUID] = None,
token_provider: Optional[TokenProvider] = None,
) -> Iterator[TOMWrapper]:
"""
Connects to the Tabular Object Model (TOM) within a semantic model.
Expand All @@ -4678,10 +4752,12 @@ def connect_semantic_model(
readonly: bool, default=True
Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
workspace : str | uuid.UUID, default=None
The Fabric workspace name or ID.
The Fabric workspace name or ID. Also supports Azure Analysis Services (token_provider required).
If connecting to Azure Analysis Services, enter the workspace parameter in the following format: 'asazure://<region>.asazure.windows.net/<server_name>'.
Defaults to None which resolves to the workspace of the attached lakehouse
or if no lakehouse attached, resolves to the workspace of the notebook.
token_provider : TokenProvider, default=None
The token provider for authentication, created by using the ServicePrincipalTokenProvider class. Required when connecting to Azure Analysis Services.
Returns
-------
typing.Iterator[TOMWrapper]
Expand All @@ -4691,7 +4767,12 @@ def connect_semantic_model(
# initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined
sempy.fabric._client._utils._init_analysis_services()

tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly)
tw = TOMWrapper(
dataset=dataset,
workspace=workspace,
readonly=readonly,
token_provider=token_provider,
)
try:
yield tw
finally:
Expand Down

0 comments on commit ebc659f

Please sign in to comment.