From db97bc858a82ba97179cbc39374177439b320823 Mon Sep 17 00:00:00 2001 From: Michael Date: Sun, 8 Dec 2024 14:54:45 +0200 Subject: [PATCH 01/11] fixed issue 331 --- src/sempy_labs/_refresh_semantic_model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/sempy_labs/_refresh_semantic_model.py b/src/sempy_labs/_refresh_semantic_model.py index a61f8eb7..028090fe 100644 --- a/src/sempy_labs/_refresh_semantic_model.py +++ b/src/sempy_labs/_refresh_semantic_model.py @@ -169,7 +169,8 @@ def display_trace_logs(trace, partition_map, widget, title, stop=False): right_on="PartitionID", how="left", ) - _process_and_display_chart(df, title=title, widget=widget) + if not df.empty: + _process_and_display_chart(df, title=title, widget=widget) if stop: df.drop(["Object Name", "PartitionID"], axis=1, inplace=True) df.rename(columns={"TableName": "Table Name"}, inplace=True) From dbadbd53aa7f08a9d0711a704476e8a37ac119b5 Mon Sep 17 00:00:00 2001 From: Michael Date: Tue, 10 Dec 2024 10:03:02 +0200 Subject: [PATCH 02/11] make admin.scan_workspaces visible --- README.md | 1 + src/sempy_labs/admin/__init__.py | 4 ++++ src/sempy_labs/tom/_model.py | 8 ++++++-- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ae4317e7..8701e906 100644 --- a/README.md +++ b/README.md @@ -38,6 +38,7 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La * [Dynamically generate a Direct Lake semantic model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.generate_direct_lake_semantic_model) * [Check why a Direct Lake semantic model would fallback to DirectQuery](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.check_fallback_reason) * [View a measure dependency tree](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.measure_dependency_tree) + * [View unique columns touched in a single (or multiple) DAX query(ies)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_dax_query_dependencies) * Reports * [Report Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.run_report_bpa) * [View report metadata](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Report%20Analysis.ipynb) diff --git a/src/sempy_labs/admin/__init__.py b/src/sempy_labs/admin/__init__.py index 1219c47d..83ec2e40 100644 --- a/src/sempy_labs/admin/__init__.py +++ b/src/sempy_labs/admin/__init__.py @@ -1,3 +1,6 @@ +from sempy_labs.admin._scanner import ( + scan_workspaces, +) from sempy_labs.admin._basic_functions import ( assign_workspaces_to_capacity, unassign_workspaces_from_capacity, @@ -66,4 +69,5 @@ "list_git_connections", "list_reports", "get_capacity_assignment_status", + "scan_workspaces", ] diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index da36060b..d5ff4d00 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -4541,9 +4541,13 @@ def add_role_member(self, role_name: str, member: str | List[str]): rm.IdentityProvider = "AzureAD" rm.MemberName = m role.Members.Add(rm) - print(f"{icons.green_dot} '{m}' has been added as a member of the '{role_name}' role.") + print( + f"{icons.green_dot} '{m}' has been added as a member of the '{role_name}' role." + ) else: - print(f"{icons.yellow_dot} '{m}' is already a member in the '{role_name}' role.") + print( + f"{icons.yellow_dot} '{m}' is already a member in the '{role_name}' role." + ) def close(self): From a111eade43818e9796884e7517e0fe42ca8bcfb3 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 10:15:58 +0200 Subject: [PATCH 03/11] bpa and dependent functions take dataset name or id --- src/sempy_labs/_helper_functions.py | 41 ++++++++++++++++++++------- src/sempy_labs/_model_bpa.py | 38 ++++++++++++++----------- src/sempy_labs/_model_bpa_bulk.py | 6 ++-- src/sempy_labs/_model_dependencies.py | 20 ++++++++----- src/sempy_labs/tom/_model.py | 32 ++++++++++++--------- 5 files changed, 87 insertions(+), 50 deletions(-) diff --git a/src/sempy_labs/_helper_functions.py b/src/sempy_labs/_helper_functions.py index 01495de9..2a443d51 100644 --- a/src/sempy_labs/_helper_functions.py +++ b/src/sempy_labs/_helper_functions.py @@ -160,14 +160,34 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str return obj -def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID: +def resolve_dataset_name_and_id( + dataset: str | UUID, workspace: Optional[str] = None +) -> Tuple[str, UUID]: + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + + if _is_valid_uuid(dataset): + dataset_id = dataset + dataset_name = fabric.resolve_item_name( + item_id=dataset_id, type="SemanticModel", workspace=workspace_id + ) + else: + dataset_name = dataset + dataset_id = fabric.resolve_item_id( + item_name=dataset, type="SemanticModel", workspace=workspace_id + ) + + return dataset_name, dataset_id + + +def resolve_dataset_id(dataset: str | UUID, workspace: Optional[str] = None) -> UUID: """ Obtains the ID of the semantic model. Parameters ---------- - dataset : str - The name of the semantic model. + dataset : str | UUID + The name or ID of the semantic model. workspace : str, default=None The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -179,15 +199,14 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID: The ID of the semantic model. """ - if workspace is None: - workspace_id = fabric.get_workspace_id() - workspace = fabric.resolve_workspace_name(workspace_id) - - obj = fabric.resolve_item_id( - item_name=dataset, type="SemanticModel", workspace=workspace - ) + if _is_valid_uuid(dataset): + dataset_id = dataset + else: + dataset_id = fabric.resolve_item_id( + item_name=dataset, type="SemanticModel", workspace=workspace + ) - return obj + return dataset_id def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> str: diff --git a/src/sempy_labs/_model_bpa.py b/src/sempy_labs/_model_bpa.py index 9f05b532..69623060 100644 --- a/src/sempy_labs/_model_bpa.py +++ b/src/sempy_labs/_model_bpa.py @@ -10,9 +10,10 @@ create_relationship_name, save_as_delta_table, resolve_workspace_capacity, - resolve_dataset_id, + resolve_dataset_name_and_id, get_language_codes, _get_max_run_id, + resolve_workspace_name_and_id, ) from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached from sempy_labs.tom import connect_semantic_model @@ -23,11 +24,12 @@ from pyspark.sql.functions import col, flatten from pyspark.sql.types import StructType, StructField, StringType import os +from uuid import UUID @log def run_model_bpa( - dataset: str, + dataset: str | UUID, rules: Optional[pd.DataFrame] = None, workspace: Optional[str] = None, export: bool = False, @@ -41,8 +43,8 @@ def run_model_bpa( Parameters ---------- - dataset : str - Name of the semantic model. + dataset : str | UUID + Name or ID of the semantic model. rules : pandas.DataFrame, default=None A pandas dataframe containing rules to be evaluated. workspace : str, default=None @@ -105,7 +107,10 @@ def map_language(language, language_list): if language is not None: language = map_language(language, language_list) - workspace = fabric.resolve_workspace_name(workspace) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id( + dataset, workspace=workspace_id + ) if language is not None and language not in language_list: print( @@ -113,7 +118,7 @@ def map_language(language, language_list): ) with connect_semantic_model( - dataset=dataset, workspace=workspace, readonly=True + dataset=dataset_id, workspace=workspace_id, readonly=True ) as tom: if extended: @@ -122,7 +127,7 @@ def map_language(language, language_list): # Do not run BPA for models with no tables if tom.model.Tables.Count == 0: print( - f"{icons.warning} The '{dataset}' semantic model within the '{workspace}' workspace has no tables and therefore there are no valid BPA results." + f"{icons.warning} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no tables and therefore there are no valid BPA results." ) finalDF = pd.DataFrame( columns=[ @@ -136,7 +141,9 @@ def map_language(language, language_list): ] ) else: - dep = get_model_calc_dependencies(dataset=dataset, workspace=workspace) + dep = get_model_calc_dependencies( + dataset=dataset_id, workspace=workspace_id + ) def translate_using_po(rule_file): current_dir = os.path.dirname(os.path.abspath(__file__)) @@ -382,20 +389,19 @@ def translate_using_spark(rule_file): runId = max_run_id + 1 now = datetime.datetime.now() - dfD = fabric.list_datasets(workspace=workspace, mode="rest") - dfD_filt = dfD[dfD["Dataset Name"] == dataset] + dfD = fabric.list_datasets(workspace=workspace_id, mode="rest") + dfD_filt = dfD[dfD["Dataset Id"] == dataset_id] configured_by = dfD_filt["Configured By"].iloc[0] - capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace) + capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace_id) dfExport["Capacity Name"] = capacity_name dfExport["Capacity Id"] = capacity_id - dfExport["Workspace Name"] = workspace - dfExport["Workspace Id"] = fabric.resolve_workspace_id(workspace) - dfExport["Dataset Name"] = dataset - dfExport["Dataset Id"] = resolve_dataset_id(dataset, workspace) + dfExport["Workspace Name"] = workspace_name + dfExport["Workspace Id"] = workspace_id + dfExport["Dataset Name"] = dataset_name + dfExport["Dataset Id"] = dataset_id dfExport["Configured By"] = configured_by dfExport["Timestamp"] = now dfExport["RunId"] = runId - dfExport["Configured By"] = configured_by dfExport["RunId"] = dfExport["RunId"].astype("int") dfExport = dfExport[list(icons.bpa_schema.keys())] diff --git a/src/sempy_labs/_model_bpa_bulk.py b/src/sempy_labs/_model_bpa_bulk.py index 41ff4b5f..7e0262d5 100644 --- a/src/sempy_labs/_model_bpa_bulk.py +++ b/src/sempy_labs/_model_bpa_bulk.py @@ -119,16 +119,16 @@ def run_model_bpa_bulk( dfD_filt = dfD[~dfD["Dataset Name"].isin(skip_models)] if len(dfD_filt) > 0: - for i2, r2 in dfD_filt.iterrows(): + for _, r2 in dfD_filt.iterrows(): + dataset_id = r2["Dataset Id"] dataset_name = r2["Dataset Name"] config_by = r2["Configured By"] - dataset_id = r2["Dataset Id"] print( f"{icons.in_progress} Collecting Model BPA stats for the '{dataset_name}' semantic model within the '{wksp}' workspace." ) try: bpa_df = run_model_bpa( - dataset=dataset_name, + dataset=dataset_id, workspace=wksp, language=language, return_dataframe=True, diff --git a/src/sempy_labs/_model_dependencies.py b/src/sempy_labs/_model_dependencies.py index 6b632826..4745826b 100644 --- a/src/sempy_labs/_model_dependencies.py +++ b/src/sempy_labs/_model_dependencies.py @@ -1,10 +1,15 @@ import sempy.fabric as fabric import pandas as pd -from sempy_labs._helper_functions import format_dax_object_name +from sempy_labs._helper_functions import ( + format_dax_object_name, + resolve_dataset_name_and_id, + resolve_workspace_name_and_id, +) import sempy_labs._icons as icons from typing import Any, Dict, Optional from anytree import Node, RenderTree from sempy._utils._log import log +from uuid import UUID @log @@ -139,15 +144,15 @@ def get_measure_dependencies( @log def get_model_calc_dependencies( - dataset: str, workspace: Optional[str] = None + dataset: str | UUID, workspace: Optional[str] = None ) -> pd.DataFrame: """ Shows all dependencies for all objects in a semantic model. Parameters ---------- - dataset : str - Name of the semantic model. + dataset : str | UUID + Name or ID of the semantic model. workspace : str, default=None The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -159,10 +164,11 @@ def get_model_calc_dependencies( Shows all dependencies for all objects in the semantic model. """ - workspace = fabric.resolve_workspace_name(workspace) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) dep = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" SELECT [TABLE] AS [Table Name], diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index da36060b..ea96b02c 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -7,6 +7,8 @@ format_dax_object_name, generate_guid, _make_list_unique, + resolve_dataset_name_and_id, + resolve_workspace_name_and_id, ) from sempy_labs._list_functions import list_relationships from sempy_labs._refresh_semantic_model import refresh_semantic_model @@ -17,6 +19,7 @@ import sempy_labs._icons as icons from sempy.fabric.exceptions import FabricHTTPException import ast +from uuid import UUID if TYPE_CHECKING: import Microsoft.AnalysisServices.Tabular @@ -31,15 +34,15 @@ class TOMWrapper: be enabled if setting the readonly parameter to False. """ - _dataset: str + _dataset_id: UUID _workspace: str _readonly: bool _tables_added: List[str] _table_map = dict _column_map = dict - def __init__(self, dataset, workspace, readonly): - self._dataset = dataset + def __init__(self, dataset_id, workspace, readonly): + self._dataset_id = dataset_id self._workspace = workspace self._readonly = readonly self._tables_added = [] @@ -47,7 +50,7 @@ def __init__(self, dataset, workspace, readonly): self._tom_server = fabric.create_tom_server( readonly=readonly, workspace=workspace ) - self.model = self._tom_server.Databases.GetByName(dataset).Model + self.model = self._tom_server.Databases[dataset_id].Model self._table_map = {} self._column_map = {} @@ -4541,9 +4544,13 @@ def add_role_member(self, role_name: str, member: str | List[str]): rm.IdentityProvider = "AzureAD" rm.MemberName = m role.Members.Add(rm) - print(f"{icons.green_dot} '{m}' has been added as a member of the '{role_name}' role.") + print( + f"{icons.green_dot} '{m}' has been added as a member of the '{role_name}' role." + ) else: - print(f"{icons.yellow_dot} '{m}' is already a member in the '{role_name}' role.") + print( + f"{icons.yellow_dot} '{m}' is already a member in the '{role_name}' role." + ) def close(self): @@ -4614,15 +4621,15 @@ def close(self): @log @contextmanager def connect_semantic_model( - dataset: str, readonly: bool = True, workspace: Optional[str] = None + dataset: str | UUID, readonly: bool = True, workspace: Optional[str] = None ) -> Iterator[TOMWrapper]: """ Connects to the Tabular Object Model (TOM) within a semantic model. Parameters ---------- - dataset : str - Name of the semantic model. + dataset : str | UUID + Name or ID of the semantic model. readonly: bool, default=True Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server. workspace : str, default=None @@ -4639,11 +4646,10 @@ def connect_semantic_model( # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined sempy.fabric._client._utils._init_analysis_services() - if workspace is None: - workspace_id = fabric.get_workspace_id() - workspace = fabric.resolve_workspace_name(workspace_id) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) - tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly) + tw = TOMWrapper(dataset_id=dataset_id, workspace=workspace_id, readonly=readonly) try: yield tw finally: From b2fdf9f2bc25ba869075b972c021ebdabf61a7a4 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 10:50:05 +0200 Subject: [PATCH 04/11] fixed for other functions --- src/sempy_labs/_helper_functions.py | 12 ++--- src/sempy_labs/_list_functions.py | 55 ++++++++++--------- src/sempy_labs/_refresh_semantic_model.py | 37 +++++++------ src/sempy_labs/directlake/_dl_helper.py | 21 +++++--- src/sempy_labs/tom/_model.py | 64 +++++++++++++---------- 5 files changed, 105 insertions(+), 84 deletions(-) diff --git a/src/sempy_labs/_helper_functions.py b/src/sempy_labs/_helper_functions.py index 2a443d51..13edf392 100644 --- a/src/sempy_labs/_helper_functions.py +++ b/src/sempy_labs/_helper_functions.py @@ -1186,20 +1186,20 @@ def _make_list_unique(my_list): def _get_partition_map(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame: - if workspace is None: - workspace = fabric.resolve_workspace_name() + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) partitions = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" select [ID] AS [PartitionID], [TableID], [Name] AS [PartitionName] from $system.tmschema_partitions """, ) tables = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" select [ID] AS [TableID], [Name] AS [TableName] from $system.tmschema_tables """, diff --git a/src/sempy_labs/_list_functions.py b/src/sempy_labs/_list_functions.py index abef0e46..f4157ccd 100644 --- a/src/sempy_labs/_list_functions.py +++ b/src/sempy_labs/_list_functions.py @@ -7,23 +7,25 @@ pagination, resolve_item_type, format_dax_object_name, + resolve_dataset_name_and_id, ) import pandas as pd from typing import Optional import sempy_labs._icons as icons from sempy.fabric.exceptions import FabricHTTPException +from uuid import UUID def get_object_level_security( - dataset: str, workspace: Optional[str] = None + dataset: str | UUID, workspace: Optional[str] = None ) -> pd.DataFrame: """ Shows the object level security for the semantic model. Parameters ---------- - dataset : str - Name of the semantic model. + dataset : str | UUID + Name or ID of the semantic model. workspace : str, default=None The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -37,12 +39,13 @@ def get_object_level_security( from sempy_labs.tom import connect_semantic_model - workspace = fabric.resolve_workspace_name(workspace) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) df = pd.DataFrame(columns=["Role Name", "Object Type", "Table Name", "Object Name"]) with connect_semantic_model( - dataset=dataset, readonly=True, workspace=workspace + dataset=dataset_id, readonly=True, workspace=workspace_id ) as tom: for r in tom.model.Roles: @@ -82,15 +85,15 @@ def get_object_level_security( def list_tables( - dataset: str, workspace: Optional[str] = None, extended: bool = False + dataset: str | UUID, workspace: Optional[str] = None, extended: bool = False ) -> pd.DataFrame: """ Shows a semantic model's tables and their properties. Parameters ---------- - dataset : str - Name of the semantic model. + dataset : str | UUID + Name or ID of the semantic model. workspace : str, default=None The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -106,7 +109,8 @@ def list_tables( from sempy_labs.tom import connect_semantic_model - workspace = fabric.resolve_workspace_name(workspace) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) df = pd.DataFrame( columns=[ @@ -121,20 +125,20 @@ def list_tables( ) with connect_semantic_model( - dataset=dataset, workspace=workspace, readonly=True + dataset=dataset_id, workspace=workspace_id, readonly=True ) as tom: if extended: dict_df = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DIMENSION_NAME],[DICTIONARY_SIZE]) """, ) dict_sum = dict_df.groupby("[DIMENSION_NAME]")["[DICTIONARY_SIZE]"].sum() data = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string="""EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[TABLE_ID],[DIMENSION_NAME],[USED_SIZE])""", ) data_sum = ( @@ -162,8 +166,8 @@ def list_tables( .sum() ) rc = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" SELECT [DIMENSION_NAME],[ROWS_COUNT] FROM $SYSTEM.DISCOVER_STORAGE_TABLES WHERE RIGHT ( LEFT ( TABLE_ID, 2 ), 1 ) <> '$' @@ -850,15 +854,15 @@ def update_item( def list_relationships( - dataset: str, workspace: Optional[str] = None, extended: bool = False + dataset: str | UUID, workspace: Optional[str] = None, extended: bool = False ) -> pd.DataFrame: """ Shows a semantic model's relationships and their properties. Parameters ---------- - dataset: str - Name of the semantic model. + dataset: str | UUID + Name or UUID of the semantic model. workspace : str, default=None The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -872,17 +876,18 @@ def list_relationships( A pandas dataframe showing the object level security for the semantic model. """ - workspace = fabric.resolve_workspace_name(workspace) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) - dfR = fabric.list_relationships(dataset=dataset, workspace=workspace) + dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id) dfR["From Object"] = format_dax_object_name(dfR["From Table"], dfR["From Column"]) dfR["To Object"] = format_dax_object_name(dfR["To Table"], dfR["To Column"]) if extended: # Used to map the Relationship IDs rel = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" SELECT [ID] AS [RelationshipID] @@ -893,8 +898,8 @@ def list_relationships( # USED_SIZE shows the Relationship Size where TABLE_ID starts with R$ cs = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" SELECT [TABLE_ID] diff --git a/src/sempy_labs/_refresh_semantic_model.py b/src/sempy_labs/_refresh_semantic_model.py index a61f8eb7..7cca8f09 100644 --- a/src/sempy_labs/_refresh_semantic_model.py +++ b/src/sempy_labs/_refresh_semantic_model.py @@ -5,6 +5,7 @@ resolve_workspace_name_and_id, _get_partition_map, _process_and_display_chart, + resolve_dataset_name_and_id, ) from typing import Any, List, Optional, Union from sempy._utils._log import log @@ -14,11 +15,12 @@ import warnings import ipywidgets as widgets import json +from uuid import UUID @log def refresh_semantic_model( - dataset: str, + dataset: str | UUID, tables: Optional[Union[str, List[str]]] = None, partitions: Optional[Union[str, List[str]]] = None, refresh_type: str = "full", @@ -34,8 +36,8 @@ def refresh_semantic_model( Parameters ---------- - dataset : str - Name of the semantic model. + dataset : str | UUID + Name or ID of the semantic model. tables : str, List[str], default=None A string or a list of tables to refresh. partitions: str, List[str], default=None @@ -65,7 +67,8 @@ def refresh_semantic_model( If 'visualize' is set to True, returns a pandas dataframe showing the SSAS trace output used to generate the visualization. """ - workspace = fabric.resolve_workspace_name(workspace) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) if isinstance(tables, str): tables = [tables] @@ -118,11 +121,11 @@ def refresh_and_trace_dataset( def extract_failure_error(): error_messages = [] combined_messages = "" - final_message = f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed." + final_message = f"{icons.red_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has failed." for _, r in fabric.get_refresh_execution_details( refresh_request_id=request_id, - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, ).messages.iterrows(): error_messages.append(f"{r['Type']}: {r['Message']}") @@ -135,8 +138,8 @@ def extract_failure_error(): # Function to perform dataset refresh def refresh_dataset(): return fabric.refresh_dataset( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, refresh_type=refresh_type, retry_count=retry_count, apply_refresh_policy=apply_refresh_policy, @@ -147,7 +150,9 @@ def refresh_dataset(): def check_refresh_status(request_id): request_details = fabric.get_refresh_execution_details( - dataset=dataset, refresh_request_id=request_id, workspace=workspace + dataset=dataset_id, + refresh_request_id=request_id, + workspace=workspace_id, ) return request_details.status @@ -180,7 +185,7 @@ def display_trace_logs(trace, partition_map, widget, title, stop=False): if not visualize: request_id = refresh_dataset() print( - f"{icons.in_progress} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is in progress..." + f"{icons.in_progress} Refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is in progress..." ) # Monitor refresh progress and handle tracing if visualize is enabled @@ -189,7 +194,7 @@ def display_trace_logs(trace, partition_map, widget, title, stop=False): widget = widgets.Output() with fabric.create_trace_connection( - dataset=dataset, workspace=workspace + dataset=dataset_id, workspace=workspace_id ) as trace_connection: with trace_connection.create_trace(icons.refresh_event_schema) as trace: trace.start() @@ -204,7 +209,7 @@ def display_trace_logs(trace, partition_map, widget, title, stop=False): raise ValueError(extract_failure_error()) elif status == "Cancelled": print( - f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled." + f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled." ) return @@ -231,7 +236,7 @@ def display_trace_logs(trace, partition_map, widget, title, stop=False): ) print( - f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete." + f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete." ) return final_df @@ -245,14 +250,14 @@ def display_trace_logs(trace, partition_map, widget, title, stop=False): raise ValueError(extract_failure_error()) elif status == "Cancelled": print( - f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled." + f"{icons.yellow_dot} The refresh of the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been cancelled." ) return time.sleep(3) print( - f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete." + f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset_name}' semantic model within the '{workspace_name}' workspace is complete." ) final_output = refresh_and_trace_dataset( diff --git a/src/sempy_labs/directlake/_dl_helper.py b/src/sempy_labs/directlake/_dl_helper.py index faf534da..a5395529 100644 --- a/src/sempy_labs/directlake/_dl_helper.py +++ b/src/sempy_labs/directlake/_dl_helper.py @@ -10,19 +10,21 @@ resolve_dataset_id, resolve_lakehouse_name, _convert_data_type, + resolve_dataset_name_and_id, + resolve_workspace_name_and_id, ) def check_fallback_reason( - dataset: str, workspace: Optional[str] = None + dataset: str | UUID, workspace: Optional[str] = None ) -> pd.DataFrame: """ Shows the reason a table in a Direct Lake semantic model would fallback to DirectQuery. Parameters ---------- - dataset : str - Name of the semantic model. + dataset : str | UUID + Name or ID of the semantic model. workspace : str, default=None The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -35,19 +37,22 @@ def check_fallback_reason( """ from sempy_labs.tom import connect_semantic_model - workspace = fabric.resolve_workspace_name(workspace) + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id( + dataset, workspace=workspace_id + ) with connect_semantic_model( - dataset=dataset, workspace=workspace, readonly=True + dataset=dataset_id, workspace=workspace_id, readonly=True ) as tom: if not tom.is_direct_lake(): raise ValueError( - f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models." + f"{icons.red_dot} The '{dataset_name}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models." ) df = fabric.evaluate_dax( - dataset=dataset, - workspace=workspace, + dataset=dataset_id, + workspace=workspace_id, dax_string=""" SELECT [TableName] AS [Table Name],[FallbackReason] AS [FallbackReasonID] FROM $SYSTEM.TMSCHEMA_DELTA_TABLE_METADATA_STORAGES diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index ea96b02c..97582a5c 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -30,25 +30,31 @@ class TOMWrapper: """ Convenience wrapper around the TOM object model for a semantic model. Always use the connect_semantic_model function to make sure the TOM object is initialized correctly. - `XMLA read/write endpoints `_ must - be enabled if setting the readonly parameter to False. + `XMLA read/write endpoints `_ must be enabled if setting the readonly parameter to False. """ _dataset_id: UUID - _workspace: str + _dataset_name: str + _workspace_id: UUID + _workspace_name: str _readonly: bool _tables_added: List[str] _table_map = dict _column_map = dict - def __init__(self, dataset_id, workspace, readonly): + def __init__(self, dataset, workspace, readonly): + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) self._dataset_id = dataset_id - self._workspace = workspace + self._dataset_name = dataset_name + self._workspace_name = workspace_name + self._workspace_id = workspace_id self._readonly = readonly self._tables_added = [] self._tom_server = fabric.create_tom_server( - readonly=readonly, workspace=workspace + readonly=readonly, workspace=workspace_id ) self.model = self._tom_server.Databases[dataset_id].Model @@ -2163,7 +2169,9 @@ def mark_as_date_table( ) """ df = fabric.evaluate_dax( - dataset=self._dataset, workspace=self._workspace, dax_string=dax_query + dataset=self._dataset_id, + workspace=self._workspace_id, + dax_string=dax_query, ) value = df["[1]"].iloc[0] if value != "1": @@ -2427,7 +2435,7 @@ def set_kpi( ) except Exception: raise ValueError( - f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'." + f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}'." ) graphics = [ @@ -2470,7 +2478,7 @@ def set_kpi( ) except Exception: raise ValueError( - f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}'." + f"{icons.red_dot} The '{target}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}'." ) if measure_target: @@ -2796,7 +2804,7 @@ def add_field_parameter( success = True if not success: raise ValueError( - f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset}' semantic model." + f"{icons.red_dot} The '{obj}' object was not found in the '{self._dataset_name}' semantic model." ) else: i += 1 @@ -2884,19 +2892,19 @@ def set_vertipaq_annotations(self): from sempy_labs._list_functions import list_tables dfT = list_tables( - dataset=self._dataset, workspace=self._workspace, extended=True + dataset=self._dataset_id, workspace=self._workspace_id, extended=True ) dfC = fabric.list_columns( - dataset=self._dataset, workspace=self._workspace, extended=True + dataset=self._dataset_id, workspace=self._workspace_id, extended=True ) dfP = fabric.list_partitions( - dataset=self._dataset, workspace=self._workspace, extended=True + dataset=self._dataset_id, workspace=self._workspace_id, extended=True ) dfH = fabric.list_hierarchies( - dataset=self._dataset, workspace=self._workspace, extended=True + dataset=self._dataset_id, workspace=self._workspace_id, extended=True ) dfR = list_relationships( - dataset=self._dataset, workspace=self._workspace, extended=True + dataset=self._dataset_id, workspace=self._workspace_id, extended=True ) for t in self.model.Tables: @@ -3341,7 +3349,9 @@ def is_direct_lake_using_view(self): usingView = False if self.is_direct_lake(): - df = check_fallback_reason(dataset=self._dataset, workspace=self._workspace) + df = check_fallback_reason( + dataset=self._dataset_id, workspace=self._workspace_id + ) df_filt = df[df["FallbackReasonID"] == 2] if len(df_filt) > 0: @@ -3388,7 +3398,7 @@ def show_incremental_refresh_policy(self, table_name: str): if rp is None: print( - f"{icons.yellow_dot} The '{table_name}' table in the '{self._dataset}' semantic model within the '{self._workspace}' workspace does not have an incremental refresh policy." + f"{icons.yellow_dot} The '{table_name}' table in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace does not have an incremental refresh policy." ) else: print(f"Table Name: {table_name}") @@ -3887,14 +3897,14 @@ def add_time_intelligence( if table_name is None: raise ValueError( - f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset}' semantic model within the '{self._workspace}' workspace." + f"{icons.red_dot} The '{measure_name}' is not a valid measure in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace." ) table_name = matching_measures[0] # Validate date table if not self.is_date_table(date_table): raise ValueError( - f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset}' wemantic model within the '{self._workspace}' workspace." + f"{icons.red_dot} The '{date_table}' table is not a valid date table in the '{self._dataset_name}' wemantic model within the '{self._workspace_name}' workspace." ) # Extract date key from date table @@ -3906,7 +3916,7 @@ def add_time_intelligence( if not matching_columns: raise ValueError( - f"{icons.red_dot} The '{date_table}' table does not have a date key column in the '{self._dataset}' semantic model within the '{self._workspace}' workspace." + f"{icons.red_dot} The '{date_table}' table does not have a date key column in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace." ) date_key = matching_columns[0] @@ -4386,7 +4396,6 @@ def generate_measure_descriptions( if isinstance(measure_name, str): measure_name = [measure_name] - workspace_id = fabric.resolve_workspace_id(self._workspace) client = fabric.FabricRestClient() if len(measure_name) > max_batch_size: @@ -4405,7 +4414,7 @@ def generate_measure_descriptions( "modelItems": [], }, }, - "workspaceId": workspace_id, + "workspaceId": self._workspace_id, "artifactInfo": {"artifactType": "SemanticModel"}, } for m_name in measure_list: @@ -4416,7 +4425,7 @@ def generate_measure_descriptions( ) if t_name is None: raise ValueError( - f"{icons.red_dot} The '{m_name}' measure does not exist in the '{self._dataset}' semantic model within the '{self._workspace}' workspace." + f"{icons.red_dot} The '{m_name}' measure does not exist in the '{self._dataset_name}' semantic model within the '{self._workspace_name}' workspace." ) new_item = { @@ -4609,9 +4618,9 @@ def close(self): if len(self._tables_added) > 0: refresh_semantic_model( - dataset=self._dataset, + dataset=self._dataset_id, tables=self._tables_added, - workspace=self._workspace, + workspace=self._workspace_id, ) self.model = None @@ -4646,10 +4655,7 @@ def connect_semantic_model( # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined sempy.fabric._client._utils._init_analysis_services() - (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) - (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id) - - tw = TOMWrapper(dataset_id=dataset_id, workspace=workspace_id, readonly=readonly) + tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly) try: yield tw finally: From b14e82fa58193c76e8c6a171e9a39b815e27abcb Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 11:00:37 +0200 Subject: [PATCH 05/11] check --- src/sempy_labs/tom/_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index 97582a5c..862fb06f 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -4649,7 +4649,7 @@ def connect_semantic_model( Returns ------- typing.Iterator[TOMWrapper] - A connection to the semantic model's Tabular Object Model. + A connection to the semantic model's Tabular Object Model. """ # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined From 7bc5f1de20d1dcde77886889e4e5d237a4570f46 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 11:14:33 +0200 Subject: [PATCH 06/11] removed space --- src/sempy_labs/tom/_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index 862fb06f..97582a5c 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -4649,7 +4649,7 @@ def connect_semantic_model( Returns ------- typing.Iterator[TOMWrapper] - A connection to the semantic model's Tabular Object Model. + A connection to the semantic model's Tabular Object Model. """ # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined From eae1ef83cf5e84a83e1b40b093b9123247cbbf9d Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 11:45:07 +0200 Subject: [PATCH 07/11] test --- tests/test_tom.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_tom.py b/tests/test_tom.py index c96fff8a..ab81d3e3 100644 --- a/tests/test_tom.py +++ b/tests/test_tom.py @@ -3,9 +3,9 @@ from sempy_labs.tom import connect_semantic_model -@patch("sempy.fabric.resolve_workspace_name") +@patch("sempy.fabric.resolve_workspace_name_and_id") @patch("sempy.fabric.create_tom_server") -def test_tom_wrapper(create_tom_server, resolve_workspace_name): +def test_tom_wrapper(create_tom_server, resolve_workspace_name_and_id): sempy.fabric._client._utils._init_analysis_services() import Microsoft.AnalysisServices.Tabular as TOM @@ -21,7 +21,7 @@ def test_tom_wrapper(create_tom_server, resolve_workspace_name): create_tom_server.return_value = tom_server - resolve_workspace_name.return_value = "my_workspace" + resolve_workspace_name_and_id.return_value = ("my_workspace", "my_workspace_id") # invoke the wrapper with connect_semantic_model("my_dataset") as tom: From c13d868eababe6e3d374407cdb8da2fb2153ef29 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 11:48:37 +0200 Subject: [PATCH 08/11] test2 --- tests/test_tom.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_tom.py b/tests/test_tom.py index ab81d3e3..b311d8d7 100644 --- a/tests/test_tom.py +++ b/tests/test_tom.py @@ -3,7 +3,7 @@ from sempy_labs.tom import connect_semantic_model -@patch("sempy.fabric.resolve_workspace_name_and_id") +@patch("sempy_labs._helper_functions.resolve_workspace_name_and_id") @patch("sempy.fabric.create_tom_server") def test_tom_wrapper(create_tom_server, resolve_workspace_name_and_id): From 0af153548190f9dd670b2a93a4a7af5c0055b727 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 12:16:29 +0200 Subject: [PATCH 09/11] try again --- tests/test_tom.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_tom.py b/tests/test_tom.py index b311d8d7..89dd748d 100644 --- a/tests/test_tom.py +++ b/tests/test_tom.py @@ -15,7 +15,7 @@ def test_tom_wrapper(create_tom_server, resolve_workspace_name_and_id): db = TOM.Database() db.Name = "my_dataset" - db.ID = "my_dataset" + db.ID = "my_dataset" db.Model = TOM.Model() tom_server.Databases.Add(db) From 774d16112f4417ebd38987d758831017f95aa50c Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 12:19:15 +0200 Subject: [PATCH 10/11] try --- tests/test_tom.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_tom.py b/tests/test_tom.py index 89dd748d..8320a202 100644 --- a/tests/test_tom.py +++ b/tests/test_tom.py @@ -1,11 +1,11 @@ import sempy.fabric from unittest.mock import patch from sempy_labs.tom import connect_semantic_model +from sempy_labs._helper_functions import resolve_workspace_name_and_id -@patch("sempy_labs._helper_functions.resolve_workspace_name_and_id") @patch("sempy.fabric.create_tom_server") -def test_tom_wrapper(create_tom_server, resolve_workspace_name_and_id): +def test_tom_wrapper(create_tom_server): sempy.fabric._client._utils._init_analysis_services() import Microsoft.AnalysisServices.Tabular as TOM From 6c38897ffe46e645faa83dff8a4fbae5bdaddf83 Mon Sep 17 00:00:00 2001 From: Michael Date: Thu, 12 Dec 2024 13:12:47 +0200 Subject: [PATCH 11/11] fix testing --- tests/test_tom.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/tests/test_tom.py b/tests/test_tom.py index 8320a202..aba5ce3f 100644 --- a/tests/test_tom.py +++ b/tests/test_tom.py @@ -1,31 +1,37 @@ import sempy.fabric from unittest.mock import patch from sempy_labs.tom import connect_semantic_model -from sempy_labs._helper_functions import resolve_workspace_name_and_id +@patch("sempy.fabric.resolve_item_id") +@patch("sempy.fabric.resolve_workspace_id") +@patch("sempy_labs._helper_functions.resolve_dataset_name_and_id") +@patch("sempy_labs._helper_functions.resolve_workspace_name_and_id") @patch("sempy.fabric.create_tom_server") -def test_tom_wrapper(create_tom_server): +def test_tom_wrapper(create_tom_server, resolve_workspace_name_and_id, resolve_dataset_name_and_id, resolve_workspace_id, resolve_item_id): sempy.fabric._client._utils._init_analysis_services() import Microsoft.AnalysisServices.Tabular as TOM + resolve_workspace_name_and_id.return_value = ("my_workspace", "my_workspace_id") + resolve_dataset_name_and_id.return_value = ("my_dataset", "my_dataset_id") + resolve_workspace_id.return_value = "my_workspace_id" + resolve_item_id.return_value = "my_dataset_id" + # create dummy server, database and model tom_server = TOM.Server() db = TOM.Database() db.Name = "my_dataset" - db.ID = "my_dataset" + db.ID = "my_dataset_id" db.Model = TOM.Model() tom_server.Databases.Add(db) create_tom_server.return_value = tom_server - resolve_workspace_name_and_id.return_value = ("my_workspace", "my_workspace_id") - # invoke the wrapper - with connect_semantic_model("my_dataset") as tom: + with connect_semantic_model(dataset="my_dataset_id", workspace="my_workspace") as tom: tom.add_table("my_table") # validate the result - assert tom_server.Databases["my_dataset"].Model.Tables["my_table"] is not None + assert tom_server.Databases["my_dataset_id"].Model.Tables["my_table"] is not None