diff --git a/pyproject.toml b/pyproject.toml index b02eb604..ac61a7c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,4 +37,7 @@ Repository = "https://github.com/microsoft/semantic-link-labs.git" [[tool.mypy.overrides]] module = "sempy.*,Microsoft.*,System.*,anytree.*,powerbiclient.*,synapse.ml.services.*" -ignore_missing_imports = true \ No newline at end of file +ignore_missing_imports = true + +[tool.flake8] +max-line-length = 200 \ No newline at end of file diff --git a/src/sempy_labs/__init__.py b/src/sempy_labs/__init__.py index 3661c1de..97a0bd23 100644 --- a/src/sempy_labs/__init__.py +++ b/src/sempy_labs/__init__.py @@ -52,9 +52,9 @@ resolve_dataset_name, resolve_report_id, resolve_report_name, - # language_validate + # language_validate ) -#from sempy_labs._model_auto_build import model_auto_build +# from sempy_labs._model_auto_build import model_auto_build from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa from sempy_labs._model_dependencies import ( measure_dependency_tree, diff --git a/src/sempy_labs/_ai.py b/src/sempy_labs/_ai.py index 908b75ab..a1592a45 100644 --- a/src/sempy_labs/_ai.py +++ b/src/sempy_labs/_ai.py @@ -8,6 +8,7 @@ from IPython.display import display import sempy_labs._icons as icons + def optimize_semantic_model(dataset: str, workspace: Optional[str] = None): from ._model_bpa import run_model_bpa @@ -92,7 +93,7 @@ def generate_measure_descriptions( df = dfM_filt[["Table Name", "Measure Name", "Measure Expression"]] df["prompt"] = ( - f"The following is DAX code used by Microsoft Power BI. Please explain this code in simple terms:" + "The following is DAX code used by Microsoft Power BI. Please explain this code in simple terms:" + df["Measure Expression"] ) @@ -152,11 +153,11 @@ def generate_aggs( #'OrderDateKey': 'GroupBy' # } - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) - if lakehouse_workspace == None: + if lakehouse_workspace is None: lakehouse_workspace = workspace lakehouse_workspace_id = workspace_id else: diff --git a/src/sempy_labs/_clear_cache.py b/src/sempy_labs/_clear_cache.py index 470baa40..dff9fcbf 100644 --- a/src/sempy_labs/_clear_cache.py +++ b/src/sempy_labs/_clear_cache.py @@ -20,7 +20,7 @@ def clear_cache(dataset: str, workspace: Optional[str] = None): or if no lakehouse attached, resolves to the workspace of the notebook. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) diff --git a/src/sempy_labs/_dax.py b/src/sempy_labs/_dax.py index 39dfefe2..d6267e25 100644 --- a/src/sempy_labs/_dax.py +++ b/src/sempy_labs/_dax.py @@ -27,7 +27,6 @@ def evaluate_dax_impersonation( The DAX query. user_name : str The user name (i.e. hello@goodbye.com). - Defaults to None which resolves to no user impersonation. workspace : str, default=None The Fabric workspace name. Defaults to None which resolves to the workspace of the attached lakehouse @@ -49,13 +48,10 @@ def evaluate_dax_impersonation( dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace) - if user_name is None: - request_body = {"queries": [{"query": dax_query}]} - else: - request_body = { - "queries": [{"query": dax_query}], - "impersonatedUserName": user_name, - } + request_body = { + "queries": [{"query": dax_query}], + "impersonatedUserName": user_name + } client = fabric.PowerBIRestClient() response = client.post( diff --git a/src/sempy_labs/_generate_semantic_model.py b/src/sempy_labs/_generate_semantic_model.py index 7f33bce6..b02d7292 100644 --- a/src/sempy_labs/_generate_semantic_model.py +++ b/src/sempy_labs/_generate_semantic_model.py @@ -31,7 +31,7 @@ def create_blank_semantic_model( or if no lakehouse attached, resolves to the workspace of the notebook. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -178,11 +178,11 @@ def deploy_semantic_model( """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) - if new_dataset_workspace == None: + if new_dataset_workspace is None: new_dataset_workspace = workspace if new_dataset is None: @@ -257,7 +257,7 @@ def get_semantic_model_bim( if save_to_file_name is not None: lakeAttach = lakehouse_attached() - if lakeAttach == False: + if lakeAttach is False: print( f"In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook." ) diff --git a/src/sempy_labs/_helper_functions.py b/src/sempy_labs/_helper_functions.py index 70eda84d..5e6aed45 100644 --- a/src/sempy_labs/_helper_functions.py +++ b/src/sempy_labs/_helper_functions.py @@ -101,7 +101,7 @@ def resolve_report_id(report: str, workspace: Optional[str] = None): The ID of the Power BI report. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -129,7 +129,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None): The name of the Power BI report. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -159,7 +159,7 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None): The ID of the semantic model. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -189,7 +189,7 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None): The name of the semantic model. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -220,7 +220,7 @@ def resolve_lakehouse_name(lakehouse_id: Optional[UUID] = None, workspace: Optio The name of the Fabric lakehouse. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -253,7 +253,7 @@ def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None): The ID of the Fabric lakehouse. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -283,7 +283,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None) The ID of SQL Endpoint. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -291,10 +291,9 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None) dfP_filt = dfP[dfP["Mode"] == "DirectLake"] if len(dfP_filt) == 0: - print( + raise ValueError( f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode." ) - return dfE = fabric.list_expressions(dataset=dataset, workspace=workspace) dfE_filt = dfE[dfE["Name"] == "DatabaseQuery"] @@ -502,10 +501,10 @@ def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str, The name and ID of the Fabric workspace. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) else: workspace_id = fabric.resolve_workspace_id(workspace) - return workspace, workspace_id + return str(workspace), str(workspace_id) diff --git a/src/sempy_labs/_list_functions.py b/src/sempy_labs/_list_functions.py index 588afb54..53163e0c 100644 --- a/src/sempy_labs/_list_functions.py +++ b/src/sempy_labs/_list_functions.py @@ -29,7 +29,7 @@ def get_object_level_security(dataset: str, workspace: Optional[str] = None): A pandas dataframe showing the object level security for the semantic model. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -88,7 +88,7 @@ def list_tables(dataset: str, workspace: Optional[str] = None): A pandas dataframe showing the semantic model's tables and their properties. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -154,7 +154,7 @@ def list_annotations(dataset: str, workspace: Optional[str] = None): A pandas dataframe showing the semantic model's annotations and their properties. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -384,7 +384,7 @@ def list_columns( get_direct_lake_lakehouse, ) - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -1032,10 +1032,10 @@ def create_warehouse( (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if description == None: - request_body = {"displayName": warehouse} - else: - request_body = {"displayName": warehouse, "description": description} + request_body = {"displayName": warehouse} + + if description: + request_body["description"] = description client = fabric.FabricRestClient() response = client.post( @@ -1123,10 +1123,9 @@ def update_item( itemId = dfI_filt["Id"].iloc[0] - if description == None: - request_body = {"displayName": new_name} - else: - request_body = {"displayName": new_name, "description": description} + request_body = {"displayName": new_name} + if description: + request_body["description"] = description client = fabric.FabricRestClient() response = client.patch( @@ -1134,7 +1133,7 @@ def update_item( ) if response.status_code == 200: - if description == None: + if description is None: print( f"The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}'" ) @@ -1171,7 +1170,7 @@ def list_relationships( A pandas dataframe showing the object level security for the semantic model. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -1560,7 +1559,7 @@ def list_shortcuts( (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, workspace) else: diff --git a/src/sempy_labs/_model_bpa.py b/src/sempy_labs/_model_bpa.py index bfbcf774..87950d09 100644 --- a/src/sempy_labs/_model_bpa.py +++ b/src/sempy_labs/_model_bpa.py @@ -70,8 +70,8 @@ def model_bpa_rules(): "Table", "Warning", "Avoid using many-to-many relationships on tables used for dynamic row level security", - lambda df: (df["Used in M2M Relationship"] == True) - & (df["Used in Dynamic RLS"] == True), + lambda df: (df["Used in M2M Relationship"] is True) + & (df["Used in Dynamic RLS"] is True), "Using many-to-many relationships on tables which use dynamic row level security can cause serious query performance degradation. This pattern's performance problems compound when snowflaking multiple many-to-many relationships against a table which contains row level security. Instead, use one of the patterns shown in the article below where a single dimension table relates many-to-one to a security table.", "https://www.elegantbi.com/post/dynamicrlspatterns", ), @@ -88,12 +88,12 @@ def model_bpa_rules(): "Column", "Warning", "Set IsAvailableInMdx to false on non-attribute columns", - lambda df: (df["Is Direct Lake"] == False) - & (df["Is Available in MDX"] == True) - & ((df["Hidden"] == True) | (df["Parent Is Hidden"] == True)) - & (df["Used in Sort By"] == False) - & (df["Used in Hierarchy"] == False) - & (df["Sort By Column"] == None), + lambda df: (df["Is Direct Lake"] is False) + & (df["Is Available in MDX"] is True) + & ((df["Hidden"] is True) | (df["Parent Is Hidden"] is True)) + & (df["Used in Sort By"] is False) + & (df["Used in Hierarchy"] is False) + & (df["Sort By Column"] is None), "To speed up processing time and conserve memory after processing, attribute hierarchies should not be built for columns that are never used for slicing by MDX clients. In other words, all hidden columns that are not used as a Sort By Column or referenced in user hierarchies should have their IsAvailableInMdx property set to false. The IsAvailableInMdx property is not relevant for Direct Lake models.", "https://blog.crossjoin.co.uk/2018/07/02/isavailableinmdx-ssas-tabular", ), @@ -219,7 +219,7 @@ def model_bpa_rules(): "Table", "Warning", "Large tables should be partitioned", - lambda df: (df["Is Direct Lake"] == False) + lambda df: (df["Is Direct Lake"] is False) & (df["Partition Count"] == 1) & (df["Row Count"] > 25000000), "Large tables should be partitioned in order to optimize processing. This is not relevant for semantic models in Direct Lake mode as they can only have one partition per table.", @@ -306,11 +306,11 @@ def model_bpa_rules(): "Column", "Warning", "Set IsAvailableInMdx to true on necessary columns", - lambda df: (df["Is Direct Lake"] == False) - & (df["Is Available in MDX"] == False) + lambda df: (df["Is Direct Lake"] is False) + & (df["Is Available in MDX"] is False) & ( - (df["Used in Sort By"] == True) - | (df["Used in Hierarchy"] == True) + (df["Used in Sort By"] is True) + | (df["Used in Hierarchy"] is True) | (df["Sort By Column"] != None) ), "In order to avoid errors, ensure that attribute hierarchies are enabled if a column is used for sorting another column, used in a hierarchy, used in variations, or is sorted by another column. The IsAvailableInMdx property is not relevant for Direct Lake models.", @@ -320,8 +320,8 @@ def model_bpa_rules(): "Table", "Error", "Avoid the USERELATIONSHIP function and RLS against the same table", - lambda df: (df["USERELATIONSHIP Used"] == True) - & (df["Used in RLS"] == True), + lambda df: (df["USERELATIONSHIP Used"] is True) + & (df["Used in RLS"] is True), "The USERELATIONSHIP function may not be used against a table which also leverages row-level security (RLS). This will generate an error when using the particular measure in a visual. This rule will highlight the table which is used in a measure's USERELATIONSHIP function as well as RLS.", "https://blog.crossjoin.co.uk/2013/05/10/userelationship-and-tabular-row-security", ), @@ -494,7 +494,7 @@ def model_bpa_rules(): "Table", "Warning", "Ensure tables have relationships", - lambda df: (df["Used in Relationship"] == False) + lambda df: (df["Used in Relationship"] is False) & (df["Type"] != "Calculation Group"), "This rule highlights tables which are not connected to any other table in the model with a relationship.", ), @@ -511,7 +511,7 @@ def model_bpa_rules(): "Column", "Info", "Visible objects with no description", - lambda df: (df["Hidden"] == False) & (df["Description"].str.len() == 0), + lambda df: (df["Hidden"] is False) & (df["Description"].str.len() == 0), "Calculation groups have no function unless they have calculation items.", ), ( @@ -595,7 +595,7 @@ def model_bpa_rules(): "Column", "Info", "Hide foreign keys", - lambda df: (df["Foreign Key"]) & (df["Hidden"] == False), + lambda df: (df["Foreign Key"]) & (df["Hidden"] is False), "Foreign keys should always be hidden.", ), ( @@ -603,7 +603,7 @@ def model_bpa_rules(): "Column", "Info", "Mark primary keys", - lambda df: (df["Primary Key"]) & (df["Key"] == False), + lambda df: (df["Primary Key"]) & (df["Key"] is False), "Set the 'Key' property to 'True' for primary key columns within the column properties.", ), ( @@ -744,7 +744,7 @@ def run_model_bpa( message="This pattern is interpreted as a regular expression, and has match groups.", ) - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -798,13 +798,13 @@ def run_model_bpa( cols = ["From Cardinality", "To Cardinality"] for col in cols: - if not col in dfR: + if col not in dfR: dfR[col] = None cols = ["Parent Is Hidden"] for col in cols: - if not col in dfM: + if col not in dfM: dfM[col] = None # Data Coverage Definition rule @@ -842,9 +842,9 @@ def run_model_bpa( dataset=dataset, workspace=workspace, dax_string=""" - SELECT [FUNCTION_NAME] + SELECT [FUNCTION_NAME] FROM $SYSTEM.MDSCHEMA_FUNCTIONS - WHERE [INTERFACE_NAME] = 'DATETIME' + WHERE [INTERFACE_NAME] = 'DATETIME' """, ) @@ -951,7 +951,7 @@ def run_model_bpa( dfD["Has Date Table"] = any( (r["Parent Data Category"] == "Time") & (r["Data Type"] == "DateTime") - & (r["Key"] == True) + & (r["Key"] is True) for i, r in dfC.iterrows() ) # dfC['In Date Table'] = dfC['Table Name'].isin(dfT.loc[dfT['Data Category'] == "Time", 'Name']) @@ -1033,7 +1033,7 @@ def run_model_bpa( dfM.at[i, "Has Fully Qualified Measure Reference"] = True dfR["Inactive without USERELATIONSHIP"] = False - for i, r in dfR[dfR["Active"] == False].iterrows(): + for i, r in dfR[dfR["Active"] is False].iterrows(): fromTable = r["From Table"] fromColumn = r["From Column"] toTable = r["To Table"] @@ -1183,7 +1183,7 @@ def execute_rule(row): if export: lakeAttach = lakehouse_attached() - if lakeAttach == False: + if lakeAttach is False: print( f"In order to save the Best Practice Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook." ) diff --git a/src/sempy_labs/_model_dependencies.py b/src/sempy_labs/_model_dependencies.py index fe82c295..d44b453d 100644 --- a/src/sempy_labs/_model_dependencies.py +++ b/src/sempy_labs/_model_dependencies.py @@ -1,8 +1,7 @@ -import sempy import sempy.fabric as fabric import pandas as pd from sempy_labs._helper_functions import format_dax_object_name -from typing import List, Optional, Union +from typing import Any, Dict, Optional from anytree import Node, RenderTree from sempy._utils._log import log @@ -26,7 +25,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None): Shows all dependencies for all measures in the semantic model. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -64,11 +63,11 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None): axis=1, ) - while any(df["Done"] == False): + while any(df["Done"] is False): for i, r in df.iterrows(): rObjFull = r["Referenced Full Object Name"] rObj = r["Referenced Object"] - if r["Done"] == False: + if r["Done"] is False: dep_filt = dep[dep["Full Object Name"] == rObjFull] for index, dependency in dep_filt.iterrows(): @@ -151,7 +150,7 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None): Shows all dependencies for all objects in the semantic model. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -192,11 +191,11 @@ def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None): lambda row: False if row["Referenced Object Type"] in objs else True, axis=1 ) - while any(df["Done"] == False): + while any(df["Done"] is False): for i, r in df.iterrows(): rObjFull = r["Referenced Full Object Name"] rObj = r["Referenced Object"] - if r["Done"] == False: + if r["Done"] is False: dep_filt = dep[dep["Full Object Name"] == rObjFull] for index, dependency in dep_filt.iterrows(): @@ -283,7 +282,7 @@ def measure_dependency_tree( """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -300,7 +299,7 @@ def measure_dependency_tree( df_filt = md[md["Object Name"] == measure_name] # Create a dictionary to hold references to nodes - node_dict = {} + node_dict: Dict[str, Any] = {} measureIcon = "\u2211" tableIcon = "\u229E" columnIcon = "\u229F" diff --git a/src/sempy_labs/_one_lake_integration.py b/src/sempy_labs/_one_lake_integration.py index f4cc8480..03d2bf1f 100644 --- a/src/sempy_labs/_one_lake_integration.py +++ b/src/sempy_labs/_one_lake_integration.py @@ -32,7 +32,7 @@ def export_model_to_onelake( (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if destination_workspace == None: + if destination_workspace is None: destination_workspace = workspace destination_workspace_id = workspace_id else: @@ -104,7 +104,7 @@ def export_model_to_onelake( dfP_filt = dfP[ (dfP["Mode"] == "Import") & (dfP["Source Type"] != "CalculationGroup") - & (dfP["Parent System Managed"] == False) + & (dfP["Parent System Managed"] is False) ] dfC = fabric.list_columns(dataset=dataset, workspace=workspace) tmc = pd.DataFrame(dfP.groupby("Table Name")["Mode"].nunique()).reset_index() diff --git a/src/sempy_labs/_query_scale_out.py b/src/sempy_labs/_query_scale_out.py index d38a7b2c..49082db0 100644 --- a/src/sempy_labs/_query_scale_out.py +++ b/src/sempy_labs/_query_scale_out.py @@ -1,8 +1,7 @@ -import sempy import sempy.fabric as fabric import pandas as pd from sempy_labs._helper_functions import resolve_dataset_id -from typing import List, Optional, Union +from typing import Optional import sempy_labs._icons as icons diff --git a/src/sempy_labs/_refresh_semantic_model.py b/src/sempy_labs/_refresh_semantic_model.py index a6b0fd95..333ca6eb 100644 --- a/src/sempy_labs/_refresh_semantic_model.py +++ b/src/sempy_labs/_refresh_semantic_model.py @@ -2,7 +2,7 @@ import sempy.fabric as fabric import time from sempy_labs._helper_functions import resolve_dataset_id -from typing import List, Optional, Union +from typing import Any, List, Optional, Union from sempy._utils._log import log import sempy_labs._icons as icons from sempy_labs._helper_functions import resolve_workspace_name_and_id @@ -41,7 +41,7 @@ def refresh_semantic_model( or if no lakehouse attached, resolves to the workspace of the notebook. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -53,7 +53,7 @@ def refresh_semantic_model( if isinstance(partitions, str): partitions = [partitions] - objects = [] + objects: List[Any] = [] if tables is not None: objects = objects + [{"table": table} for table in tables] @@ -161,7 +161,7 @@ def cancel_dataset_refresh( rr = fabric.list_refresh_requests(dataset=dataset, workspace=workspace) rr_filt = rr[rr["Status"] == "Unknown"] - if request_id == None: + if request_id is None: if len(rr_filt) == 0: print( f"{icons.red_dot} There are no active Enhanced API refreshes of the '{dataset}' semantic model within the '{workspace}' workspace." diff --git a/src/sempy_labs/_translations.py b/src/sempy_labs/_translations.py index c9e9cef2..6b593356 100644 --- a/src/sempy_labs/_translations.py +++ b/src/sempy_labs/_translations.py @@ -212,7 +212,7 @@ def translate_semantic_model( print(f"{icons.in_progress} Translating into the '{lang}' language...") for t in tom.model.Tables: - if t.IsHidden == False: + if t.IsHidden is False: if clm == "Name": df_filt = df_panda[ (df_panda["Object Type"] == "Table") @@ -240,7 +240,7 @@ def translate_semantic_model( value=tr, ) for c in t.Columns: - if c.IsHidden == False: + if c.IsHidden is False: if clm == "Name": df_filt = df_panda[ (df_panda["Object Type"] == "Column") @@ -287,7 +287,7 @@ def translate_semantic_model( value=tr, ) for h in t.Hierarchies: - if h.IsHidden == False: + if h.IsHidden is False: if clm == "Name": df_filt = df_panda[ (df_panda["Object Type"] == "Hierarchy") @@ -331,7 +331,7 @@ def translate_semantic_model( value=tr, ) for ms in t.Measures: - if ms.IsHidden == False: + if ms.IsHidden is False: if clm == "Name": df_filt = df_panda[ (df_panda["Object Type"] == "Measure") diff --git a/src/sempy_labs/_vertipaq.py b/src/sempy_labs/_vertipaq.py index 3d2d670a..35b7ac8d 100644 --- a/src/sempy_labs/_vertipaq.py +++ b/src/sempy_labs/_vertipaq.py @@ -56,11 +56,11 @@ def vertipaq_analyzer( "ignore", message="createDataFrame attempted Arrow optimization*" ) - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) - if lakehouse_workspace == None: + if lakehouse_workspace is None: lakehouse_workspace = workspace dfT = fabric.list_tables(dataset=dataset, extended=True, workspace=workspace) @@ -233,7 +233,7 @@ def vertipaq_analyzer( query = f"evaluate\nsummarizecolumns(\n\"1\",calculate(countrows('{fromTable}'),isblank({toObject}))\n)" - if isActive == False: # add userelationship + if isActive is False: # add userelationship query = f"evaluate\nsummarizecolumns(\n\"1\",calculate(countrows('{fromTable}'),userelationship({fromObject},{toObject}),isblank({toObject}))\n)" result = fabric.evaluate_dax( @@ -359,7 +359,7 @@ def vertipaq_analyzer( "Max To Cardinality", "Missing Rows", ] - if read_stats_from_data == False: + if read_stats_from_data is False: intList.remove("Missing Rows") dfR[intList] = dfR[intList].applymap("{:,}".format) @@ -436,7 +436,7 @@ def vertipaq_analyzer( ### Export vertipaq to delta tables in lakehouse if export in ["table", "zip"]: lakeAttach = lakehouse_attached() - if lakeAttach == False: + if lakeAttach is False: print( f"{icons.red_dot} In order to save the Vertipaq Analyzer results, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook." ) diff --git a/src/sempy_labs/directlake/_directlake_schema_compare.py b/src/sempy_labs/directlake/_directlake_schema_compare.py index 15153c54..9be3a9d8 100644 --- a/src/sempy_labs/directlake/_directlake_schema_compare.py +++ b/src/sempy_labs/directlake/_directlake_schema_compare.py @@ -1,4 +1,3 @@ -import sempy import sempy.fabric as fabric import pandas as pd from sempy_labs._helper_functions import ( @@ -40,14 +39,14 @@ def direct_lake_schema_compare( or if no lakehouse attached, resolves to the workspace of the notebook. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) if lakehouse_workspace is None: lakehouse_workspace = workspace - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) diff --git a/src/sempy_labs/directlake/_directlake_schema_sync.py b/src/sempy_labs/directlake/_directlake_schema_sync.py index 356cba5e..b5ef01e5 100644 --- a/src/sempy_labs/directlake/_directlake_schema_sync.py +++ b/src/sempy_labs/directlake/_directlake_schema_sync.py @@ -46,13 +46,13 @@ def direct_lake_schema_sync( import Microsoft.AnalysisServices.Tabular as TOM import System - if workspace == None: + if workspace is None: workspace = fabric.resolve_workspace_name() if lakehouse_workspace is None: lakehouse_workspace = workspace - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) diff --git a/src/sempy_labs/directlake/_fallback.py b/src/sempy_labs/directlake/_fallback.py index 022a416a..8cabb740 100644 --- a/src/sempy_labs/directlake/_fallback.py +++ b/src/sempy_labs/directlake/_fallback.py @@ -23,7 +23,7 @@ def check_fallback_reason(dataset: str, workspace: Optional[str] = None): The tables in the semantic model and their fallback reason. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) diff --git a/src/sempy_labs/directlake/_get_directlake_lakehouse.py b/src/sempy_labs/directlake/_get_directlake_lakehouse.py index 63d03191..0c696ae9 100644 --- a/src/sempy_labs/directlake/_get_directlake_lakehouse.py +++ b/src/sempy_labs/directlake/_get_directlake_lakehouse.py @@ -45,7 +45,7 @@ def get_direct_lake_lakehouse( if lakehouse_workspace is None: lakehouse_workspace = workspace - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) diff --git a/src/sempy_labs/directlake/_get_shared_expression.py b/src/sempy_labs/directlake/_get_shared_expression.py index 0f836ffb..b2aa6f1f 100644 --- a/src/sempy_labs/directlake/_get_shared_expression.py +++ b/src/sempy_labs/directlake/_get_shared_expression.py @@ -31,7 +31,7 @@ def get_shared_expression( """ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id) diff --git a/src/sempy_labs/directlake/_guardrails.py b/src/sempy_labs/directlake/_guardrails.py index 32937b57..8bdf7f15 100644 --- a/src/sempy_labs/directlake/_guardrails.py +++ b/src/sempy_labs/directlake/_guardrails.py @@ -7,7 +7,7 @@ def get_direct_lake_guardrails(): """ Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query based on Microsoft's `online documentation `_. - + Parameters ---------- @@ -44,7 +44,7 @@ def get_sku_size(workspace: Optional[str] = None): The SKU size for a workspace. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) diff --git a/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py b/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py index 0342df48..c7d7252f 100644 --- a/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +++ b/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py @@ -27,7 +27,7 @@ def list_direct_lake_model_calc_tables(dataset: str, workspace: Optional[str] = A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations. """ - if workspace == None: + if workspace is None: workspace = fabric.resolve_workspace_name() df = pd.DataFrame(columns=["Table Name", "Source Expression"]) diff --git a/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py b/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py index 86c21e07..160c1dc9 100644 --- a/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +++ b/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py @@ -30,7 +30,7 @@ def show_unsupported_direct_lake_objects( pd.options.mode.chained_assignment = None - if workspace == None: + if workspace is None: workspace = fabric.resolve_workspace_name() dfT = list_tables(dataset, workspace) diff --git a/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py b/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py index 6763e500..e422585a 100644 --- a/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +++ b/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py @@ -1,4 +1,3 @@ -import sempy import sempy.fabric as fabric from sempy_labs.directlake._get_shared_expression import get_shared_expression from sempy_labs._helper_functions import ( @@ -6,9 +5,10 @@ resolve_workspace_name_and_id, ) from sempy_labs.tom import connect_semantic_model -from typing import List, Optional, Union +from typing import Optional import sempy_labs._icons as icons + def update_direct_lake_model_lakehouse_connection( dataset: str, workspace: Optional[str] = None, @@ -41,10 +41,10 @@ def update_direct_lake_model_lakehouse_connection( (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if lakehouse_workspace == None: + if lakehouse_workspace is None: lakehouse_workspace = workspace - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) diff --git a/src/sempy_labs/directlake/_update_directlake_partition_entity.py b/src/sempy_labs/directlake/_update_directlake_partition_entity.py index f8216820..654a9c9e 100644 --- a/src/sempy_labs/directlake/_update_directlake_partition_entity.py +++ b/src/sempy_labs/directlake/_update_directlake_partition_entity.py @@ -39,10 +39,10 @@ def update_direct_lake_partition_entity( workspace = fabric.resolve_workspace_name(workspace) - if lakehouse_workspace == None: + if lakehouse_workspace is None: lakehouse_workspace = workspace - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) diff --git a/src/sempy_labs/directlake/_warm_cache.py b/src/sempy_labs/directlake/_warm_cache.py index 2f8b92f7..74dfa33b 100644 --- a/src/sempy_labs/directlake/_warm_cache.py +++ b/src/sempy_labs/directlake/_warm_cache.py @@ -1,4 +1,3 @@ -import sempy import sempy.fabric as fabric import pandas as pd from tqdm.auto import tqdm @@ -131,7 +130,7 @@ def warm_direct_lake_cache_perspective( bar.set_description(f"Warming the '{tableName}' table...") css = ",".join(map(str, filtered_list)) dax = """EVALUATE TOPN(1,SUMMARIZECOLUMNS(""" + css + "))" "" - x = fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace) + fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace) print(f"{icons.green_dot} The following columns have been put into memory:") @@ -168,10 +167,9 @@ def warm_direct_lake_cache_isresident( dfP = fabric.list_partitions(dataset=dataset, workspace=workspace) if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()): - print( + raise ValueError( f"{icons.red_dot} The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode. This function is specifically for semantic models in Direct Lake mode." ) - return # Identify columns which are currently in memory (Is Resident = True) dfC = fabric.list_columns(dataset=dataset, workspace=workspace, extended=True) @@ -181,10 +179,9 @@ def warm_direct_lake_cache_isresident( dfC_filtered = dfC[dfC["Is Resident"]] if len(dfC_filtered) == 0: - print( + raise ValueError( f"{icons.yellow_dot} At present, no columns are in memory in the '{dataset}' semantic model in the '{workspace}' workspace." ) - return # Refresh/frame dataset refresh_semantic_model(dataset=dataset, refresh_type="full", workspace=workspace) @@ -199,7 +196,7 @@ def warm_direct_lake_cache_isresident( bar.set_description(f"Warming the '{tableName}' table...") css = ",".join(map(str, column_values)) dax = """EVALUATE TOPN(1,SUMMARIZECOLUMNS(""" + css + "))" "" - x = fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace) + fabric.evaluate_dax(dataset=dataset, dax_string=dax, workspace=workspace) print( f"{icons.green_dot} The following columns have been put into memory. Temperature indicates the column temperature prior to the semantic model refresh." diff --git a/src/sempy_labs/lakehouse/__init__.py b/src/sempy_labs/lakehouse/__init__.py index b643ec3d..f7cbcdc7 100644 --- a/src/sempy_labs/lakehouse/__init__.py +++ b/src/sempy_labs/lakehouse/__init__.py @@ -5,7 +5,7 @@ optimize_lakehouse_tables, ) -from sempy_labs.lakehouse._shortcuts import ( +from sempy_labs.lakehouse._shortcuts import ( # create_shortcut, create_shortcut_onelake, delete_shortcut, @@ -15,7 +15,7 @@ "get_lakehouse_columns", "get_lakehouse_tables", "lakehouse_attached", - "optimize_lakehouse_tables", + "optimize_lakehouse_tables", # create_shortcut, "create_shortcut_onelake", "delete_shortcut", diff --git a/src/sempy_labs/lakehouse/_get_lakehouse_columns.py b/src/sempy_labs/lakehouse/_get_lakehouse_columns.py index 84de2de6..1b9098ef 100644 --- a/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +++ b/src/sempy_labs/lakehouse/_get_lakehouse_columns.py @@ -48,7 +48,7 @@ def get_lakehouse_columns( workspace = fabric.resolve_workspace_name(workspace) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, workspace) else: diff --git a/src/sempy_labs/lakehouse/_get_lakehouse_tables.py b/src/sempy_labs/lakehouse/_get_lakehouse_tables.py index 328a8997..1a8a6e86 100644 --- a/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +++ b/src/sempy_labs/lakehouse/_get_lakehouse_tables.py @@ -64,7 +64,7 @@ def get_lakehouse_tables( (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, workspace) else: @@ -83,7 +83,7 @@ def get_lakehouse_tables( tType = i["type"] tFormat = i["format"] tLocation = i["location"] - if extended == False: + if not extended: new_data = { "Workspace Name": workspace, "Lakehouse Name": lakehouse, @@ -173,7 +173,7 @@ def get_lakehouse_tables( if export: lakeAttach = lakehouse_attached() - if lakeAttach == False: + if lakeAttach is False: print( f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook." ) diff --git a/src/sempy_labs/lakehouse/_lakehouse.py b/src/sempy_labs/lakehouse/_lakehouse.py index 6469a36a..2c64af8f 100644 --- a/src/sempy_labs/lakehouse/_lakehouse.py +++ b/src/sempy_labs/lakehouse/_lakehouse.py @@ -50,11 +50,11 @@ def optimize_lakehouse_tables( from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables from delta import DeltaTable - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, workspace) diff --git a/src/sempy_labs/lakehouse/_shortcuts.py b/src/sempy_labs/lakehouse/_shortcuts.py index c3f6e8f6..cbd72d99 100644 --- a/src/sempy_labs/lakehouse/_shortcuts.py +++ b/src/sempy_labs/lakehouse/_shortcuts.py @@ -1,12 +1,10 @@ -import sempy import sempy.fabric as fabric -import pandas as pd from sempy_labs._helper_functions import ( resolve_lakehouse_name, resolve_lakehouse_id, resolve_workspace_name_and_id, ) -from typing import List, Optional, Union +from typing import Optional import sempy_labs._icons as icons @@ -42,7 +40,7 @@ def create_shortcut_onelake( sourceWorkspaceId = fabric.resolve_workspace_id(source_workspace) sourceLakehouseId = resolve_lakehouse_id(source_lakehouse, source_workspace) - if destination_workspace == None: + if destination_workspace is None: destination_workspace = source_workspace destinationWorkspaceId = fabric.resolve_workspace_id(destination_workspace) @@ -50,7 +48,7 @@ def create_shortcut_onelake( destination_lakehouse, destination_workspace ) - if shortcut_name == None: + if shortcut_name is None: shortcut_name = table_name client = fabric.FabricRestClient() @@ -124,7 +122,7 @@ def create_shortcut( (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() else: lakehouse_id = resolve_lakehouse_id(lakehouse, workspace) @@ -180,7 +178,7 @@ def delete_shortcut( (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, workspace) else: diff --git a/src/sempy_labs/migration/_create_pqt_file.py b/src/sempy_labs/migration/_create_pqt_file.py index fee5ec97..ce98f1a1 100644 --- a/src/sempy_labs/migration/_create_pqt_file.py +++ b/src/sempy_labs/migration/_create_pqt_file.py @@ -34,13 +34,13 @@ def create_pqt_file( lakeAttach = lakehouse_attached() - if lakeAttach == False: + if lakeAttach is False: print( f"{icons.red_dot} In order to run the 'create_pqt_file' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook." ) return - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) diff --git a/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py b/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py index 75baff62..20306be4 100644 --- a/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +++ b/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py @@ -52,16 +52,16 @@ def migrate_calc_tables_to_lakehouse( workspace = fabric.resolve_workspace_name(workspace) - if new_dataset_workspace == None: + if new_dataset_workspace is None: new_dataset_workspace = workspace - if lakehouse_workspace == None: + if lakehouse_workspace is None: lakehouse_workspace = new_dataset_workspace lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace) else: lakehouse_workspace_id = fabric.resolve_workspace_id(lakehouse_workspace) - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) else: @@ -288,16 +288,16 @@ def migrate_field_parameters( or if no lakehouse attached, resolves to the workspace of the notebook. """ - from .HelperFunctions import format_dax_object_name + from sempy_labs import format_dax_object_name sempy.fabric._client._utils._init_analysis_services() import Microsoft.AnalysisServices.Tabular as TOM - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) - if new_dataset_workspace == None: + if new_dataset_workspace is None: new_dataset_workspace = workspace dfC = fabric.list_columns(dataset=dataset, workspace=workspace) diff --git a/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py b/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py index e31eb993..4bda8765 100644 --- a/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +++ b/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py @@ -46,12 +46,12 @@ def migrate_calc_tables_to_semantic_model( workspace = fabric.resolve_workspace_name(workspace) - if new_dataset_workspace == None: + if new_dataset_workspace is None: new_dataset_workspace = workspace - if lakehouse_workspace == None: + if lakehouse_workspace is None: lakehouse_workspace = new_dataset_workspace - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) diff --git a/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py b/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py index a42870fe..33ffc339 100644 --- a/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +++ b/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py @@ -38,13 +38,13 @@ def migrate_model_objects_to_semantic_model( import Microsoft.AnalysisServices.Tabular as TOM import System - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) else: - workspaceId = fabric.resolve_workspace_id(workspace) + workspace_id = fabric.resolve_workspace_id(workspace) - if new_dataset_workspace == None: + if new_dataset_workspace is None: new_dataset_workspace = workspace dfT = list_tables(dataset, workspace) @@ -238,7 +238,7 @@ def migrate_model_objects_to_semantic_model( f"\n{icons.in_progress} Updating calculation group column name..." ) dfC_filt = dfC[ - (dfC["Table Name"] == cgName) & (dfC["Hidden"] == False) + (dfC["Table Name"] == cgName) & (dfC["Hidden"] is False) ] colName = dfC_filt["Column Name"].iloc[0] tom.model.Tables[cgName].Columns["Name"].Name = colName diff --git a/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py b/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py index 309a5c0b..7c4893ee 100644 --- a/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +++ b/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py @@ -50,13 +50,13 @@ def migrate_tables_columns_to_semantic_model( workspace = fabric.resolve_workspace_name(workspace) - if new_dataset_workspace == None: + if new_dataset_workspace is None: new_dataset_workspace = workspace - if lakehouse_workspace == None: + if lakehouse_workspace is None: lakehouse_workspace = new_dataset_workspace - if lakehouse == None: + if lakehouse is None: lakehouse_id = fabric.get_lakehouse_id() lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace) @@ -158,8 +158,8 @@ def migrate_tables_columns_to_semantic_model( f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook or specify the lakehouse/lakehouse_workspace parameters." ) print( - f"To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook" + "To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook" ) print( - f"\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse" + "\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse" ) diff --git a/src/sempy_labs/report/_generate_report.py b/src/sempy_labs/report/_generate_report.py index a362e6f4..0e509a12 100644 --- a/src/sempy_labs/report/_generate_report.py +++ b/src/sempy_labs/report/_generate_report.py @@ -1,4 +1,3 @@ -import sempy import sempy.fabric as fabric import pandas as pd import json, base64, time @@ -6,6 +5,7 @@ from sempy_labs._helper_functions import resolve_workspace_name_and_id import sempy_labs._icons as icons + def create_report_from_reportjson( report: str, dataset: str, @@ -82,7 +82,7 @@ def conv_b64(file): definitionPBIR = conv_b64(defPBIR) payloadReportJson = conv_b64(report_json) - if theme_json == None: + if theme_json is None: request_body = { "displayName": report, "type": objectType, diff --git a/src/sempy_labs/report/_report_functions.py b/src/sempy_labs/report/_report_functions.py index afff7a4c..1e6f516c 100644 --- a/src/sempy_labs/report/_report_functions.py +++ b/src/sempy_labs/report/_report_functions.py @@ -1,7 +1,6 @@ -import sempy import sempy.fabric as fabric import pandas as pd -import json, os, time, base64, copy, re +import json, os, time, base64, copy from anytree import Node, RenderTree from powerbiclient import Report from synapse.ml.services import Translate @@ -73,7 +72,7 @@ def get_report_json( if save_to_file_name is not None: lakeAttach = lakehouse_attached() - if lakeAttach == False: + if lakeAttach is False: print( f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook." ) @@ -107,7 +106,7 @@ def report_dependency_tree(workspace: Optional[str] = None): or if no lakehouse attached, resolves to the workspace of the notebook. """ - if workspace == None: + if workspace is None: workspaceId = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspaceId) @@ -190,7 +189,7 @@ def export_report( lakeAttach = lakehouse_attached() - if lakeAttach == False: + if lakeAttach is False: print( f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook." ) @@ -240,7 +239,7 @@ def export_report( ) return - if file_name == None: + if file_name is None: file_name = report + fileExt else: file_name = file_name + fileExt @@ -466,7 +465,7 @@ def clone_report( return target_workspace_id = dfW_filt["Id"].iloc[0] - if target_dataset == None: + if target_dataset is None: dfR = fabric.list_reports(workspace=target_workspace) dfR_filt = dfR[dfR["Name"] == report] target_dataset_id = dfR_filt["Dataset Id"].iloc[0] @@ -532,7 +531,7 @@ def launch_report(report: str, workspace: Optional[str] = None): An embedded Power BI report within the notebook. """ - from .HelperFunctions import resolve_report_id + from sempy_labs import resolve_report_id (workspace, workspace_id) = resolve_workspace_name_and_id(workspace) @@ -562,7 +561,7 @@ def list_report_pages(report: str, workspace: Optional[str] = None): A pandas dataframe showing the pages within a Power BI report and their properties. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -626,7 +625,7 @@ def list_report_visuals(report: str, workspace: Optional[str] = None): A pandas dataframe showing the visuals within a Power BI report and their properties. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -681,7 +680,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None): A pandas dataframe showing the bookmarks within a Power BI report and their properties. """ - if workspace == None: + if workspace is None: workspace_id = fabric.get_workspace_id() workspace = fabric.resolve_workspace_name(workspace_id) @@ -749,6 +748,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None): f"The '{report}' report within the '{workspace}' workspace has no bookmarks." ) + @log def translate_report_titles( report: str, languages: Union[str, List[str]], workspace: Optional[str] = None diff --git a/src/sempy_labs/report/_report_rebind.py b/src/sempy_labs/report/_report_rebind.py index 05966929..97f43d1e 100644 --- a/src/sempy_labs/report/_report_rebind.py +++ b/src/sempy_labs/report/_report_rebind.py @@ -1,7 +1,6 @@ -import sempy import sempy.fabric as fabric from sempy_labs._helper_functions import resolve_dataset_id, resolve_report_id -from typing import List, Optional, Union +from typing import Optional from sempy._utils._log import log import sempy_labs._icons as icons @@ -35,12 +34,12 @@ def report_rebind( """ - if report_workspace == None: + if report_workspace is None: report_workspace_id = fabric.get_workspace_id() report_workspace = fabric.resolve_workspace_name(report_workspace_id) else: report_workspace_id = fabric.resolve_workspace_id(report_workspace) - if dataset_workspace == None: + if dataset_workspace is None: dataset_workspace = report_workspace client = fabric.PowerBIRestClient() @@ -65,6 +64,7 @@ def report_rebind( f"{icons.red_dot} The '{report}' report within the '{report_workspace}' workspace failed to rebind to the '{dataset}' semantic model within the '{dataset_workspace}' workspace." ) + @log def report_rebind_all( dataset: str, @@ -102,16 +102,16 @@ def report_rebind_all( """ - if dataset_workspace == None: + if dataset_workspace is None: dataset_workspace_id = fabric.get_workspace_id() dataset_workspace = fabric.resolve_workspace_name(dataset_workspace_id) else: dataset_workspace_id = fabric.resolve_workspace_id(dataset_workspace) - if new_dataset_workpace == None: + if new_dataset_workpace is None: new_dataset_workpace = dataset_workspace - if report_workspace == None: + if report_workspace is None: report_workspace = dataset_workspace datasetId = resolve_dataset_id(dataset, dataset_workspace) diff --git a/src/sempy_labs/tom/_model.py b/src/sempy_labs/tom/_model.py index 52c364e6..b2c5a211 100644 --- a/src/sempy_labs/tom/_model.py +++ b/src/sempy_labs/tom/_model.py @@ -8,7 +8,7 @@ from sempy_labs._refresh_semantic_model import refresh_semantic_model from sempy_labs.directlake._fallback import check_fallback_reason from contextlib import contextmanager -from typing import List, Optional, Union, TYPE_CHECKING +from typing import List, Iterator, Optional, Union, TYPE_CHECKING from sempy._utils._log import log import sempy_labs._icons as icons @@ -177,10 +177,10 @@ def all_levels(self): All levels within the semantic model. """ - for t in self.model.Tables: - for h in t.Hierarchies: - for l in h.Levels: - yield l + for table in self.model.Tables: + for hierarchy in table.Hierarchies: + for level in hierarchy.Levels: + yield level def all_calculation_items(self): """ @@ -929,7 +929,7 @@ def add_m_partition( p.Name = partition_name p.Source = mp if description is not None: - p.Description = description + p.Description = description p.Mode = System.Enum.Parse(TOM.ModeType, mode) self.model.Tables[table_name].Partitions.Add(p) @@ -1627,12 +1627,12 @@ def used_in_levels(self, column: "TOM.Column"): objType = column.ObjectType if objType == TOM.ObjectType.Column: - for l in self.all_levels(): + for level in self.all_levels(): if ( - l.Parent.Table.Name == column.Parent.Name - and l.Column.Name == column.Name + level.Parent.Table.Name == column.Parent.Name + and level.Column.Name == column.Name ): - yield l + yield level def used_in_hierarchies(self, column: "TOM.Column"): """ @@ -2037,7 +2037,7 @@ def has_hybrid_table(self): Indicates if the semantic model has a hybrid table. """ - return any(self.is_hybrid_table(table_name = t.Name) for t in self.model.Tables) + return any(self.is_hybrid_table(table_name=t.Name) for t in self.model.Tables) def has_date_table(self): """ @@ -2052,7 +2052,7 @@ def has_date_table(self): Indicates if the semantic model has a table marked as a date table. """ - return any(self.is_date_table(table_name = t.Name) for t in self.model.Tables) + return any(self.is_date_table(table_name=t.Name) for t in self.model.Tables) def is_direct_lake(self): """ @@ -2184,15 +2184,16 @@ def set_kpi( if status_graphic is None: status_graphic = "Three Circles Colored" - statusType = ["Linear", "LinearReversed", "Centered", "CenteredReversed"] - status_type = status_type.title().replace(" ", "") - + valid_status_types = ["Linear", "LinearReversed", "Centered", "CenteredReversed"] + status_type = status_type if status_type is None: status_type = "Linear" + else: + status_type = status_type.title().replace(" ", "") - if status_type not in statusType: + if status_type not in valid_status_types: print( - f"{icons.red_dot} '{status_type}' is an invalid status_type. Please choose from these options: {statusType}." + f"{icons.red_dot} '{status_type}' is an invalid status_type. Please choose from these options: {valid_status_types}." ) return @@ -3075,7 +3076,7 @@ def unqualified_columns(self, object: "TOM.Column", dependencies: pd.DataFrame): import Microsoft.AnalysisServices.Tabular as TOM def create_pattern(a, b): - return r"(? Iterator[TOMWrapper]: """ Connects to the Tabular Object Model (TOM) within a semantic model. @@ -3726,7 +3727,7 @@ def connect_semantic_model( Returns ------- - str + typing.Iterator[TOMWrapper] A connection to the semantic model's Tabular Object Model. """