Skip to content

Commit

Permalink
Merge branch 'm-kovalsky/capacitymigration'
Browse files Browse the repository at this point in the history
  • Loading branch information
m-kovalsky committed Sep 24, 2024
2 parents b0aeac5 + 2f577e7 commit f21d9db
Show file tree
Hide file tree
Showing 12 changed files with 2,031 additions and 50 deletions.
2 changes: 1 addition & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ anytree
IPython
polib
azure.mgmt.resource
jsonpath_ng
jsonpath_ng
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ dependencies:
- pytest-cov
- pytest-mock
- pip:
- semantic-link-sempy>=0.7.5
- semantic-link-sempy>=0.8.0
- azure-identity==1.7.1
- azure-storage-blob>=12.9.0
- pandas-stubs
Expand Down
1 change: 1 addition & 0 deletions notebooks/Capacity Migration.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"cells":[{"cell_type":"markdown","id":"5c27dfd1-4fe0-4a97-92e6-ddf78889aa93","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Install the latest .whl package\n","\n","Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."]},{"cell_type":"code","execution_count":null,"id":"d5cae9db-cef9-48a8-a351-9c5fcc99645c","metadata":{"jupyter":{"outputs_hidden":true,"source_hidden":false},"nteract":{"transient":{"deleting":false}}},"outputs":[],"source":["%pip install semantic-link-labs"]},{"cell_type":"markdown","id":"2856d26d","metadata":{},"source":["### Requirements\n","* Must have an Azure Subscription\n","* Must [register an App](https://ms.portal.azure.com/#blade/Microsoft_AAD_RegisteredApps/ApplicationsListBlade)\n"," * Permissions: Will need the Contributor role at the scope where the resources will be created, which is often the subscription level\n","* Azure Key Vault\n"," * [Set up](https://learn.microsoft.com/azure/key-vault/secrets/quick-create-portal) within the Azure Subscription\n"," * Save secrets for the Tenant ID, Client ID (Application ID), Client Secret\n"," * Permissions: Ensure the user who will be executing the notebooks has “Key Vault Secrets User”\n","* Fabric Permissions\n"," * User should be a tenant admin. This ensures they have the necessary authority to execute and manage the notebooks without encountering permission issues.\n","\n","### Result\n","* F skus are created for each (specified) capacity\n"," * Within the same region as the P SKU\n"," * Equivalent SKU size as the P SKU\n"," * Same admins as listed on the P SKU\n"," * All workspaces are migrated to the corresponding new capacity\n"," * Capacity settings from the P SKU are transferred to the F SKU\n"," * Capacity settings\n"," * Notification settings\n"," * Access settings\n"," * Disaster recovery settings\n"," * Spark settings\n"," * Delegated tenant settings\n","* The names of the newly created F SKU capacities will be an alphanumeric lowercase version of the P SKU capacity name, suffixed with 'fsku'. As an example: \"My capacity_3!\" -> \"mycapacity3fsku\"."]},{"cell_type":"markdown","id":"b195eae8","metadata":{},"source":["### Import the library and set the initial parameters"]},{"cell_type":"code","execution_count":null,"id":"1344e286","metadata":{},"outputs":[],"source":["import sempy_labs as labs\n","\n","azure_subscription_id = '' # Enter your Azure subscription ID\n","key_vault_uri = '' # Enter your Azure Key Vault URI\n","key_vault_tenant_id = '' # Enter the name of the Azure Key Vault secret which stores your Tenant ID\n","key_vault_client_id = '' # Enter the name of the Azure Key Vault secret which stores your Client ID (Application ID)\n","key_vault_client_secret = '' # Enter the name of the Azure Key Vault secret which stores your Client Secret\n","resource_group = '' # Enter the name of the resource group (to be used to create the new F skus)"]},{"cell_type":"markdown","id":"5a3fe6e8-b8aa-4447-812b-7931831e07fe","metadata":{"nteract":{"transient":{"deleting":false}}},"source":["### Migrate a single P SKU -> F SKU\n","Set the 'capacities' parameter to the single P SKU."]},{"cell_type":"code","execution_count":null,"id":"3655dd88","metadata":{},"outputs":[],"source":["labs.migrate_capacities(\n"," azure_subscription_id = azure_subscription_id,\n"," key_vault_uri = key_vault_uri,\n"," key_vault_tenant_id = key_vault_tenant_id,\n"," key_vault_client_id = key_vault_client_id,\n"," key_vault_client_secret = key_vault_client_secret,\n"," resource_group = resource_group,\n"," capacities = 'CapacityA',\n"," p_sku_only = True,\n",")"]},{"cell_type":"markdown","id":"175a59b8","metadata":{},"source":["### Migrate a list of P SKUs to F SKUs\n","Set the 'capacities' parameter to a list of P SKUs."]},{"cell_type":"code","execution_count":null,"id":"3a7a80ec","metadata":{},"outputs":[],"source":["labs.migrate_capacities(\n"," azure_subscription_id = azure_subscription_id,\n"," key_vault_uri = key_vault_uri,\n"," key_vault_tenant_id = key_vault_tenant_id,\n"," key_vault_client_id = key_vault_client_id,\n"," key_vault_client_secret = key_vault_client_secret,\n"," resource_group = resource_group,\n"," capacities = ['CapacityA', 'CapacityB', 'CapacityC'],\n"," p_sku_only = True,\n",")"]},{"cell_type":"markdown","id":"30438799","metadata":{},"source":["### Migrate all P SKUs to F SKUs\n","Set the 'capacities' parameter to None."]},{"cell_type":"code","execution_count":null,"id":"315c2dc7","metadata":{},"outputs":[],"source":["labs.migrate_capacities(\n"," azure_subscription_id = azure_subscription_id,\n"," key_vault_uri = key_vault_uri,\n"," key_vault_tenant_id = key_vault_tenant_id,\n"," key_vault_client_id = key_vault_client_id,\n"," key_vault_client_secret = key_vault_client_secret,\n"," resource_group = resource_group,\n"," capacities = None,\n"," p_sku_only = True,\n",")"]},{"cell_type":"markdown","id":"1d8e73b2","metadata":{},"source":["### Migrate a list of P SKUs to F SKUs; associate each capacity with a specific resource group\n","This process ensures that each F SKU is created within the resource group specified in the resource_group_mapping dictionary."]},{"cell_type":"code","execution_count":null,"id":"2854bf8a","metadata":{},"outputs":[],"source":["resource_group_mapping = {\n"," \"CapacityA\": \"ResourceGroupA\",\n"," \"CapacityB\": \"ResourceGroupA\",\n"," \"CapacityC\": \"ResourceGroupB\",\n","}\n","\n","labs.migrate_capacities(\n"," azure_subscription_id = azure_subscription_id,\n"," key_vault_uri = key_vault_uri,\n"," key_vault_tenant_id = key_vault_tenant_id,\n"," key_vault_client_id = key_vault_client_id,\n"," key_vault_client_secret = key_vault_client_secret,\n"," resource_group = resource_group_mapping,\n"," capacities = ['CapacityA', 'CapacityB', 'CapacityC'],\n"," p_sku_only = True,\n",")"]},{"cell_type":"markdown","id":"c3f497c8","metadata":{},"source":["### Migrate a single P SKU (already created F SKU)"]},{"cell_type":"code","execution_count":null,"id":"a4f0b5a2","metadata":{},"outputs":[],"source":["source_capacity = '' # Enter the P SKU capacity name\n","target_capacity = '' # Enter the F SKU capacity name (already exists) \n","\n","labs.migrate_workspaces(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n",")\n","\n","# Optionally migrate settings\n","\"\"\"\n","labs.migrate_capacity_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n",")\n","labs.migrate_access_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n",")\n","labs.migrate_delegated_tenant_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n",")\n","labs.migrate_disaster_recovery_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n",")\n","labs.migrate_notification_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n",")\n","labs.migrate_spark_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n",")\n","\"\"\""]},{"cell_type":"markdown","id":"e0db744b","metadata":{},"source":["### Migrate a list of P SKUs (already created F SKUs)"]},{"cell_type":"code","execution_count":null,"id":"0e04d519","metadata":{},"outputs":[],"source":["capacity_mapping = {\n"," \"capacitya\": \"capacityafsku\", # Format is \"P SKU\": \"F SKU\"\n"," \"capacityb\": \"capacitybfsku\",\n"," \"capacityc\": \"capacitycfsku\",\n","}\n","\n","p_skus = list(capacity_mapping.keys())\n","\n","for p_sku in p_skus:\n"," labs.migrate_workspaces(\n"," source_capacity=p_sku,\n"," target_capacity=capacity_mapping.get(p_sku)\n"," )\n"," # Optionally migrate settings\n"," \"\"\"\n"," labs.migrate_capacity_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n"," )\n"," labs.migrate_access_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n"," )\n"," labs.migrate_delegated_tenant_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n"," )\n"," labs.migrate_disaster_recovery_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n"," )\n"," labs.migrate_notification_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n"," )\n"," labs.migrate_spark_settings(\n"," source_capacity=source_capacity, \n"," target_capacity=target_capacity\n"," )\n"," \"\"\"\n"]}],"metadata":{"kernel_info":{"name":"synapse_pyspark"},"kernelspec":{"display_name":"Synapse PySpark","language":"Python","name":"synapse_pyspark"},"language_info":{"name":"python"},"microsoft":{"language":"python"},"nteract":{"version":"[email protected]"},"spark_compute":{"compute_id":"/trident/default"},"synapse_widget":{"state":{},"version":"0.1"},"widgets":{}},"nbformat":4,"nbformat_minor":5}
67 changes: 49 additions & 18 deletions src/sempy_labs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,36 @@
delete_environment,
publish_environment,
)
from sempy_labs._clear_cache import (
clear_cache,
backup_semantic_model,
restore_semantic_model,
copy_semantic_model_backup_file,
list_backups,
list_storage_account_files,
)
from sempy_labs._capacity_migration import (
migrate_spark_settings,
migrate_workspaces,
migrate_capacities,
migrate_notification_settings,
migrate_access_settings,
migrate_delegated_tenant_settings,
migrate_capacity_settings,
migrate_disaster_recovery_settings,
)
from sempy_labs._capacities import (
create_fabric_capacity,
# get_capacity_resource_governance,
# list_vcores,
resume_fabric_capacity,
suspend_fabric_capacity,
update_fabric_capacity,
delete_fabric_capacity,
check_fabric_capacity_name_availablility,
delete_embedded_capacity,
delete_premium_capacity,
)

from sempy_labs._spark import (
get_spark_settings,
Expand Down Expand Up @@ -30,16 +60,6 @@
ConnectWarehouse,
ConnectLakehouse,
)
from sempy_labs._capacities import (
check_fabric_capacity_name_availablility,
delete_fabric_capacity,
resume_fabric_capacity,
update_fabric_capacity,
create_fabric_capacity,
delete_premium_capacity,
suspend_fabric_capacity,
delete_embedded_capacity,
)
from sempy_labs._workspace_identity import (
provision_workspace_identity,
deprovision_workspace_identity,
Expand All @@ -63,14 +83,6 @@
assign_workspace_to_dataflow_storage,
list_dataflows,
)
from sempy_labs._clear_cache import (
clear_cache,
backup_semantic_model,
restore_semantic_model,
copy_semantic_model_backup_file,
list_backups,
list_storage_account_files,
)
from sempy_labs._connections import (
list_connections,
list_item_connections,
Expand Down Expand Up @@ -193,6 +205,8 @@
"restore_semantic_model",
"list_semantic_model_object_report_usage",
"list_report_semantic_model_objects",
"migrate_spark_settings",
"create_azure_storage_account",
"delete_custom_pool",
"clear_cache",
# create_connection_cloud,
Expand Down Expand Up @@ -309,4 +323,21 @@
"suspend_fabric_capacity",
"delete_embedded_capacity",
"resolve_dataset_from_report",
"migrate_workspaces",
"migrate_capacities",
"create_fabric_capacity",
"migrate_capacity_settings",
# "get_capacity_resource_governance",
# "list_vcores",
"migrate_disaster_recovery_settings",
"migrate_notification_settings",
"migrate_access_settings",
"migrate_delegated_tenant_settings",
"resume_fabric_capacity",
"suspend_fabric_capacity",
"update_fabric_capacity",
"delete_fabric_capacity",
"check_fabric_capacity_name_availablility",
"delete_embedded_capacity",
"delete_premium_capacity",
]
40 changes: 38 additions & 2 deletions src/sempy_labs/_capacities.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from sempy.fabric.exceptions import FabricHTTPException
import requests
from sempy_labs._helper_functions import get_azure_token_credentials
import pandas as pd


def _add_sll_tag(payload, tags):
Expand Down Expand Up @@ -155,10 +156,10 @@ def create_fabric_capacity(
for i in resource_client.resources.list(
"resourceType eq 'Microsoft.PowerBIDedicated/capacities'"
):
if i.name == capacity_name.removesuffix(capacity_suffix):
if i.name == capacity_name.removesuffix(icons.migrate_capacity_suffix):
resource_group = i.id.split("/")[4]
print(
f"{icons.yellow_dot} Override resource group flag detected for A SKUs - using the existing resource group '{resource_group}' for capacity '{capacity_name}'"
f"{icons.yellow_dot} Override resource group flag detected for A SKUs - using the existing resource group '{resource_group}' for the '{capacity_name}' capacity."
)
else:
# Attempt to get the resource group
Expand Down Expand Up @@ -207,6 +208,41 @@ def create_fabric_capacity(
)


def list_vcores() -> pd.DataFrame:

df = pd.DataFrame(columns=["Total Purchased Cores", "Available Cores"])

client = fabric.PowerBIRestClient()
response = client.get("capacities/vcores")
if response.status_code != 200:
FabricHTTPException(response)
response_json = response.json()
new_data = {
"Total Purchased Cores": response_json.get("totalPurchasedCores"),
"Available Cores": response_json.get("availableCores"),
}
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)

int_cols = ["Total Purchased Cores", "Available Cores"]
df[int_cols] = df[int_cols].astype(int)

return df


def get_capacity_resource_governance(capacity_name: str):

dfC = fabric.list_capacities()
dfC_filt = dfC[dfC["Display Name"] == capacity_name]
capacity_id = dfC_filt["Id"].iloc[0].upper()
client = fabric.PowerBIRestClient()
response = client.get(f"capacities/{capacity_id}/resourceGovernance")

if response.status_code != 200:
FabricHTTPException(response)

return response.json()["workloadSettings"]


def suspend_fabric_capacity(
capacity_name: str,
azure_subscription_id: str,
Expand Down
Loading

0 comments on commit f21d9db

Please sign in to comment.