Skip to content

Commit

Permalink
version 0.1.1
Browse files Browse the repository at this point in the history
### Refactoring
Massive refactoring to simplify the code and make it more maintainable. The main changes are:
- All APIs are called from the client classes.
- The API calls are handled by a single function
This leads to an improvement of performance of the application by reducing the number of API calls.
Each function normally is doing a single API call.
  • Loading branch information
DaSenf1860 committed Jul 15, 2024
1 parent 4181a40 commit c00dc78
Show file tree
Hide file tree
Showing 21 changed files with 3,766 additions and 3,792 deletions.
15 changes: 8 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ They are designed to automate your Fabric processes.

This SDK helps to interact with the Fabric APIs in a more Pythonic way.
Additionally it brings some extra features like:
- Authentication is handled for you (currently Azure CLI Authentication and Service Principal Authentication are supported, as well as Synapse Spark Authentication in Fabric Notebooks)
- Authentication is handled for you (currently Azure CLI Authentication, Authentication from a Microsoft Fabric notebook and Service Principal Authentication are supported)
- Waiting for completion of long running operations
- Retry logic when hitting the API rate limits
- Referencing objects by name instead of ID
Expand Down Expand Up @@ -70,6 +70,7 @@ from msfabricpysdkcore import FabricClientCore
# Create a client

# Either login with the Azure CLI first and initiate the client directly
# This also works directly in a Microsoft Fabric notebook
fc = FabricClientCore()

# Or use a service principal (note that not all APIs are supported with service principal)
Expand Down Expand Up @@ -215,15 +216,15 @@ pipe = fc.get_deployment_pipeline(pipe_id)


# Get deployment pipeline stages
stages = fc.get_deployment_pipeline_stages(pipe_id)
stages = fc.list_deployment_pipeline_stages(pipe_id)

names = [stage.display_name for stage in stages]

dev_stage = [stage for stage in stages if stage.display_name == "Development"][0]
prod_stage = [stage for stage in stages if stage.display_name == "Production"][0]

# Get deployment pipeline stages items
items = fc.get_deployment_pipeline_stages_items(pipeline_id=pipe_id, stage_id=dev_stage.id)
items = fc.list_deployment_pipeline_stages_items(pipeline_id=pipe_id, stage_id=dev_stage.id)


items = [item for item in dev_stage.get_items() if item["itemDisplayName"] == 'cicdlakehouse']
Expand Down Expand Up @@ -297,11 +298,11 @@ item_list = ws.list_items()


# Update an item
fc.update_item(workspace_id="workspace_id", item_id="item_id" display_name="new_item_name", description = None)
fc.update_item(workspace_id="workspace_id", item_id="item_id" display_name="new_item_name", description = None, return_item=True)
# or
ws.update_item(item_id="item_id", display_name="new_item_name", description = None)
ws.update_item(item_id="item_id", display_name="new_item_name", description = None, return_item=True)
# or
item.update(display_name="new_item_name", description = None)
item.update(display_name="new_item_name", description = None, return_item=True)


# Delete an item
Expand Down Expand Up @@ -639,7 +640,7 @@ domains = fca.list_domains()

# Update domain
domain_new_name = "sdktestdomains2"
domain_clone = fca.update_domain(domain.id, display_name=domain_new_name)
domain_clone = fca.update_domain(domain.id, display_name=domain_new_name, return_item=True)

# Assign domain workspaces by Ids
fca.assign_domain_workspaces_by_ids(domain.id, ["workspace_id_1", "workspace_id_2"])
Expand Down
26 changes: 13 additions & 13 deletions item_specific_apis.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ dp_new = fc.create_data_pipeline(workspace_id, display_name="pipeline_new", desc
dp = fc.get_data_pipeline(workspace_id, data_pipeline_name="pipeline1")

# Update Data Pipeline
dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2", return_item=True)

# Delete Data Pipeline
fc.delete_data_pipeline(workspace_id, dp.id)
Expand All @@ -97,7 +97,7 @@ environment_names = [env.display_name for env in environments]
env = fc.get_environment(workspace_id, environment_name="environment1")

# Update environment
env2 = fc.update_environment(workspace_id, env.id, display_name="environment2")
env2 = fc.update_environment(workspace_id, env.id, display_name="environment2", return_item=True)

# Delete environment
status_code = fc.delete_environment(workspace_id, env.id)
Expand Down Expand Up @@ -163,7 +163,7 @@ eventhouse_names = [eh.display_name for eh in eventhouses]
eh = fc.get_eventhouse(workspace_id, eventhouse_name="eventhouse1")

# Update Eventhouse
eh2 = fc.update_eventhouse(workspace_id, eh.id, display_name="eventhouse2")
eh2 = fc.update_eventhouse(workspace_id, eh.id, display_name="eventhouse2", return_item=True)

# Delete Eventhouse
status_code = fc.delete_eventhouse(workspace_id, eh.id)
Expand All @@ -190,7 +190,7 @@ es = fc.create_eventstream(workspace_id, display_name="es1")
es = fc.get_eventstream(workspace_id, eventstream_name="es1")

# Update Eventstream
es2 = fc.update_eventstream(workspace_id, es.id, display_name="es2")
es2 = fc.update_eventstream(workspace_id, es.id, display_name="es2", return_item=True)

# Delete Eventstream
fc.delete_eventstream(workspace_id, es.id)
Expand Down Expand Up @@ -222,7 +222,7 @@ kql_database_names = [kqldb.display_name for kqldb in kql_databases]
kqldb = fc.get_kql_database(workspace_id, kql_database_name="kqldatabase12")

# Update KQL Database
kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldb23")
kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldb23", return_item=True)

# Delete KQL Database
status_code = fc.delete_kql_database(workspace_id, kqldb.id)
Expand All @@ -246,7 +246,7 @@ kql_querysets = fc.list_kql_querysets(workspace_id)
kqlq = fc.get_kql_queryset(workspace_id, kql_queryset_name="kqlqueryset1")

# Update KQL Queryset
kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name="kqlqueryset2")
kqlq2 = fc.update_kql_queryset(workspace_id, kqlq.id, display_name="kqlqueryset2", return_item=True)

# Delete KQL Queryset
fc.delete_kql_queryset(workspace_id, kqlq.id)
Expand Down Expand Up @@ -304,7 +304,7 @@ lakehouses = fc.list_lakehouses(workspace_id)
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)

# Update Lakehouse
lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name="lakehouse3")
lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name="lakehouse3", return_item=True)

# Delete Lakehouse
fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
Expand Down Expand Up @@ -332,7 +332,7 @@ mle = fc.create_ml_experiment(workspace_id, display_name="mlexperiment1")
mle = fc.get_ml_experiment(workspace_id, ml_experiment_name="mlexperiment1")

# Update ML Experiment
mle2 = fc.update_ml_experiment(workspace_id, mle.id, display_name="mlexperiment2")
mle2 = fc.update_ml_experiment(workspace_id, mle.id, display_name="mlexperiment2", return_item=True)

# Delete ML Experiment
fc.delete_ml_experiment(workspace_id, mle.id)
Expand All @@ -359,7 +359,7 @@ ml_model = fc.create_ml_model(workspace_id, display_name="mlmodel1")
ml_model = fc.get_ml_model(workspace_id, ml_model_name="mlmodel1")

# Update ML Model
ml_model2 = fc.update_ml_model(workspace_id, ml_model_id=ml_model.id, display_name="mlmodel2")
ml_model2 = fc.update_ml_model(workspace_id, ml_model_id=ml_model.id, display_name="mlmodel2", return_item=True)

# Delete ML Model
fc.delete_ml_model(workspace_id, ml_model.id)
Expand Down Expand Up @@ -389,7 +389,7 @@ notebook = fc.create_notebook(workspace_id, definition = definition, display_nam
notebook = fc.get_notebook(workspace_id, notebook_name="notebook1")

# Update Notebook
notebook2 = fc.update_notebook(workspace_id, notebook.id, display_name="notebook2")
notebook2 = fc.update_notebook(workspace_id, notebook.id, display_name="notebook2", return_item=True)

# Get Notebook Definition
fc.get_notebook_definition(workspace_id, notebook.id, format=None)
Expand Down Expand Up @@ -494,7 +494,7 @@ pool2 = fc.create_workspace_custom_pool(workspace_id=workspace_id,
# Update a spark custom pool

pool2 = fc.update_workspace_custom_pool(workspace_id=workspace_id, pool_id=pool2.id,
auto_scale = {"enabled": True, "minNodeCount": 1, "maxNodeCount": 7})
auto_scale = {"enabled": True, "minNodeCount": 1, "maxNodeCount": 7}, return_item=True)


# Delete a spark custom pool
Expand Down Expand Up @@ -546,7 +546,7 @@ spark_job_definition = fc.get_spark_job_definition(workspace_id, spark_job_defin
job_instance = fc.run_on_demand_spark_job_definition(workspace_id, spark_job_definition.id, job_type="sparkjob")

# Update Spark Job Definition
spark_job_definition2 = fc.update_spark_job_definition(workspace_id, spark_job_definition.id, display_name="sparkjobdefinition2")
spark_job_definition2 = fc.update_spark_job_definition(workspace_id, spark_job_definition.id, display_name="sparkjobdefinition2", return_item=True)

# Get Spark Job Definition Definition
fc.get_spark_job_definition_definition(workspace_id, spark_job_definition.id, format=None)
Expand Down Expand Up @@ -582,7 +582,7 @@ warehouse = fc.create_warehouse(workspace_id, display_name="wh1")
warehouse = fc.get_warehouse(workspace_id, warehouse_name="wh1")

# Update Warehouse
warehouse2 = fc.update_warehouse(workspace_id, warehouse.id, display_name="wh2")
warehouse2 = fc.update_warehouse(workspace_id, warehouse.id, display_name="wh2", return_item=True)

# Delete Warehouse
fc.delete_warehouse(workspace_id, warehouse.id)
Expand Down
62 changes: 18 additions & 44 deletions msfabricpysdkcore/admin_item.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,26 @@
import json
from time import sleep

import requests
from msfabricpysdkcore.adminapi import FabricClientAdmin


class AdminItem:
"""Class to represent a item in Microsoft Fabric"""

def __init__(self, id, type, name, workspace_id, state, description, last_updated_date, capacity_id, creator_principal, auth) -> None:
def __init__(self, id, type, name, workspace_id, state, description, last_updated_date, capacity_id, creator_principal, admin_client: FabricClientAdmin) -> None:
"""Constructor for the Item class
Args:
id (str): The ID of the item
type (str): The type of the item
name (str): The name of the item
workspace_id (str): The ID of the workspace
workspace_id (str): The ID of the workspace to which the item belongs
state (str): The state of the item
description (str): The description of the item
last_updated_date (str): The last updated date of the item
capacity_id (str): The ID of the capacity
creator_principal (dict): The creator principal of the item
auth (Auth): The Auth object
Returns:
Item: The Item object
last_updated_date (str): The date when the item was last updated
capacity_id (str): The
creator_principal (str): The principal who created the item
admin_client (FabricClientAdmin): The FabricClientAdmin object
"""

self.id = id
self.type = type
self.name = name
Expand All @@ -33,7 +30,7 @@ def __init__(self, id, type, name, workspace_id, state, description, last_update
self.last_updated_date = last_updated_date
self.capacity_id = capacity_id
self.creator_principal = creator_principal
self.auth = auth
self.admin_client = admin_client


def __str__(self) -> str:
Expand All @@ -59,55 +56,32 @@ def __str__(self) -> str:
def __repr__(self) -> str:
return self.__str__()

def from_dict(item_dict, auth):
def from_dict(item_dict, admin_client):
"""Create Item object from dictionary
Args:
item_dict (dict): The dictionary containing the item information
auth (Auth): The Auth object
item_dict (dict): The dictionary containing the item details
admin_client (FabricClientAdmin): The FabricClientAdmin object
Returns:
Item: The Item object"""
AdminItem: The AdminItem object
"""
return AdminItem(
id = item_dict['id'],
type = item_dict['type'],
name = item_dict['name'],
name = item_dict.get('name', None),
workspace_id = item_dict['workspaceId'],
state = item_dict['state'],
description = item_dict.get('description', None),
last_updated_date = item_dict['lastUpdatedDate'],
capacity_id = item_dict['capacityId'],
creator_principal = item_dict['creatorPrincipal'],
auth = auth
admin_client = admin_client
)

def get_item_access_details(self, type=None):
"""Get the access details of the item
Returns:
dict: The access details of the item"""
return self.list_item_access_details(type)


def list_item_access_details(self, type=None):
"""Get the access details of the item
Returns:
dict: The access details of the item"""

url = f"https://api.fabric.microsoft.com/v1/admin/workspaces/{self.workspace_id}/items/{self.id}/users"

if type:
url += f"?type={self.type}"

for _ in range(10):
response = requests.get(url=url, headers=self.auth.get_headers())
if response.status_code == 429:
print("Too many requests, waiting 10 seconds")
sleep(10)
continue
if response.status_code not in (200, 429):
print(response.status_code)
print(response.text)
raise Exception(f"Error getting item: {response.text}")
break

return json.loads(response.text)
return self.admin_client.list_item_access_details(self.workspace_id, self.id, type)
Loading

0 comments on commit c00dc78

Please sign in to comment.