Skip to content

Commit

Permalink
ran formatter in correct dir :(
Browse files Browse the repository at this point in the history
  • Loading branch information
azaleacolburn committed Jul 26, 2024
1 parent 4fdacf5 commit 0bfc19a
Show file tree
Hide file tree
Showing 27 changed files with 273 additions and 781 deletions.
4 changes: 1 addition & 3 deletions exporter/SynthesisFusionAddin/Synthesis.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,7 @@ def stop(_):

path = os.path.abspath(os.path.dirname(__file__))

path_proto_files = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "proto", "proto_out")
)
path_proto_files = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "proto", "proto_out"))

if path in sys.path:
sys.path.remove(path)
Expand Down
12 changes: 3 additions & 9 deletions exporter/SynthesisFusionAddin/proto/deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@ def getPythonFolder() -> str:
elif system == "Darwin":
pythonFolder = f"{Path(osPath).parents[2]}/bin"
else:
raise ImportError(
"Unsupported platform! This add-in only supports windows and macos"
)
raise ImportError("Unsupported platform! This add-in only supports windows and macos")

logger.debug(f"Python Folder -> {pythonFolder}")
return pythonFolder
Expand Down Expand Up @@ -129,9 +127,7 @@ def installCross(pipDeps: list) -> bool:
]
)
if installResult != 0:
logger.warn(
f'Dep installation "{depName}" exited with code "{installResult}"'
)
logger.warn(f'Dep installation "{depName}" exited with code "{installResult}"')

if system == "Darwin":
pipAntiDeps = ["dataclasses", "typing"]
Expand All @@ -152,9 +148,7 @@ def installCross(pipDeps: list) -> bool:
]
)
if uninstallResult != 0:
logger.warn(
f'AntiDep uninstallation "{depName}" exited with code "{uninstallResult}"'
)
logger.warn(f'AntiDep uninstallation "{depName}" exited with code "{uninstallResult}"')

progressBar.hide()

Expand Down
80 changes: 20 additions & 60 deletions exporter/SynthesisFusionAddin/src/APS/APS.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,7 @@ def refreshAuthToken():
"scope": "data:create data:write data:search data:read",
}
).encode("utf-8")
req = urllib.request.Request(
"https://developer.api.autodesk.com/authentication/v2/token", data=body
)
req = urllib.request.Request("https://developer.api.autodesk.com/authentication/v2/token", data=body)
req.method = "POST"
req.add_header(key="Content-Type", val="application/x-www-form-urlencoded")
try:
Expand Down Expand Up @@ -188,9 +186,7 @@ def getUserInfo() -> APSUserInfo | None:
return loadUserInfo()


def create_folder(
auth: str, project_id: str, parent_folder_id: str, folder_display_name: str
) -> str | None:
def create_folder(auth: str, project_id: str, parent_folder_id: str, folder_display_name: str) -> str | None:
"""
creates a folder on an APS project
Expand All @@ -215,9 +211,7 @@ def create_folder(
"name": folder_display_name,
"extension": {"type": "folders:autodesk.core:Folder", "version": "1.0"},
},
"relationships": {
"parent": {"data": {"type": "folders", "id": f"{parent_folder_id}"}}
},
"relationships": {"parent": {"data": {"type": "folders", "id": f"{parent_folder_id}"}}},
},
}

Expand All @@ -238,9 +232,7 @@ def file_path_to_file_name(file_path: str) -> str:
return file_path.split("/").pop()


def upload_mirabuf(
project_id: str, folder_id: str, file_name: str, file_contents: str
) -> str | None:
def upload_mirabuf(project_id: str, folder_id: str, file_name: str, file_contents: str) -> str | None:
"""
uploads mirabuf file to a specific folder in an APS project
the folder and project must be created and valid
Expand Down Expand Up @@ -280,18 +272,14 @@ def upload_mirabuf(
folder_id = create_folder(auth, project_id, folder_id, "MirabufDir")
else:
folder_id = new_folder_id
(lineage_id, file_id, file_version) = get_file_id(
auth, project_id, folder_id, file_name
)
(lineage_id, file_id, file_version) = get_file_id(auth, project_id, folder_id, file_name)

"""
Create APS Storage Location
"""
object_id = create_storage_location(auth, project_id, folder_id, file_name)
if object_id is None:
gm.ui.messageBox(
"UPLOAD ERROR", "Object id is none; check create storage location"
)
gm.ui.messageBox("UPLOAD ERROR", "Object id is none; check create storage location")
return None
(prefix, object_key) = str(object_id).split("/", 1)
bucket_key = prefix.split(":", 3)[3] # gets the last element smth like: wip.dm.prod
Expand Down Expand Up @@ -325,9 +313,7 @@ def upload_mirabuf(
object_id,
)
else:
_lineage_info = create_first_file_version(
auth, str(object_id), project_id, str(folder_id), file_name
)
_lineage_info = create_first_file_version(auth, str(object_id), project_id, str(folder_id), file_name)
return ""


Expand All @@ -345,13 +331,9 @@ def get_hub_id(auth: str, hub_name: str) -> str | None:
"""

headers = {"Authorization": f"Bearer {auth}"}
hub_list_res = requests.get(
"https://developer.api.autodesk.com/project/v1/hubs", headers=headers
)
hub_list_res = requests.get("https://developer.api.autodesk.com/project/v1/hubs", headers=headers)
if not hub_list_res.ok:
gm.ui.messageBox(
"UPLOAD ERROR", f"Failed to retrieve hubs: {hub_list_res.text}"
)
gm.ui.messageBox("UPLOAD ERROR", f"Failed to retrieve hubs: {hub_list_res.text}")
return None
hub_list: list[dict[str, Any]] = hub_list_res.json()
for hub in hub_list:
Expand Down Expand Up @@ -384,9 +366,7 @@ def get_project_id(auth: str, hub_id: str, project_name: str) -> str | None:
headers=headers,
)
if not project_list_res.ok:
gm.ui.messageBox(
"UPLOAD ERROR", f"Failed to retrieve hubs: {project_list_res.text}"
)
gm.ui.messageBox("UPLOAD ERROR", f"Failed to retrieve hubs: {project_list_res.text}")
return None
project_list: list[dict[str, Any]] = project_list_res.json()
for project in project_list:
Expand All @@ -396,9 +376,7 @@ def get_project_id(auth: str, hub_id: str, project_name: str) -> str | None:
return ""


def get_item_id(
auth: str, project_id: str, parent_folder_id: str, folder_name: str, item_type: str
) -> str | None:
def get_item_id(auth: str, project_id: str, parent_folder_id: str, folder_name: str, item_type: str) -> str | None:
headers = {"Authorization": f"Bearer {auth}"}
res = requests.get(
f"https://developer.api.autodesk.com/data/v1/projects/{project_id}/folders/{parent_folder_id}/contents",
Expand Down Expand Up @@ -501,9 +479,7 @@ def update_file_version(
json=data,
)
if not update_res.ok:
gm.ui.messageBox(
f"UPLOAD ERROR:\n{update_res.text}", "Updating file to new version failed"
)
gm.ui.messageBox(f"UPLOAD ERROR:\n{update_res.text}", "Updating file to new version failed")
return None
gm.ui.messageBox(
f"Successfully updated file {file_name} to version {int(curr_file_version) + 1} on APS",
Expand All @@ -513,9 +489,7 @@ def update_file_version(
return new_id


def get_file_id(
auth: str, project_id: str, folder_id: str, file_name: str
) -> tuple[str, str, str] | None:
def get_file_id(auth: str, project_id: str, folder_id: str, file_name: str) -> tuple[str, str, str] | None:
"""
gets the file id given a file name
Expand Down Expand Up @@ -559,9 +533,7 @@ def get_file_id(
return (lineage, id, version)


def create_storage_location(
auth: str, project_id: str, folder_id: str, file_name: str
) -> str | None:
def create_storage_location(auth: str, project_id: str, folder_id: str, file_name: str) -> str | None:
"""
creates a storage location (a bucket)
the bucket can be used to upload a file to
Expand All @@ -588,9 +560,7 @@ def create_storage_location(
"data": {
"type": "objects",
"attributes": {"name": file_name},
"relationships": {
"target": {"data": {"type": "folders", "id": f"{folder_id}"}}
},
"relationships": {"target": {"data": {"type": "folders", "id": f"{folder_id}"}}},
},
}
headers = {
Expand All @@ -613,9 +583,7 @@ def create_storage_location(
return object_id


def generate_signed_url(
auth: str, bucket_key: str, object_key: str
) -> tuple[str, str] | None:
def generate_signed_url(auth: str, bucket_key: str, object_key: str) -> tuple[str, str] | None:
"""
generates a signed_url for a bucket, given a bucket_key and object_key
Expand All @@ -641,9 +609,7 @@ def generate_signed_url(
headers=headers,
)
if not signed_url_res.ok:
gm.ui.messageBox(
f"UPLOAD ERROR: {signed_url_res.text}", "Failed to get signed url"
)
gm.ui.messageBox(f"UPLOAD ERROR: {signed_url_res.text}", "Failed to get signed url")
return None
signed_url_json: dict[str, str] = signed_url_res.json()
return (signed_url_json["uploadKey"], signed_url_json["urls"][0])
Expand All @@ -666,16 +632,12 @@ def upload_file(signed_url: str, file_contents: str) -> str | None:
"""
upload_response = requests.put(url=signed_url, data=file_contents)
if not upload_response.ok:
gm.ui.messageBox(
"UPLOAD ERROR", f"Failed to upload to signed url: {upload_response.text}"
)
gm.ui.messageBox("UPLOAD ERROR", f"Failed to upload to signed url: {upload_response.text}")
return None
return ""


def complete_upload(
auth: str, upload_key: str, object_key: str, bucket_key: str
) -> str | None:
def complete_upload(auth: str, upload_key: str, object_key: str, bucket_key: str) -> str | None:
"""
completes and verifies the APS file upload given the upload_key
Expand Down Expand Up @@ -764,9 +726,7 @@ def create_first_file_version(
"type": "versions",
"id": "1",
"attributes": included_attributes,
"relationships": {
"storage": {"data": {"type": "objects", "id": object_id}}
},
"relationships": {"storage": {"data": {"type": "objects", "id": object_id}}},
},
]

Expand Down
18 changes: 4 additions & 14 deletions exporter/SynthesisFusionAddin/src/Logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,7 @@ def setupLogger() -> None:
now = datetime.now().strftime("%H-%M-%S")
today = date.today()
logFileFolder = getOSPath(f"{pathlib.Path(__file__).parent.parent}", "logs")
logFiles = [
os.path.join(logFileFolder, file)
for file in os.listdir(logFileFolder)
if file.endswith(".log")
]
logFiles = [os.path.join(logFileFolder, file) for file in os.listdir(logFileFolder) if file.endswith(".log")]
logFiles.sort()
if len(logFiles) >= MAX_LOG_FILES_TO_KEEP:
for file in logFiles[: len(logFiles) - MAX_LOG_FILES_TO_KEEP]:
Expand Down Expand Up @@ -70,21 +66,15 @@ def wrapper(*args: any, **kwargs: any) -> any:
except BaseException:
excType, excValue, excTrace = sys.exc_info()
tb = traceback.TracebackException(excType, excValue, excTrace)
formattedTb = "".join(
list(tb.format())[2:]
) # Remove the wrapper func from the traceback.
formattedTb = "".join(list(tb.format())[2:]) # Remove the wrapper func from the traceback.
clsName = ""
if args and hasattr(args[0], "__class__"):
clsName = args[0].__class__.__name__ + "."

getLogger(f"{INTERNAL_ID}.{clsName}{func.__name__}").error(
f"Failed:\n{formattedTb}"
)
getLogger(f"{INTERNAL_ID}.{clsName}{func.__name__}").error(f"Failed:\n{formattedTb}")
if messageBox:
ui = adsk.core.Application.get().userInterface
ui.messageBox(
f"Internal Failure: {formattedTb}", "Synthesis: Error"
)
ui.messageBox(f"Internal Failure: {formattedTb}", "Synthesis: Error")

return wrapper

Expand Down
22 changes: 5 additions & 17 deletions exporter/SynthesisFusionAddin/src/Parser/ExporterOptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,7 @@ class ExporterOptions:
# user's computer has conflicting configs of some sort. This has happened and should be accounted
# for accordingly.
fileLocation: str | None = field(
default=(
os.getenv("HOME")
if platform.system() == "Windows"
else os.path.expanduser("~")
)
default=(os.getenv("HOME") if platform.system() == "Windows" else os.path.expanduser("~"))
)
name: str = field(default=None)
version: str = field(default=None)
Expand All @@ -61,13 +57,9 @@ class ExporterOptions:
exportLocation: ExportLocation = field(default=ExportLocation.UPLOAD)

hierarchy: ModelHierarchy = field(default=ModelHierarchy.FusionAssembly)
visualQuality: TriangleMeshQualityOptions = field(
default=TriangleMeshQualityOptions.LowQualityTriangleMesh
)
visualQuality: TriangleMeshQualityOptions = field(default=TriangleMeshQualityOptions.LowQualityTriangleMesh)
physicalDepth: PhysicalDepth = field(default=PhysicalDepth.AllOccurrence)
physicalCalculationLevel: CalculationAccuracy = field(
default=CalculationAccuracy.LowCalculationAccuracy
)
physicalCalculationLevel: CalculationAccuracy = field(default=CalculationAccuracy.LowCalculationAccuracy)

@logFailure
@timed
Expand All @@ -76,9 +68,7 @@ def readFromDesign(self) -> "ExporterOptions":
for field in fields(self):
attribute = designAttributes.itemByName(INTERNAL_ID, field.name)
if attribute:
attrJsonData = makeObjectFromJson(
field.type, json.loads(attribute.value)
)
attrJsonData = makeObjectFromJson(field.type, json.loads(attribute.value))
setattr(self, field.name, attrJsonData)

return self
Expand All @@ -88,7 +78,5 @@ def readFromDesign(self) -> "ExporterOptions":
def writeToDesign(self) -> None:
designAttributes = adsk.core.Application.get().activeProduct.attributes
for field in fields(self):
data = json.dumps(
getattr(self, field.name), default=encodeNestedObjects, indent=4
)
data = json.dumps(getattr(self, field.name), default=encodeNestedObjects, indent=4)
designAttributes.add(INTERNAL_ID, field.name, data)
Loading

0 comments on commit 0bfc19a

Please sign in to comment.