From b3122c3cc9b4ad1f32024689ebfaf3a970bacfa0 Mon Sep 17 00:00:00 2001 From: "ivan.ruzavin" Date: Thu, 9 Jan 2025 11:56:33 +0100 Subject: [PATCH 1/6] Updated papckaging approach for reducing the number of github requests This PR is needed only for huge updates like year switch in header files. Currently per 1 Asset Upload-Delete request function call we use 4-20 github requests. With 450 assets (at the moment when this PR was created) we exceed 5000 request limit if we need to reupload all of them (we always have chack indexes script running in the background twice in an hour and 1 scheduled workflow at the moment takes 900 requests) So with this update packaging script will take ~10 requests in the beginning to take all uploaded assets and then just 2 requests for Asset Delete-Upload function call. --- scripts/package.py | 65 +++++++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 29 deletions(-) diff --git a/scripts/package.py b/scripts/package.py index 7ed824bb8..a012d9396 100644 --- a/scripts/package.py +++ b/scripts/package.py @@ -58,21 +58,14 @@ def create_custom_archive(source_folder, archive_path): os.chdir(source_folder) archive.writeall('./') -def upload_asset_to_release(repo, release_id, asset_path, token, delete_existing=True): - """Upload an asset to a specific GitHub release. If the asset exists, delete it first.""" - asset_name = os.path.basename(asset_path) - url = f'https://api.github.com/repos/{repo}/releases/{release_id}/assets' +def get_all_release_assets(repo, release_id, token): + all_assets = [] headers = { 'Authorization': f'token {token}', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36' } - - # Handle pagination to get all assets page = 1 - asset_deleted = False while True: - if asset_deleted: - break url = f'https://api.github.com/repos/{repo}/releases/{release_id}/assets?page={page}&per_page=30' response = requests.get(url, headers=headers) response.raise_for_status() @@ -82,21 +75,33 @@ def upload_asset_to_release(repo, release_id, asset_path, token, delete_existing if not assets: break - # Check if the asset already exists - for asset in assets: - if asset['name'] == asset_name: - # If the asset exists, delete it - delete_url = asset['url'] - if delete_existing: - print(f'Deleting existing asset: {asset_name}') - delete_response = requests.delete(delete_url, headers=headers) - delete_response.raise_for_status() - print(f'Asset deleted: {asset_name}') - asset_deleted = True - break + all_assets += (asset for asset in assets) page += 1 + return all_assets + +def upload_asset_to_release(repo, release_id, asset_path, token, assets, delete_existing=True): + """Upload an asset to a specific GitHub release. If the asset exists, delete it first.""" + asset_name = os.path.basename(asset_path) + url = f'https://api.github.com/repos/{repo}/releases/{release_id}/assets' + headers = { + 'Authorization': f'token {token}', + 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36' + } + + # Check if the asset already exists + for asset in assets: + if asset['name'] == asset_name: + # If the asset exists, delete it + delete_url = asset['url'] + if delete_existing: + print(f'Deleting existing asset: {asset_name}') + delete_response = requests.delete(delete_url, headers=headers) + delete_response.raise_for_status() + print(f'Asset deleted: {asset_name}') + break + # Upload the new asset url = f'https://uploads.github.com/repos/{repo}/releases/{release_id}/assets?name={os.path.basename(asset_path)}' headers = { @@ -511,6 +516,8 @@ def str2bool(v): # Get the release ID used to upload assets release_id = get_release_id(args.repo, f'mikroSDK-{version}', args.token) + assets = get_all_release_assets(args.repo, release_id, args.token) + metadata_content = {} if not args.package_boards_or_mcus: if manifest_folder: @@ -522,7 +529,7 @@ def str2bool(v): create_7z_archive('mikroSDK_v2', repo_dir, archive_path) print('Archive created successfully: %s' % archive_path) metadata_content['mikrosdk'] = {'version': version} - upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token) + upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token, assets) print('Asset "%s" uploaded successfully to release ID: %s' % ('mikrosdk', release_id)) if os.path.exists(os.path.join(repo_dir, 'resources/images')): @@ -532,7 +539,7 @@ def str2bool(v): os.chdir(repo_dir) metadata_content['images'] = {'hash': hash_directory_contents(os.path.join(repo_dir, 'resources/images'))} print('Archive created successfully: %s' % archive_path) - upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token) + upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token, assets) print('Asset "%s" uploaded successfully to release ID: %s' % ('images', release_id)) if not args.package_boards_or_mcus: @@ -543,7 +550,7 @@ def str2bool(v): os.chdir(repo_dir) metadata_content['templates'] = {'hash': hash_directory_contents(os.path.join(repo_dir, 'templates/necto'))} print('Archive created successfully: %s' % archive_path) - upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token) + upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token, assets) print('Asset "%s" uploaded successfully to release ID: %s' % ('templates', release_id)) if os.path.exists(os.path.join(repo_dir, 'resources/queries')): @@ -552,7 +559,7 @@ def str2bool(v): create_custom_archive('resources/queries', archive_path) os.chdir(repo_dir) print('Archive created successfully: %s' % archive_path) - upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token) + upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token, assets) print('Asset "%s" uploaded successfully to release ID: %s' % ('queries', release_id)) # Package all boards as separate packages @@ -600,16 +607,16 @@ def str2bool(v): execute = False break if execute: - upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token) + upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token, assets) else: - upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token) + upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token, assets) # BSP asset for internal MIKROE tools os.chdir(repo_dir) archive_path = os.path.join(repo_dir, 'bsps.7z') print('Creating archive: %s' % archive_path) zip_bsp_related_files(archive_path, repo_dir) - upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token) + upload_result = upload_asset_to_release(args.repo, release_id, archive_path, args.token, assets) print('Asset "%s" uploaded successfully to release ID: %s' % ('bsps', release_id)) os.makedirs(os.path.join(repo_dir, 'tmp'), exist_ok=True) @@ -627,4 +634,4 @@ def str2bool(v): with open(os.path.join(repo_dir, 'tmp/metadata.json'), 'w') as metadata: json.dump(metadata_content, metadata, indent=4) metadata.close() - upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, 'tmp/metadata.json'), args.token) + upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, 'tmp/metadata.json'), args.token, assets) From 6ab5a364b75b6ac404737fb2b8b9bc1fe2ab0356 Mon Sep 17 00:00:00 2001 From: "ivan.ruzavin" Date: Thu, 9 Jan 2025 12:04:43 +0100 Subject: [PATCH 2/6] Added coloring for key steps --- scripts/package.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/package.py b/scripts/package.py index a012d9396..9ec427a0c 100644 --- a/scripts/package.py +++ b/scripts/package.py @@ -99,7 +99,7 @@ def upload_asset_to_release(repo, release_id, asset_path, token, assets, delete_ print(f'Deleting existing asset: {asset_name}') delete_response = requests.delete(delete_url, headers=headers) delete_response.raise_for_status() - print(f'Asset deleted: {asset_name}') + print(f'\033[91mAsset deleted: {asset_name}\033[0m') break # Upload the new asset @@ -114,7 +114,7 @@ def upload_asset_to_release(repo, release_id, asset_path, token, assets, delete_ print(f'Uploading new asset: {asset_name}') response = requests.post(url, headers=headers, data=file) response.raise_for_status() - print(f'Uploaded asset: {os.path.basename(asset_path)} to release ID: {release_id}') + print(f'\033[92mUploaded asset: {os.path.basename(asset_path)} to release ID: {release_id}\033[0m') return response.json() else: asset_exists = False @@ -127,7 +127,7 @@ def upload_asset_to_release(repo, release_id, asset_path, token, assets, delete_ print(f'Uploading new asset: {asset_name}') response = requests.post(url, headers=headers, data=file) response.raise_for_status() - print(f'Uploaded asset: {os.path.basename(asset_path)} to release ID: {release_id}') + print(f'\033[92mUploaded asset: {os.path.basename(asset_path)} to release ID: {release_id}\033[0m') return response.json() def get_release_id(repo, tag_name, token): From cb9d03db17a0185382500d881236180cdab9753f Mon Sep 17 00:00:00 2001 From: Ivan Ruzavin Date: Fri, 10 Jan 2025 08:47:50 +0100 Subject: [PATCH 3/6] Added message if hashes are not the same --- scripts/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/package.py b/scripts/package.py index 9ec427a0c..08aefad1f 100644 --- a/scripts/package.py +++ b/scripts/package.py @@ -605,6 +605,8 @@ def str2bool(v): # If package has been changed, update it either way if packages[each_package]['hash'] == live_packages[each_metadata_package_key]['hash']: execute = False + else: + print(f'\033[93mHashes for uploaded and currently zipped archives are not the same for {each_package['name']}!\033[0m') break if execute: upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token, assets) From d78166fcd1dd36c9df5e592a8c0346a0c61720d2 Mon Sep 17 00:00:00 2001 From: Ivan Ruzavin Date: Fri, 10 Jan 2025 09:16:01 +0100 Subject: [PATCH 4/6] Updated naming for package printing --- scripts/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/package.py b/scripts/package.py index 08aefad1f..55d0a29fe 100644 --- a/scripts/package.py +++ b/scripts/package.py @@ -606,7 +606,7 @@ def str2bool(v): if packages[each_package]['hash'] == live_packages[each_metadata_package_key]['hash']: execute = False else: - print(f'\033[93mHashes for uploaded and currently zipped archives are not the same for {each_package['name']}!\033[0m') + print(f'\033[93mHashes for uploaded archive and for currently zipped are not the same for {packages[each_package]["name"]}!\033[0m') break if execute: upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token, assets) From 875e7dbe4b09bea1aa92350f4708462423dfef03 Mon Sep 17 00:00:00 2001 From: Ivan Ruzavin Date: Fri, 10 Jan 2025 10:50:56 +0100 Subject: [PATCH 5/6] Updated logic for packaging --- scripts/package.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/scripts/package.py b/scripts/package.py index 55d0a29fe..0c7c6447c 100644 --- a/scripts/package.py +++ b/scripts/package.py @@ -99,6 +99,7 @@ def upload_asset_to_release(repo, release_id, asset_path, token, assets, delete_ print(f'Deleting existing asset: {asset_name}') delete_response = requests.delete(delete_url, headers=headers) delete_response.raise_for_status() + assets.remove(asset) print(f'\033[91mAsset deleted: {asset_name}\033[0m') break @@ -445,13 +446,13 @@ def package_card_files(repo_root, files_root_dir, path_list, sdk_version): "name": json_device['uid'].rsplit('_', 1)[0].lower(), "display_name": json_device['name'], "type": "card", - "icon": f'https://raw.githubusercontent.com/MikroElektronika/mikrosdk_v2/master/resources/{json_device['icon']}', + "icon": f'https://raw.githubusercontent.com/MikroElektronika/mikrosdk_v2/master/resources/{json_device["icon"]}', "package_name": package_name, "hash": hash_directory_contents(os.path.join(repo_root, f'tmp/assets/{asset_type}/{each_query_path}')), "category": "Card Package", "package_rel_path": f'tmp/assets/{asset_type}/{package_name}.7z', "install_location": f"%APPLICATION_DATA_DIR%/packages/sdk/mikroSDK_v2/src/bsp/board/include/mcu_cards/{card_path}/{json_device['def_file'].split('.')[0]}", - "db_query": f'UPDATE Devices SET installer_package = {query_file} WHERE name = \"{json_device['name']}\"' + "db_query": f'UPDATE Devices SET installer_package = {query_file} WHERE name = \"{json_device["name"]}\"' } } ) @@ -596,8 +597,14 @@ def str2bool(v): ) # Upload all the board packages + processed_packages = [] live_packages, metadata_full = fetch_live_packages('https://github.com/MikroElektronika/mikrosdk_v2/releases/latest/download/metadata.json') for each_package in packages: + # As we are not fetching actual info before evere deletion/upload, we need to store all the + # processed packages because some of them have the same assets. + if os.path.basename(packages[each_package]["package_rel_path"]) in processed_packages: + continue + processed_packages.append(os.path.basename(packages[each_package]["package_rel_path"])) if args.package_boards_or_mcus: execute = True for each_metadata_package_key in live_packages.keys(): @@ -606,12 +613,12 @@ def str2bool(v): if packages[each_package]['hash'] == live_packages[each_metadata_package_key]['hash']: execute = False else: - print(f'\033[93mHashes for uploaded archive and for currently zipped are not the same for {packages[each_package]["name"]}!\033[0m') + print(f'\033[93mHashes for uploaded archive and for currently zipped are not the same for {os.path.basename(packages[each_package]["package_rel_path"])}!\033[0m') break if execute: - upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token, assets) + upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]["package_rel_path"]}'), args.token, assets) else: - upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]['package_rel_path']}'), args.token, assets) + upload_result = upload_asset_to_release(args.repo, release_id, os.path.join(repo_dir, f'{packages[each_package]["package_rel_path"]}'), args.token, assets) # BSP asset for internal MIKROE tools os.chdir(repo_dir) From db83a86d04fab32b8385643a3211893a396643cd Mon Sep 17 00:00:00 2001 From: "ivan.ruzavin" Date: Mon, 13 Jan 2025 09:20:41 +0100 Subject: [PATCH 6/6] Added debug message for skipping doubled assets --- scripts/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/package.py b/scripts/package.py index 0c7c6447c..ba6e59a4d 100644 --- a/scripts/package.py +++ b/scripts/package.py @@ -600,9 +600,10 @@ def str2bool(v): processed_packages = [] live_packages, metadata_full = fetch_live_packages('https://github.com/MikroElektronika/mikrosdk_v2/releases/latest/download/metadata.json') for each_package in packages: - # As we are not fetching actual info before evere deletion/upload, we need to store all the + # As we are not fetching actual info before every deletion/upload, we need to store all the # processed packages because some of them have the same assets. if os.path.basename(packages[each_package]["package_rel_path"]) in processed_packages: + print(f'\033[95mSkipped {os.path.basename(packages[each_package]["package_rel_path"])} asset because it is used by another item as well and has been already uploaded within this workflow run.\033[0m') continue processed_packages.append(os.path.basename(packages[each_package]["package_rel_path"])) if args.package_boards_or_mcus: