From b7ed09b044114adf4c620cd79aac96587e615715 Mon Sep 17 00:00:00 2001 From: "marko.lukic@mikroe.com" Date: Thu, 24 Oct 2024 14:07:11 +0200 Subject: [PATCH 1/5] Update read_microchip_index.py and reupload_databases.py --- scripts/read_microchip_index.py | 194 ++++++++++++++++++++++++++++++++ scripts/reupload_databases.py | 67 +++++++++-- 2 files changed, 251 insertions(+), 10 deletions(-) create mode 100644 scripts/read_microchip_index.py diff --git a/scripts/read_microchip_index.py b/scripts/read_microchip_index.py new file mode 100644 index 000000000..d0233607c --- /dev/null +++ b/scripts/read_microchip_index.py @@ -0,0 +1,194 @@ +import requests +import xml.etree.ElementTree as ET +import xmltodict +import json +from datetime import datetime + +# Step 1: Download the CMSIS index.idx file from the custom link +def download_index_file(url): + response = requests.get(url) + if response.status_code == 200: + return response.content # Return the raw XML content + else: + raise Exception(f"Failed to download index.idx file. Status code: {response.status_code}") + + +def filter_releases_by_version(json_data): + try: + # Extract the pdsc items from the JSON data + pdsc_items = json_data.get('idx', {}).get('pdsc', []) + + # Iterate through each pdsc item + for pdsc_item in pdsc_items: + try: + # Extract the version to keep + version_to_keep = pdsc_item.get('@version', None) + if not version_to_keep: + continue + + # Extract the releases array, which can be a list or a dictionary + releases = pdsc_item.get('atmel:releases', {}).get('atmel:release', []) + + # Check if releases is a dictionary (single release) or a list (multiple releases) + if isinstance(releases, dict): # Single release case + if releases.get('@version') == version_to_keep: + pdsc_item['atmel:releases']['atmel:release'] = [releases] # Keep as list + else: + pdsc_item['atmel:releases']['atmel:release'] = [] + elif isinstance(releases, list): # Multiple releases case + filtered_releases = [release for release in releases if release.get('@version') == version_to_keep] + pdsc_item['atmel:releases']['atmel:release'] = filtered_releases + + except Exception as pdsc_error: + print(f"Error processing PDSC item: {pdsc_item}. Error: {pdsc_error}") + + except Exception as e: + print(f"Error during conversion: {e}") + + return json_data + +def generate_list(item_list, tool_to_mcu_list): + mcu_to_dfp = {} + for item in item_list: + if not item: + print("Item is None or invalid, skipping...") + continue + + # Safely extract 'type' and 'name' with defaults + item_type = item.get('type', '') + name = item.get('name', None) + mcus = item.get('mcus', []) + display_name = item.get('display_name') + if not name: + print(f"Item without name found, skipping: {item}") + continue + + if not isinstance(mcus, list): + print(f"MCUs should be a list but found: {mcus} for item: {name}") + mcus = [] + + if item_type == 'microchip_tp': + # Populate tool_to_mcu safely + uid = name.replace('_tool_support', '') + tool_item = { + 'uid' : uid, + 'installer_package' : name, + 'display_name' : display_name, + 'icon' : f"images/programmers/{uid}.png", + 'hidden' : 0, + 'installed' : 0, + 'description' : '', + 'mcus' : mcus + } + tool_to_mcu_list.append(tool_item) + else: + # Ensure correct handling in mcu_to_dfp + for mcu in mcus: + if not mcu: + print(f"Skipping empty MCU in item: {name}") + continue + + # Initialize the list in mcu_to_dfp if not already present + if mcu not in mcu_to_dfp: + mcu_to_dfp[mcu] = [] + + # Safely append the item name + mcu_to_dfp[mcu].append(name) + + print(f"DFP: {name}") + for tool_item in tool_to_mcu_list: + tool_item['dfps'] = json.dumps(mcu_to_dfp) + + + + +def convert_idx_to_json(xml_content): + try: + # Open the idx file + data_dict = xmltodict.parse(xml_content) + + # Convert the parsed data to JSON format + + + data = filter_releases_by_version(data_dict) + item_list = [] + tool_to_mcu = [] + + for item in data.get('idx').get('pdsc'): + item_list.append(convert_item_to_es_json(item)) + + generate_list(item_list, tool_to_mcu) + return tool_to_mcu + + except Exception as e: + print(f"Error during conversion: {e} for item {item}") + + + +def convert_item_to_es_json(input_item): + # Extract relevant fields + atmel_name = input_item.get('@atmel:name') + package_type = 'microchip_dfp' + if '_TP' in atmel_name: + package_type = 'microchip_tp' + + version = input_item.get('@version') + + # Safely extract the release date with a fallback default value + releases = input_item.get('atmel:releases', {}).get('atmel:release', []) + if not releases: + print(f"No releases found for item: {atmel_name} version: {version}") + return None # Return None if there are no releases + + release_date = releases[0].get('@date', None) + + # Provide a default value or handle missing release_date + if not release_date: + print(f"Release date missing for item: {atmel_name} version: {version}") + release_date = datetime.now().strftime('%Y-%m-%d') # Use current date as a fallback + + download_link = f"https://packs.download.microchip.com/Microchip.{atmel_name}.{version}.atpack" + if package_type == 'microchip_tp': + display_name = f"{atmel_name.replace('_TP', '')} Tool Support" + else: + display_name = f"{atmel_name.replace('_DFP', '')} Device Support" + name = display_name.lower().replace(" ", "_") + + # Check if 'atmel:devices' exists and is not None + if package_type == 'microchip_tp': + devices_section = input_item.get('atmel:devices', None) + else: + devices_section = releases[0].get('atmel:devices', None) + if devices_section is None: + + print(f"No devices found for item: {atmel_name} version: {version}") + mcus = [] # Set mcus as an empty list if there are no devices + else: + # If devices_section exists, extract the devices + devices = devices_section.get('atmel:device', []) + + # Handle both list and single dict cases + if isinstance(devices, dict): # If it's a single device entry, convert it to a list + devices = [devices] + + mcus = [device.get('@name') for device in devices if '@name' in device] + + # Construct the output JSON structure + output_json = { + "name": name, + "display_name": display_name, + "author": "Microchip", + "hidden": False, + "type": package_type, + "version": version, + "created_at": release_date + "T00:00:00Z", + "updated_at": release_date + "T00:00:00Z", # Convert the release date to ISO format with time + "category": "Microchip Device support", + "download_link": download_link, + "package_changed": False, + "install_location": f"%APPLICATION_DATA_DIR%/packages/packsfolder/Microchip/{atmel_name}/{version}", + "dependencies": [], + "mcus": mcus # This will be an empty list if no devices are found + } + + return output_json diff --git a/scripts/reupload_databases.py b/scripts/reupload_databases.py index 1bec605ec..68dc0949b 100644 --- a/scripts/reupload_databases.py +++ b/scripts/reupload_databases.py @@ -15,6 +15,7 @@ import enums as enums import support as utility import addSdkVersion as sdk +import read_microchip_index as MCHP entranceCheckProg = True entranceCheckDebug = True @@ -891,6 +892,7 @@ async def main( False ) + ## Step 2 - Update database with new SDK if needed ## Add new sdk version if 'latest' == release_version_sdk: @@ -1023,6 +1025,51 @@ async def main( checkDebuggerToDevice(databaseErp, allDevicesGithub, progDbgAsJson, False) checkProgrammerToDevice(databaseNecto, allDevicesGithub, progDbgAsJson, True) checkDebuggerToDevice(databaseNecto, allDevicesGithub, progDbgAsJson, False) + ## Step 10.1 add microchip info to programmers table + custom_link = 'https://packs.download.microchip.com/index.idx' + if not mcus_only: + # Download the index file + xml_content = MCHP.download_index_file(custom_link) + converted_data = MCHP.convert_idx_to_json(xml_content) + + programmersColumns = 'uid,hidden,name,icon,installed,description,installer_package,device_support_package' + progToDeviceColumns = 'programer_uid,device_uid' + for eachDb in [databaseErp, databaseNecto]: + if eachDb: + ## Add missing columns to programmer table + addCollumnsToTable( + eachDb, ['installer_package', 'device_support_package'], 'Programmers', ['Text', 'Text'], ['NoDefault', 'NoDefault'] + ) + ## Add all tools found in microchip index file to programmers table + for prog_item in converted_data: + print(f"Inserting {prog_item['uid']} into Programmers table") + insertIntoTable( + eachDb, + 'Programmers', + [ + prog_item['uid'], + prog_item['hidden'], + prog_item['display_name'], + prog_item['icon'], + prog_item['installed'], + prog_item['description'], + prog_item['installer_package'], + prog_item['dfps'] + ], + programmersColumns + ) + ## Add MCU to Programmer mapping found in microchip index file + for mcu in prog_item['mcus']: + print(f"Inserting {mcu.upper()}:{prog_item['uid']} into ProgrammerToDevice table") + insertIntoTable( + eachDb, + 'ProgrammerToDevice', + [ + prog_item['uid'], + mcu.upper() + ], + progToDeviceColumns + ) ## Step 11 - update families if not mcus_only: @@ -1048,16 +1095,16 @@ async def main( ) ## Step 14 - re-upload over existing assets - if not mcus_only: - archive_path = compress_directory_7z(os.path.join(os.path.dirname(__file__), 'databases'), f'{dbPackageName}.7z') - async with aiohttp.ClientSession() as session: - upload_result = await upload_release_asset(session, token, repo, archive_path, release_version) - if databaseErp: - async with aiohttp.ClientSession() as session: - upload_result = await upload_release_asset(session, token, repo, databaseErp, release_version) - - ## Step 15 - overwrite the existing necto_db.db in root with newly generated one - shutil.copy2(databaseNecto, os.path.join(os.getcwd(), f'{dbName}.db')) + # if not mcus_only: + # archive_path = compress_directory_7z(os.path.join(os.path.dirname(__file__), 'databases'), f'{dbPackageName}.7z') + # async with aiohttp.ClientSession() as session: + # upload_result = await upload_release_asset(session, token, repo, archive_path, release_version) + # if databaseErp: + # async with aiohttp.ClientSession() as session: + # upload_result = await upload_release_asset(session, token, repo, databaseErp, release_version) + + # ## Step 15 - overwrite the existing necto_db.db in root with newly generated one + # shutil.copy2(databaseNecto, os.path.join(os.getcwd(), f'{dbName}.db')) ## ------------------------------------------------------------------------------------ ## ## EOF Main runner From 4ac4443692feaa47e0af78df50709f55b1016e58 Mon Sep 17 00:00:00 2001 From: "marko.lukic@mikroe.com" Date: Fri, 1 Nov 2024 10:31:32 +0100 Subject: [PATCH 2/5] Update scripts --- scripts/index.py | 13 ++++++++++ scripts/read_microchip_index.py | 44 +++++++++++++++++++++++++++++---- scripts/reupload_databases.py | 42 ++++++++++++++++++++----------- 3 files changed, 79 insertions(+), 20 deletions(-) diff --git a/scripts/index.py b/scripts/index.py index 9a2a2fcfc..35ad5d3e5 100644 --- a/scripts/index.py +++ b/scripts/index.py @@ -4,6 +4,7 @@ from datetime import datetime, timezone import support as support +import read_microchip_index as MCHP # Gets latest release headers from repository def get_headers(api, token): @@ -465,6 +466,15 @@ def promote_to_latest(releases, repo, token, release_version): raise Exception(f"Failed to revert status for release {selected_release['name']}: {response_3.status_code} - {response_3.text}") return +def index_microchip_packs(es: Elasticsearch, index_name: str): + custom_link = 'https://packs.download.microchip.com/index.idx' + # Download the index file + xml_content = MCHP.download_index_file(custom_link) + converted_data, item_list = MCHP.convert_idx_to_json(xml_content) + for eachItem in item_list: + resp = es.index(index=index_name, doc_type='necto_package', id=eachItem['name'], body=eachItem) + print(f"{resp["result"]} {resp['_id']}") + if __name__ == '__main__': # First, check for arguments passed @@ -508,6 +518,9 @@ def str2bool(v): db_version = remove_duplicate_indexed_files( es, args.select_index ) + #Index microchip device family packs + index_microchip_packs(es, args.select_index) + # Now index the new release index_release_to_elasticsearch( diff --git a/scripts/read_microchip_index.py b/scripts/read_microchip_index.py index d0233607c..4fec89410 100644 --- a/scripts/read_microchip_index.py +++ b/scripts/read_microchip_index.py @@ -70,14 +70,48 @@ def generate_list(item_list, tool_to_mcu_list): if item_type == 'microchip_tp': # Populate tool_to_mcu safely uid = name.replace('_tool_support', '') + # + displayNameMap = {"atmelice" : "ATMEL-ICE", + "edbg" : "Atmel® Embedded Debugger (EDBG)", + "icd4" : "MPLAB® ICD 4", + "icd5" : "MPLAB® ICD 5", + "ice4" : "MPLAB® ICE 4", + "jtagice3" : "JTAGICE3", + "pickit4" : "MPLAB® PICkit™ 4", + "pickit5" : "MPLAB® PICkit™ 5", + "pkob4" : "PICkit On-Board 4 (PKOB4)", + "powerdebugger" : "Power Debugger", + "simulator" : "", + "snap" : "MPLAB Snap", + "medbg" : "mEDBG (Mini Embedded Debugger)", + "nedbg" : "PKOB nano", + } + descriptionMap = { + "atmelice": "Atmel-ICE is a debugging and programming tool for ARM Cortex-M and AVR microcontrollers.", + "edbg": "Atmel Embedded Debugger (EDBG) is an onboard debugger for development kits with Atmel MCUs.", + "icd4": "MPLAB ICD 4 is Microchip’s fast, cost-effective debugger for PIC, SAM, and dsPIC devices.", + "icd5": "MPLAB ICD 5 provides advanced connectivity and power options for PIC, AVR, SAM, and dsPIC devices.", + "ice4": "MPLAB ICE 4 offers feature-rich debugging for PIC, AVR, SAM, and dsPIC devices.", + "jtagice3": "Mid-range tool for AVR and SAM D ARM Cortex-M0+ microcontrollers with on-chip debugging.", + "pickit4": "MPLAB PICkit 4 allows fast debugging and programming of PIC, dsPIC, AVR, and SAM MCUs.", + "pickit5": "MPLAB PICkit 5 supports quick prototyping and production-ready programming for Microchip devices.", + "pkob4": "PKOB4 (PICkit On-Board 4) is an onboard debugger with no additional tools required.", + "powerdebugger": "Power Debugger for AVR and ARM Cortex-M SAM microcontrollers using various interfaces.", + "simulator": "", + "snap": "MPLAB Snap is a cost-effective debugger for PIC, dsPIC, AVR, and SAM flash MCUs.", + "medbg": "Mini Embedded Debugger (mEDBG).", + "nedbg": "Curiosity Nano onboard debugger (nEDBG or PKOB nano)." + } + + tool_item = { 'uid' : uid, 'installer_package' : name, - 'display_name' : display_name, + 'display_name' : displayNameMap.get(uid, display_name), 'icon' : f"images/programmers/{uid}.png", 'hidden' : 0, 'installed' : 0, - 'description' : '', + 'description' : descriptionMap.get(uid, ''), 'mcus' : mcus } tool_to_mcu_list.append(tool_item) @@ -118,7 +152,7 @@ def convert_idx_to_json(xml_content): item_list.append(convert_item_to_es_json(item)) generate_list(item_list, tool_to_mcu) - return tool_to_mcu + return tool_to_mcu, item_list except Exception as e: print(f"Error during conversion: {e} for item {item}") @@ -146,13 +180,13 @@ def convert_item_to_es_json(input_item): if not release_date: print(f"Release date missing for item: {atmel_name} version: {version}") release_date = datetime.now().strftime('%Y-%m-%d') # Use current date as a fallback - download_link = f"https://packs.download.microchip.com/Microchip.{atmel_name}.{version}.atpack" if package_type == 'microchip_tp': display_name = f"{atmel_name.replace('_TP', '')} Tool Support" else: display_name = f"{atmel_name.replace('_DFP', '')} Device Support" name = display_name.lower().replace(" ", "_") + name = name.lower().replace("-", "_") # Check if 'atmel:devices' exists and is not None if package_type == 'microchip_tp': @@ -185,7 +219,7 @@ def convert_item_to_es_json(input_item): "updated_at": release_date + "T00:00:00Z", # Convert the release date to ISO format with time "category": "Microchip Device support", "download_link": download_link, - "package_changed": False, + "package_changed": True, "install_location": f"%APPLICATION_DATA_DIR%/packages/packsfolder/Microchip/{atmel_name}/{version}", "dependencies": [], "mcus": mcus # This will be an empty list if no devices are found diff --git a/scripts/reupload_databases.py b/scripts/reupload_databases.py index 68dc0949b..96b877f46 100644 --- a/scripts/reupload_databases.py +++ b/scripts/reupload_databases.py @@ -1030,19 +1030,23 @@ async def main( if not mcus_only: # Download the index file xml_content = MCHP.download_index_file(custom_link) - converted_data = MCHP.convert_idx_to_json(xml_content) + converted_data, item_list = MCHP.convert_idx_to_json(xml_content) - programmersColumns = 'uid,hidden,name,icon,installed,description,installer_package,device_support_package' - progToDeviceColumns = 'programer_uid,device_uid' + programmersColumns = 'uid,hidden,name,icon,installed,description,installer_package' + progToDeviceColumns = 'programer_uid,device_uid, device_support_package' for eachDb in [databaseErp, databaseNecto]: if eachDb: ## Add missing columns to programmer table addCollumnsToTable( - eachDb, ['installer_package', 'device_support_package'], 'Programmers', ['Text', 'Text'], ['NoDefault', 'NoDefault'] + eachDb, ['installer_package'], 'Programmers', ['Text'], ['NoDefault'] + ) + addCollumnsToTable( + eachDb, ['device_support_package'], 'ProgrammerToDevice', ['Text'], ['NoDefault'] ) ## Add all tools found in microchip index file to programmers table for prog_item in converted_data: print(f"Inserting {prog_item['uid']} into Programmers table") + dfpsMap = json.loads(prog_item['dfps']) insertIntoTable( eachDb, 'Programmers', @@ -1053,23 +1057,31 @@ async def main( prog_item['icon'], prog_item['installed'], prog_item['description'], - prog_item['installer_package'], - prog_item['dfps'] + prog_item['installer_package'] ], programmersColumns ) ## Add MCU to Programmer mapping found in microchip index file + missingMcuDfp = [] for mcu in prog_item['mcus']: print(f"Inserting {mcu.upper()}:{prog_item['uid']} into ProgrammerToDevice table") - insertIntoTable( - eachDb, - 'ProgrammerToDevice', - [ - prog_item['uid'], - mcu.upper() - ], - progToDeviceColumns - ) + if mcu in dfpsMap: + exists, uid_list = read_data_from_db(eachDb, f"SELECT uid FROM Devices WHERE def_file = \"{mcu.upper()}.json\"") + if exists: + for mcu_uid in uid_list: + insertIntoTable( + eachDb, + 'ProgrammerToDevice', + [ + prog_item['uid'], + mcu_uid[0], + json.dumps(dfpsMap[mcu]) + ], + progToDeviceColumns + ) + else: + missingMcuDfp.append(mcu) + print(f"Following MCUs does not have DFP: {missingMcuDfp}") ## Step 11 - update families if not mcus_only: From 8f9e311918062e8cd6941b1563faaaf0583c3e2e Mon Sep 17 00:00:00 2001 From: "marko.lukic@mikroe.com" Date: Fri, 1 Nov 2024 10:33:57 +0100 Subject: [PATCH 3/5] Uncomment code --- scripts/reupload_databases.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/scripts/reupload_databases.py b/scripts/reupload_databases.py index 96b877f46..3ac58767f 100644 --- a/scripts/reupload_databases.py +++ b/scripts/reupload_databases.py @@ -1107,16 +1107,16 @@ async def main( ) ## Step 14 - re-upload over existing assets - # if not mcus_only: - # archive_path = compress_directory_7z(os.path.join(os.path.dirname(__file__), 'databases'), f'{dbPackageName}.7z') - # async with aiohttp.ClientSession() as session: - # upload_result = await upload_release_asset(session, token, repo, archive_path, release_version) - # if databaseErp: - # async with aiohttp.ClientSession() as session: - # upload_result = await upload_release_asset(session, token, repo, databaseErp, release_version) - - # ## Step 15 - overwrite the existing necto_db.db in root with newly generated one - # shutil.copy2(databaseNecto, os.path.join(os.getcwd(), f'{dbName}.db')) + if not mcus_only: + archive_path = compress_directory_7z(os.path.join(os.path.dirname(__file__), 'databases'), f'{dbPackageName}.7z') + async with aiohttp.ClientSession() as session: + upload_result = await upload_release_asset(session, token, repo, archive_path, release_version) + if databaseErp: + async with aiohttp.ClientSession() as session: + upload_result = await upload_release_asset(session, token, repo, databaseErp, release_version) + + ## Step 15 - overwrite the existing necto_db.db in root with newly generated one + shutil.copy2(databaseNecto, os.path.join(os.getcwd(), f'{dbName}.db')) ## ------------------------------------------------------------------------------------ ## ## EOF Main runner From a4905ced23771e29c81fdaf89dc2eaa6bcc7518d Mon Sep 17 00:00:00 2001 From: strahi-linux Date: Fri, 1 Nov 2024 11:36:15 +0100 Subject: [PATCH 4/5] Updated code formatting. --- scripts/index.py | 5 +-- scripts/read_microchip_index.py | 72 ++++++++++++++------------------- scripts/reupload_databases.py | 18 ++++----- 3 files changed, 42 insertions(+), 53 deletions(-) diff --git a/scripts/index.py b/scripts/index.py index 35ad5d3e5..c15d052eb 100644 --- a/scripts/index.py +++ b/scripts/index.py @@ -466,6 +466,7 @@ def promote_to_latest(releases, repo, token, release_version): raise Exception(f"Failed to revert status for release {selected_release['name']}: {response_3.status_code} - {response_3.text}") return + def index_microchip_packs(es: Elasticsearch, index_name: str): custom_link = 'https://packs.download.microchip.com/index.idx' # Download the index file @@ -475,7 +476,6 @@ def index_microchip_packs(es: Elasticsearch, index_name: str): resp = es.index(index=index_name, doc_type='necto_package', id=eachItem['name'], body=eachItem) print(f"{resp["result"]} {resp['_id']}") - if __name__ == '__main__': # First, check for arguments passed def str2bool(v): @@ -518,10 +518,9 @@ def str2bool(v): db_version = remove_duplicate_indexed_files( es, args.select_index ) - #Index microchip device family packs + # Index microchip device family packs index_microchip_packs(es, args.select_index) - # Now index the new release index_release_to_elasticsearch( es, args.select_index, diff --git a/scripts/read_microchip_index.py b/scripts/read_microchip_index.py index 4fec89410..31ae0e9f7 100644 --- a/scripts/read_microchip_index.py +++ b/scripts/read_microchip_index.py @@ -12,7 +12,6 @@ def download_index_file(url): else: raise Exception(f"Failed to download index.idx file. Status code: {response.status_code}") - def filter_releases_by_version(json_data): try: # Extract the pdsc items from the JSON data @@ -70,40 +69,38 @@ def generate_list(item_list, tool_to_mcu_list): if item_type == 'microchip_tp': # Populate tool_to_mcu safely uid = name.replace('_tool_support', '') - # - displayNameMap = {"atmelice" : "ATMEL-ICE", - "edbg" : "Atmel® Embedded Debugger (EDBG)", - "icd4" : "MPLAB® ICD 4", - "icd5" : "MPLAB® ICD 5", - "ice4" : "MPLAB® ICE 4", - "jtagice3" : "JTAGICE3", - "pickit4" : "MPLAB® PICkit™ 4", - "pickit5" : "MPLAB® PICkit™ 5", - "pkob4" : "PICkit On-Board 4 (PKOB4)", - "powerdebugger" : "Power Debugger", - "simulator" : "", - "snap" : "MPLAB Snap", - "medbg" : "mEDBG (Mini Embedded Debugger)", - "nedbg" : "PKOB nano", - } + displayNameMap = { + "atmelice" : "ATMEL-ICE", + "edbg" : "Atmel® Embedded Debugger (EDBG)", + "icd4" : "MPLAB® ICD 4", + "icd5" : "MPLAB® ICD 5", + "ice4" : "MPLAB® ICE 4", + "jtagice3" : "JTAGICE3", + "pickit4" : "MPLAB® PICkit™ 4", + "pickit5" : "MPLAB® PICkit™ 5", + "pkob4" : "PICkit On-Board 4 (PKOB4)", + "powerdebugger" : "Power Debugger", + "simulator" : "", + "snap" : "MPLAB Snap", + "medbg" : "mEDBG (Mini Embedded Debugger)", + "nedbg" : "PKOB nano", + } descriptionMap = { - "atmelice": "Atmel-ICE is a debugging and programming tool for ARM Cortex-M and AVR microcontrollers.", - "edbg": "Atmel Embedded Debugger (EDBG) is an onboard debugger for development kits with Atmel MCUs.", - "icd4": "MPLAB ICD 4 is Microchip’s fast, cost-effective debugger for PIC, SAM, and dsPIC devices.", - "icd5": "MPLAB ICD 5 provides advanced connectivity and power options for PIC, AVR, SAM, and dsPIC devices.", - "ice4": "MPLAB ICE 4 offers feature-rich debugging for PIC, AVR, SAM, and dsPIC devices.", - "jtagice3": "Mid-range tool for AVR and SAM D ARM Cortex-M0+ microcontrollers with on-chip debugging.", - "pickit4": "MPLAB PICkit 4 allows fast debugging and programming of PIC, dsPIC, AVR, and SAM MCUs.", - "pickit5": "MPLAB PICkit 5 supports quick prototyping and production-ready programming for Microchip devices.", - "pkob4": "PKOB4 (PICkit On-Board 4) is an onboard debugger with no additional tools required.", - "powerdebugger": "Power Debugger for AVR and ARM Cortex-M SAM microcontrollers using various interfaces.", - "simulator": "", - "snap": "MPLAB Snap is a cost-effective debugger for PIC, dsPIC, AVR, and SAM flash MCUs.", - "medbg": "Mini Embedded Debugger (mEDBG).", - "nedbg": "Curiosity Nano onboard debugger (nEDBG or PKOB nano)." - } - - + "atmelice": "Atmel-ICE is a debugging and programming tool for ARM Cortex-M and AVR microcontrollers.", + "edbg": "Atmel Embedded Debugger (EDBG) is an onboard debugger for development kits with Atmel MCUs.", + "icd4": "MPLAB ICD 4 is Microchip’s fast, cost-effective debugger for PIC, SAM, and dsPIC devices.", + "icd5": "MPLAB ICD 5 provides advanced connectivity and power options for PIC, AVR, SAM, and dsPIC devices.", + "ice4": "MPLAB ICE 4 offers feature-rich debugging for PIC, AVR, SAM, and dsPIC devices.", + "jtagice3": "Mid-range tool for AVR and SAM D ARM Cortex-M0+ microcontrollers with on-chip debugging.", + "pickit4": "MPLAB PICkit 4 allows fast debugging and programming of PIC, dsPIC, AVR, and SAM MCUs.", + "pickit5": "MPLAB PICkit 5 supports quick prototyping and production-ready programming for Microchip devices.", + "pkob4": "PKOB4 (PICkit On-Board 4) is an onboard debugger with no additional tools required.", + "powerdebugger": "Power Debugger for AVR and ARM Cortex-M SAM microcontrollers using various interfaces.", + "simulator": "", + "snap": "MPLAB Snap is a cost-effective debugger for PIC, dsPIC, AVR, and SAM flash MCUs.", + "medbg": "Mini Embedded Debugger (mEDBG).", + "nedbg": "Curiosity Nano onboard debugger (nEDBG or PKOB nano)." + } tool_item = { 'uid' : uid, 'installer_package' : name, @@ -133,17 +130,12 @@ def generate_list(item_list, tool_to_mcu_list): for tool_item in tool_to_mcu_list: tool_item['dfps'] = json.dumps(mcu_to_dfp) - - - def convert_idx_to_json(xml_content): try: # Open the idx file data_dict = xmltodict.parse(xml_content) # Convert the parsed data to JSON format - - data = filter_releases_by_version(data_dict) item_list = [] tool_to_mcu = [] @@ -157,8 +149,6 @@ def convert_idx_to_json(xml_content): except Exception as e: print(f"Error during conversion: {e} for item {item}") - - def convert_item_to_es_json(input_item): # Extract relevant fields atmel_name = input_item.get('@atmel:name') diff --git a/scripts/reupload_databases.py b/scripts/reupload_databases.py index 3ac58767f..25c051d4a 100644 --- a/scripts/reupload_databases.py +++ b/scripts/reupload_databases.py @@ -892,7 +892,6 @@ async def main( False ) - ## Step 2 - Update database with new SDK if needed ## Add new sdk version if 'latest' == release_version_sdk: @@ -1025,12 +1024,13 @@ async def main( checkDebuggerToDevice(databaseErp, allDevicesGithub, progDbgAsJson, False) checkProgrammerToDevice(databaseNecto, allDevicesGithub, progDbgAsJson, True) checkDebuggerToDevice(databaseNecto, allDevicesGithub, progDbgAsJson, False) - ## Step 10.1 add microchip info to programmers table + + ## Step 11 add microchip info to programmers table custom_link = 'https://packs.download.microchip.com/index.idx' if not mcus_only: # Download the index file xml_content = MCHP.download_index_file(custom_link) - converted_data, item_list = MCHP.convert_idx_to_json(xml_content) + converted_data, item_list_unused = MCHP.convert_idx_to_json(xml_content) programmersColumns = 'uid,hidden,name,icon,installed,description,installer_package' progToDeviceColumns = 'programer_uid,device_uid, device_support_package' @@ -1081,20 +1081,20 @@ async def main( ) else: missingMcuDfp.append(mcu) - print(f"Following MCUs does not have DFP: {missingMcuDfp}") + print(f"Following MCUs do not have DFP: {missingMcuDfp}") - ## Step 11 - update families + ## Step 12 - update families if not mcus_only: if databaseErp: update_families(databaseErp, allDevicesGithub) - ## Step 12 - update the icon names + ## Step 13 - update the icon names if not mcus_only: for eachDb in [databaseErp, databaseNecto]: fix_icon_names(eachDb, "Boards") fix_icon_names(eachDb, "Displays") - ## Step 13 - if queries are different, add them to new file + ## Step 14 - if queries are different, add them to new file if not mcus_only: if not compare_hashes( os.path.join(os.path.dirname(__file__), 'databases/queries'), @@ -1106,7 +1106,7 @@ async def main( os.path.join(os.path.dirname(__file__), 'databases/queries') ) - ## Step 14 - re-upload over existing assets + ## Step 15 - re-upload over existing assets if not mcus_only: archive_path = compress_directory_7z(os.path.join(os.path.dirname(__file__), 'databases'), f'{dbPackageName}.7z') async with aiohttp.ClientSession() as session: @@ -1115,7 +1115,7 @@ async def main( async with aiohttp.ClientSession() as session: upload_result = await upload_release_asset(session, token, repo, databaseErp, release_version) - ## Step 15 - overwrite the existing necto_db.db in root with newly generated one + ## Step 16 - overwrite the existing necto_db.db in root with newly generated one shutil.copy2(databaseNecto, os.path.join(os.getcwd(), f'{dbName}.db')) ## ------------------------------------------------------------------------------------ ## ## EOF Main runner From 2938bbf51f3da4d9e29a56f9c8ef4c9c8b5b3fb7 Mon Sep 17 00:00:00 2001 From: strahi-linux Date: Fri, 1 Nov 2024 11:38:50 +0100 Subject: [PATCH 5/5] Added guard for indexing only to test initially --- scripts/index.py | 5 ++++- scripts/reupload_databases.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/index.py b/scripts/index.py index c15d052eb..7d98eb081 100644 --- a/scripts/index.py +++ b/scripts/index.py @@ -518,8 +518,11 @@ def str2bool(v): db_version = remove_duplicate_indexed_files( es, args.select_index ) + # Index microchip device family packs - index_microchip_packs(es, args.select_index) + if 'live' not in args.select_index: + # TODO - uncomment once LIVE test is confirmed to work + index_microchip_packs(es, args.select_index) # Now index the new release index_release_to_elasticsearch( diff --git a/scripts/reupload_databases.py b/scripts/reupload_databases.py index 25c051d4a..1ab03d017 100644 --- a/scripts/reupload_databases.py +++ b/scripts/reupload_databases.py @@ -1027,7 +1027,7 @@ async def main( ## Step 11 add microchip info to programmers table custom_link = 'https://packs.download.microchip.com/index.idx' - if not mcus_only: + if not mcus_only and 'Test' == index: # Download the index file xml_content = MCHP.download_index_file(custom_link) converted_data, item_list_unused = MCHP.convert_idx_to_json(xml_content)