Skip to content

Commit

Permalink
Updated workflow for uploading MCUs
Browse files Browse the repository at this point in the history
  • Loading branch information
IvanRuzavin committed Oct 8, 2024
1 parent 3c841cd commit 7442b8e
Show file tree
Hide file tree
Showing 4 changed files with 129 additions and 51 deletions.
59 changes: 27 additions & 32 deletions .github/workflows/MCUsReleaseLive.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -71,26 +71,22 @@ jobs:
pip install -r scripts/requirements/support.txt
sudo apt-get install p7zip-full
- name: Update database
- name: Build Message with Python
run: |
python -u scripts/update_db_for_release.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }} ${{ secrets.PROG_DEBUG_CODEGRIP }} ${{ secrets.PROG_DEBUG_MIKROPROG }} ${{ github.event.inputs.release_version }} "latest" "Test"
python -u scripts/build_message.py > message.txt
- name: Commit database to current branch
- name: Remove original changelog file from git
run: |
echo "Updating with new database";
git add necto_db.db
git commit -m "Updated database with latest release info."
git rm changelog/new_hw.md
git commit -m "Remove old changelog file after moving it"
git push
- name: Upload MCUs Asset
env:
DB_PATH: ${{ secrets.DB_PATH }}
ES_HOST: ${{ secrets.ES_HOST }}
ES_USER: ${{ secrets.ES_USER }}
ES_PASSWORD: ${{ secrets.ES_PASSWORD }}
ES_INDEX: ${{ secrets.ES_INDEX_LIVE }}
ES_INDEX_LIVE: ${{ secrets.ES_INDEX_LIVE }}
run: python -u scripts/release_mcus.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }} ${{ github.event.inputs.release_version }} "--live" "True"
- name: Commit Changelog to current branch
run: |
echo "Updating with new changelog files";
git add changelog/**
git commit -m "Updated changelog files with latest release info."
git push
# Create a pull request using the GitHub API
- name: Create Pull Request
Expand Down Expand Up @@ -144,23 +140,6 @@ jobs:
echo "Indexing to Live."
python -u scripts/index.py ${{ github.repository }} ${{ secrets.GITHUB_TOKEN }} ${{ secrets.ES_INDEX_LIVE }} "False" ${{ github.event.inputs.release_version }} "False" "False"
- name: Build Message with Python
run: |
python -u scripts/build_message.py > message.txt
- name: Remove original changelog file from git
run: |
git rm changelog/new_hw.md
git commit -m "Remove old changelog file after moving it"
git push
- name: Commit Changelog to current branch
run: |
echo "Updating with new changelog files";
git add changelog/**
git commit -m "Updated changelog files with latest release info."
git push
- name: Send notification to Mattermost
if: ${{ github.event.inputs.notify_channel == 'true' }}
env:
Expand All @@ -170,3 +149,19 @@ jobs:
curl -X POST -H 'Content-Type: application/json' \
--data "{\"text\": \"$MESSAGE\"}" \
$MATTERMOST_WEBHOOK_URL
- name: Trigger database update in Core repo
run: |
# Set the required variables
repo_owner="MikroElektronika"
repo_name="core_packages"
event_type="trigger-workflow"
version="latest"
curl -L \
-X POST \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.MIKROE_ACTIONS_KEY }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/$repo_owner/$repo_name/dispatches \
-d "{\"event_type\": \"$event_type\", \"client_payload\": {\"version\": \"$version\", \"index\": \"Live\", \"unit\": false, \"integration\": true}}"
9 changes: 1 addition & 8 deletions .github/workflows/MCUsReleaseTest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,7 @@ jobs:
- name: Update database
run: |
python -u scripts/update_db_for_release.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }} ${{ secrets.PROG_DEBUG_CODEGRIP }} ${{ secrets.PROG_DEBUG_MIKROPROG }} ${{ github.event.inputs.release_version }} "latest" "Test"
- name: Commit database to current branch
run: |
echo "Updating with new database";
git add necto_db_dev.db
git commit -m "Updated database with latest release info."
git push
python -u scripts/reupload_databases.py ${{ secrets.GITHUB_TOKEN }} ${{ github.repository }} ${{ secrets.PROG_DEBUG_CODEGRIP }} ${{ secrets.PROG_DEBUG_MIKROPROG }} ${{ github.event.inputs.release_version }} "latest" "Test" "--upload" "False"
- name: Upload MCUs Asset
env:
Expand Down
2 changes: 0 additions & 2 deletions scripts/release_mcus.py
Original file line number Diff line number Diff line change
Expand Up @@ -901,8 +901,6 @@ async def main(token, repo, tag_name, live=False):
architectures = ["ARM", "RISCV", "PIC32", "PIC", "dsPIC", "AVR"]

db_paths = ['necto_db_dev.db']
if live:
db_paths = ['necto_db.db']

current_metadata = fetch_current_metadata(repo, token)

Expand Down
110 changes: 101 additions & 9 deletions scripts/reupload_databases.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,6 +471,7 @@ async def get_all_assets(session, token, repo, release_id):
return assets

async def upload_release_asset(session, token, repo, asset_path, release_version=None):
return 0
""" Upload a release asset to GitHub """
print(f"Preparing to upload asset: {os.path.basename(asset_path)}...")
headers = {'Authorization': f'token {token}', 'Content-Type': 'application/octet-stream'}
Expand Down Expand Up @@ -722,6 +723,90 @@ def updateDevicesFromSdk(dbs, queries):

return

def updateDevicesFromCore(dbs, queries):
allDevicesDirs = os.listdir(queries)
for eachDeviceDir in allDevicesDirs:
currentDeviceDir = os.path.join(queries, eachDeviceDir)
currentDeviceFiles = os.listdir(currentDeviceDir)

for eachDb in dbs:
if eachDb:
if 'Devices.json' in currentDeviceFiles:
with open(os.path.join(currentDeviceDir, 'Devices.json'), 'r') as file:
device = json.load(file)
file.close()
values = []
collumns = []
for eachKey in device.keys():
collumns.append(eachKey)
values.append(device[eachKey])
insertIntoTable(
eachDb,
'Devices',
values,
','.join(collumns)
)

if 'LinkerTables.json' in currentDeviceFiles:
with open(os.path.join(currentDeviceDir, 'LinkerTables.json'), 'r') as file:
linkerTables = json.load(file)
file.close()
table_keys = [list(table.keys())[0] for table in linkerTables['tables']]
for eachTableKey in table_keys:
collumns = ['device_uid']
values = [linkerTables['device_uid']]
for eachKey in linkerTables['tables']:
if eachTableKey in eachKey:
collumns.append(list(eachKey[eachTableKey].keys())[0])
if 'SDKToDevice' == eachTableKey:
sdkVersions = read_data_from_db(eachDb, 'SELECT DISTINCT version FROM SDKs WHERE name IS "mikroSDK"')
versions = filter_versions(list(v[0] for v in sdkVersions[enums.dbSync.ELEMENTS.value]))
threshold_version = version.parse(eachKey[eachTableKey][collumns[1]][:-1])
filtered_versions = [f'mikrosdk_v{v.replace('.','')}' for v in versions if version.parse(v) >= threshold_version]
values.append(filtered_versions)
# Add Packages if they are not present in the database
elif 'DeviceToPackage' == eachTableKey:
package_uids = linkerTables['tables'][enums.dbSync.BOARDTODEVICEPACKAGES.value]['DeviceToPackage']['package_uid']
for package_uid in package_uids:
pin_count = package_uid.split('/')[0]
package_name = package_uid.split('/')[1]
insertIntoTable(
eachDb,
'Packages',
[
pin_count,
package_uid,
package_uid,
"",
'{"_MSDK_PACKAGE_NAME_":"' + package_name + '","_MSDK_DIP_SOCKET_TYPE_":""}'
],
'pin_count,name,uid,stm_sdk_config,sdk_config'
)
values.append(eachKey[eachTableKey][collumns[1]])
else:
values.append(eachKey[eachTableKey][collumns[1]])
break
if list == type(values[1]):
for eachValue in values[1]:
insertIntoTable(
eachDb,
eachTableKey,
[
values[0],
eachValue
],
','.join(collumns)
)
else:
insertIntoTable(
eachDb,
eachTableKey,
values,
','.join(collumns)
)

return

def hash_file(filename):
"""Generate MD5 hash of a file."""
hash_md5 = hashlib.md5()
Expand Down Expand Up @@ -790,7 +875,7 @@ def fix_icon_names(db, tableName):
)

## Main runner
async def main(token, repo, doc_codegrip, doc_mikroprog, release_version="", release_version_sdk="", index="Test"):
async def main(token, repo, doc_codegrip, doc_mikroprog, release_version="", release_version_sdk="", index="Test", upload=True):
## Step 1 - download the database first
## Always use latest release
dbName = 'necto_db_dev'
Expand All @@ -806,6 +891,7 @@ async def main(token, repo, doc_codegrip, doc_mikroprog, release_version="", rel

## Step 2 - Update database with new SDK if needed
## Add new sdk version
release_version_sdk = fetch_release_details('MikroElektronika/mikrosdk_v2', token, release_version)
for eachDb in [databaseNecto, databaseErp]:
if eachDb:
sdkVersionUidNew, sdkVersionUidPrevious = sdk.addSdkVersion(eachDb, release_version_sdk.replace('mikroSDK-', ''))
Expand Down Expand Up @@ -868,6 +954,10 @@ async def main(token, repo, doc_codegrip, doc_mikroprog, release_version="", rel
updateTableCollumn(
eachDb, None, None, None, None, None, jsonFile[eachBoard]['db_query']
)

coreQueriesPath = os.path.join(os.getcwd(), 'resources/queries')
if os.path.exists(os.path.join(coreQueriesPath, 'mcus')):
updateDevicesFromCore([databaseErp, databaseNecto], os.path.join(coreQueriesPath, 'mcus')) ## If any new mcus were added
## EOF Step 3

## Step 4 - add missing collumns to tables
Expand Down Expand Up @@ -942,15 +1032,16 @@ async def main(token, repo, doc_codegrip, doc_mikroprog, release_version="", rel
)

## Step 14 - re-upload over existing assets
archive_path = compress_directory_7z(os.path.join(os.path.dirname(__file__), 'databases'), f'{dbPackageName}.7z')
async with aiohttp.ClientSession() as session:
upload_result = await upload_release_asset(session, token, repo, archive_path, release_version)
if databaseErp:
if upload:
archive_path = compress_directory_7z(os.path.join(os.path.dirname(__file__), 'databases'), f'{dbPackageName}.7z')
async with aiohttp.ClientSession() as session:
upload_result = await upload_release_asset(session, token, repo, databaseErp, release_version)
upload_result = await upload_release_asset(session, token, repo, archive_path, release_version)
if databaseErp:
async with aiohttp.ClientSession() as session:
upload_result = await upload_release_asset(session, token, repo, databaseErp, release_version)

## Step 15 - overwrite the existing necto_db.db in root with newly generated one
shutil.copy2(databaseNecto, os.path.join(os.getcwd(), f'{dbName}.db'))
## Step 15 - overwrite the existing necto_db.db in root with newly generated one
shutil.copy2(databaseNecto, os.path.join(os.getcwd(), f'{dbName}.db'))
## ------------------------------------------------------------------------------------ ##
## EOF Main runner

Expand All @@ -964,9 +1055,10 @@ async def main(token, repo, doc_codegrip, doc_mikroprog, release_version="", rel
parser.add_argument('specific_tag', type=str, help='Specific release tag for database update.', default="")
parser.add_argument('specific_tag_mikrosdk', type=str, help='Specific release tag from mikrosdk for database update.', default="")
parser.add_argument('index', type=str, help='Index selection - Live/Test.', default="Test")
parser.add_argument('--upload', type=bool, help='If True - will upload asset.', default=True)

## Parse the arguments
args = parser.parse_args()

## Run the main code
asyncio.run(main(args.token, args.repo, args.doc_codegrip, args.doc_mikroprog, args.specific_tag, args.specific_tag_mikrosdk, args.index))
asyncio.run(main(args.token, args.repo, args.doc_codegrip, args.doc_mikroprog, args.specific_tag, args.specific_tag_mikrosdk, args.index, args.upload))

0 comments on commit 7442b8e

Please sign in to comment.