Skip to content

Commit

Permalink
add verification script that produces a csv file
Browse files Browse the repository at this point in the history
  • Loading branch information
fthiery committed Jun 9, 2021
1 parent f596491 commit 7b69e54
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 1 deletion.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ dmypy.json
tests/content/
config.json.backup
*.txt
*.csv

# VS Code
.vscode
.vscode
75 changes: 75 additions & 0 deletions bin/report.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
#!/usr/bin/env python3
import csv
from mediasite_migration_scripts.utils import common as utils
from mediasite_migration_scripts.ms_client.client import MediaServerClient


config = utils.read_json('config.json')
mediasite_data = utils.read_json('mediasite_data.json')
redirections = utils.read_json('redirections.json')

mediasite_cname = utils.get_mediasite_host(config['mediasite_api_url'])
mediasite_play_url_pattern = f'https://{mediasite_cname}/Site1/Play/'


ms_config = {
'API_KEY': config['mediaserver_api_key'],
'CLIENT_ID': 'mediasite-migration-client',
'SERVER_URL': config['mediaserver_url'],
'TIMEOUT': 60,
}

ms_client = MediaServerClient(local_conf=ms_config, setup_logging=False)


def get_mediaserver_path(oid):
r = ms_client.api('medias/get/', params={'oid': oid, 'path': 'yes'})
path_str = ''
if r['success']:
path = r['info']['path']
for p in path:
path_str += p['title'] + '/'
return path_str + '/' + oid


rows = list()
folders_to_process = list()
total_presentations = processed_presentations = skipped_presentations = 0
print('Filtering folders')
for folder in mediasite_data:
if utils.is_folder_to_add(folder['path'], config):
folders_to_process.append(folder)
total_presentations += len(folder['presentations'])

print(f'Verifying {len(folders_to_process)} folders and {total_presentations} presentations')
for f in folders_to_process:
for p in f['presentations']:
oid = mediaserver_path = 'SKIPPED'
print(utils.get_progress_string(processed_presentations, total_presentations), end='\r')
processed_presentations += 1
p_id = p['id']
p_path = folder['path'] + '/' + p_id
mediasite_url = mediasite_play_url_pattern + p_id
mediaserver_url = redirections.get(mediasite_url)
if mediaserver_url:
oid = mediaserver_url.split('/')[4]
mediaserver_path = get_mediaserver_path(oid)
else:
mediaserver_url = 'SKIPPED'
skipped_presentations += 1

rows.append({
'mediasite_path': p_path,
'mediaserver_path': mediaserver_path,
'mediasite_url': mediasite_url,
'mediaserver_url': mediaserver_url
})

print()
print(f'{skipped_presentations}/{total_presentations} presentations have not been migrated')
print('Writing csv')
with open('report.csv', 'w', newline='') as csvfile:
fieldnames = ['mediasite_path', 'mediaserver_path', 'mediasite_url', 'mediaserver_url']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(rows)
4 changes: 4 additions & 0 deletions mediasite_migration_scripts/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,3 +132,7 @@ def get_timecode_from_sec(seconds):
h, m = divmod(m, 60)
timecode = "%d:%02d:%02d" % (h, m, s)
return timecode


def get_mediasite_host(url):
return url.split('/')[2]

0 comments on commit 7b69e54

Please sign in to comment.