Skip to content

Commit

Permalink
Merge pull request hed-standard#781 from VisLab/develop
Browse files Browse the repository at this point in the history
Updated the remodeling backup arguments
  • Loading branch information
VisLab authored Oct 23, 2023
2 parents e03efd1 + 9814e4a commit 20bfa33
Show file tree
Hide file tree
Showing 8 changed files with 67 additions and 71 deletions.
7 changes: 4 additions & 3 deletions hed/tools/remodeling/backup_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

class BackupManager:
DEFAULT_BACKUP_NAME = 'default_back'
RELATIVE_BACKUP_LOCATION = 'derivatives/remodel'
RELATIVE_BACKUP_LOCATION = './derivatives/remodel/backups'
BACKUP_DICTIONARY = 'backup_lock.json'
BACKUP_ROOT = 'backup_root'

Expand All @@ -24,14 +24,15 @@ def __init__(self, data_root, backups_root=None):
:raises HedFileError:
- If the data_root does not correspond to a real directory.
Notes: The backup_root will have remodeling/backups appended.
"""
if not os.path.isdir(data_root):
raise HedFileError('NonExistentData', f"{data_root} is not an existing directory", "")
self.data_root = data_root
if backups_root:
self.backups_path = os.path.join(backups_root, 'backups')
self.backups_path = backups_root
else:
self.backups_path = os.path.join(data_root, self.RELATIVE_BACKUP_LOCATION, 'backups')
self.backups_path = os.path.join(data_root, self.RELATIVE_BACKUP_LOCATION)
self.backups_path = os.path.realpath(self.backups_path)
os.makedirs(self.backups_path, exist_ok=True)
self.backups_dict = self._get_backups()
Expand Down
8 changes: 6 additions & 2 deletions hed/tools/remodeling/cli/run_remodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ def get_parser():
parser = argparse.ArgumentParser(description="Converts event files based on a json file specifying operations.")
parser.add_argument("data_dir", help="Full path of dataset root directory.")
parser.add_argument("model_path", help="Full path of the file with remodeling instructions.")
parser.add_argument("-bd", "--backup_dir", default="", dest="backup_dir",
help="Directory for the backup that is being created")
parser.add_argument("-bn", "--backup_name", default=BackupManager.DEFAULT_BACKUP_NAME, dest="backup_name",
help="Name of the default backup for remodeling")
parser.add_argument("-b", "--bids-format", action='store_true', dest="use_bids",
help="If present, the dataset is in BIDS format with sidecars. HED analysis is available.")
parser.add_argument("-e", "--extensions", nargs="*", default=['.tsv'], dest="extensions",
Expand All @@ -31,8 +35,8 @@ def get_parser():
help="Controls individual file summaries ('none', 'separate', 'consolidated')")
parser.add_argument("-j", "--json-sidecar", dest="json_sidecar", nargs="?",
help="Optional path to JSON sidecar with HED information")
parser.add_argument("-n", "--backup-name", default=BackupManager.DEFAULT_BACKUP_NAME, dest="backup_name",
help="Name of the default backup for remodeling")
# parser.add_argument("-n", "--backup-name", default=BackupManager.DEFAULT_BACKUP_NAME, dest="backup_name",
# help="Name of the default backup for remodeling")
parser.add_argument("-nb", "--no-backup", action='store_true', dest="no_backup",
help="If present, the operations are run directly on the files with no backup.")
parser.add_argument("-ns", "--no-summaries", action='store_true', dest="no_summaries",
Expand Down
18 changes: 8 additions & 10 deletions hed/tools/remodeling/cli/run_remodel_backup.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
""" Command-line program for creating a backup. """

import os
import argparse
from hed.errors.exceptions import HedFileError
from hed.tools.util.io_util import get_file_list, get_filtered_by_element
Expand All @@ -15,21 +16,18 @@ def get_parser():
"""
parser = argparse.ArgumentParser(description="Creates a backup for the remodeling process.")
parser.add_argument("data_dir", help="Full path of dataset root directory.")
parser.add_argument("-bd", "--backup_dir", default="", dest="backup_dir",
help="Directory for the backup that is being created")
parser.add_argument("-bn", "--backup_name", default=BackupManager.DEFAULT_BACKUP_NAME, dest="backup_name",
help="Name of the default backup for remodeling")
parser.add_argument("-e", "--extensions", nargs="*", default=['.tsv'], dest="extensions",
help="File extensions to allow in locating files. A * indicates all files allowed.")
parser.add_argument("-f", "--file-suffix", dest="file_suffix", nargs="*", default=['events'],
help="Filename suffix of files to be backed up. A * indicates all files allowed.")
parser.add_argument("-n", "--backup_name", default=BackupManager.DEFAULT_BACKUP_NAME, dest="backup_name",
help="Name of the default backup for remodeling")
parser.add_argument("-p", "--path-work", default="", dest="path_work",
help="The root path for remodeling work if given, " +
"otherwise [data_root]/derivatives/remodel is used.")

parser.add_argument("-t", "--task-names", dest="task_names", nargs="*", default=[], help="The name of the task.")
parser.add_argument("-v", "--verbose", action='store_true',
help="If present, output informative messages as computation progresses.")
parser.add_argument("-w", "--work-dir", default="", dest="work_dir",
help="If given, is the path to directory for saving, " +
"otherwise [data_root]derivatives/remodel is used.")
parser.add_argument("-x", "--exclude-dirs", nargs="*", default=['derivatives'], dest="exclude_dirs",
help="Directories names to exclude from search for files. " +
"If omitted, no directories except the backup directory will be excluded." +
Expand Down Expand Up @@ -60,8 +58,8 @@ def main(arg_list=None):
exclude_dirs=exclude_dirs)
if args.task_names:
file_list = get_filtered_by_element(file_list, args.task_names)
if args.work_dir:
backups_root = args.work_dir
if args.backup_dir:
backups_root = args.backup_dir
else:
backups_root = None
backup_man = BackupManager(args.data_dir, backups_root=backups_root)
Expand Down
12 changes: 5 additions & 7 deletions hed/tools/remodeling/cli/run_remodel_restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,13 @@ def get_parser():
"""
parser = argparse.ArgumentParser(description="Restores the backup files for the original data.")
parser.add_argument("data_dir", help="Full path of dataset root directory.")
parser.add_argument("-n", "--backup_name", default=BackupManager.DEFAULT_BACKUP_NAME, dest="backup_name",
parser.add_argument("-bd", "--backup_dir", default="", dest="backup_dir",
help="Directory for the backup that is being created")
parser.add_argument("-bn", "--backup_name", default=BackupManager.DEFAULT_BACKUP_NAME, dest="backup_name",
help="Name of the default backup for remodeling")

parser.add_argument("-t", "--task-names", dest="task_names", nargs="*", default=[], help="The names of the task.")
parser.add_argument("-v", "--verbose", action='store_true',
help="If present, output informative messages as computation progresses.")
parser.add_argument("-w", "--work_dir", default="", dest="work_dir",
help="The root path for remodeling work if given, " +
"otherwise [data_root]/derivatives/remodel is used.")
return parser


Expand All @@ -39,8 +37,8 @@ def main(arg_list=None):
"""
parser = get_parser()
args = parser.parse_args(arg_list)
if args.work_dir:
backups_root = args.work_dir
if args.backup_dir:
backups_root = args.backup_dir
else:
backups_root = None
backup_man = BackupManager(args.data_dir, backups_root=backups_root)
Expand Down
14 changes: 4 additions & 10 deletions tests/tools/remodeling/cli/test_run_remodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def tearDownClass(cls):

def test_parse_arguments(self):
# Test no verbose
arg_list1 = [self.data_root, self.model_path, '-x', 'derivatives', '-n', 'back1']
arg_list1 = [self.data_root, self.model_path, '-x', 'derivatives', '-bn', 'back1']
with patch('sys.stdout', new=io.StringIO()) as fp1:
args1, operations1 = parse_arguments(arg_list1)
self.assertFalse(fp1.getvalue())
Expand All @@ -60,7 +60,7 @@ def test_parse_arguments(self):
self.assertEqual(args1.file_suffix, 'events')

# Test * for extensions and suffix as well as verbose
arg_list2 = [self.data_root, self.model_path, '-x', 'derivatives', '-n', 'back1', '-f', '*', '-e', '*', '-v']
arg_list2 = [self.data_root, self.model_path, '-x', 'derivatives', '-bn', 'back1', '-f', '*', '-e', '*', '-v']
with patch('sys.stdout', new=io.StringIO()) as fp2:
args2, operations2 = parse_arguments(arg_list2)
self.assertTrue(fp2.getvalue())
Expand Down Expand Up @@ -164,13 +164,13 @@ def test_main_bids_no_sidecar_with_hed_task(self):

def test_main_errors(self):
# Test bad data directory
arg_list = ['junk/junk', self.model_path, '-x', 'derivatives', '-n', 'back1']
arg_list = ['junk/junk', self.model_path, '-x', 'derivatives', '-bn', 'back1']
with self.assertRaises(HedFileError) as context:
main(arg_list=arg_list)
self.assertEqual(context.exception.args[0], "DataDirectoryDoesNotExist")

# Test no backup
arg_list = [self.data_root, self.model_path, '-x', 'derivatives', '-n', 'back1']
arg_list = [self.data_root, self.model_path, '-x', 'derivatives', '-bn', 'back1']
with self.assertRaises(HedFileError) as context:
main(arg_list=arg_list)
self.assertEqual(context.exception.args[0], "BackupDoesNotExist")
Expand All @@ -193,12 +193,6 @@ def test_run_bids_ops_verbose(self):
main(arg_list)
self.assertFalse(fp.getvalue())

# def test_temp(self):
# data_root = "g:/ds002718OpenNeuro"
# model_path = 'G:/wh_excerpt_rmdl.json'
# arg_list = [data_root, model_path, '-x', 'derivatives', 'code', 'stimuli', '-b', '-n', '']
# main(arg_list)


if __name__ == '__main__':
unittest.main()
51 changes: 26 additions & 25 deletions tests/tools/remodeling/cli/test_run_remodel_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ class Test(unittest.TestCase):
def setUpClass(cls):
file_list = ['top_level.tsv', 'sub1/sub1_events.tsv', 'sub2/sub2_events.tsv', 'sub2/sub2_next_events.tsv']
# cls.file_list = file_list
cls.extract_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../data/remodel_tests')
extract_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../data/remodel_tests')
cls.alt_path = os.path.realpath(os.path.join(extract_path, 'temp'))
cls.extract_path = extract_path
test_root = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../data/remodel_tests/test_root')
cls.test_root = test_root
cls.test_paths = [os.path.join(test_root, file) for file in file_list]
Expand All @@ -34,20 +36,24 @@ def setUp(self):
zip_ref.extractall(self.extract_path)

def tearDown(self):
shutil.rmtree(self.test_root)
shutil.rmtree(self.data_root)
if os.path.exists(self.test_root):
shutil.rmtree(self.test_root)
if os.path.exists(self.data_root):
shutil.rmtree(self.data_root)
if os.path.exists(self.alt_path):
shutil.rmtree(self.alt_path)

@classmethod
def tearDownClass(cls):
pass

def test_main_events(self):
self.assertFalse(os.path.exists(self.derv_path), 'backup directory does not exist before creation')
arg_list = [self.test_root, '-n', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
arg_list = [self.test_root, '-bn', BackupManager.DEFAULT_BACKUP_NAME, '-bd', self.derv_path, '-x', 'derivatives',
'-f', 'events', '-e', '.tsv']
main(arg_list)
self.assertTrue(os.path.exists(self.derv_path), 'backup directory exists before creation')
json_path = os.path.realpath(os.path.join(self.derv_path, 'backups', BackupManager.DEFAULT_BACKUP_NAME,
json_path = os.path.realpath(os.path.join(self.derv_path, BackupManager.DEFAULT_BACKUP_NAME,
BackupManager.BACKUP_DICTIONARY))
with open(json_path, 'r') as fp:
key_dict = json.load(fp)
Expand All @@ -56,19 +62,18 @@ def test_main_events(self):
self.assertEqual(len(file_list), 3, "The backup of events.tsv has the right number of files")

def test_main_all(self):
arg_list = [self.test_root, '-n', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
'-f', '*', '-e', '*']
arg_list = [self.test_root, '-bn', BackupManager.DEFAULT_BACKUP_NAME, '-bd', self.derv_path,
'-x', 'derivatives', '-f', '*', '-e', '*']

self.assertFalse(os.path.exists(self.derv_path), 'backup directory does not exist before creation')
main(arg_list)
self.assertTrue(os.path.exists(self.derv_path), 'backup directory exists before creation')
json_path = os.path.realpath(os.path.join(self.derv_path, 'backups', BackupManager.DEFAULT_BACKUP_NAME,
json_path = os.path.realpath(os.path.join(self.derv_path, BackupManager.DEFAULT_BACKUP_NAME,
BackupManager.BACKUP_DICTIONARY))
with open(json_path, 'r') as fp:
key_dict = json.load(fp)
self.assertEqual(len(key_dict), 4, "The backup of events.tsv does not include top_level.tsv")
back_path = os.path.realpath(os.path.join(self.derv_path, 'backups',
BackupManager.DEFAULT_BACKUP_NAME, 'backup_root'))
back_path = os.path.realpath(os.path.join(self.derv_path, BackupManager.DEFAULT_BACKUP_NAME, 'backup_root'))
file_list1 = get_file_list(back_path)
self.assertIsInstance(file_list1, list)
self.assertEqual(len(file_list1), 4)
Expand All @@ -78,11 +83,11 @@ def test_main_task(self):
self.assertTrue(os.path.exists(der_path))
shutil.rmtree(der_path)
self.assertFalse(os.path.exists(der_path))
arg_list = [self.data_root, '-n', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
arg_list = [self.data_root, '-bn', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
'-f', 'events', '-e', '.tsv', '-t', 'FacePerception']
main(arg_list)
self.assertTrue(os.path.exists(der_path))
back_path = os.path.realpath(os.path.join(self.data_root, BackupManager.RELATIVE_BACKUP_LOCATION, 'backups',
back_path = os.path.realpath(os.path.join(self.data_root, BackupManager.RELATIVE_BACKUP_LOCATION,
BackupManager.DEFAULT_BACKUP_NAME, 'backup_root'))
self.assertTrue(os.path.exists(back_path))
backed_files = get_file_list(back_path)
Expand All @@ -93,37 +98,33 @@ def test_main_bad_task(self):
self.assertTrue(os.path.exists(der_path))
shutil.rmtree(der_path)
self.assertFalse(os.path.exists(der_path))
arg_list = [self.data_root, '-n', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
arg_list = [self.data_root, '-bn', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
'-f', 'events', '-e', '.tsv', '-t', 'Baloney']
main(arg_list)
self.assertTrue(os.path.exists(der_path))
back_path = os.path.realpath(os.path.join(self.data_root, BackupManager.RELATIVE_BACKUP_LOCATION, 'backups',
back_path = os.path.realpath(os.path.join(self.data_root, BackupManager.RELATIVE_BACKUP_LOCATION,
BackupManager.DEFAULT_BACKUP_NAME, 'backup_root'))
self.assertTrue(os.path.exists(back_path))
backed_files = get_file_list(back_path)
self.assertEqual(len(backed_files), 0)

def test_alt_loc(self):
alt_path = os.path.realpath(os.path.join(self.extract_path, 'temp'))
if os.path.exists(alt_path):
shutil.rmtree(alt_path)
self.assertFalse(os.path.exists(alt_path))
arg_list = [self.data_root, '-n', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives', '-w', alt_path,
if os.path.exists(self.alt_path):
shutil.rmtree(self.alt_path)
self.assertFalse(os.path.exists(self.alt_path))
arg_list = [self.data_root, '-bn', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives', '-bd', self.alt_path,
'-f', 'events', '-e', '.tsv', ]
main(arg_list)
self.assertTrue(os.path.exists(alt_path))
back_path = os.path.realpath(os.path.join(alt_path, 'backups', 'default_back', 'backup_root'))
self.assertTrue(os.path.exists(back_path))
self.assertTrue(os.path.exists(self.alt_path))
back_path = os.path.realpath(os.path.join(self.alt_path, 'default_back/backup_root'))
self.assertTrue(os.path.exists(back_path))
backed_files = get_file_list(back_path)
self.assertEqual(len(backed_files), 6)
if os.path.exists(alt_path):
shutil.rmtree(alt_path)

def test_main_backup_exists(self):
der_path = os.path.realpath(os.path.join(self.data_root, 'derivatives'))
self.assertTrue(os.path.exists(der_path))
arg_list = [self.data_root, '-n', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
arg_list = [self.data_root, '-bn', BackupManager.DEFAULT_BACKUP_NAME, '-x', 'derivatives',
'-f', 'events', '-e', '.tsv', '-t', 'Baloney']
with self.assertRaises(HedFileError) as context:
main(arg_list)
Expand Down
Loading

0 comments on commit 20bfa33

Please sign in to comment.