diff --git a/formats/gds.py b/formats/gds.py index f3997cc..1ac21a6 100644 --- a/formats/gds.py +++ b/formats/gds.py @@ -4,22 +4,32 @@ import yaml import os import sys +import collections import parse import ast from utils import cli_file_pairs, foreach_file_pair from version import v -@click.group(help="Script-like format, also used to store puzzle parameters.",options_metavar='') + +@click.group( + help="Script-like format, also used to store puzzle parameters.", options_metavar="" +) def cli(): pass -dir_path = "/".join(os.path.dirname(os.path.realpath(__file__).replace("\\", "/")).split("/")[:-1]) + +dir_path = "/".join( + os.path.dirname(os.path.realpath(__file__).replace("\\", "/")).split("/")[:-1] +) commands = json.load(open(f"{dir_path}/data/commands.json", encoding="utf-8")) -commands_i = {val["id"]: key for key, val in commands.items() if "id" in val} # Inverted version of commands +commands_i = { + val["id"]: key for key, val in commands.items() if "id" in val +} # Inverted version of commands + class GDS: - def __init__(self, cmds=None): #modes: "bin"/"b", "json"/"j", "gda"/"a" + def __init__(self, cmds=None): # modes: "bin"/"b", "json"/"j", "gda"/"a" if cmds is None: cmds = [] self.cmds = cmds @@ -29,7 +39,7 @@ def from_gds(Self, file): length = int.from_bytes(file[0:4], "little") if file[4:6] == b"\x0c\x00": return Self([]) - cmd_data = file[6:length+4] + cmd_data = file[6 : length + 4] cmds = [] cmd = None @@ -37,36 +47,65 @@ def from_gds(Self, file): c = 0 while True: if c >= length: - raise Exception("GDS file error: End of file reached with no 0xC command!") + raise Exception( + "GDS file error: End of file reached with no 0xC command!" + ) if cmd == None: - cmd = int.from_bytes(cmd_data[c:c+2], "little") + cmd = int.from_bytes(cmd_data[c : c + 2], "little") if cmd in commands_i: cmd = commands_i[cmd] c += 2 continue - p_type = int.from_bytes(cmd_data[c:c+2], "little") - + p_type = int.from_bytes(cmd_data[c : c + 2], "little") + if p_type == 0: - cmds.append({"command":cmd, "parameters":params}) + cmds.append({"command": cmd, "parameters": params}) cmd = None params = [] c += 2 elif p_type == 1: - params.append({"type": "int", "data": int.from_bytes(cmd_data[c+2:c+6], "little")}) + params.append( + { + "type": "int", + "data": int.from_bytes(cmd_data[c + 2 : c + 6], "little"), + } + ) c += 6 elif p_type == 2: - params.append({"type": "unknown-2", "data": int.from_bytes(cmd_data[c+2:c+6], "little")}) + params.append( + { + "type": "unknown-2", + "data": int.from_bytes(cmd_data[c + 2 : c + 6], "little"), + } + ) c += 6 elif p_type == 3: - str_len = int.from_bytes(cmd_data[c+2:c+4], "little") - params.append({"type": "string", "data": cmd_data[c+4:c+4+str_len].decode("ascii").rstrip("\x00")}) #TODO: JP/KO compatibility - c += str_len+4 + str_len = int.from_bytes(cmd_data[c + 2 : c + 4], "little") + params.append( + { + "type": "string", + "data": cmd_data[c + 4 : c + 4 + str_len] + .decode("ascii") + .rstrip("\x00"), + } + ) # TODO: JP/KO compatibility + c += str_len + 4 elif p_type == 6: - params.append({"type": "unknown-6", "data": int.from_bytes(cmd_data[c+2:c+6], "little")}) + params.append( + { + "type": "unknown-6", + "data": int.from_bytes(cmd_data[c + 2 : c + 6], "little"), + } + ) c += 6 elif p_type == 7: - params.append({"type": "unknown-7", "data": int.from_bytes(cmd_data[c+2:c+6], "little")}) + params.append( + { + "type": "unknown-7", + "data": int.from_bytes(cmd_data[c + 2 : c + 6], "little"), + } + ) c += 6 elif p_type == 8: params.append({"type": "unknown-8"}) @@ -74,39 +113,41 @@ def from_gds(Self, file): elif p_type == 9: params.append({"type": "unknown-9"}) c += 2 - elif p_type == 0xb: + elif p_type == 0xB: params.append({"type": "unknown-b"}) c += 2 - elif p_type == 0xc: - #cmd = hex(cmd) - cmds.append({"command":cmd, "parameters":params}) + elif p_type == 0xC: + # cmd = hex(cmd) + cmds.append({"command": cmd, "parameters": params}) break else: - raise Exception(f"GDS file error: Invalid or unsupported parameter type {hex(p_type)}!") - + raise Exception( + f"GDS file error: Invalid or unsupported parameter type {hex(p_type)}!" + ) + return Self(cmds) - + @classmethod - def from_json (Self, file): + def from_json(Self, file): cmds = json.loads(file)["data"] - #TODO: reject non-compatible json files + # TODO: reject non-compatible json files return Self(cmds) - + @classmethod def from_yaml(Self, file): cmds = yaml.safe_load(file)["data"] - #TODO: reject non-compatible yaml files + # TODO: reject non-compatible yaml files return Self(cmds) - + @classmethod - def from_gda (Self, file): #TODO: make this, so gds_old can be completely removed + def from_gda(Self, file): # TODO: make this, so gds_old can be completely removed cmds = [] - + for line in file.split("\n"): line = line.strip() if line.startswith("#"): continue - if line == '': + if line == "": continue line, strings = parse.remove_strings(line) @@ -125,51 +166,51 @@ def from_gda (Self, file): #TODO: make this, so gds_old can be completely remove cmd = int(cmd[2:], base=16) else: raise Exception(f"Unknown GDA command: {cmd}") - + params = [] for param in line[1:]: if param.isdigit(): - params.append({"type":"int", "data":int(param)}) + params.append({"type": "int", "data": int(param)}) elif param.startswith("0x"): - params.append({"type":"unknown-2", "data":int(param[2:], 16)}) + params.append({"type": "unknown-2", "data": int(param[2:], 16)}) elif param.startswith('"') and param.endswith('"'): param = ast.literal_eval(f'"{strings[int(param[1:-1])]}"') - params.append({"type":"string", "data":param}) + params.append({"type": "string", "data": param}) elif param.startswith("!6("): - params.append({"type":"unknown-6", "data":int(param[3:-1], 16)}) + params.append({"type": "unknown-6", "data": int(param[3:-1], 16)}) elif param.startswith("!7("): - params.append({"type":"unknown-7", "data":int(param[3:-1], 16)}) + params.append({"type": "unknown-7", "data": int(param[3:-1], 16)}) elif param.startswith("!8"): - params.append({"type":"unknown-8"}) + params.append({"type": "unknown-8"}) elif param.startswith("!9"): - params.append({"type":"unknown-9"}) + params.append({"type": "unknown-9"}) elif param.startswith("!b"): - params.append({"type":"unknown-b"}) + params.append({"type": "unknown-b"}) else: raise Exception(f"Invalid GDA parameter: {param}") - - cmds.append({"command":cmd, "parameters":params}) + + cmds.append({"command": cmd, "parameters": params}) return Self(cmds) - def __getitem__ (self, index): + def __getitem__(self, index): index = int(index) return self.cmds[index] - + def to_json(self): return json.dumps({"version": v, "data": self.cmds}, indent=4) def to_yaml(self): return yaml.safe_dump({"version": v, "data": self.cmds}) - - def to_gds (self): + + def to_gds(self): out = b"\x00" * 2 for command in self.cmds: if type(command["command"]) == int: out += command["command"].to_bytes(2, "little") else: - out += commands[command["command"]["id"]].to_bytes(2, "little") + out += command["command"]["id"].to_bytes(2, "little") for param in command["parameters"]: if param["type"] == "int": out += b"\x01\x00" @@ -179,8 +220,10 @@ def to_gds (self): out += param["data"].to_bytes(4, "little") elif param["type"] == "string": out += b"\x03\x00" - out += (len(param["data"])+1).to_bytes(2, "little") - out += param["data"].encode("ASCII") + b"\x00" #TODO: JP/KO compatibility + out += (len(param["data"]) + 1).to_bytes(2, "little") + out += ( + param["data"].encode("ASCII") + b"\x00" + ) # TODO: JP/KO compatibility elif param["type"] == "unknown-6": out += b"\x06\x00" out += param["data"].to_bytes(4, "little") @@ -194,22 +237,24 @@ def to_gds (self): elif param["type"] == "unknown-b": out += b"\x0b\x00" else: - raise Exception(f"GDS JSON error: Invalid or unsupported parameter type '{param['type']}'!") + raise Exception( + f"GDS JSON error: Invalid or unsupported parameter type '{param['type']}'!" + ) out += b"\x00\x00" out = out[:-2] + b"\x0c\x00" return len(out).to_bytes(4, "little") + out - - def to_bin (self): #alias + + def to_bin(self): # alias return self.to_gds() - + def to_gda(self): out = "" for command in self.cmds: if type(command["command"]) == int: - out += "0x"+command["command"].to_bytes(1, "little").hex() + out += "0x" + command["command"].to_bytes(1, "little").hex() else: - out += command['command'] + out += command["command"] for param in command["parameters"]: out += " " if param["type"] == "int": @@ -231,148 +276,261 @@ def to_gda(self): elif param["type"] == "unknown-b": out += "!b" else: - raise Exception(f"GDA error: invalid or unsupported parameter type '{param['type']}'!") + raise Exception( + f"GDA error: invalid or unsupported parameter type '{param['type']}'!" + ) out += "\n" return out -@cli.command( - name="extract", - no_args_is_help = False - ) + +@cli.command(name="compile", no_args_is_help=True) @click.argument("input", required=False, type=click.Path(exists=True)) @click.argument("output", required=False, type=click.Path(exists=False)) -@click.option("--recursive", "-r", is_flag=True, help="Recurse into subdirectories of the input directory to find more applicable files.") -@click.option("--quiet", "-q", is_flag=True, help="Suppress all output. By default, operations involving multiple files will show a progressbar.") -def unpack_json(input = None, output = None, recursive = False, quiet = False): +@click.option( + "--recursive", + "-r", + is_flag=True, + help="Recurse into subdirectories of the input directory to find more applicable files.", +) +@click.option( + "--quiet", + "-q", + is_flag=True, + help="Suppress all output. By default, operations involving multiple files will show a progressbar.", +) +@click.option( + "--overwrite/--no-overwrite", + "-o/-O", + default=True, + help="Whether existing files should be overwritten. Default: true", +) +@click.option( + "--format", + "-f", + required=False, + default=None, + multiple=False, + help="The format of the input file. Will be inferred from the file ending or content if unset. " + "If multiple file types would compile to the same output (but may not necessarily have the same content), " + "specify this to disambigute. Possible values: gda, json, yaml", +) +def compile( + input=None, output=None, recursive=False, quiet=False, format=None, overwrite=None +): """ - Converts the GDS script(s) at INPUT to JSON files at OUTPUT. + Compiles the human-readable script(s) at INPUT into the game's binary script files at OUTPUT. INPUT can be a single file or a directory (which obviously has to exist). In the latter case subfiles with the correct file ending will be processed. If unset, defaults to the current working directory. The meaning of OUTPUT may depend on INPUT: - If INPUT is a file, then OUTPUT is expected to be a file, unless it explicitly ends with a slash indicating a directory. - In this case, if unset OUTPUT will default to the INPUT filename with `.json` exchanged/appended. + In this case, if unset OUTPUT will default to the INPUT filename with `.gds` exchanged/appended. - Otherwise OUTPUT has to be a directory as well (or an error will be shown). In this case, if unset OUTPUT will default to the INPUT directory (which may itself default to the current working directory). In the file-to-file case, the paths are explicitly used as they are. Otherwise, if multiple input files were collected, or OUTPUT is a directory, - an output path is inferred for each input file by exchanging the `.gds` file ending for `.json`, or otherwise appending the `.json` file ending. + an output path is inferred for each input file by exchanging the input format's file ending for, or otherwise appending the `.gds` file ending. + + In the case where INPUT is a directory, if no format is specified, this command will collect files of all compatible types. Note that this can lead + to situations where multiple files would compile to the same output (e.g. `test.json` and `test.gda` would both be candidates for `test.gds`); + this command will NOT make a choice in this case, and instead ask to explicitly specify the format to be used. """ + + in_endings = [] + if format is None: + in_endings = [".gda", ".json", ".yaml", ".yml"] + elif format == "gda": + in_endings = [".gda"] + elif format == "json": + in_endings = [".json"] + elif format in ["yaml", "yml"]: + in_endings = [".yaml", ".yml"] + else: + raise Exception(f"Unsupported input format: '{format}'") + def process(input, output): - input = open(input, "rb").read() - output = open(output, "w", encoding="utf-8") - gds = GDS(input) - output.write(gds.to_json()) + inpath = input + input = open(inpath, "r", encoding="utf-8").read() + + format2 = format + if format2 is None: + if inpath.lower().endswith(".gda"): + format2 = "gda" + elif inpath.lower().endswith(".json"): + format2 = "json" + elif inpath.lower().endswith(".yml") or inpath.lower().endswith(".yaml"): + format2 = "yaml" + + gds = None + with contextlib.suppress(Exception): + if format2 == "gda": + gds = GDS.from_gda(input) + elif format2 == "json": + gds = GDS.from_json(input) + elif format2 in ["yaml", "yml"]: + gds = GDS.from_yaml(input) + + if gds is None: + if format2 is not None: + # TODO: should this abort instead? + print( + f"WARNING: Input file '{inpath}' did not have expected format '{format2}'", + file=sys.stderr, + ) + # format not specified and couldn't be inferred, or file turns out not to have the correct format + # => try all the formats & see which one works (only one should be possible) + for f in ["gda", "json", "yaml"]: + with contextlib.suppress(Exception): + if f == "gda": + gds = GDS.from_gda(input) + elif f == "json": + gds = GDS.from_json(input) + elif f == "yaml": + gds = GDS.from_yaml(input) + if gds is not None: + break + if gds is None: + raise Exception( + f"File '{inpath}' couldn't be read: not a known file format" + + (f" (expected '{format2}')" if format2 is not None else "") + ) + + output = open(output, "wb") + output.write(gds.to_bin()) output.close() - pairs = cli_file_pairs(input, output, in_ending=".gds", out_ending=".json", recursive=recursive) - foreach_file_pair(pairs, process, quiet=quiet) + pairs = cli_file_pairs( + input, output, in_endings=in_endings, out_ending=".gds", recursive=recursive + ) + duplicates = collections.defaultdict(list) + for ip, op in pairs: + duplicates[op].append(ip) + duplicates = {k: v for k, v in duplicates.items() if len(v) > 1} + if len(duplicates) > 0: + print( + f"ERROR: {len(duplicates)} {'files have' if len(duplicates) > 1 else 'file has'} multiple conflicting source files; please explicitly specify a format to determine which should be used.", + file=sys.stderr, + ) + for op, ips in duplicates.items(): + pathlist = ", ".join("'" + ip + "'" for ip in ips) + print(f"'{op}' could be compiled from {pathlist}", file=sys.stderr) + sys.exit(-1) + if not overwrite: + new_pairs = [] + existing = [] + for ip, op in pairs: + if os.path.exists(op): + existing.append(op) + else: + new_pairs.append((ip, op)) -@cli.command( - name="compile", - no_args_is_help = True - ) -@click.argument("input") -@click.argument("output", required=False, default = None) -@click.option("--format", "-f", required=False, default = None, multiple=False, help="The format of the input file. Will be inferred from the file ending or content if unset. Possible values: gda, json, yaml") -def compile(input, output, format): - """ - Generates a GDS binary from a human-readable script file. - """ - inpath = input - if format not in [None, "gda", "json", "yaml", "yml"]: - raise Exception(f"Unsupported input format: '{format}'") + if not quiet: + print(f"Skipping {len(existing)} existing output files.") - if format is None: - if inpath.lower().endswith(".gda"): - format = "gda" - elif inpath.lower().endswith(".json"): - format = "json" - elif inpath.lower().endswith(".yml") or inpath.lower().endswith(".yaml"): - format = "yaml" - - - if output is None: - output = inpath - if format == 'gda' and output.lower().endswith(".gda"): - output = output[:-4] - elif format == 'json' and output.lower().endswith(".json"): - output = output[:-5] - elif format in ['yaml', 'yml'] and output.lower().endswith(".yml"): - output = output[:-4] - elif format in ['yaml', 'yml'] and output.lower().endswith(".yaml"): - output = output[:-5] - output += ".gds" - - input = open(inpath, encoding="utf-8").read() - gds = None - with contextlib.suppress(Exception): - if format == 'gda': - gds = GDS.from_gda(input) - elif format == 'json': - gds = GDS.from_json(input) - elif format in ['yaml', 'yml']: - gds = GDS.from_yaml(input) - - if gds is None: - if format is not None: - # TODO: should this abort instead? - print(f"WARNING: Input file '{inpath}' did not have expected format '{format}'", file = sys.stderr) - # format not specified and couldn't be inferred, or file turns out not to have the correct format - # => try all the formats & see which one works (only one should be possible) - for f in ["json", "yaml", "gda"]: - with contextlib.suppress(Exception): - if f == 'gda': - gds = GDS.from_gda(input) - elif f == 'json': - gds = GDS.from_json(input) - elif f == 'yaml': - gds = GDS.from_yaml(input) - if gds is None: - raise Exception(f"File '{inpath}' couldn't be read: not a known file format" - +(f" (expected '{format}')" if format is not None else "")) - - output = open(output, "wb") - output.write(gds.to_bin()) - output.close() - -@cli.command( - name="decompile", - no_args_is_help = True - ) -@click.argument("input") -@click.argument("output", required=False, default = None) -@click.option("--format", "-f", default="gda", required=False, multiple=False, help="The format used for output. Possible values: gda (default), json, yaml") -def decompile(input, output, format): + pairs = new_pairs + + foreach_file_pair(pairs, process, quiet=quiet) + + +@cli.command(name="decompile", no_args_is_help=True) +@click.argument("input", required=False, type=click.Path(exists=True)) +@click.argument("output", required=False, type=click.Path(exists=False)) +@click.option( + "--recursive", + "-r", + is_flag=True, + help="Recurse into subdirectories of the input directory to find more applicable files.", +) +@click.option( + "--quiet", + "-q", + is_flag=True, + help="Suppress all output. By default, operations involving multiple files will show a progressbar.", +) +@click.option( + "--overwrite/--no-overwrite", + "-o/-O", + default=True, + help="Whether existing files should be overwritten. Default: true", +) +@click.option( + "--format", + "-f", + required=False, + multiple=False, + help="The format used for output. Possible values: gda (default), json, yaml", +) +def decompile( + input=None, output=None, recursive=False, quiet=False, format=None, overwrite=None +): """ - Convert a GDS file into a human-readable GDA script format. + Decompiles the GDS script(s) at INPUT into a human-readable text format at OUTPUT. + + INPUT can be a single file or a directory (which obviously has to exist). In the latter case subfiles with the correct file ending will be processed. + If unset, defaults to the current working directory. + + The meaning of OUTPUT may depend on INPUT: + - If INPUT is a file, then OUTPUT is expected to be a file, unless it explicitly ends with a slash indicating a directory. + In this case, if unset OUTPUT will default to the INPUT filename with `.json` exchanged/appended. + - Otherwise OUTPUT has to be a directory as well (or an error will be shown). + In this case, if unset OUTPUT will default to the INPUT directory (which may itself default to the current working directory). + + In the file-to-file case, the paths are explicitly used as they are. Otherwise, if multiple input files were collected, or OUTPUT is a directory, + an output path is inferred for each input file by exchanging the `.gds` file ending for `.json`, or otherwise appending the `.json` file ending. """ out_ending = "" - if format == 'gda': + if format == "gda" or format is None: out_ending = ".gda" - elif format == 'json': + elif format == "json": out_ending = ".json" - elif format in ['yaml', 'yml']: + elif format in ["yaml", "yml"]: out_ending = ".yml" else: raise Exception(f"Unsupported output format: '{format}'") - - if output is None: - output = input - if output.lower().endswith(".gds"): - output = output[:-4] - output = output + out_ending - - input = open(input, "rb").read() - gds = GDS.from_gds(input) - - with open(output, "w", encoding="utf-8") as output: - if format == 'gda': - output.write(gds.to_gda()) - elif format == 'json': - output.write(gds.to_json()) - elif format in ['yaml', 'yml']: - output.write(gds.to_yaml()) + + def process(input, output): + input = open(input, "rb").read() + gds = GDS.from_gds(input) + + nonlocal format + if format is None: + if output.lower().endswith(".gda"): + format = "gda" + elif output.lower().endswith(".json"): + format = "json" + elif output.lower().endswith(".yml") or output.lower().endswith(".yaml"): + format = "yaml" + else: + print( + f"WARNING: output format couldn't be inferred from filename '{output}'; using default (gda). To remove this warning, please explicitly specify a format.", + file=sys.stderr, + ) + + with open(output, "w", encoding="utf-8") as output: + if format == "gda": + output.write(gds.to_gda()) + elif format == "json": + output.write(gds.to_json()) + elif format in ["yaml", "yml"]: + output.write(gds.to_yaml()) + + pairs = cli_file_pairs( + input, output, in_endings=[".gds"], out_ending=out_ending, recursive=recursive + ) + if not overwrite: + new_pairs = [] + existing = [] + for ip, op in pairs: + if os.path.exists(op): + existing.append(op) + else: + new_pairs.append((ip, op)) + + if not quiet: + print(f"Skipping {len(existing)} existing output files.") + + pairs = new_pairs + foreach_file_pair(pairs, process, quiet=quiet) diff --git a/utils.py b/utils.py index 82194f0..a35c133 100644 --- a/utils.py +++ b/utils.py @@ -1,6 +1,15 @@ import os -def cli_file_pairs(input = None, output = None, *, in_ending = None, out_ending = None, filter_infer=None, recursive = False): + +def cli_file_pairs( + input=None, + output=None, + *, + in_endings=None, + out_ending=None, + filter_infer=None, + recursive=False, +): """ Given the file path inputs to the various CLI commands, determines which input files should be operated on and mapped to which output files. @@ -9,13 +18,13 @@ def cli_file_pairs(input = None, output = None, *, in_ending = None, out_ending - If the input is a directory, all files in this directory (with the file ending `in_ending`) will be separately treated as input paths. - If there is no input (which also means there's no output), the current working directory is used as both input and output (which should be fine since most commands produce files of a different ending). - + Assuming the input is a file, then if the output is the same this pair is used as-is. Otherwise, if the output isn't specified (or a directory) he output path is *inferred* like this: - If the input ends with the expected file ending, that is stripped and replaced with the output file ending. It's expected that if a user wants more control over this file extension, they should provide an output manually for each input file. - If the input has a different ending (or none at all), the output file ending is simply appended to the full name. - + This same inference is used if the input is a directory: here we have multiple input paths, for which the targets can not have been specified, and so the inference is applied to each of them separately. The output must be a directory (or the input directory itself if not specified) and all the output paths are calculated such that input paths relative to the input directory become output paths relative to the output directory @@ -24,13 +33,13 @@ def cli_file_pairs(input = None, output = None, *, in_ending = None, out_ending if input is None: input = "." - + if not os.path.exists(input): raise FileNotFoundError(input) - + def listfiles(path): if recursive: - for (dp, _, fn) in os.walk(path, topdown=True): + for dp, _, fn in os.walk(path, topdown=True): for f in fn: yield os.path.join(dp, f) else: @@ -38,24 +47,30 @@ def listfiles(path): if not os.path.isfile(os.path.join(path, f)): continue yield os.path.join(path, f) - + def default_filter_infer(input, force_accept=False): - if in_ending is not None and not input.lower().endswith(in_ending): + if in_endings is not None and not any( + input.lower().endswith(ie) for ie in in_endings + ): return None if out_ending is not None and input.lower().endswith(out_ending): return None - + output = input - if in_ending is not None and input.lower().endswith(in_ending): - output = input[:-len(in_ending)] + if in_endings is not None: + endings = [ie for ie in in_endings if input.lower().endswith(ie)] + if endings: + output = input[: -len(endings[0])] if out_ending is None: - raise ValueError("Can't infer output file names without a target file ending specified") + raise ValueError( + "Can't infer output file names without a target file ending specified" + ) output += out_ending return output - + if filter_infer is None: filter_infer = default_filter_infer - + input_dir = "" input_paths = [] rel_pairs = None @@ -68,31 +83,40 @@ def default_filter_infer(input, force_accept=False): input_paths = [os.path.relpath(f, input_dir) for f in listfiles(input)] rel_pairs = [(ip, filter_infer(ip)) for ip in input_paths] rel_pairs = [(ip, op) for (ip, op) in rel_pairs if op is not None] - + if output is None: output = input_dir - if os.path.isfile(input) and not os.path.isdir(output) and os.path.split(output)[1] != '': + if ( + os.path.isfile(input) + and not os.path.isdir(output) + and os.path.split(output)[1] != "" + ): return [(input, output)] - + if os.path.isfile(output): raise OSError(f"Output path exists but is not a directory: '{output}'") output_dir = output - - pairs = [(os.path.join(input_dir, ip), os.path.join(output_dir, op)) for (ip, op) in rel_pairs] + + pairs = [ + (os.path.join(input_dir, ip), os.path.join(output_dir, op)) + for (ip, op) in rel_pairs + ] return pairs -def foreach_file_pair(pairs, fn, quiet = False): + +def foreach_file_pair(pairs, fn, quiet=False): try: from tqdm import tqdm - if not quiet: + + if not quiet and len(pairs) > 5: progress = tqdm(pairs) - for (input, output) in progress: + for input, output in progress: progress.set_description(input) fn(input, output) return except ImportError: # TQDM isn't installed; just don't show a progress bar. pass - for (input, output) in pairs: - fn(input, output) \ No newline at end of file + for input, output in pairs: + fn(input, output)