diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md new file mode 100644 index 00000000..dcf32fdb --- /dev/null +++ b/plugins/FileMonitor/README.md @@ -0,0 +1,35 @@ +# FileMonitor: Ver 0.1.0 (By David Maisonave) +FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths. + +### Using FileMonitor as a plugin +- To start monitoring file changes, go to **Stash->Settings->Task->[Plugin Tasks]->FileMonitor**, and click on the [Start Library Monitor] button. + - ![FileMonitor_Task](https://github.com/user-attachments/assets/f275a70f-8e86-42a4-b2c1-98b3f4935334) +- To stop this task, go to **Stash->Settings->Task->[Task Queue]**, and click on the **[x]**. + - ![Kill_FileMonitor_Task](https://github.com/user-attachments/assets/a3f4abca-f3a2-49fa-9db5-e0c733e0aeb1) + +### Using FileMonitor as a script +**FileMonitor** can be called as a standalone script. +- To start monitoring call the script and pass any argument. + - python filemonitor.py **start** +- To stop **FileMonitor**, pass argument **stop**. + - python filemonitor.py **stop** + - After running above command line, **FileMonitor** will stop after the next file change occurs. + - The stop command works to stop the standalone job and the Stash plugin task job. + +### Requirements +`pip install stashapp-tools` +`pip install pyYAML` +`pip install watchdog` + +### Installation +- Follow **Requirements** instructions. +- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **FileMonitor**. +- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\FileMonitor**). +- Restart Stash. + +That's it!!! + +### Options +- All options are accessible in the GUI via Settings->Plugins->Plugins->[FileMonitor]. + + diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py new file mode 100644 index 00000000..ccfe0388 --- /dev/null +++ b/plugins/FileMonitor/filemonitor.py @@ -0,0 +1,289 @@ +# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor +# Note: To call this script outside of Stash, pass any argument. +# Example: python filemonitor.py foofoo +import os +import sys +import time +import shutil +import fileinput +import hashlib +import json +from pathlib import Path +import requests +import logging +from logging.handlers import RotatingFileHandler +import stashapi.log as log # Importing stashapi.log as log for critical events ONLY +from stashapi.stashapp import StashInterface +from watchdog.observers import Observer # This is also needed for event attributes +import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/ +from threading import Lock, Condition +from multiprocessing import shared_memory +from filemonitor_config import config # Import settings from filemonitor_config.py + +# ********************************************************************** +# Constant global variables -------------------------------------------- +LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" +FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" +PLUGIN_ARGS = False +PLUGIN_ARGS_MODE = False +PLUGIN_ID = Path(__file__).stem.lower() +# GraphQL query to fetch all scenes +QUERY_ALL_SCENES = """ + query AllScenes { + allScenes { + id + updated_at + } + } +""" +RFH = RotatingFileHandler( + filename=LOG_FILE_PATH, + mode='a', + maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K + backupCount=2, + encoding=None, + delay=0 +) +TIMEOUT = 5 +CONTINUE_RUNNING_SIG = 99 + +# ********************************************************************** +# Global variables -------------------------------------------- +exitMsg = "Change success!!" +mutex = Lock() +signal = Condition(mutex) +shouldUpdate = False +TargetPaths = [] +runningInPluginMode = False + +# Configure local log file for plugin within plugin folder having a limited max log file size +logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) +logger = logging.getLogger(Path(__file__).stem) + +# ********************************************************************** +# ---------------------------------------------------------------------- +# Code section to fetch variables from Plugin UI and from filemonitor_settings.py +# Check if being called as Stash plugin +gettingCalledAsStashPlugin = True +stopLibraryMonitoring = False +StdInRead = None +try: + if len(sys.argv) == 1: + print(f"Attempting to read stdin. (len(sys.argv)={len(sys.argv)})", file=sys.stderr) + StdInRead = sys.stdin.read() + # for line in fileinput.input(): + # StdInRead = line + # break + else: + if len(sys.argv) > 1 and sys.argv[1].lower() == "stop": + stopLibraryMonitoring = True + raise Exception("Not called in plugin mode.") +except: + gettingCalledAsStashPlugin = False + print(f"Either len(sys.argv) not expected value OR sys.stdin.read() failed! (stopLibraryMonitoring={stopLibraryMonitoring}) (StdInRead={StdInRead}) (len(sys.argv)={len(sys.argv)})", file=sys.stderr) + pass + +if gettingCalledAsStashPlugin and StdInRead: + print(f"StdInRead={StdInRead} (len(sys.argv)={len(sys.argv)})", file=sys.stderr) + runningInPluginMode = True + json_input = json.loads(StdInRead) + FRAGMENT_SERVER = json_input["server_connection"] +else: + runningInPluginMode = False + FRAGMENT_SERVER = {'Scheme': config['endpoint_Scheme'], 'Host': config['endpoint_Host'], 'Port': config['endpoint_Port'], 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent} + print("Running in non-plugin mode!", file=sys.stderr) + +stash = StashInterface(FRAGMENT_SERVER) +PLUGINCONFIGURATION = stash.get_configuration()["plugins"] +STASHCONFIGURATION = stash.get_configuration()["general"] +STASHPATHSCONFIG = STASHCONFIGURATION['stashes'] +stashPaths = [] +settings = { + "recursiveDisabled": False, + "runCleanAfterDelete": False, + "scanModified": False, + "zzdebugTracing": False, + "zzdryRun": False, +} + +if PLUGIN_ID in PLUGINCONFIGURATION: + settings.update(PLUGINCONFIGURATION[PLUGIN_ID]) +# ---------------------------------------------------------------------- +debugTracing = settings["zzdebugTracing"] +RECURSIVE = settings["recursiveDisabled"] == False +SCAN_MODIFIED = settings["scanModified"] +RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"] +RUN_GENERATE_CONTENT = config['runGenerateContent'] + +for item in STASHPATHSCONFIG: + stashPaths.append(item["path"]) + +# Extract dry_run setting from settings +DRY_RUN = settings["zzdryRun"] +dry_run_prefix = '' +try: + PLUGIN_ARGS = json_input['args'] + PLUGIN_ARGS_MODE = json_input['args']["mode"] +except: + pass +logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") +if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................") +if debugTracing: logger.info("settings: %s " % (settings,)) +if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................") +if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................") + +if DRY_RUN: + logger.info("Dry run mode is enabled.") + dry_run_prefix = "Would've " +if debugTracing: logger.info("Debug Tracing................") +# ---------------------------------------------------------------------- +# ********************************************************************** +if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................") + +def start_library_monitor(): + global shouldUpdate + global TargetPaths + try: + # Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script + shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=True, size=4) + except: + pass + logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.") + return + type(shm_a.buf) + shm_buffer = shm_a.buf + len(shm_buffer) + shm_buffer[0] = CONTINUE_RUNNING_SIG + if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + RunCleanMetadata = False + + event_handler = watchdog.events.FileSystemEventHandler() + def on_created(event): + global shouldUpdate + global TargetPaths + TargetPaths.append(event.src_path) + logger.info(f"CREATE *** '{event.src_path}'") + with mutex: + shouldUpdate = True + signal.notify() + + def on_deleted(event): + global shouldUpdate + global TargetPaths + nonlocal RunCleanMetadata + TargetPaths.append(event.src_path) + logger.info(f"DELETE *** '{event.src_path}'") + with mutex: + shouldUpdate = True + RunCleanMetadata = True + signal.notify() + + def on_modified(event): + global shouldUpdate + global TargetPaths + if SCAN_MODIFIED: + TargetPaths.append(event.src_path) + logger.info(f"MODIFIED *** '{event.src_path}'") + with mutex: + shouldUpdate = True + signal.notify() + else: + if debugTracing: logger.info(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'") + + def on_moved(event): + global shouldUpdate + global TargetPaths + TargetPaths.append(event.src_path) + TargetPaths.append(event.dest_path) + logger.info(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'") + with mutex: + shouldUpdate = True + signal.notify() + + if debugTracing: logger.info("Debug Trace........") + event_handler.on_created = on_created + event_handler.on_deleted = on_deleted + event_handler.on_modified = on_modified + event_handler.on_moved = on_moved + + observer = Observer() + + # Iterate through stashPaths + for path in stashPaths: + observer.schedule(event_handler, path, recursive=RECURSIVE) + if debugTracing: logger.info(f"Observing {path}") + observer.start() + if debugTracing: logger.info("Starting loop................") + try: + while True: + TmpTargetPaths = [] + with mutex: + while not shouldUpdate: + if debugTracing: logger.info("Wait start................") + signal.wait() + if debugTracing: logger.info("Wait end................") + shouldUpdate = False + TmpTargetPaths = [] + for TargetPath in TargetPaths: + TmpTargetPaths.append(os.path.dirname(TargetPath)) + TargetPaths = [] + TmpTargetPaths = list(set(TmpTargetPaths)) + if TmpTargetPaths != []: + logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}") + if not DRY_RUN: + stash.metadata_scan(paths=TmpTargetPaths) + if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata: + stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN) + if RUN_GENERATE_CONTENT: + stash.metadata_generate() + if gettingCalledAsStashPlugin and shm_buffer[0] == CONTINUE_RUNNING_SIG: + stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor") + if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.") + return + else: + if debugTracing: logger.info("Nothing to scan.") + if shm_buffer[0] != CONTINUE_RUNNING_SIG: + logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})") + shm_a.close() + shm_a.unlink() # Call unlink only once to release the shared memory + raise KeyboardInterrupt + except KeyboardInterrupt: + observer.stop() + if debugTracing: logger.info("Stopping observer................") + observer.join() + if debugTracing: logger.info("Exiting function................") + +# This function is only useful when called outside of Stash. +# Example: python filemonitor.py stop +# Stops monitoring after triggered by the next file change. +# ToDo: Add logic so it doesn't have to wait until the next file change +def stop_library_monitor(): + if debugTracing: logger.info("Opening shared memory map.") + try: + shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=False, size=4) + except: + pass + logger.info("Could not open shared memory map. Change File Monitor must not be running.") + return + type(shm_a.buf) + shm_buffer = shm_a.buf + len(shm_buffer) + shm_buffer[0] = 123 + if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}") + shm_a.close() + shm_a.unlink() # Call unlink only once to release the shared memory + time.sleep(1) + return + +if stopLibraryMonitoring: + stop_library_monitor() + if debugTracing: logger.info(f"stop_library_monitor EXIT................") +elif PLUGIN_ARGS_MODE == "start_library_monitor" or not gettingCalledAsStashPlugin: + start_library_monitor() + if debugTracing: logger.info(f"start_library_monitor EXIT................") +else: + logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})") + +if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************") diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml new file mode 100644 index 00000000..14a41783 --- /dev/null +++ b/plugins/FileMonitor/filemonitor.yml @@ -0,0 +1,34 @@ +name: FileMonitor +description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. +version: 0.2.0 +url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor +settings: + recursiveDisabled: + displayName: No Recursive + description: Enable stop monitoring paths recursively. + type: BOOLEAN + runCleanAfterDelete: + displayName: Run Clean + description: Enable to run metadata clean task after file deletion. + type: BOOLEAN + scanModified: + displayName: Scan Modifications + description: Enable to monitor changes in file system for modification flag. This option is NOT needed for Windows, because on Windows changes are triggered via CREATE, DELETE, and MOVE flags. Other OS may differ. + type: BOOLEAN + zzdebugTracing: + displayName: Debug Tracing + description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\FileMonitor\filemonitor.log + type: BOOLEAN + zzdryRun: + displayName: Dry Run + description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken. + type: BOOLEAN +exec: + - python + - "{pluginDir}/filemonitor.py" +interface: raw +tasks: + - name: Start Library Monitor + description: Monitors paths in Stash library for media file changes, and updates Stash. + defaultArgs: + mode: start_library_monitor diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py new file mode 100644 index 00000000..de0210b6 --- /dev/null +++ b/plugins/FileMonitor/filemonitor_config.py @@ -0,0 +1,12 @@ +# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor +config = { + # Enable to run metadata_generate (Generate Content) after metadata scan. + "runGenerateContent": False, + + # The following fields are ONLY used when running FileMonitor in script mode + "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server + "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server + "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server +} diff --git a/plugins/FileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt new file mode 100644 index 00000000..aa553701 --- /dev/null +++ b/plugins/FileMonitor/requirements.txt @@ -0,0 +1,4 @@ +stashapp-tools +pyYAML +watchdog +requests \ No newline at end of file diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index e373aea8..7ea05101 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -1,5 +1,5 @@ -# RenameFile: Ver 0.2.5 -RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following two main task. +# RenameFile: Ver 0.4.0 (By David Maisonave) +RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. - **Rename Scene File Name** (On-The-Fly) - **Append tag names** to file name - **Append Performer names** to file name @@ -21,6 +21,15 @@ Note: This script is **largely** based on the [Renamer](https://github.com/Serec - To add these fields see the [Key Fields] option in Settings->Plugins->Plugins->[RenameFile]. - The [Key Fields] can also be used to change the order for the file name format. - There are many options in Plugins->[RenameFile] UI, and all the options have detailed descriptions. Please advise us if any of the options need further clarification, and provide example details. + - **[Key Fields]**: (This option may require more detail than could be provided in the GUI) + - Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. (Default=title,performers,studio,tags) + - For example, if the user wants the performers name before the title, set the performers name first. + - Example:"performers,title,tags". + - This is an example of user adding height:"title,performers,tags,height" + - Here's an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date". + - The **resolution** field equals width + height. + - The date field is **not** populated by default unless the user explicitly adds the date value to a scene. + - If **[Key Fields]** is empty, the default value is used. (Default=title,performers,studio,tags) - There are additional options in renamefile_settings.py, but these options should only be changed by advanced users, and any changes should be tested first with the [Dry-Run] option enabled. **Note:** On Windows 10/11, the file can not be renamed while it's playing. It will result in following error: @@ -30,15 +39,17 @@ Error: [WinError 32] The process cannot access the file because it is being used To avoid this error, refresh the URL before changing the Title field. ### Requirements -`pip install stashapp-tools` - -`pip install pyYAML` +pip install -r requirements.txt +- Or manually install each requirement: + - `pip install stashapp-tools` + - `pip install pyYAML` + - `pip install requests` ### Installation - Follow **Requirements** instructions. -- In the stash plugin directory (C:\Users\MyUserName\.stash\plugins), create a folder named **RenameFile**. -- Copy all the plugin files to this folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**). -- Restart Stash. +- Create a folder named **RenameFile**, in the stash plugin directory (C:\Users\MyUserName\.stash\plugins). +- Download the latest version from the following link: [RenameFile](https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile), and copy the plugin files to folder.(**C:\Users\MyUserName\\.stash\plugins\RenameFile**). +- Click the **[Reload Plugins]** button in Stash->Settings->Plugins->Plugins. That's it!!! diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index f131a44d..ccf7b27e 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -1,72 +1,112 @@ -import requests +# Description: This is a Stash plugin which allows users to rename the video (scene) file name by editing the [Title] field located in the scene [Edit] tab. +# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) +# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile +# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer import os -import logging +import sys import shutil -from pathlib import Path import hashlib import json -import sys +from pathlib import Path +import requests +import logging +from logging.handlers import RotatingFileHandler +import stashapi.log as log # Importing stashapi.log as log for critical events ONLY from stashapi.stashapp import StashInterface +from renamefile_settings import config # Import settings from renamefile_settings.py -# This is a Stash plugin which allows users to rename the video (scene) file name by editing the [Title] field located in the scene [Edit] tab. - -# Importing stashapi.log as log for critical events -import stashapi.log as log - -# Import settings from renamefile_settings.py -from renamefile_settings import config - -# Get the directory of the script -script_dir = Path(__file__).resolve().parent - -# Configure logging for your script -log_file_path = script_dir / 'renamefile.log' +# ********************************************************************** +# Constant global variables -------------------------------------------- +LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log" FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s" -logging.basicConfig(filename=log_file_path, level=logging.INFO, format=FORMAT) -logger = logging.getLogger('renamefile') -DEFAULT_ENDPOINT = "http://localhost:9999/graphql" # Default GraphQL endpoint -DEFAULT_FIELD_KEY_LIST = "title, performers, tags" # Default Field Key List with the desired order +DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order +PLUGIN_ID = Path(__file__).stem.lower() DEFAULT_SEPERATOR = "-" PLUGIN_ARGS = False - - - -# ------------------------------------------ -# ------------------------------------------ -# Code to fetch variables from Plugin UI +PLUGIN_ARGS_MODE = False +WRAPPER_STYLES = config["wrapper_styles"] +POSTFIX_STYLES = config["postfix_styles"] +# GraphQL query to fetch all scenes +QUERY_ALL_SCENES = """ + query AllScenes { + allScenes { + id + updated_at + } + } +""" +RFH = RotatingFileHandler( + filename=LOG_FILE_PATH, + mode='a', + maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K + backupCount=2, + encoding=None, + delay=0 +) + +# ********************************************************************** +# Global variables -------------------------------------------- +inputToUpdateScenePost = False +exitMsg = "Change success!!" + +# Configure local log file for plugin within plugin folder having a limited max log file size +logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH]) +logger = logging.getLogger(PLUGIN_ID) + +# ********************************************************************** +# ---------------------------------------------------------------------- +# Code section to fetch variables from Plugin UI and from renamefile_settings.py json_input = json.loads(sys.stdin.read()) -FRAGMENT_SERVER = json_input["server_connection"] +FRAGMENT_SERVER = json_input['server_connection'] stash = StashInterface(FRAGMENT_SERVER) pluginConfiguration = stash.get_configuration()["plugins"] + settings = { - "dryRun": False, - "fileRenameViaMove": False, "performerAppend": False, - "performerIncludeInFileName": False, + "studioAppend": False, "tagAppend": False, - "tagIncludeInFileName": False, - "zFieldKeyList": DEFAULT_FIELD_KEY_LIST, - "zgraphqlEndpoint": DEFAULT_ENDPOINT, + "z_keyFIeldsIncludeInFileName": False, + "zafileRenameViaMove": False, + "zfieldKeyList": DEFAULT_FIELD_KEY_LIST, "zmaximumTagKeys": 12, - "zpathToExclude": "", "zseparators": DEFAULT_SEPERATOR, - "ztagWhitelist": "", "zzdebugTracing": False, + "zzdryRun": False, } -if "renamefile" in pluginConfiguration: - settings.update(pluginConfiguration["renamefile"]) -# ------------------------------------------ +if PLUGIN_ID in pluginConfiguration: + settings.update(pluginConfiguration[PLUGIN_ID]) +# ---------------------------------------------------------------------- debugTracing = settings["zzdebugTracing"] # Extract dry_run setting from settings -dry_run = settings["dryRun"] +dry_run = settings["zzdryRun"] dry_run_prefix = '' try: - PLUGIN_ARGS = json_input['args']["mode"] + PLUGIN_ARGS = json_input['args'] + PLUGIN_ARGS_MODE = json_input['args']["mode"] except: pass -logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS={PLUGIN_ARGS})************************************************") +try: + if json_input['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice +except: + pass +logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************") if debugTracing: logger.info("settings: %s " % (settings,)) + +if PLUGIN_ID in pluginConfiguration: + if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................") + # if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]: + # if debugTracing: logger.info("Debug Tracing................") + # try: + # stash.configure_plugin(PLUGIN_ID, settings) + # stash.configure_plugin("renamefile", {"zmaximumTagKeys": 12}) + # except Exception as e: + # logger.error(f"configure_plugin failed!!! Error: {e}") + # logger.exception('Got exception on main handler') + # pass + # # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True + # if debugTracing: logger.info("Debug Tracing................") + if dry_run: logger.info("Dry run mode is enabled.") dry_run_prefix = "Would've " @@ -74,23 +114,27 @@ max_tag_keys = settings["zmaximumTagKeys"] if settings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI if debugTracing: logger.info("Debug Tracing................") # ToDo: Add split logic here to slpit possible string array into an array -exclude_paths = settings["zpathToExclude"] +exclude_paths = config["pathToExclude"] exclude_paths = exclude_paths.split() if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................") # Extract tag whitelist from settings -tag_whitelist = settings["ztagWhitelist"] +tag_whitelist = config["tagWhitelist"] if debugTracing: logger.info("Debug Tracing................") if not tag_whitelist: tag_whitelist = "" -endpoint = settings["zgraphqlEndpoint"] # GraphQL endpoint -if debugTracing: logger.info("Debug Tracing................") -if not endpoint or endpoint == "": - endpoint = DEFAULT_ENDPOINT +if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................") + +endpointHost = json_input['server_connection']['Host'] +if endpointHost == "0.0.0.0": + endpointHost = "localhost" +endpoint = f"{json_input['server_connection']['Scheme']}://{endpointHost}:{json_input['server_connection']['Port']}/graphql" + +if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................") # Extract rename_files and move_files settings from renamefile_settings.py rename_files = config["rename_files"] -move_files = settings["fileRenameViaMove"] +move_files = settings["zafileRenameViaMove"] if debugTracing: logger.info("Debug Tracing................") -fieldKeyList = settings["zFieldKeyList"] # Default Field Key List with the desired order +fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order if not fieldKeyList or fieldKeyList == "": fieldKeyList = DEFAULT_FIELD_KEY_LIST fieldKeyList = fieldKeyList.replace(" ", "") @@ -98,21 +142,12 @@ fieldKeyList = fieldKeyList.split(",") if debugTracing: logger.info(f"Debug Tracing (fieldKeyList={fieldKeyList})................") separator = settings["zseparators"] -# ------------------------------------------ -# ------------------------------------------ -double_separator = separator + separator - +# ---------------------------------------------------------------------- +# ********************************************************************** - -# GraphQL query to fetch all scenes -query_all_scenes = """ - query AllScenes { - allScenes { - id - updated_at - } - } -""" +double_separator = separator + separator +if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................") +if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID=\"{PLUGIN_ID}\")................") if debugTracing: logger.info("Debug Tracing................") # Function to make GraphQL requests @@ -142,14 +177,13 @@ def should_exclude_path(scene_details): return False # Function to form the new filename based on scene details and user settings -def form_filename(original_file_stem, scene_details, wrapper_styles): +def form_filename(original_file_stem, scene_details): if debugTracing: logger.info("Debug Tracing................") filename_parts = [] tag_keys_added = 0 default_title = '' if_notitle_use_org_filename = config["if_notitle_use_org_filename"] - include_tag_if_in_name = settings["tagIncludeInFileName"] - include_performer_if_in_name = settings["performerIncludeInFileName"] + include_keyField_if_in_name = settings["z_keyFIeldsIncludeInFileName"] if if_notitle_use_org_filename: default_title = original_file_stem # ................... @@ -166,15 +200,14 @@ def form_filename(original_file_stem, scene_details, wrapper_styles): def add_tag(tag_name): nonlocal tag_keys_added nonlocal filename_parts - nonlocal wrapper_styles if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)): return # Skip adding more tags if the maximum limit is reached # Check if the tag name is in the whitelist if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist): - if wrapper_styles.get('tag'): - filename_parts.append(f"{wrapper_styles['tag'][0]}{tag_name}{wrapper_styles['tag'][1]}") + if WRAPPER_STYLES.get('tag'): + filename_parts.append(f"{WRAPPER_STYLES['tag'][0]}{tag_name}{WRAPPER_STYLES['tag'][1]}") if debugTracing: logger.info("Debug Tracing................") else: filename_parts.append(tag_name) @@ -187,69 +220,120 @@ def add_tag(tag_name): for key in fieldKeyList: if key == 'studio': - studio_name = scene_details.get('studio', {}).get('name', '') - if studio_name: - if wrapper_styles.get('studio'): - filename_parts.append(f"{wrapper_styles['studio'][0]}{studio_name}{wrapper_styles['studio'][1]}") - else: - filename_parts.append(studio_name) + if settings["studioAppend"]: + if debugTracing: logger.info("Debug Tracing................") + studio_name = scene_details.get('studio', {}) + if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................") + if studio_name: + studio_name = scene_details.get('studio', {}).get('name', '') + if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................") + if studio_name: + studio_name += POSTFIX_STYLES.get('studio') + if debugTracing: logger.info("Debug Tracing................") + if include_keyField_if_in_name or studio_name.lower() not in title.lower(): + if WRAPPER_STYLES.get('studio'): + filename_parts.append(f"{WRAPPER_STYLES['studio'][0]}{studio_name}{WRAPPER_STYLES['studio'][1]}") + else: + filename_parts.append(studio_name) elif key == 'title': if title: # This value has already been fetch in start of function because it needs to be defined before tags and performers - if wrapper_styles.get('title'): - filename_parts.append(f"{wrapper_styles['title'][0]}{title}{wrapper_styles['title'][1]}") + title += POSTFIX_STYLES.get('title') + if WRAPPER_STYLES.get('title'): + filename_parts.append(f"{WRAPPER_STYLES['title'][0]}{title}{WRAPPER_STYLES['title'][1]}") else: filename_parts.append(title) elif key == 'performers': if settings["performerAppend"]: performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])]) if performers: - if debugTracing: logger.info(f"Debug Tracing (include_performer_if_in_name={include_performer_if_in_name})................") - if include_performer_if_in_name or performers.lower() not in title.lower(): + performers += POSTFIX_STYLES.get('performers') + if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name})................") + if include_keyField_if_in_name or performers.lower() not in title.lower(): if debugTracing: logger.info(f"Debug Tracing (performers={performers})................") - if wrapper_styles.get('performers'): - filename_parts.append(f"{wrapper_styles['performers'][0]}{performers}{wrapper_styles['performers'][1]}") + if WRAPPER_STYLES.get('performers'): + filename_parts.append(f"{WRAPPER_STYLES['performers'][0]}{performers}{WRAPPER_STYLES['performers'][1]}") else: filename_parts.append(performers) elif key == 'date': scene_date = scene_details.get('date', '') + if debugTracing: logger.info("Debug Tracing................") if scene_date: - if wrapper_styles.get('date'): - filename_parts.append(f"{wrapper_styles['date'][0]}{scene_date}{wrapper_styles['date'][1]}") + scene_date += POSTFIX_STYLES.get('date') + if debugTracing: logger.info("Debug Tracing................") + if WRAPPER_STYLES.get('date'): + filename_parts.append(f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}") else: filename_parts.append(scene_date) + elif key == 'resolution': + width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string + height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string + if width and height: + resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution') + if WRAPPER_STYLES.get('resolution'): + filename_parts.append(f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}") + else: + filename_parts.append(resolution) + elif key == 'width': + width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string + if width: + width += POSTFIX_STYLES.get('width') + if WRAPPER_STYLES.get('width'): + filename_parts.append(f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}") + else: + filename_parts.append(width) elif key == 'height': height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string if height: - height += 'p' - if wrapper_styles.get('height'): - filename_parts.append(f"{wrapper_styles['height'][0]}{height}{wrapper_styles['height'][1]}") + height += POSTFIX_STYLES.get('height') + if WRAPPER_STYLES.get('height'): + filename_parts.append(f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}") else: filename_parts.append(height) elif key == 'video_codec': video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase if video_codec: - if wrapper_styles.get('video_codec'): - filename_parts.append(f"{wrapper_styles['video_codec'][0]}{video_codec}{wrapper_styles['video_codec'][1]}") + video_codec += POSTFIX_STYLES.get('video_codec') + if WRAPPER_STYLES.get('video_codec'): + filename_parts.append(f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}") else: filename_parts.append(video_codec) elif key == 'frame_rate': - frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + ' FPS' # Convert to string and append ' FPS' + frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS' if frame_rate: - if wrapper_styles.get('frame_rate'): - filename_parts.append(f"{wrapper_styles['frame_rate'][0]}{frame_rate}{wrapper_styles['frame_rate'][1]}") + frame_rate += POSTFIX_STYLES.get('frame_rate') + if WRAPPER_STYLES.get('frame_rate'): + filename_parts.append(f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}") else: filename_parts.append(frame_rate) + elif key == 'galleries': + galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])] + if debugTracing: logger.info("Debug Tracing................") + for gallery_name in galleries: + if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})................") + if include_keyField_if_in_name or gallery_name.lower() not in title.lower(): + gallery_name += POSTFIX_STYLES.get('galleries') + if WRAPPER_STYLES.get('galleries'): + filename_parts.append(f"{WRAPPER_STYLES['galleries'][0]}{gallery_name}{WRAPPER_STYLES['galleries'][1]}") + if debugTracing: logger.info("Debug Tracing................") + else: + filename_parts.append(gallery_name) + if debugTracing: logger.info("Debug Tracing................") + if debugTracing: logger.info(f"Debug Tracing (gallery_name={gallery_name})................") + if debugTracing: logger.info("Debug Tracing................") elif key == 'tags': if settings["tagAppend"]: tags = [tag.get('name', '') for tag in scene_details.get('tags', [])] if debugTracing: logger.info("Debug Tracing................") for tag_name in tags: - if debugTracing: logger.info(f"Debug Tracing (include_tag_if_in_name={include_tag_if_in_name})................") - if include_tag_if_in_name or tag_name.lower() not in title.lower(): - add_tag(tag_name) + if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})................") + if include_keyField_if_in_name or tag_name.lower() not in title.lower(): + add_tag(tag_name + POSTFIX_STYLES.get('tag')) if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") + if debugTracing: logger.info("Debug Tracing................") + if debugTracing: logger.info(f"Debug Tracing (filename_parts={filename_parts})................") new_filename = separator.join(filename_parts).replace(double_separator, separator) + if debugTracing: logger.info(f"Debug Tracing (new_filename={new_filename})................") # Check if the scene's path matches any of the excluded paths if exclude_paths and should_exclude_path(scene_details): @@ -267,10 +351,14 @@ def find_scene_by_id(scene_id): date files { path + width height video_codec frame_rate } + galleries { + title + } studio { name } @@ -287,6 +375,7 @@ def find_scene_by_id(scene_id): return scene_result.get('data', {}).get('findScene') def move_or_rename_files(scene_details, new_filename, original_parent_directory): + global exitMsg studio_directory = None for file_info in scene_details['files']: path = file_info['path'] @@ -325,12 +414,13 @@ def move_or_rename_files(scene_details, new_filename, original_parent_directory) except FileNotFoundError: log.error(f"File not found: {path}. Skipping...") logger.error(f"File not found: {path}. Skipping...") + exitMsg = "File not found" continue except OSError as e: log.error(f"Failed to move or rename file: {path}. Error: {e}") logger.error(f"Failed to move or rename file: {path}. Error: {e}") + exitMsg = "Failed to move or rename file" continue - return new_path # Return the new_path variable after the loop def perform_metadata_scan(metadata_scan_path): @@ -345,7 +435,8 @@ def perform_metadata_scan(metadata_scan_path): logger.info(f"Mutation string: {mutation_metadata_scan}") graphql_request(mutation_metadata_scan) -def rename_scene(scene_id, wrapper_styles, stash_directory): +def rename_scene(scene_id, stash_directory): + global exitMsg scene_details = find_scene_by_id(scene_id) if debugTracing: logger.info(f"Debug Tracing (scene_details={scene_details})................") if not scene_details: @@ -372,7 +463,7 @@ def rename_scene(scene_id, wrapper_styles, stash_directory): original_file_stem = Path(original_file_path).stem original_file_name = Path(original_file_path).name - new_filename = form_filename(original_file_stem, scene_details, wrapper_styles) + new_filename = form_filename(original_file_stem, scene_details) newFilenameWithExt = new_filename + Path(original_file_path).suffix if debugTracing: logger.info(f"Debug Tracing (original_file_name={original_file_name})(newFilenameWithExt={newFilenameWithExt})................") if original_file_name == newFilenameWithExt: @@ -400,14 +491,13 @@ def rename_scene(scene_id, wrapper_styles, stash_directory): os.rename(original_file_path, new_file_path) logger.info(f"{dry_run_prefix}Renamed file: {original_file_path} -> {new_file_path}") except Exception as e: + exitMsg = "Failed to rename file" log.error(f"Failed to rename file: {original_file_path}. Error: {e}") logger.error(f"Failed to rename file: {original_file_path}. Error: {e}") metadata_scan_path = original_parent_directory perform_metadata_scan(metadata_scan_path) - # ToDo: Add logic to the below code section so it checks base file length and checks folder length, instead of lumping them altogether. - # Current DB schema allows file folder max length to be 255, and max base filename to be 255 max_filename_length = int(config["max_filename_length"]) if len(new_filename) > max_filename_length: extension_length = len(Path(original_file_path).suffix) @@ -415,14 +505,15 @@ def rename_scene(scene_id, wrapper_styles, stash_directory): truncated_filename = new_filename[:max_base_filename_length] hash_suffix = hashlib.md5(new_filename.encode()).hexdigest() new_filename = truncated_filename + '_' + hash_suffix + Path(original_file_path).suffix - + + if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................") return new_filename, original_path_info, new_path_info # Main default function for rename scene def rename_files_task(): if debugTracing: logger.info("Debug Tracing................") # Execute the GraphQL query to fetch all scenes - scene_result = graphql_request(query_all_scenes) + scene_result = graphql_request(QUERY_ALL_SCENES) if debugTracing: logger.info("Debug Tracing................") all_scenes = scene_result.get('data', {}).get('allScenes', []) if debugTracing: logger.info("Debug Tracing................") @@ -439,19 +530,13 @@ def rename_files_task(): # Extract the ID of the latest scene latest_scene_id = latest_scene.get('id') - - # Extract wrapper styles - wrapper_styles = config["wrapper_styles"] - # Read stash directory from renamefile_settings.py stash_directory = config.get('stash_directory', '') if debugTracing: logger.info("Debug Tracing................") - if debugTracing: logger.info("Debug Tracing................") - # Rename the latest scene and trigger metadata scan - new_filename = rename_scene(latest_scene_id, wrapper_styles, stash_directory) - if debugTracing: logger.info("Debug Tracing................") + new_filename = rename_scene(latest_scene_id, stash_directory) + if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................") # Log dry run state and indicate if no changes were made if dry_run: @@ -460,24 +545,21 @@ def rename_files_task(): elif not new_filename: logger.info("No changes were made.") else: - logger.info("Change success!") + logger.info(f"{exitMsg}") return def fetch_dup_filename_tags(): # Place holder for new implementation return -if PLUGIN_ARGS == "fetch_dup_filename_tags": +if PLUGIN_ARGS_MODE == "fetch_dup_filename_tags": fetch_dup_filename_tags() -elif PLUGIN_ARGS == "rename_files_task": +elif PLUGIN_ARGS_MODE == "rename_files_task": rename_files_task() -else: +elif inputToUpdateScenePost: rename_files_task() if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************") - -# ToDo List - # Add logic to max_filename_length code so it checks base file length and checks folder length, instead of lumping them altogether. +# ToDo: Wish List # Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan. - # Get variables from the Plugins Settings UI instead of from renamefile_settings.py # Add code to get tags from duplicate filenames \ No newline at end of file diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index b838025c..e5d2a0f0 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,60 +1,48 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.2.6 +version: 0.4.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: - dryRun: - displayName: Dry Run - description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified. - type: BOOLEAN - fileRenameViaMove: - displayName: Rename Using Move - description: Enable to have file moved when renaming file. - type: BOOLEAN performerAppend: displayName: Append Performers description: Enable to append performers name to file name when renaming a file. Requires performers to be included in [Key Fields] list, which by default it is included. type: BOOLEAN - performerIncludeInFileName: - displayName: Include Existing Performers - description: Enable to append performer even if performers name already exists in the original file name. + studioAppend: + displayName: Append Studio + description: Enable to append studio name to file name when renaming a file. Requires studio to be included in [Key Fields] list, which by default it is included. type: BOOLEAN tagAppend: displayName: Append Tags description: Enable to append tag names to file name when renaming a file. Requires tags to be included in [Key Fields] list, which by default it is included. type: BOOLEAN - tagIncludeInFileName: - displayName: Include Existing Tags - description: Enable to append tag name even if tag already exists in original file name. + z_keyFIeldsIncludeInFileName: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed) + displayName: Include Existing Key Field + description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name. + type: BOOLEAN + zafileRenameViaMove: + displayName: Rename Using Move + description: Enable to have file moved when renaming file. type: BOOLEAN - zFieldKeyList: + zfieldKeyList: displayName: Key Fields - description: '(Default=title,performers,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,date,height,video_codec,frame_rate".' - type: STRING - zgraphqlEndpoint: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed) - displayName: GraphQL Endpoint - description: (Default=http://localhost:9999/graphql). Update with your endpoint, or leave blank to use default. + description: '(Default=title,performers,studio,tags) Define key fields to use to format the file name. This is a comma seperated list, and the list should be in the desired format order. For example, if the user wants the performers name before the title, set the performers name first. Example:"performers,title,tags". This is an example of user adding height:"title,performers,tags,height" Here''s an example using all of the supported fields: "title,performers,tags,studio,galleries,resolution,width,height,video_codec,frame_rate,date".' type: STRING zmaximumTagKeys: displayName: Max Tag Keys description: (Default=12) Maximum quantity of tag keys to append to file name. 0=Default(12); -1=No tags appended. type: NUMBER - zpathToExclude: - displayName: Exclude Path - description: 'Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"' - type: STRING zseparators: displayName: Separator description: '(Default=-) Define the separator to use between different parts of the filename. Example Usage: ","' type: STRING - ztagWhitelist: - displayName: Tag Whitelist - description: 'Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"' - type: STRING zzdebugTracing: displayName: Debug Tracing description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\RenameFile\renamefile.log type: BOOLEAN + zzdryRun: + displayName: Dry Run + description: Enable to run script in [Dry Run] mode. In dry run mode, files are NOT renamed, and only logging is performed. Use the logging to determine if rename will occur as expected. This should always be enabled on the first run after renamefile_settings.py has been modified. + type: BOOLEAN exec: - python - "{pluginDir}/renamefile.py" @@ -65,7 +53,7 @@ hooks: triggeredBy: - Scene.Update.Post tasks: - - name: Rename Files Task - description: Renames scene files. + - name: Rename Last Scene + description: Renames file of last updated scene. defaultArgs: mode: rename_files_task diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py index a027bd01..24052f8a 100644 --- a/plugins/RenameFile/renamefile_settings.py +++ b/plugins/RenameFile/renamefile_settings.py @@ -1,4 +1,4 @@ -# Importing config dictionary +# By David Maisonave (aka Axter) 2024 # RenameFile plugin main configuration options are available on the Stash GUI under Settings->Plugins->Plugins->[RenameFile]. # Most users should only use the GUI options. # The configuration options in this file are for advanced users ONLY!!! @@ -9,22 +9,42 @@ config = { # Define wrapper styles for different parts of the filename. # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "wrapper_styles": { - "studio": '[]', # Modify these values to change how each part of the filename is wrapped. - "title": '', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "performers": '()', # Modify these values to change how each part of the filename is wrapped. - "date": '[]', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "height": '()', # Modify these values to change how each part of the filename is wrapped. - "video_codec": '[]', # Use '[]' for square brackets, '{}' for curly brackets, '()' for parentheses, or an empty string for None. - "frame_rate": '[]', # Modify these values to change how each part of the filename is wrapped. - "tag": '[]' # Modify these values to change how each tag part of the filename is wrapped. + "wrapper_styles": { # Modify these values to change how each part of the filename is wrapped. + "title": '', + "performers": '()', + "tag": '[]', + "studio": '{}', + "galleries": '()', + "resolution": '', # Contains both WITH and HEIGHT + "width": '', + "height": '', + "video_codec": '', + "frame_rate": '', + "date": '()', # This field is not populated in the DB by default. It's usually empty. }, + # Define the field postfix + "postfix_styles": { + "title": '', + "performers": '', + "tag": '', + "studio": '', + "galleries": '', + "resolution": 'P', # Contains both WITH and HEIGHT + "width": 'W', + "height": 'P', + "width_height_seperator": 'x', # Used in RESOLUTION field as the string seperating WITH and HEIGHT. Example: 720x480 or 1280X720 + "video_codec": '', + "frame_rate": 'FR', + "date": '', + }, + # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" + "pathToExclude": "", + # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3" + "tagWhitelist": "", # Define whether files should be renamed when moved "rename_files": True, # Define whether the original file name should be used if title is empty "if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False. # Current Stash DB schema only allows maximum base file name length to be 255 "max_filename_length": 255, - # "max_filefolder_length": 255, # For future useage - # "max_filebase_length": 255, # For future useage }