diff --git a/.gitignore b/.gitignore index 37b3c86..12feb4a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ __pycache__/ .pytest_cache/ -.vscode/ config/config.conf ToDo snip*.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..38bafbf --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,11 @@ +repos: + - repo: local + hooks: + - id: black + name: black + entry: venv/bin/black + language: system + types: [python] + + + \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..fe4b83c --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "editor.formatOnSave": true, + "editor.defaultFormatter": "ms-python.black-formatter" +} \ No newline at end of file diff --git a/config/definitions.py b/config/definitions.py index ae31745..c7e6a45 100644 --- a/config/definitions.py +++ b/config/definitions.py @@ -1,86 +1,132 @@ #!/usr/bin/env python from config.parser import get_config_value from config.env_vars import * + # Define data types and default values for settingsDict variables -# General -LOG_LEVEL = get_config_value('LOG_LEVEL', 'general', False, str, 'INFO') -TEST_RUN = get_config_value('TEST_RUN', 'general', False, bool, False) -SSL_VERIFICATION = get_config_value('SSL_VERIFICATION', 'general', False, bool, True) +# General +LOG_LEVEL = get_config_value("LOG_LEVEL", "general", False, str, "INFO") +TEST_RUN = get_config_value("TEST_RUN", "general", False, bool, False) +SSL_VERIFICATION = get_config_value("SSL_VERIFICATION", "general", False, bool, True) -# Features -REMOVE_TIMER = get_config_value('REMOVE_TIMER', 'features', False, float, 10) -REMOVE_FAILED = get_config_value('REMOVE_FAILED', 'features', False, bool, False) -REMOVE_FAILED_IMPORTS = get_config_value('REMOVE_FAILED_IMPORTS' , 'features', False, bool, False) -REMOVE_METADATA_MISSING = get_config_value('REMOVE_METADATA_MISSING', 'features', False, bool, False) -REMOVE_MISSING_FILES = get_config_value('REMOVE_MISSING_FILES' , 'features', False, bool, False) -REMOVE_NO_FORMAT_UPGRADE = get_config_value('REMOVE_NO_FORMAT_UPGRADE' , 'features', False, bool, False) # OUTDATED - WILL RETURN WARNING -REMOVE_ORPHANS = get_config_value('REMOVE_ORPHANS' , 'features', False, bool, False) -REMOVE_SLOW = get_config_value('REMOVE_SLOW' , 'features', False, bool, False) -REMOVE_STALLED = get_config_value('REMOVE_STALLED', 'features', False, bool, False) -REMOVE_UNMONITORED = get_config_value('REMOVE_UNMONITORED', 'features', False, bool, False) -CANCEL_UNAVAILABLE_FILES = get_config_value('CANCEL_UNAVAILABLE_FILES', 'features', False, bool, False) -MIN_DOWNLOAD_SPEED = get_config_value('MIN_DOWNLOAD_SPEED', 'features', False, int, 0) -PERMITTED_ATTEMPTS = get_config_value('PERMITTED_ATTEMPTS', 'features', False, int, 3) -NO_STALLED_REMOVAL_QBIT_TAG = get_config_value('NO_STALLED_REMOVAL_QBIT_TAG', 'features', False, str, 'Don\'t Kill') -IGNORE_PRIVATE_TRACKERS = get_config_value('IGNORE_PRIVATE_TRACKERS', 'features', False, bool, True) -FAILED_IMPORT_MESSAGE_PATTERNS = get_config_value('FAILED_IMPORT_MESSAGE_PATTERNS','features', False, list, []) +# Features +REMOVE_TIMER = get_config_value("REMOVE_TIMER", "features", False, float, 10) +REMOVE_FAILED = get_config_value("REMOVE_FAILED", "features", False, bool, False) +REMOVE_FAILED_IMPORTS = get_config_value( + "REMOVE_FAILED_IMPORTS", "features", False, bool, False +) +REMOVE_METADATA_MISSING = get_config_value( + "REMOVE_METADATA_MISSING", "features", False, bool, False +) +REMOVE_MISSING_FILES = get_config_value( + "REMOVE_MISSING_FILES", "features", False, bool, False +) +REMOVE_NO_FORMAT_UPGRADE = get_config_value( + "REMOVE_NO_FORMAT_UPGRADE", "features", False, bool, False +) # OUTDATED - WILL RETURN WARNING +REMOVE_ORPHANS = get_config_value("REMOVE_ORPHANS", "features", False, bool, False) +REMOVE_SLOW = get_config_value("REMOVE_SLOW", "features", False, bool, False) +REMOVE_STALLED = get_config_value("REMOVE_STALLED", "features", False, bool, False) +REMOVE_UNMONITORED = get_config_value( + "REMOVE_UNMONITORED", "features", False, bool, False +) +CANCEL_UNAVAILABLE_FILES = get_config_value( + "CANCEL_UNAVAILABLE_FILES", "features", False, bool, False +) +MIN_DOWNLOAD_SPEED = get_config_value("MIN_DOWNLOAD_SPEED", "features", False, int, 0) +PERMITTED_ATTEMPTS = get_config_value("PERMITTED_ATTEMPTS", "features", False, int, 3) +NO_STALLED_REMOVAL_QBIT_TAG = get_config_value( + "NO_STALLED_REMOVAL_QBIT_TAG", "features", False, str, "Don't Kill" +) +IGNORE_PRIVATE_TRACKERS = get_config_value( + "IGNORE_PRIVATE_TRACKERS", "features", False, bool, True +) +FAILED_IMPORT_MESSAGE_PATTERNS = get_config_value( + "FAILED_IMPORT_MESSAGE_PATTERNS", "features", False, list, [] +) # Radarr -RADARR_URL = get_config_value('RADARR_URL', 'radarr', False, str) -RADARR_KEY = None if RADARR_URL == None else \ - get_config_value('RADARR_KEY', 'radarr', True, str) +RADARR_URL = get_config_value("RADARR_URL", "radarr", False, str) +RADARR_KEY = ( + None if RADARR_URL == None else get_config_value("RADARR_KEY", "radarr", True, str) +) -# Sonarr -SONARR_URL = get_config_value('SONARR_URL', 'sonarr', False, str) -SONARR_KEY = None if SONARR_URL == None else \ - get_config_value('SONARR_KEY', 'sonarr', True, str) +# Sonarr +SONARR_URL = get_config_value("SONARR_URL", "sonarr", False, str) +SONARR_KEY = ( + None if SONARR_URL == None else get_config_value("SONARR_KEY", "sonarr", True, str) +) -# Lidarr -LIDARR_URL = get_config_value('LIDARR_URL', 'lidarr', False, str) -LIDARR_KEY = None if LIDARR_URL == None else \ - get_config_value('LIDARR_KEY', 'lidarr', True, str) +# Lidarr +LIDARR_URL = get_config_value("LIDARR_URL", "lidarr", False, str) +LIDARR_KEY = ( + None if LIDARR_URL == None else get_config_value("LIDARR_KEY", "lidarr", True, str) +) -# Readarr -READARR_URL = get_config_value('READARR_URL', 'readarr', False, str) -READARR_KEY = None if READARR_URL == None else \ - get_config_value('READARR_KEY', 'readarr', True, str) +# Readarr +READARR_URL = get_config_value("READARR_URL", "readarr", False, str) +READARR_KEY = ( + None + if READARR_URL == None + else get_config_value("READARR_KEY", "readarr", True, str) +) -# Whisparr -WHISPARR_URL = get_config_value('WHISPARR_URL', 'whisparr', False, str) -WHISPARR_KEY = None if WHISPARR_URL == None else \ - get_config_value('WHISPARR_KEY', 'whisparr', True, str) +# Whisparr +WHISPARR_URL = get_config_value("WHISPARR_URL", "whisparr", False, str) +WHISPARR_KEY = ( + None + if WHISPARR_URL == None + else get_config_value("WHISPARR_KEY", "whisparr", True, str) +) -# qBittorrent -QBITTORRENT_URL = get_config_value('QBITTORRENT_URL', 'qbittorrent', False, str, '') -QBITTORRENT_USERNAME = get_config_value('QBITTORRENT_USERNAME', 'qbittorrent', False, str, '') -QBITTORRENT_PASSWORD = get_config_value('QBITTORRENT_PASSWORD', 'qbittorrent', False, str, '') +# qBittorrent +QBITTORRENT_URL = get_config_value("QBITTORRENT_URL", "qbittorrent", False, str, "") +QBITTORRENT_USERNAME = get_config_value( + "QBITTORRENT_USERNAME", "qbittorrent", False, str, "" +) +QBITTORRENT_PASSWORD = get_config_value( + "QBITTORRENT_PASSWORD", "qbittorrent", False, str, "" +) ######################################################################################################################## ########### Validate settings -if not (IS_IN_PYTEST or RADARR_URL or SONARR_URL or LIDARR_URL or READARR_URL or WHISPARR_URL): - print(f'[ ERROR ]: No Radarr/Sonarr/Lidarr/Readarr/Whisparr URLs specified (nothing to monitor)') +if not ( + IS_IN_PYTEST + or RADARR_URL + or SONARR_URL + or LIDARR_URL + or READARR_URL + or WHISPARR_URL +): + print( + f"[ ERROR ]: No Radarr/Sonarr/Lidarr/Readarr/Whisparr URLs specified (nothing to monitor)" + ) exit() ########### Enrich setting variables -if RADARR_URL: RADARR_URL = RADARR_URL.rstrip('/') + '/api/v3' -if SONARR_URL: SONARR_URL = SONARR_URL.rstrip('/') + '/api/v3' -if LIDARR_URL: LIDARR_URL = LIDARR_URL.rstrip('/') + '/api/v1' -if READARR_URL: READARR_URL = READARR_URL.rstrip('/') + '/api/v1' -if WHISPARR_URL: WHISPARR_URL = WHISPARR_URL.rstrip('/') + '/api/v3' -if QBITTORRENT_URL: QBITTORRENT_URL = QBITTORRENT_URL.rstrip('/') + '/api/v2' +if RADARR_URL: + RADARR_URL = RADARR_URL.rstrip("/") + "/api/v3" +if SONARR_URL: + SONARR_URL = SONARR_URL.rstrip("/") + "/api/v3" +if LIDARR_URL: + LIDARR_URL = LIDARR_URL.rstrip("/") + "/api/v1" +if READARR_URL: + READARR_URL = READARR_URL.rstrip("/") + "/api/v1" +if WHISPARR_URL: + WHISPARR_URL = WHISPARR_URL.rstrip("/") + "/api/v3" +if QBITTORRENT_URL: + QBITTORRENT_URL = QBITTORRENT_URL.rstrip("/") + "/api/v2" -RADARR_MIN_VERSION = '5.3.6.8608' -SONARR_MIN_VERSION = '4.0.1.1131' -LIDARR_MIN_VERSION = None -READARR_MIN_VERSION = None -WHISPARR_MIN_VERSION = '2.0.0.548' -QBITTORRENT_MIN_VERSION = '4.3.0' +RADARR_MIN_VERSION = "5.3.6.8608" +SONARR_MIN_VERSION = "4.0.1.1131" +LIDARR_MIN_VERSION = None +READARR_MIN_VERSION = None +WHISPARR_MIN_VERSION = "2.0.0.548" +QBITTORRENT_MIN_VERSION = "4.3.0" -SUPPORTED_ARR_APPS = ['RADARR', 'SONARR', 'LIDARR', 'READARR', 'WHISPARR'] +SUPPORTED_ARR_APPS = ["RADARR", "SONARR", "LIDARR", "READARR", "WHISPARR"] ########### Add Variables to Dictionary settingsDict = {} for var_name in dir(): if var_name.isupper(): settingsDict[var_name] = locals()[var_name] - diff --git a/config/env_vars.py b/config/env_vars.py index 5bf0d93..9ade11e 100644 --- a/config/env_vars.py +++ b/config/env_vars.py @@ -1,5 +1,6 @@ import os -IS_IN_DOCKER = os.environ.get('IS_IN_DOCKER') -IMAGE_TAG = os.environ.get('IMAGE_TAG', 'Local') -SHORT_COMMIT_ID = os.environ.get('SHORT_COMMIT_ID', 'n/a') -IS_IN_PYTEST = os.environ.get('IS_IN_PYTEST') \ No newline at end of file + +IS_IN_DOCKER = os.environ.get("IS_IN_DOCKER") +IMAGE_TAG = os.environ.get("IMAGE_TAG", "Local") +SHORT_COMMIT_ID = os.environ.get("SHORT_COMMIT_ID", "n/a") +IS_IN_PYTEST = os.environ.get("IS_IN_PYTEST") diff --git a/config/parser.py b/config/parser.py index a17626a..940266d 100644 --- a/config/parser.py +++ b/config/parser.py @@ -6,16 +6,18 @@ from config.env_vars import * # Configures how to parse configuration file -config_file_name = 'config.conf' -config_file_full_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), config_file_name) +config_file_name = "config.conf" +config_file_full_path = os.path.join( + os.path.abspath(os.path.dirname(__file__)), config_file_name +) sys.tracebacklimit = 0 # dont show stack traces in prod mode config = configparser.ConfigParser() -config.optionxform = str # maintain capitalization of config keys +config.optionxform = str # maintain capitalization of config keys config.read(config_file_full_path) def config_section_map(section): - 'Load the config file into a dictionary' + "Load the config file into a dictionary" dict1 = {} options = config.options(section) for option in options: @@ -26,19 +28,21 @@ def config_section_map(section): dict1[option] = None return dict1 + def cast(value, type_): return type_(value) -def get_config_value(key, config_section, is_mandatory, datatype, default_value = None): - 'Return for each key the corresponding value from the Docker Environment or the Config File' + +def get_config_value(key, config_section, is_mandatory, datatype, default_value=None): + "Return for each key the corresponding value from the Docker Environment or the Config File" if IS_IN_DOCKER: config_value = os.environ.get(key) - if config_value is not None: + if config_value is not None: # print(f'The value retrieved for [{config_section}]: {key} is "{config_value}"') config_value = config_value # return config_value elif is_mandatory: - print(f'[ ERROR ]: Variable not specified in Docker environment: {key}' ) + print(f"[ ERROR ]: Variable not specified in Docker environment: {key}") sys.exit(0) else: # return default_value @@ -52,13 +56,15 @@ def get_config_value(key, config_section, is_mandatory, datatype, default_value config_value = None if config_value is not None: # print(f'The value retrieved for [{config_section}]: {key} is "{config_value}"') - config_value = config_value + config_value = config_value # return config_value elif is_mandatory: - print(f'[ ERROR ]: Mandatory variable not specified in config file, section [{config_section}]: {key} (data type: {datatype.__name__})') + print( + f"[ ERROR ]: Mandatory variable not specified in config file, section [{config_section}]: {key} (data type: {datatype.__name__})" + ) sys.exit(0) else: - # return default_value + # return default_value # print(f'The default value used for [{config_section}]: {key} is "{default_value}" (data type: {type(default_value).__name__})') config_value = default_value @@ -67,14 +73,16 @@ def get_config_value(key, config_section, is_mandatory, datatype, default_value if datatype == bool: config_value = eval(str(config_value).capitalize()) elif datatype == list: - if type(config_value) != list: # Default value is already a list, doesn't need to be pushed through json.loads + if ( + type(config_value) != list + ): # Default value is already a list, doesn't need to be pushed through json.loads config_value = json.loads(config_value) - elif config_value is not None: + elif config_value is not None: config_value = cast(config_value, datatype) - except Exception as e: - print(f'[ ERROR ]: The value retrieved for [{config_section}]: {key} is "{config_value}" and cannot be converted to data type {datatype}') + except Exception as e: + print( + f'[ ERROR ]: The value retrieved for [{config_section}]: {key} is "{config_value}" and cannot be converted to data type {datatype}' + ) print(e) sys.exit(0) - return config_value - - + return config_value diff --git a/docker/requirements.txt b/docker/requirements.txt index a6714bc..b332e89 100644 --- a/docker/requirements.txt +++ b/docker/requirements.txt @@ -1,6 +1,9 @@ +# python3 -m pip install -r docker/requirements.txt requests==2.32.3 asyncio==3.4.3 python-dateutil==2.8.2 verboselogs==1.7 pytest==8.0.1 pytest-asyncio==0.23.5 +pre-commit==3.8.0 +black==24.8.0 diff --git a/main.py b/main.py index 053fccf..c89255e 100644 --- a/main.py +++ b/main.py @@ -1,47 +1,51 @@ # Import Libraries -import asyncio +import asyncio import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) import json + # Import Functions from config.definitions import settingsDict from src.utils.loadScripts import * from src.decluttarr import queueCleaner -from src.utils.rest import rest_get, rest_post -from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker +from src.utils.rest import rest_get, rest_post +from src.utils.trackers import Defective_Tracker, Download_Sizes_Tracker # Hide SSL Verification Warnings -if settingsDict['SSL_VERIFICATION']==False: +if settingsDict["SSL_VERIFICATION"] == False: import warnings + warnings.filterwarnings("ignore", message="Unverified HTTPS request") # Set up logging setLoggingFormat(settingsDict) + # Main function async def main(settingsDict): -# Adds to settings Dict the instances that are actually configures - settingsDict['INSTANCES'] = [] - for arrApplication in settingsDict['SUPPORTED_ARR_APPS']: - if settingsDict[arrApplication + '_URL']: - settingsDict['INSTANCES'].append(arrApplication) + # Adds to settings Dict the instances that are actually configures + settingsDict["INSTANCES"] = [] + for arrApplication in settingsDict["SUPPORTED_ARR_APPS"]: + if settingsDict[arrApplication + "_URL"]: + settingsDict["INSTANCES"].append(arrApplication) # Pre-populates the dictionaries (in classes) that track the items that were already caught as having problems or removed - defectiveTrackingInstances = {} - for instance in settingsDict['INSTANCES']: + defectiveTrackingInstances = {} + for instance in settingsDict["INSTANCES"]: defectiveTrackingInstances[instance] = {} defective_tracker = Defective_Tracker(defectiveTrackingInstances) download_sizes_tracker = Download_Sizes_Tracker({}) # Get name of arr-instances - for instance in settingsDict['INSTANCES']: + for instance in settingsDict["INSTANCES"]: settingsDict = await getArrInstanceName(settingsDict, instance) # Check outdated upgradeChecks(settingsDict) # Welcome Message - showWelcome() + showWelcome() # Current Settings showSettings(settingsDict) @@ -57,21 +61,29 @@ async def main(settingsDict): # Start Cleaning while True: - logger.verbose('-' * 50) - # Cache protected (via Tag) and private torrents - protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit(settingsDict) + logger.verbose("-" * 50) + # Cache protected (via Tag) and private torrents + protectedDownloadIDs, privateDowloadIDs = await getProtectedAndPrivateFromQbit( + settingsDict + ) # Run script for each instance - for instance in settingsDict['INSTANCES']: - await queueCleaner(settingsDict, instance, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs) - logger.verbose('') - logger.verbose('Queue clean-up complete!') + for instance in settingsDict["INSTANCES"]: + await queueCleaner( + settingsDict, + instance, + defective_tracker, + download_sizes_tracker, + protectedDownloadIDs, + privateDowloadIDs, + ) + logger.verbose("") + logger.verbose("Queue clean-up complete!") # Wait for the next run - await asyncio.sleep(settingsDict['REMOVE_TIMER']*60) + await asyncio.sleep(settingsDict["REMOVE_TIMER"] * 60) return -if __name__ == '__main__': - asyncio.run(main(settingsDict)) - +if __name__ == "__main__": + asyncio.run(main(settingsDict)) diff --git a/src/decluttarr.py b/src/decluttarr.py index 2d24753..3490dd6 100644 --- a/src/decluttarr.py +++ b/src/decluttarr.py @@ -1,7 +1,8 @@ # Cleans the download queue import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -from src.utils.shared import (errorDetails, get_queue) +from src.utils.shared import errorDetails, get_queue from src.jobs.remove_failed import remove_failed from src.jobs.remove_failed_imports import remove_failed_imports from src.jobs.remove_metadata_missing import remove_metadata_missing @@ -13,82 +14,173 @@ from src.jobs.cancel_unavailable_files import cancel_unavailable_files from src.utils.trackers import Deleted_Downloads -async def queueCleaner(settingsDict, arr_type, defective_tracker, download_sizes_tracker, protectedDownloadIDs, privateDowloadIDs): + +async def queueCleaner( + settingsDict, + arr_type, + defective_tracker, + download_sizes_tracker, + protectedDownloadIDs, + privateDowloadIDs, +): # Read out correct instance depending on radarr/sonarr flag run_dict = {} - if arr_type == 'RADARR': - BASE_URL = settingsDict['RADARR_URL'] - API_KEY = settingsDict['RADARR_KEY'] - NAME = settingsDict['RADARR_NAME'] - full_queue_param = 'includeUnknownMovieItems' - elif arr_type == 'SONARR': - BASE_URL = settingsDict['SONARR_URL'] - API_KEY = settingsDict['SONARR_KEY'] - NAME = settingsDict['SONARR_NAME'] - full_queue_param = 'includeUnknownSeriesItems' - elif arr_type == 'LIDARR': - BASE_URL = settingsDict['LIDARR_URL'] - API_KEY = settingsDict['LIDARR_KEY'] - NAME = settingsDict['LIDARR_NAME'] - full_queue_param = 'includeUnknownArtistItems' - elif arr_type == 'READARR': - BASE_URL = settingsDict['READARR_URL'] - API_KEY = settingsDict['READARR_KEY'] - NAME = settingsDict['READARR_NAME'] - full_queue_param = 'includeUnknownAuthorItems' - elif arr_type == 'WHISPARR': - BASE_URL = settingsDict['WHISPARR_URL'] - API_KEY = settingsDict['WHISPARR_KEY'] - NAME = settingsDict['WHISPARR_NAME'] - full_queue_param = 'includeUnknownSeriesItems' + if arr_type == "RADARR": + BASE_URL = settingsDict["RADARR_URL"] + API_KEY = settingsDict["RADARR_KEY"] + NAME = settingsDict["RADARR_NAME"] + full_queue_param = "includeUnknownMovieItems" + elif arr_type == "SONARR": + BASE_URL = settingsDict["SONARR_URL"] + API_KEY = settingsDict["SONARR_KEY"] + NAME = settingsDict["SONARR_NAME"] + full_queue_param = "includeUnknownSeriesItems" + elif arr_type == "LIDARR": + BASE_URL = settingsDict["LIDARR_URL"] + API_KEY = settingsDict["LIDARR_KEY"] + NAME = settingsDict["LIDARR_NAME"] + full_queue_param = "includeUnknownArtistItems" + elif arr_type == "READARR": + BASE_URL = settingsDict["READARR_URL"] + API_KEY = settingsDict["READARR_KEY"] + NAME = settingsDict["READARR_NAME"] + full_queue_param = "includeUnknownAuthorItems" + elif arr_type == "WHISPARR": + BASE_URL = settingsDict["WHISPARR_URL"] + API_KEY = settingsDict["WHISPARR_KEY"] + NAME = settingsDict["WHISPARR_NAME"] + full_queue_param = "includeUnknownSeriesItems" else: - logger.error('Unknown arr_type specified, exiting: %s', str(arr_type)) + logger.error("Unknown arr_type specified, exiting: %s", str(arr_type)) sys.exit() - + # Cleans up the downloads queue - logger.verbose('Cleaning queue on %s:', NAME) + logger.verbose("Cleaning queue on %s:", NAME) # Refresh queue: - - full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) - if not full_queue: - logger.verbose('>>> Queue is empty.') + + full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True}) + if not full_queue: + logger.verbose(">>> Queue is empty.") return else: - logger.debug('queueCleaner/full_queue at start:') - logger.debug(full_queue) - + logger.debug("queueCleaner/full_queue at start:") + logger.debug(full_queue) + deleted_downloads = Deleted_Downloads([]) items_detected = 0 - try: - if settingsDict['CANCEL_UNAVAILABLE_FILES']: - await cancel_unavailable_files( settingsDict, BASE_URL, API_KEY, NAME, protectedDownloadIDs, privateDowloadIDs, arr_type) + try: + if settingsDict["CANCEL_UNAVAILABLE_FILES"]: + await cancel_unavailable_files( + settingsDict, + BASE_URL, + API_KEY, + NAME, + protectedDownloadIDs, + privateDowloadIDs, + arr_type, + ) - if settingsDict['REMOVE_FAILED']: - items_detected += await remove_failed( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + if settingsDict["REMOVE_FAILED"]: + items_detected += await remove_failed( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + ) - if settingsDict['REMOVE_FAILED_IMPORTS']: - items_detected += await remove_failed_imports( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + if settingsDict["REMOVE_FAILED_IMPORTS"]: + items_detected += await remove_failed_imports( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + ) - if settingsDict['REMOVE_METADATA_MISSING']: - items_detected += await remove_metadata_missing( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + if settingsDict["REMOVE_METADATA_MISSING"]: + items_detected += await remove_metadata_missing( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + ) - if settingsDict['REMOVE_MISSING_FILES']: - items_detected += await remove_missing_files( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + if settingsDict["REMOVE_MISSING_FILES"]: + items_detected += await remove_missing_files( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + ) - if settingsDict['REMOVE_ORPHANS']: - items_detected += await remove_orphans( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param) + if settingsDict["REMOVE_ORPHANS"]: + items_detected += await remove_orphans( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + full_queue_param, + ) - if settingsDict['REMOVE_SLOW']: - items_detected += await remove_slow( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker) + if settingsDict["REMOVE_SLOW"]: + items_detected += await remove_slow( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + download_sizes_tracker, + ) - if settingsDict['REMOVE_STALLED']: - items_detected += await remove_stalled( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs) + if settingsDict["REMOVE_STALLED"]: + items_detected += await remove_stalled( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + ) - if settingsDict['REMOVE_UNMONITORED']: - items_detected += await remove_unmonitored( settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type) + if settingsDict["REMOVE_UNMONITORED"]: + items_detected += await remove_unmonitored( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + arr_type, + ) if items_detected == 0: - logger.verbose('>>> Queue is clean.') + logger.verbose(">>> Queue is clean.") except Exception as error: errorDetails(NAME, error) return diff --git a/src/jobs/cancel_unavailable_files.py b/src/jobs/cancel_unavailable_files.py index d222ce1..0fe9001 100644 --- a/src/jobs/cancel_unavailable_files.py +++ b/src/jobs/cancel_unavailable_files.py @@ -1,50 +1,104 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download, qBitOffline) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, + qBitOffline, +) import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) from src.utils.rest import rest_get, rest_post -async def cancel_unavailable_files(settingsDict, BASE_URL, API_KEY, NAME, protectedDownloadIDs, privateDowloadIDs, arr_type): +async def cancel_unavailable_files( + settingsDict, + BASE_URL, + API_KEY, + NAME, + protectedDownloadIDs, + privateDowloadIDs, + arr_type, +): # Checks if downloads have less than 100% availability and marks the underyling files that cause it as 'do not download' # Only works in qbit try: - failType = '>100% availability' + failType = ">100% availability" queue = await get_queue(BASE_URL, API_KEY) - logger.debug('CANCEL_UNAVAILABLE_FILES/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 - if await qBitOffline(settingsDict, failType, NAME): return + logger.debug("CANCEL_UNAVAILABLE_FILES/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 + if await qBitOffline(settingsDict, failType, NAME): + return # Find items affected - qbitHashes = list(set(queueItem['downloadId'].upper() for queueItem in queue['records'])) + qbitHashes = list( + set(queueItem["downloadId"].upper() for queueItem in queue["records"]) + ) - # Remove private and protected trackers - if settingsDict['IGNORE_PRIVATE_TRACKERS']: - for qbitHash in reversed(qbitHashes): - if qbitHash in privateDowloadIDs: - qbitHashes.remove(qbitHash) + # Remove private and protected trackers + if settingsDict["IGNORE_PRIVATE_TRACKERS"]: + for qbitHash in reversed(qbitHashes): + if qbitHash in privateDowloadIDs: + qbitHashes.remove(qbitHash) - if settingsDict['IGNORE_PRIVATE_TRACKERS']: - for qbitHash in reversed(qbitHashes): - if qbitHash in privateDowloadIDs: - qbitHashes.remove(qbitHash) + if settingsDict["IGNORE_PRIVATE_TRACKERS"]: + for qbitHash in reversed(qbitHashes): + if qbitHash in privateDowloadIDs: + qbitHashes.remove(qbitHash) - qbitItems = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': ('|').join(qbitHashes)}, cookies=settingsDict['QBIT_COOKIE']) + qbitItems = await rest_get( + settingsDict["QBITTORRENT_URL"] + "/torrents/info", + params={"hashes": ("|").join(qbitHashes)}, + cookies=settingsDict["QBIT_COOKIE"], + ) for qbitItem in qbitItems: - if 'state' in qbitItem and 'availability' in qbitItem: - if qbitItem['state'] == 'downloading' and qbitItem['availability'] < 1: - logger.info('>>> Detected %s: %s', failType, qbitItem['name']) - logger.verbose('>>>>> Marking following files to "not download":') - qbitItemFiles = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/files',params={'hash': qbitItem['hash']}, cookies=settingsDict['QBIT_COOKIE']) - for qbitItemFile in qbitItemFiles: - if all(key in qbitItemFile for key in ['availability', 'progress', 'priority', 'index', 'name']): - if qbitItemFile['availability'] < 1 and qbitItemFile['progress'] < 1 and qbitItemFile['priority'] != 0: - logger.verbose('>>>>> %s', qbitItemFile['name'].split('/')[-1]) - if not settingsDict['TEST_RUN']: - await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/filePrio', data={'hash': qbitItem['hash'].lower(), 'id': qbitItemFile['index'], 'priority': 0}, cookies=settingsDict['QBIT_COOKIE']) + if "state" in qbitItem and "availability" in qbitItem: + if qbitItem["state"] == "downloading" and qbitItem["availability"] < 1: + logger.info(">>> Detected %s: %s", failType, qbitItem["name"]) + logger.verbose('>>>>> Marking following files to "not download":') + qbitItemFiles = await rest_get( + settingsDict["QBITTORRENT_URL"] + "/torrents/files", + params={"hash": qbitItem["hash"]}, + cookies=settingsDict["QBIT_COOKIE"], + ) + for qbitItemFile in qbitItemFiles: + if all( + key in qbitItemFile + for key in [ + "availability", + "progress", + "priority", + "index", + "name", + ] + ): + if ( + qbitItemFile["availability"] < 1 + and qbitItemFile["progress"] < 1 + and qbitItemFile["priority"] != 0 + ): + logger.verbose( + ">>>>> %s", qbitItemFile["name"].split("/")[-1] + ) + if not settingsDict["TEST_RUN"]: + await rest_post( + url=settingsDict["QBITTORRENT_URL"] + + "/torrents/filePrio", + data={ + "hash": qbitItem["hash"].lower(), + "id": qbitItemFile["index"], + "priority": 0, + }, + cookies=settingsDict["QBIT_COOKIE"], + ) except Exception as error: errorDetails(NAME, error) - return - + return diff --git a/src/jobs/remove_failed.py b/src/jobs/remove_failed.py index 8a53d21..4595a21 100644 --- a/src/jobs/remove_failed.py +++ b/src/jobs/remove_failed.py @@ -1,29 +1,62 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download, qBitOffline) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, + qBitOffline, +) import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -async def remove_failed(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): + +async def remove_failed( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, +): # Detects failed and triggers delete. Does not add to blocklist try: - failType = 'failed' + failType = "failed" queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_failed/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 - if await qBitOffline(settingsDict, failType, NAME): return 0 + logger.debug("remove_failed/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 + if await qBitOffline(settingsDict, failType, NAME): + return 0 # Find items affected affectedItems = [] - for queueItem in queue['records']: - if 'errorMessage' in queueItem and 'status' in queueItem: - if queueItem['status'] == 'failed': + for queueItem in queue["records"]: + if "errorMessage" in queueItem and "status" in queueItem: + if queueItem["status"] == "failed": affectedItems.append(queueItem) - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = False, - doPrivateTrackerCheck = True, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = False) + affectedItems = await execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist=False, + doPrivateTrackerCheck=True, + doProtectedDownloadCheck=True, + doPermittedAttemptsCheck=False, + ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) return 0 - diff --git a/src/jobs/remove_failed_imports.py b/src/jobs/remove_failed_imports.py index bf84582..c2e89bf 100644 --- a/src/jobs/remove_failed_imports.py +++ b/src/jobs/remove_failed_imports.py @@ -1,73 +1,102 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, execute_checks) +from src.utils.shared import errorDetails, formattedQueueInfo, get_queue, execute_checks import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -async def remove_failed_imports(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): - # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist + +async def remove_failed_imports( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, +): + # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist try: - failType = 'failed import' - queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_failed_imports/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 - + failType = "failed import" + queue = await get_queue(BASE_URL, API_KEY) + logger.debug("remove_failed_imports/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 + # Find items affected affectedItems = [] # Check if any patterns have been specified - patterns = settingsDict.get('FAILED_IMPORT_MESSAGE_PATTERNS', []) + patterns = settingsDict.get("FAILED_IMPORT_MESSAGE_PATTERNS", []) if not patterns: # If patterns is empty or not present patterns = None - for queueItem in queue['records']: - if 'status' in queueItem \ - and 'trackedDownloadStatus' in queueItem \ - and 'trackedDownloadState' in queueItem \ - and 'statusMessages' in queueItem: + for queueItem in queue["records"]: + if ( + "status" in queueItem + and "trackedDownloadStatus" in queueItem + and "trackedDownloadState" in queueItem + and "statusMessages" in queueItem + ): removal_messages = [] - if queueItem['status'] == 'completed' \ - and queueItem['trackedDownloadStatus'] == 'warning' \ - and queueItem['trackedDownloadState'] in {'importPending', 'importFailed', 'importBlocked'}: + if ( + queueItem["status"] == "completed" + and queueItem["trackedDownloadStatus"] == "warning" + and queueItem["trackedDownloadState"] + in {"importPending", "importFailed", "importBlocked"} + ): # Find messages that find specified pattern and put them into a "removal_message" that will be displayed in the logger when removing the affected item - if not patterns: + if not patterns: # No patterns defined - including all status messages in the removal_messages - removal_messages.append ('>>>>> Status Messages (All):') - for statusMessage in queueItem['statusMessages']: - removal_messages.extend(f">>>>> - {message}" for message in statusMessage.get('messages', [])) + removal_messages.append(">>>>> Status Messages (All):") + for statusMessage in queueItem["statusMessages"]: + removal_messages.extend( + f">>>>> - {message}" + for message in statusMessage.get("messages", []) + ) else: # Specific patterns defined - only removing if any of these are matched - for statusMessage in queueItem['statusMessages']: - messages = statusMessage.get('messages', []) + for statusMessage in queueItem["statusMessages"]: + messages = statusMessage.get("messages", []) for message in messages: if any(pattern in message for pattern in patterns): removal_messages.append(f">>>>> - {message}") if removal_messages: - removal_messages.insert (0, '>>>>> Status Messages (matching specified patterns):') - + removal_messages.insert( + 0, + ">>>>> Status Messages (matching specified patterns):", + ) + if removal_messages: - removal_messages = list(dict.fromkeys(removal_messages)) # deduplication - removal_messages.insert(0,'>>>>> Tracked Download State: ' + queueItem['trackedDownloadState']) - queueItem['removal_messages'] = removal_messages + removal_messages = list( + dict.fromkeys(removal_messages) + ) # deduplication + removal_messages.insert( + 0, + ">>>>> Tracked Download State: " + + queueItem["trackedDownloadState"], + ) + queueItem["removal_messages"] = removal_messages affectedItems.append(queueItem) - + check_kwargs = { - 'settingsDict': settingsDict, - 'affectedItems': affectedItems, - 'failType': failType, - 'BASE_URL': BASE_URL, - 'API_KEY': API_KEY, - 'NAME': NAME, - 'deleted_downloads': deleted_downloads, - 'defective_tracker': defective_tracker, - 'privateDowloadIDs': privateDowloadIDs, - 'protectedDownloadIDs': protectedDownloadIDs, - 'addToBlocklist': True, - 'doPrivateTrackerCheck': False, - 'doProtectedDownloadCheck': True, - 'doPermittedAttemptsCheck': False, - 'extraParameters': {'keepTorrentForPrivateTrackers': True} + "settingsDict": settingsDict, + "affectedItems": affectedItems, + "failType": failType, + "BASE_URL": BASE_URL, + "API_KEY": API_KEY, + "NAME": NAME, + "deleted_downloads": deleted_downloads, + "defective_tracker": defective_tracker, + "privateDowloadIDs": privateDowloadIDs, + "protectedDownloadIDs": protectedDownloadIDs, + "addToBlocklist": True, + "doPrivateTrackerCheck": False, + "doProtectedDownloadCheck": True, + "doPermittedAttemptsCheck": False, + "extraParameters": {"keepTorrentForPrivateTrackers": True}, } affectedItems = await execute_checks(**check_kwargs) diff --git a/src/jobs/remove_metadata_missing.py b/src/jobs/remove_metadata_missing.py index 4c97b35..6984bc8 100644 --- a/src/jobs/remove_metadata_missing.py +++ b/src/jobs/remove_metadata_missing.py @@ -1,27 +1,65 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download, qBitOffline) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, + qBitOffline, +) import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -async def remove_metadata_missing(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): - # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist + +async def remove_metadata_missing( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, +): + # Detects downloads stuck downloading meta data and triggers repeat check and subsequent delete. Adds to blocklist try: - failType = 'missing metadata' + failType = "missing metadata" queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_metadata_missing/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 - if await qBitOffline(settingsDict, failType, NAME): return 0 + logger.debug("remove_metadata_missing/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 + if await qBitOffline(settingsDict, failType, NAME): + return 0 # Find items affected - affectedItems = [] - for queueItem in queue['records']: - if 'errorMessage' in queueItem and 'status' in queueItem: - if queueItem['status'] == 'queued' and queueItem['errorMessage'] == 'qBittorrent is downloading metadata': + affectedItems = [] + for queueItem in queue["records"]: + if "errorMessage" in queueItem and "status" in queueItem: + if ( + queueItem["status"] == "queued" + and queueItem["errorMessage"] + == "qBittorrent is downloading metadata" + ): affectedItems.append(queueItem) - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = True, - doPrivateTrackerCheck = True, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = True) + affectedItems = await execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist=True, + doPrivateTrackerCheck=True, + doProtectedDownloadCheck=True, + doPermittedAttemptsCheck=True, + ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) diff --git a/src/jobs/remove_missing_files.py b/src/jobs/remove_missing_files.py index 6f3a368..bc67d4c 100644 --- a/src/jobs/remove_missing_files.py +++ b/src/jobs/remove_missing_files.py @@ -1,38 +1,80 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download, qBitOffline) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, + qBitOffline, +) import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -async def remove_missing_files(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): + +async def remove_missing_files( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, +): # Detects downloads broken because of missing files. Does not add to blocklist try: - failType = 'missing files' + failType = "missing files" queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_missing_files/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 - if await qBitOffline(settingsDict, failType, NAME): return 0 + logger.debug("remove_missing_files/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 + if await qBitOffline(settingsDict, failType, NAME): + return 0 # Find items affected affectedItems = [] - for queueItem in queue['records']: - if 'status' in queueItem: + for queueItem in queue["records"]: + if "status" in queueItem: # case to check for failed torrents - if (queueItem['status'] == 'warning' and 'errorMessage' in queueItem and - (queueItem['errorMessage'] == 'DownloadClientQbittorrentTorrentStateMissingFiles' or - queueItem['errorMessage'] == 'The download is missing files')): + if ( + queueItem["status"] == "warning" + and "errorMessage" in queueItem + and ( + queueItem["errorMessage"] + == "DownloadClientQbittorrentTorrentStateMissingFiles" + or queueItem["errorMessage"] == "The download is missing files" + ) + ): affectedItems.append(queueItem) # case to check for failed nzb's/bad files/empty directory - if queueItem['status'] == 'completed' and 'statusMessages' in queueItem: - for statusMessage in queueItem['statusMessages']: - if 'messages' in statusMessage: - for message in statusMessage['messages']: - if message.startswith("No files found are eligible for import in"): + if queueItem["status"] == "completed" and "statusMessages" in queueItem: + for statusMessage in queueItem["statusMessages"]: + if "messages" in statusMessage: + for message in statusMessage["messages"]: + if message.startswith( + "No files found are eligible for import in" + ): affectedItems.append(queueItem) - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = False, - doPrivateTrackerCheck = True, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = False) + affectedItems = await execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist=False, + doPrivateTrackerCheck=True, + doProtectedDownloadCheck=True, + doPermittedAttemptsCheck=False, + ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) - return 0 + return 0 diff --git a/src/jobs/remove_orphans.py b/src/jobs/remove_orphans.py index 56d9d7d..60360ed 100644 --- a/src/jobs/remove_orphans.py +++ b/src/jobs/remove_orphans.py @@ -1,34 +1,72 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, +) import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -async def remove_orphans(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, full_queue_param): + +async def remove_orphans( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + full_queue_param, +): # Removes downloads belonging to movies/tv shows that have been deleted in the meantime. Does not add to blocklist try: - failType = 'orphan' - full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}) - queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_orphans/full queue IN: %s', formattedQueueInfo(full_queue)) - if not full_queue: return 0 # By now the queue may be empty - logger.debug('remove_orphans/queue IN: %s', formattedQueueInfo(queue)) + failType = "orphan" + full_queue = await get_queue(BASE_URL, API_KEY, params={full_queue_param: True}) + queue = await get_queue(BASE_URL, API_KEY) + logger.debug("remove_orphans/full queue IN: %s", formattedQueueInfo(full_queue)) + if not full_queue: + return 0 # By now the queue may be empty + logger.debug("remove_orphans/queue IN: %s", formattedQueueInfo(queue)) # Find items affected # 1. create a list of the "known" queue items - queueIDs = [queueItem['id'] for queueItem in queue['records']] if queue else [] + queueIDs = [queueItem["id"] for queueItem in queue["records"]] if queue else [] affectedItems = [] # 2. compare all queue items against the known ones, and those that are not found are the "unknown" or "orphan" ones - for queueItem in full_queue['records']: - if queueItem['id'] not in queueIDs: + for queueItem in full_queue["records"]: + if queueItem["id"] not in queueIDs: affectedItems.append(queueItem) - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = False, - doPrivateTrackerCheck = True, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = False) - logger.debug('remove_orphans/full queue OUT: %s', formattedQueueInfo(await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True}))) + affectedItems = await execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist=False, + doPrivateTrackerCheck=True, + doProtectedDownloadCheck=True, + doPermittedAttemptsCheck=False, + ) + logger.debug( + "remove_orphans/full queue OUT: %s", + formattedQueueInfo( + await get_queue(BASE_URL, API_KEY, params={full_queue_param: True}) + ), + ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) - return 0 \ No newline at end of file + return 0 diff --git a/src/jobs/remove_slow.py b/src/jobs/remove_slow.py index ee24947..33b9dcd 100644 --- a/src/jobs/remove_slow.py +++ b/src/jobs/remove_slow.py @@ -1,70 +1,143 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download, qBitOffline) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, + qBitOffline, +) import sys, os, traceback import logging, verboselogs -from src.utils.rest import (rest_get) +from src.utils.rest import rest_get + logger = verboselogs.VerboseLogger(__name__) -async def remove_slow(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, download_sizes_tracker): + +async def remove_slow( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + download_sizes_tracker, +): # Detects slow downloads and triggers delete. Adds to blocklist try: - failType = 'slow' + failType = "slow" queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_slow/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 - if await qBitOffline(settingsDict, failType, NAME): return 0 + logger.debug("remove_slow/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 + if await qBitOffline(settingsDict, failType, NAME): + return 0 # Find items affected affectedItems = [] alreadyCheckedDownloadIDs = [] - for queueItem in queue['records']: - if 'downloadId' in queueItem and 'size' in queueItem and 'sizeleft' in queueItem and 'status' in queueItem: - if queueItem['downloadId'] not in alreadyCheckedDownloadIDs: - alreadyCheckedDownloadIDs.append(queueItem['downloadId']) # One downloadId may occur in multiple queueItems - only check once for all of them per iteration - if queueItem['protocol'] == 'usenet': # No need to check for speed for usenet, since there users pay for speed + for queueItem in queue["records"]: + if ( + "downloadId" in queueItem + and "size" in queueItem + and "sizeleft" in queueItem + and "status" in queueItem + ): + if queueItem["downloadId"] not in alreadyCheckedDownloadIDs: + alreadyCheckedDownloadIDs.append( + queueItem["downloadId"] + ) # One downloadId may occur in multiple queueItems - only check once for all of them per iteration + if ( + queueItem["protocol"] == "usenet" + ): # No need to check for speed for usenet, since there users pay for speed continue - if queueItem['status'] == 'downloading': - if queueItem['sizeleft'] == 0: # Skip items that are finished downloading but are still marked as downloading. May be the case when files are moving - logger.info('>>> Detected %s download that has completed downloading - skipping check (torrent files likely in process of being moved): %s',failType, queueItem['title']) + if queueItem["status"] == "downloading": + if ( + queueItem["sizeleft"] == 0 + ): # Skip items that are finished downloading but are still marked as downloading. May be the case when files are moving + logger.info( + ">>> Detected %s download that has completed downloading - skipping check (torrent files likely in process of being moved): %s", + failType, + queueItem["title"], + ) continue # determine if the downloaded bit on average between this and the last iteration is greater than the min threshold - downloadedSize, previousSize, increment, speed = await getDownloadedSize(settingsDict, queueItem, download_sizes_tracker, NAME) - if queueItem['downloadId'] in download_sizes_tracker.dict and speed is not None: - if speed < settingsDict['MIN_DOWNLOAD_SPEED']: + downloadedSize, previousSize, increment, speed = ( + await getDownloadedSize( + settingsDict, queueItem, download_sizes_tracker, NAME + ) + ) + if ( + queueItem["downloadId"] in download_sizes_tracker.dict + and speed is not None + ): + if speed < settingsDict["MIN_DOWNLOAD_SPEED"]: affectedItems.append(queueItem) - logger.debug('remove_slow/slow speed detected: %s (Speed: %d KB/s, KB now: %s, KB previous: %s, Diff: %s, In Minutes: %s', \ - queueItem['title'], speed, downloadedSize, previousSize, increment, settingsDict['REMOVE_TIMER']) + logger.debug( + "remove_slow/slow speed detected: %s (Speed: %d KB/s, KB now: %s, KB previous: %s, Diff: %s, In Minutes: %s", + queueItem["title"], + speed, + downloadedSize, + previousSize, + increment, + settingsDict["REMOVE_TIMER"], + ) - - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = True, - doPrivateTrackerCheck = True, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = True) + affectedItems = await execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist=True, + doPrivateTrackerCheck=True, + doProtectedDownloadCheck=True, + doPermittedAttemptsCheck=True, + ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) return 0 + async def getDownloadedSize(settingsDict, queueItem, download_sizes_tracker, NAME): try: # Determines the speed of download # Since Sonarr/Radarr do not update the downlodedSize on realtime, if possible, fetch it directly from qBit - if settingsDict['QBITTORRENT_URL'] and queueItem['downloadClient'] == 'qBittorrent': - qbitInfo = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={'hashes': queueItem['downloadId']}, cookies=settingsDict['QBIT_COOKIE'] ) - downloadedSize = qbitInfo[0]['completed'] + if ( + settingsDict["QBITTORRENT_URL"] + and queueItem["downloadClient"] == "qBittorrent" + ): + qbitInfo = await rest_get( + settingsDict["QBITTORRENT_URL"] + "/torrents/info", + params={"hashes": queueItem["downloadId"]}, + cookies=settingsDict["QBIT_COOKIE"], + ) + downloadedSize = qbitInfo[0]["completed"] else: - logger.debug('getDownloadedSize/WARN: Using imprecise method to determine download increments because no direct qBIT query is possible') - downloadedSize = queueItem['size'] - queueItem['sizeleft'] - if queueItem['downloadId'] in download_sizes_tracker.dict: - previousSize = download_sizes_tracker.dict.get(queueItem['downloadId']) + logger.debug( + "getDownloadedSize/WARN: Using imprecise method to determine download increments because no direct qBIT query is possible" + ) + downloadedSize = queueItem["size"] - queueItem["sizeleft"] + if queueItem["downloadId"] in download_sizes_tracker.dict: + previousSize = download_sizes_tracker.dict.get(queueItem["downloadId"]) increment = downloadedSize - previousSize - speed = round(increment / 1000 / (settingsDict['REMOVE_TIMER'] * 60),1) + speed = round(increment / 1000 / (settingsDict["REMOVE_TIMER"] * 60), 1) else: previousSize = None increment = None speed = None - download_sizes_tracker.dict[queueItem['downloadId']] = downloadedSize + download_sizes_tracker.dict[queueItem["downloadId"]] = downloadedSize return downloadedSize, previousSize, increment, speed except Exception as error: errorDetails(NAME, error) - return + return diff --git a/src/jobs/remove_stalled.py b/src/jobs/remove_stalled.py index 622f96c..5edd7df 100644 --- a/src/jobs/remove_stalled.py +++ b/src/jobs/remove_stalled.py @@ -1,27 +1,65 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download, qBitOffline) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, + qBitOffline, +) import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -async def remove_stalled(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs): - # Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist + +async def remove_stalled( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, +): + # Detects stalled and triggers repeat check and subsequent delete. Adds to blocklist try: - failType = 'stalled' + failType = "stalled" queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_stalled/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 - if await qBitOffline(settingsDict, failType, NAME): return 0 + logger.debug("remove_stalled/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 + if await qBitOffline(settingsDict, failType, NAME): + return 0 # Find items affected affectedItems = [] - for queueItem in queue['records']: - if 'errorMessage' in queueItem and 'status' in queueItem: - if queueItem['status'] == 'warning' and queueItem['errorMessage'] == 'The download is stalled with no connections': + for queueItem in queue["records"]: + if "errorMessage" in queueItem and "status" in queueItem: + if ( + queueItem["status"] == "warning" + and queueItem["errorMessage"] + == "The download is stalled with no connections" + ): affectedItems.append(queueItem) - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = True, - doPrivateTrackerCheck = True, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = True) + affectedItems = await execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist=True, + doPrivateTrackerCheck=True, + doProtectedDownloadCheck=True, + doPermittedAttemptsCheck=True, + ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) diff --git a/src/jobs/remove_unmonitored.py b/src/jobs/remove_unmonitored.py index f6f5023..08ccc43 100644 --- a/src/jobs/remove_unmonitored.py +++ b/src/jobs/remove_unmonitored.py @@ -1,43 +1,98 @@ -from src.utils.shared import (errorDetails, formattedQueueInfo, get_queue, privateTrackerCheck, protectedDownloadCheck, execute_checks, permittedAttemptsCheck, remove_download) +from src.utils.shared import ( + errorDetails, + formattedQueueInfo, + get_queue, + privateTrackerCheck, + protectedDownloadCheck, + execute_checks, + permittedAttemptsCheck, + remove_download, +) import sys, os, traceback import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) from src.utils.rest import rest_get -async def remove_unmonitored(settingsDict, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, protectedDownloadIDs, privateDowloadIDs, arr_type): - # Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist + +async def remove_unmonitored( + settingsDict, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + protectedDownloadIDs, + privateDowloadIDs, + arr_type, +): + # Removes downloads belonging to movies/tv shows that are not monitored. Does not add to blocklist try: - failType = 'unmonitored' + failType = "unmonitored" queue = await get_queue(BASE_URL, API_KEY) - logger.debug('remove_unmonitored/queue IN: %s', formattedQueueInfo(queue)) - if not queue: return 0 + logger.debug("remove_unmonitored/queue IN: %s", formattedQueueInfo(queue)) + if not queue: + return 0 # Find items affected monitoredDownloadIDs = [] - for queueItem in queue['records']: - if arr_type == 'SONARR': - isMonitored = (await rest_get(f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY))['monitored'] - elif arr_type == 'RADARR': - isMonitored = (await rest_get(f'{BASE_URL}/movie/{str(queueItem["movieId"])}', API_KEY))['monitored'] - elif arr_type == 'LIDARR': - isMonitored = (await rest_get(f'{BASE_URL}/album/{str(queueItem["albumId"])}', API_KEY))['monitored'] - elif arr_type == 'READARR': - isMonitored = (await rest_get(f'{BASE_URL}/book/{str(queueItem["bookId"])}', API_KEY))['monitored'] - elif arr_type == 'WHISPARR': - isMonitored = (await rest_get(f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY))['monitored'] + for queueItem in queue["records"]: + if arr_type == "SONARR": + isMonitored = ( + await rest_get( + f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY + ) + )["monitored"] + elif arr_type == "RADARR": + isMonitored = ( + await rest_get( + f'{BASE_URL}/movie/{str(queueItem["movieId"])}', API_KEY + ) + )["monitored"] + elif arr_type == "LIDARR": + isMonitored = ( + await rest_get( + f'{BASE_URL}/album/{str(queueItem["albumId"])}', API_KEY + ) + )["monitored"] + elif arr_type == "READARR": + isMonitored = ( + await rest_get( + f'{BASE_URL}/book/{str(queueItem["bookId"])}', API_KEY + ) + )["monitored"] + elif arr_type == "WHISPARR": + isMonitored = ( + await rest_get( + f'{BASE_URL}/episode/{str(queueItem["episodeId"])}', API_KEY + ) + )["monitored"] if isMonitored: - monitoredDownloadIDs.append(queueItem['downloadId']) + monitoredDownloadIDs.append(queueItem["downloadId"]) affectedItems = [] - for queueItem in queue['records']: - if queueItem['downloadId'] not in monitoredDownloadIDs: - affectedItems.append(queueItem) # One downloadID may be shared by multiple queueItems. Only removes it if ALL queueitems are unmonitored + for queueItem in queue["records"]: + if queueItem["downloadId"] not in monitoredDownloadIDs: + affectedItems.append( + queueItem + ) # One downloadID may be shared by multiple queueItems. Only removes it if ALL queueitems are unmonitored - affectedItems = await execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, - addToBlocklist = False, - doPrivateTrackerCheck = True, - doProtectedDownloadCheck = True, - doPermittedAttemptsCheck = False) + affectedItems = await execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist=False, + doPrivateTrackerCheck=True, + doProtectedDownloadCheck=True, + doPermittedAttemptsCheck=False, + ) return len(affectedItems) except Exception as error: errorDetails(NAME, error) - return 0 \ No newline at end of file + return 0 diff --git a/src/utils/loadScripts.py b/src/utils/loadScripts.py index f676fbc..47c6bf2 100644 --- a/src/utils/loadScripts.py +++ b/src/utils/loadScripts.py @@ -1,231 +1,416 @@ ########### Import Libraries import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) from dateutil.relativedelta import relativedelta as rd -import requests -from src.utils.rest import rest_get, rest_post # +import requests +from src.utils.rest import rest_get, rest_post # import asyncio from packaging import version + def setLoggingFormat(settingsDict): # Sets logger output to specific format - log_level_num=logging.getLevelName(settingsDict['LOG_LEVEL']) + log_level_num = logging.getLevelName(settingsDict["LOG_LEVEL"]) logging.basicConfig( - format=('' if settingsDict['IS_IN_DOCKER'] else '%(asctime)s ') + ('[%(levelname)-7s]' if settingsDict['LOG_LEVEL']=='VERBOSE' else '[%(levelname)s]') + ': %(message)s', - level=log_level_num + format=("" if settingsDict["IS_IN_DOCKER"] else "%(asctime)s ") + + ( + "[%(levelname)-7s]" + if settingsDict["LOG_LEVEL"] == "VERBOSE" + else "[%(levelname)s]" + ) + + ": %(message)s", + level=log_level_num, ) - return + return async def getArrInstanceName(settingsDict, arrApp): # Retrieves the names of the arr instances, and if not defined, sets a default (should in theory not be requried, since UI already enforces a value) try: - if settingsDict[arrApp + '_URL']: - settingsDict[arrApp + '_NAME'] = (await rest_get(settingsDict[arrApp + '_URL']+'/system/status', settingsDict[arrApp + '_KEY']))['instanceName'] + if settingsDict[arrApp + "_URL"]: + settingsDict[arrApp + "_NAME"] = ( + await rest_get( + settingsDict[arrApp + "_URL"] + "/system/status", + settingsDict[arrApp + "_KEY"], + ) + )["instanceName"] except: - settingsDict[arrApp + '_NAME'] = arrApp.title() + settingsDict[arrApp + "_NAME"] = arrApp.title() return settingsDict + async def getProtectedAndPrivateFromQbit(settingsDict): # Returns two lists containing the hashes of Qbit that are either protected by tag, or are private trackers (if IGNORE_PRIVATE_TRACKERS is true) protectedDownloadIDs = [] privateDowloadIDs = [] - if settingsDict['QBITTORRENT_URL']: + if settingsDict["QBITTORRENT_URL"]: # Fetch all torrents - qbitItems = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/info',params={}, cookies=settingsDict['QBIT_COOKIE']) + qbitItems = await rest_get( + settingsDict["QBITTORRENT_URL"] + "/torrents/info", + params={}, + cookies=settingsDict["QBIT_COOKIE"], + ) # Fetch protected torrents (by tag) for qbitItem in qbitItems: - if settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'] in qbitItem.get('tags'): - protectedDownloadIDs.append(str.upper(qbitItem['hash'])) + if settingsDict["NO_STALLED_REMOVAL_QBIT_TAG"] in qbitItem.get("tags"): + protectedDownloadIDs.append(str.upper(qbitItem["hash"])) # Fetch private torrents - if settingsDict['IGNORE_PRIVATE_TRACKERS']: - for qbitItem in qbitItems: - qbitItemProperties = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/properties',params={'hash': qbitItem['hash']}, cookies=settingsDict['QBIT_COOKIE']) - qbitItem['is_private'] = qbitItemProperties.get('is_private', None) # Adds the is_private flag to qbitItem info for simplified logging - if qbitItemProperties.get('is_private', False): - privateDowloadIDs.append(str.upper(qbitItem['hash'])) - logger.debug('main/getProtectedAndPrivateFromQbit/qbitItems: %s', str([{"hash": str.upper(item["hash"]), "name": item["name"], "category": item["category"], "tags": item["tags"], "is_private": item.get("is_private", None)} for item in qbitItems])) - - logger.debug('main/getProtectedAndPrivateFromQbit/protectedDownloadIDs: %s', str(protectedDownloadIDs)) - logger.debug('main/getProtectedAndPrivateFromQbit/privateDowloadIDs: %s', str(privateDowloadIDs)) + if settingsDict["IGNORE_PRIVATE_TRACKERS"]: + for qbitItem in qbitItems: + qbitItemProperties = await rest_get( + settingsDict["QBITTORRENT_URL"] + "/torrents/properties", + params={"hash": qbitItem["hash"]}, + cookies=settingsDict["QBIT_COOKIE"], + ) + qbitItem["is_private"] = qbitItemProperties.get( + "is_private", None + ) # Adds the is_private flag to qbitItem info for simplified logging + if qbitItemProperties.get("is_private", False): + privateDowloadIDs.append(str.upper(qbitItem["hash"])) + logger.debug( + "main/getProtectedAndPrivateFromQbit/qbitItems: %s", + str( + [ + { + "hash": str.upper(item["hash"]), + "name": item["name"], + "category": item["category"], + "tags": item["tags"], + "is_private": item.get("is_private", None), + } + for item in qbitItems + ] + ), + ) + + logger.debug( + "main/getProtectedAndPrivateFromQbit/protectedDownloadIDs: %s", + str(protectedDownloadIDs), + ) + logger.debug( + "main/getProtectedAndPrivateFromQbit/privateDowloadIDs: %s", + str(privateDowloadIDs), + ) return protectedDownloadIDs, privateDowloadIDs - + + def showWelcome(): # Welcome Message - logger.info('#' * 50) - logger.info('Decluttarr - Application Started!') - logger.info('') - logger.info('Like this app? Thanks for giving it a ⭐️ on GitHub!') - logger.info('https://github.com/ManiMatter/decluttarr/') - logger.info('') + logger.info("#" * 50) + logger.info("Decluttarr - Application Started!") + logger.info("") + logger.info("Like this app? Thanks for giving it a ⭐️ on GitHub!") + logger.info("https://github.com/ManiMatter/decluttarr/") + logger.info("") return + def showSettings(settingsDict): # Settings Message - fmt = '{0.days} days {0.hours} hours {0.minutes} minutes' - logger.info('*** Current Settings ***') - logger.info('Version: %s', settingsDict['IMAGE_TAG']) - logger.info('Commit: %s', settingsDict['SHORT_COMMIT_ID']) - logger.info('') - logger.info('%s | Removing failed downloads (%s)', str(settingsDict['REMOVE_FAILED']), 'REMOVE_FAILED') - logger.info('%s | Removing failed imports (%s)', str(settingsDict['REMOVE_FAILED_IMPORTS']), 'REMOVE_FAILED_IMPORTS') - if settingsDict['REMOVE_FAILED_IMPORTS'] and not settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: - logger.verbose ('> Any imports with a warning flag are considered failed, as no patterns specified (%s).', 'FAILED_IMPORT_MESSAGE_PATTERNS') - elif settingsDict['REMOVE_FAILED_IMPORTS'] and settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: - logger.verbose ('> Imports with a warning flag are considered failed if the status message contains any of the following patterns:') - for pattern in settingsDict['FAILED_IMPORT_MESSAGE_PATTERNS']: + fmt = "{0.days} days {0.hours} hours {0.minutes} minutes" + logger.info("*** Current Settings ***") + logger.info("Version: %s", settingsDict["IMAGE_TAG"]) + logger.info("Commit: %s", settingsDict["SHORT_COMMIT_ID"]) + logger.info("") + logger.info( + "%s | Removing failed downloads (%s)", + str(settingsDict["REMOVE_FAILED"]), + "REMOVE_FAILED", + ) + logger.info( + "%s | Removing failed imports (%s)", + str(settingsDict["REMOVE_FAILED_IMPORTS"]), + "REMOVE_FAILED_IMPORTS", + ) + if ( + settingsDict["REMOVE_FAILED_IMPORTS"] + and not settingsDict["FAILED_IMPORT_MESSAGE_PATTERNS"] + ): + logger.verbose( + "> Any imports with a warning flag are considered failed, as no patterns specified (%s).", + "FAILED_IMPORT_MESSAGE_PATTERNS", + ) + elif ( + settingsDict["REMOVE_FAILED_IMPORTS"] + and settingsDict["FAILED_IMPORT_MESSAGE_PATTERNS"] + ): + logger.verbose( + "> Imports with a warning flag are considered failed if the status message contains any of the following patterns:" + ) + for pattern in settingsDict["FAILED_IMPORT_MESSAGE_PATTERNS"]: logger.verbose(' - "%s"', pattern) - logger.info('%s | Removing downloads missing metadata (%s)', str(settingsDict['REMOVE_METADATA_MISSING']), 'REMOVE_METADATA_MISSING') - logger.info('%s | Removing downloads missing files (%s)', str(settingsDict['REMOVE_MISSING_FILES']), 'REMOVE_MISSING_FILES') - logger.info('%s | Removing orphan downloads (%s)', str(settingsDict['REMOVE_ORPHANS']), 'REMOVE_ORPHANS') - logger.info('%s | Removing slow downloads (%s)', str(settingsDict['REMOVE_SLOW']), 'REMOVE_SLOW') - logger.info('%s | Removing stalled downloads (%s)', str(settingsDict['REMOVE_STALLED']), 'REMOVE_STALLED') - logger.info('%s | Removing downloads belonging to unmonitored items (%s)', str(settingsDict['REMOVE_UNMONITORED']), 'REMOVE_UNMONITORED') - logger.info('%s | Cancelling files with >100%% availability (%s)', str(settingsDict['CANCEL_UNAVAILABLE_FILES']), 'CANCEL_UNAVAILABLE_FILES') - logger.info('') - logger.info('Running every: %s', fmt.format(rd(minutes=settingsDict['REMOVE_TIMER']))) - if settingsDict['REMOVE_SLOW']: - logger.info('Minimum speed enforced: %s KB/s', str(settingsDict['MIN_DOWNLOAD_SPEED'])) - logger.info('Permitted number of times before stalled/missing metadata/slow downloads are removed: %s', str(settingsDict['PERMITTED_ATTEMPTS'])) - if settingsDict['QBITTORRENT_URL']: - logger.info('Downloads with this tag will be skipped: \"%s\"', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']) - logger.info('Private Trackers will be skipped: %s', settingsDict['IGNORE_PRIVATE_TRACKERS']) - - logger.info('') - logger.info('*** Configured Instances ***') - - for instance in settingsDict['INSTANCES']: - if settingsDict[instance + '_URL']: + logger.info( + "%s | Removing downloads missing metadata (%s)", + str(settingsDict["REMOVE_METADATA_MISSING"]), + "REMOVE_METADATA_MISSING", + ) + logger.info( + "%s | Removing downloads missing files (%s)", + str(settingsDict["REMOVE_MISSING_FILES"]), + "REMOVE_MISSING_FILES", + ) + logger.info( + "%s | Removing orphan downloads (%s)", + str(settingsDict["REMOVE_ORPHANS"]), + "REMOVE_ORPHANS", + ) + logger.info( + "%s | Removing slow downloads (%s)", + str(settingsDict["REMOVE_SLOW"]), + "REMOVE_SLOW", + ) + logger.info( + "%s | Removing stalled downloads (%s)", + str(settingsDict["REMOVE_STALLED"]), + "REMOVE_STALLED", + ) + logger.info( + "%s | Removing downloads belonging to unmonitored items (%s)", + str(settingsDict["REMOVE_UNMONITORED"]), + "REMOVE_UNMONITORED", + ) + logger.info( + "%s | Cancelling files with >100%% availability (%s)", + str(settingsDict["CANCEL_UNAVAILABLE_FILES"]), + "CANCEL_UNAVAILABLE_FILES", + ) + logger.info("") + logger.info( + "Running every: %s", fmt.format(rd(minutes=settingsDict["REMOVE_TIMER"])) + ) + if settingsDict["REMOVE_SLOW"]: + logger.info( + "Minimum speed enforced: %s KB/s", str(settingsDict["MIN_DOWNLOAD_SPEED"]) + ) + logger.info( + "Permitted number of times before stalled/missing metadata/slow downloads are removed: %s", + str(settingsDict["PERMITTED_ATTEMPTS"]), + ) + if settingsDict["QBITTORRENT_URL"]: + logger.info( + 'Downloads with this tag will be skipped: "%s"', + settingsDict["NO_STALLED_REMOVAL_QBIT_TAG"], + ) + logger.info( + "Private Trackers will be skipped: %s", + settingsDict["IGNORE_PRIVATE_TRACKERS"], + ) + + logger.info("") + logger.info("*** Configured Instances ***") + + for instance in settingsDict["INSTANCES"]: + if settingsDict[instance + "_URL"]: logger.info( - '%s%s: %s', - instance.title(), - f" ({settingsDict.get(instance + '_NAME')})" if settingsDict.get(instance + '_NAME') != instance.title() else "", - (settingsDict[instance + '_URL']).split('/api')[0] - ) + "%s%s: %s", + instance.title(), + ( + f" ({settingsDict.get(instance + '_NAME')})" + if settingsDict.get(instance + "_NAME") != instance.title() + else "" + ), + (settingsDict[instance + "_URL"]).split("/api")[0], + ) - if settingsDict['QBITTORRENT_URL']: + if settingsDict["QBITTORRENT_URL"]: logger.info( - 'qBittorrent: %s', - (settingsDict['QBITTORRENT_URL']).split('/api')[0] - ) + "qBittorrent: %s", (settingsDict["QBITTORRENT_URL"]).split("/api")[0] + ) + + logger.info("") + return - logger.info('') - return def upgradeChecks(settingsDict): - if settingsDict['REMOVE_NO_FORMAT_UPGRADE']: - logger.warn('❗️' * 10 + ' OUTDATED SETTINGS ' + '❗️' * 10 ) - logger.warn('') - logger.warn("❗️ %s was replaced with %s.", 'REMOVE_NO_FORMAT_UPGRADE', 'REMOVE_FAILED_IMPORTS') + if settingsDict["REMOVE_NO_FORMAT_UPGRADE"]: + logger.warn("❗️" * 10 + " OUTDATED SETTINGS " + "❗️" * 10) + logger.warn("") + logger.warn( + "❗️ %s was replaced with %s.", + "REMOVE_NO_FORMAT_UPGRADE", + "REMOVE_FAILED_IMPORTS", + ) logger.warn("❗️ Please check the ReadMe and update your settings.") - logger.warn("❗️ Specifically read the section on %s.", 'FAILED_IMPORT_MESSAGE_PATTERNS') - logger.warn('') - logger.warn('❗️' * 29) - logger.warn('') + logger.warn( + "❗️ Specifically read the section on %s.", "FAILED_IMPORT_MESSAGE_PATTERNS" + ) + logger.warn("") + logger.warn("❗️" * 29) + logger.warn("") return + async def instanceChecks(settingsDict): - # Checks if the arr and qbit instances are reachable, and returns the settings dictionary with the qbit cookie - logger.info('*** Check Instances ***') + # Checks if the arr and qbit instances are reachable, and returns the settings dictionary with the qbit cookie + logger.info("*** Check Instances ***") error_occured = False # Check ARR-apps - for instance in settingsDict['INSTANCES']: - if settingsDict[instance + '_URL']: + for instance in settingsDict["INSTANCES"]: + if settingsDict[instance + "_URL"]: # Check instance is reachable - try: - response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(settingsDict[instance + '_URL']+'/system/status', params=None, headers={'X-Api-Key': settingsDict[instance + '_KEY']}, verify=settingsDict['SSL_VERIFICATION'])) + try: + response = await asyncio.get_event_loop().run_in_executor( + None, + lambda: requests.get( + settingsDict[instance + "_URL"] + "/system/status", + params=None, + headers={"X-Api-Key": settingsDict[instance + "_KEY"]}, + verify=settingsDict["SSL_VERIFICATION"], + ), + ) response.raise_for_status() except Exception as error: error_occured = True - logger.error('!! %s Error: !!', instance.title()) - logger.error('> %s', error) - if isinstance(error, requests.exceptions.HTTPError) and error.response.status_code == 401: - logger.error ('> Have you configured %s correctly?', instance + '_KEY') + logger.error("!! %s Error: !!", instance.title()) + logger.error("> %s", error) + if ( + isinstance(error, requests.exceptions.HTTPError) + and error.response.status_code == 401 + ): + logger.error( + "> Have you configured %s correctly?", instance + "_KEY" + ) - if not error_occured: + if not error_occured: # Check if network settings are pointing to the right Arr-apps - current_app = (await rest_get(settingsDict[instance + '_URL']+'/system/status', settingsDict[instance + '_KEY']))['appName'] + current_app = ( + await rest_get( + settingsDict[instance + "_URL"] + "/system/status", + settingsDict[instance + "_KEY"], + ) + )["appName"] if current_app.upper() != instance: error_occured = True - logger.error('!! %s Error: !!', instance.title()) - logger.error('> Your %s points to a %s instance, rather than %s. Did you specify the wrong IP?', instance + '_URL', current_app, instance.title()) - + logger.error("!! %s Error: !!", instance.title()) + logger.error( + "> Your %s points to a %s instance, rather than %s. Did you specify the wrong IP?", + instance + "_URL", + current_app, + instance.title(), + ) + if not error_occured: # Check minimum version requirements are met - current_version = (await rest_get(settingsDict[instance + '_URL']+'/system/status', settingsDict[instance + '_KEY']))['version'] - if settingsDict[instance + '_MIN_VERSION']: - if version.parse(current_version) < version.parse(settingsDict[instance + '_MIN_VERSION']): + current_version = ( + await rest_get( + settingsDict[instance + "_URL"] + "/system/status", + settingsDict[instance + "_KEY"], + ) + )["version"] + if settingsDict[instance + "_MIN_VERSION"]: + if version.parse(current_version) < version.parse( + settingsDict[instance + "_MIN_VERSION"] + ): error_occured = True - logger.error('!! %s Error: !!', instance.title()) - logger.error('> Please update %s to at least version %s. Current version: %s', instance.title(), settingsDict[instance + '_MIN_VERSION'], current_version) + logger.error("!! %s Error: !!", instance.title()) + logger.error( + "> Please update %s to at least version %s. Current version: %s", + instance.title(), + settingsDict[instance + "_MIN_VERSION"], + current_version, + ) if not error_occured: - logger.info('OK | %s', instance.title()) - logger.debug('Current version of %s: %s', instance, current_version) + logger.info("OK | %s", instance.title()) + logger.debug("Current version of %s: %s", instance, current_version) # Check Bittorrent - if settingsDict['QBITTORRENT_URL']: + if settingsDict["QBITTORRENT_URL"]: # Checking if qbit can be reached, and checking if version is OK - try: - response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(settingsDict['QBITTORRENT_URL']+'/auth/login', data={'username': settingsDict['QBITTORRENT_USERNAME'], 'password': settingsDict['QBITTORRENT_PASSWORD']}, headers={'content-type': 'application/x-www-form-urlencoded'}, verify=settingsDict['SSL_VERIFICATION'])) - if response.text == 'Fails.': - raise ConnectionError('Login failed.') + try: + response = await asyncio.get_event_loop().run_in_executor( + None, + lambda: requests.post( + settingsDict["QBITTORRENT_URL"] + "/auth/login", + data={ + "username": settingsDict["QBITTORRENT_USERNAME"], + "password": settingsDict["QBITTORRENT_PASSWORD"], + }, + headers={"content-type": "application/x-www-form-urlencoded"}, + verify=settingsDict["SSL_VERIFICATION"], + ), + ) + if response.text == "Fails.": + raise ConnectionError("Login failed.") response.raise_for_status() - settingsDict['QBIT_COOKIE'] = {'SID': response.cookies['SID']} + settingsDict["QBIT_COOKIE"] = {"SID": response.cookies["SID"]} except Exception as error: error_occured = True - logger.error('!! %s Error: !!', 'qBittorrent') - logger.error('> %s', error) - logger.error('> Details:') + logger.error("!! %s Error: !!", "qBittorrent") + logger.error("> %s", error) + logger.error("> Details:") logger.error(response.text) if not error_occured: - qbit_version = await rest_get(settingsDict['QBITTORRENT_URL']+'/app/version',cookies=settingsDict['QBIT_COOKIE']) - qbit_version = qbit_version[1:] # version without _v - if version.parse(qbit_version) < version.parse(settingsDict['QBITTORRENT_MIN_VERSION']): + qbit_version = await rest_get( + settingsDict["QBITTORRENT_URL"] + "/app/version", + cookies=settingsDict["QBIT_COOKIE"], + ) + qbit_version = qbit_version[1:] # version without _v + if version.parse(qbit_version) < version.parse( + settingsDict["QBITTORRENT_MIN_VERSION"] + ): error_occured = True - logger.error('-- | %s *** Error: %s ***', 'qBittorrent', 'Please update qBittorrent to at least version %s Current version: %s',settingsDict['QBITTORRENT_MIN_VERSION'], qbit_version) + logger.error( + "-- | %s *** Error: %s ***", + "qBittorrent", + "Please update qBittorrent to at least version %s Current version: %s", + settingsDict["QBITTORRENT_MIN_VERSION"], + qbit_version, + ) if not error_occured: - logger.info('OK | %s', 'qBittorrent') - logger.debug('Current version of %s: %s', 'qBittorrent', qbit_version) - + logger.info("OK | %s", "qBittorrent") + logger.debug("Current version of %s: %s", "qBittorrent", qbit_version) if error_occured: - logger.warning('At least one instance had a problem. Waiting for 60 seconds, then exiting Decluttarr.') + logger.warning( + "At least one instance had a problem. Waiting for 60 seconds, then exiting Decluttarr." + ) await asyncio.sleep(60) exit() - logger.info('') + logger.info("") return settingsDict + async def createQbitProtectionTag(settingsDict): # Creates the qBit Protection tag if not already present - if settingsDict['QBITTORRENT_URL']: - current_tags = await rest_get(settingsDict['QBITTORRENT_URL']+'/torrents/tags',cookies=settingsDict['QBIT_COOKIE']) - if not settingsDict['NO_STALLED_REMOVAL_QBIT_TAG'] in current_tags: - if settingsDict['QBITTORRENT_URL']: - logger.info('Creating tag in qBittorrent: %s', settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']) - if not settingsDict['TEST_RUN']: - await rest_post(url=settingsDict['QBITTORRENT_URL']+'/torrents/createTags', data={'tags': settingsDict['NO_STALLED_REMOVAL_QBIT_TAG']}, headers={'content-type': 'application/x-www-form-urlencoded'}, cookies=settingsDict['QBIT_COOKIE']) + if settingsDict["QBITTORRENT_URL"]: + current_tags = await rest_get( + settingsDict["QBITTORRENT_URL"] + "/torrents/tags", + cookies=settingsDict["QBIT_COOKIE"], + ) + if not settingsDict["NO_STALLED_REMOVAL_QBIT_TAG"] in current_tags: + if settingsDict["QBITTORRENT_URL"]: + logger.info( + "Creating tag in qBittorrent: %s", + settingsDict["NO_STALLED_REMOVAL_QBIT_TAG"], + ) + if not settingsDict["TEST_RUN"]: + await rest_post( + url=settingsDict["QBITTORRENT_URL"] + "/torrents/createTags", + data={"tags": settingsDict["NO_STALLED_REMOVAL_QBIT_TAG"]}, + headers={"content-type": "application/x-www-form-urlencoded"}, + cookies=settingsDict["QBIT_COOKIE"], + ) + def showLoggerLevel(settingsDict): - logger.info('#' * 50) - if settingsDict['LOG_LEVEL'] == 'INFO': - logger.info('LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)') + logger.info("#" * 50) + if settingsDict["LOG_LEVEL"] == "INFO": + logger.info( + "LOG_LEVEL = INFO: Only logging changes (switch to VERBOSE for more info)" + ) else: - logger.info(f'') - if settingsDict['TEST_RUN']: - logger.info(f'*'* 50) - logger.info(f'*'* 50) - logger.info(f'') - logger.info(f'!! TEST_RUN FLAG IS SET !!') - logger.info(f'NO UPDATES/DELETES WILL BE PERFORMED') - logger.info(f'') - logger.info(f'*'* 50) - logger.info(f'*'* 50) - - - - - + logger.info(f"") + if settingsDict["TEST_RUN"]: + logger.info(f"*" * 50) + logger.info(f"*" * 50) + logger.info(f"") + logger.info(f"!! TEST_RUN FLAG IS SET !!") + logger.info(f"NO UPDATES/DELETES WILL BE PERFORMED") + logger.info(f"") + logger.info(f"*" * 50) + logger.info(f"*" * 50) diff --git a/src/utils/nest_functions.py b/src/utils/nest_functions.py index 1917bf5..399eb68 100644 --- a/src/utils/nest_functions.py +++ b/src/utils/nest_functions.py @@ -1,13 +1,12 @@ - def nested_set(dic, keys, value, matchConditions=None): - # Sets the value of a key in a dictionary to a certain value. + # Sets the value of a key in a dictionary to a certain value. # If multiple items are present, it can filter for a matching item for key in keys[:-1]: dic = dic.setdefault(key, {}) if matchConditions: i = 0 match = False - for item in dic: + for item in dic: for matchCondition in matchConditions: if item[matchCondition] != matchConditions[matchCondition]: match = False @@ -19,9 +18,9 @@ def nested_set(dic, keys, value, matchConditions=None): break i += 1 dic[keys[-1]] = value - -def add_keys_nested_dict(d, keys, defaultValue = None): + +def add_keys_nested_dict(d, keys, defaultValue=None): # Creates a nested value if key does not exist for key in keys[:-1]: if key not in d: @@ -29,12 +28,13 @@ def add_keys_nested_dict(d, keys, defaultValue = None): d = d[key] d.setdefault(keys[-1], defaultValue) + def nested_get(dic, return_attribute, matchConditions): # Retrieves a list contained in return_attribute, found within dic based on matchConditions i = 0 match = False hits = [] - for item in dic: + for item in dic: for matchCondition in matchConditions: if item[matchCondition] != matchConditions[matchCondition]: match = False @@ -44,4 +44,4 @@ def nested_get(dic, return_attribute, matchConditions): if match: hits.append(dic[i][return_attribute]) i += 1 - return hits \ No newline at end of file + return hits diff --git a/src/utils/rest.py b/src/utils/rest.py index 86651b6..66fd580 100644 --- a/src/utils/rest.py +++ b/src/utils/rest.py @@ -6,67 +6,104 @@ import json from config.definitions import settingsDict + # GET async def rest_get(url, api_key=None, params=None, cookies=None): try: - headers = {'X-Api-Key': api_key} if api_key else None - response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.get(url, params=params, headers=headers, cookies=cookies, verify=settingsDict['SSL_VERIFICATION'])) + headers = {"X-Api-Key": api_key} if api_key else None + response = await asyncio.get_event_loop().run_in_executor( + None, + lambda: requests.get( + url, + params=params, + headers=headers, + cookies=cookies, + verify=settingsDict["SSL_VERIFICATION"], + ), + ) response.raise_for_status() return response.json() except requests.exceptions.HTTPError as e: - print("HTTP Error:", e) + print("HTTP Error:", e) except RequestException as e: return response.text except ValueError as e: - logging.error(f'Error parsing JSON response from {url}: {e}') + logging.error(f"Error parsing JSON response from {url}: {e}") return None + # DELETE async def rest_delete(url, api_key, params=None): - if settingsDict['TEST_RUN']: return + if settingsDict["TEST_RUN"]: + return try: - headers = {'X-Api-Key': api_key} - response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.delete(url, params=params, headers=headers, verify=settingsDict['SSL_VERIFICATION'])) + headers = {"X-Api-Key": api_key} + response = await asyncio.get_event_loop().run_in_executor( + None, + lambda: requests.delete( + url, + params=params, + headers=headers, + verify=settingsDict["SSL_VERIFICATION"], + ), + ) response.raise_for_status() if response.status_code in [200, 204]: return None return response.json() except RequestException as e: - logging.error(f'Error making API request to {url}: {e}') + logging.error(f"Error making API request to {url}: {e}") return None except ValueError as e: - logging.error(f'Error parsing JSON response from {url}: {e}') + logging.error(f"Error parsing JSON response from {url}: {e}") return None + # POST async def rest_post(url, data=None, json=None, headers=None, cookies=None): - if settingsDict['TEST_RUN']: return + if settingsDict["TEST_RUN"]: + return try: - response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.post(url, data=data, json=json, headers=headers, cookies=cookies, verify=settingsDict['SSL_VERIFICATION'])) + response = await asyncio.get_event_loop().run_in_executor( + None, + lambda: requests.post( + url, + data=data, + json=json, + headers=headers, + cookies=cookies, + verify=settingsDict["SSL_VERIFICATION"], + ), + ) response.raise_for_status() - if response.status_code in (200,201): + if response.status_code in (200, 201): return None return response.json() except RequestException as e: - logging.error(f'Error making API request to {url}: {e}') + logging.error(f"Error making API request to {url}: {e}") return None except ValueError as e: - logging.error(f'Error parsing JSON response from {url}: {e}') + logging.error(f"Error parsing JSON response from {url}: {e}") return None # PUT async def rest_put(url, api_key, data): - if settingsDict['TEST_RUN']: return + if settingsDict["TEST_RUN"]: + return try: - headers = {'X-Api-Key': api_key} | {"content-type": "application/json"} - response = await asyncio.get_event_loop().run_in_executor(None, lambda: requests.put(url, data=data, headers=headers, verify=settingsDict['SSL_VERIFICATION'])) + headers = {"X-Api-Key": api_key} | {"content-type": "application/json"} + response = await asyncio.get_event_loop().run_in_executor( + None, + lambda: requests.put( + url, data=data, headers=headers, verify=settingsDict["SSL_VERIFICATION"] + ), + ) response.raise_for_status() return response.json() except RequestException as e: - logging.error(f'Error making API request to {url}: {e}') + logging.error(f"Error making API request to {url}: {e}") return None except ValueError as e: - logging.error(f'Error parsing JSON response from {url}: {e}') + logging.error(f"Error parsing JSON response from {url}: {e}") return None - diff --git a/src/utils/shared.py b/src/utils/shared.py index 3ad2756..74cc4ca 100644 --- a/src/utils/shared.py +++ b/src/utils/shared.py @@ -1,193 +1,369 @@ # Shared Functions import logging, verboselogs + logger = verboselogs.VerboseLogger(__name__) -from src.utils.rest import (rest_get, rest_delete, rest_post) -from src.utils.nest_functions import (add_keys_nested_dict, nested_get) +from src.utils.rest import rest_get, rest_delete, rest_post +from src.utils.nest_functions import add_keys_nested_dict, nested_get import sys, os, traceback -async def get_queue(BASE_URL, API_KEY, params = {}): + +async def get_queue(BASE_URL, API_KEY, params={}): # Retrieves the current queue - await rest_post(url=BASE_URL+'/command', json={'name': 'RefreshMonitoredDownloads'}, headers={'X-Api-Key': API_KEY}) - totalRecords = (await rest_get(f'{BASE_URL}/queue', API_KEY, params))['totalRecords'] + await rest_post( + url=BASE_URL + "/command", + json={"name": "RefreshMonitoredDownloads"}, + headers={"X-Api-Key": API_KEY}, + ) + totalRecords = (await rest_get(f"{BASE_URL}/queue", API_KEY, params))[ + "totalRecords" + ] if totalRecords == 0: return None - queue = await rest_get(f'{BASE_URL}/queue', API_KEY, {'page': '1', 'pageSize': totalRecords}|params) + queue = await rest_get( + f"{BASE_URL}/queue", API_KEY, {"page": "1", "pageSize": totalRecords} | params + ) queue = filterOutDelayedQueueItems(queue) return queue + def filterOutDelayedQueueItems(queue): # Ignores delayed queue items if queue is None: return None seen_combinations = set() filtered_records = [] - for record in queue['records']: + for record in queue["records"]: # Use get() method with default value "No indexer" if 'indexer' key does not exist - indexer = record.get('indexer', 'No indexer') - protocol = record.get('protocol', 'No protocol') - combination = (record['title'], protocol, indexer) - if record['status'] == 'delay': + indexer = record.get("indexer", "No indexer") + protocol = record.get("protocol", "No protocol") + combination = (record["title"], protocol, indexer) + if record["status"] == "delay": if combination not in seen_combinations: seen_combinations.add(combination) - logger.debug('>>> Delayed queue item ignored: %s (Protocol: %s, Indexer: %s)', record['title'], protocol, indexer) + logger.debug( + ">>> Delayed queue item ignored: %s (Protocol: %s, Indexer: %s)", + record["title"], + protocol, + indexer, + ) else: filtered_records.append(record) if not filtered_records: return None - queue['records'] = filtered_records + queue["records"] = filtered_records return queue def privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs): # Ignores private tracker items (if setting is turned on) for affectedItem in reversed(affectedItems): - if settingsDict['IGNORE_PRIVATE_TRACKERS'] and affectedItem['downloadId'] in privateDowloadIDs: - affectedItems.remove(affectedItem) + if ( + settingsDict["IGNORE_PRIVATE_TRACKERS"] + and affectedItem["downloadId"] in privateDowloadIDs + ): + affectedItems.remove(affectedItem) return affectedItems + def protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownloadIDs): # Checks if torrent is protected and skips for affectedItem in reversed(affectedItems): - if affectedItem['downloadId'] in protectedDownloadIDs: - logger.verbose('>>> Detected %s download, tagged not to be killed: %s',failType, affectedItem['title']) - logger.debug('>>> DownloadID of above %s download (%s): %s',failType, affectedItem['title'], affectedItem['downloadId']) + if affectedItem["downloadId"] in protectedDownloadIDs: + logger.verbose( + ">>> Detected %s download, tagged not to be killed: %s", + failType, + affectedItem["title"], + ) + logger.debug( + ">>> DownloadID of above %s download (%s): %s", + failType, + affectedItem["title"], + affectedItem["downloadId"], + ) affectedItems.remove(affectedItem) return affectedItems -async def execute_checks(settingsDict, affectedItems, failType, BASE_URL, API_KEY, NAME, deleted_downloads, defective_tracker, privateDowloadIDs, protectedDownloadIDs, addToBlocklist, doPrivateTrackerCheck, doProtectedDownloadCheck, doPermittedAttemptsCheck, extraParameters = {}): +async def execute_checks( + settingsDict, + affectedItems, + failType, + BASE_URL, + API_KEY, + NAME, + deleted_downloads, + defective_tracker, + privateDowloadIDs, + protectedDownloadIDs, + addToBlocklist, + doPrivateTrackerCheck, + doProtectedDownloadCheck, + doPermittedAttemptsCheck, + extraParameters={}, +): # Goes over the affected items and performs the checks that are parametrized try: # De-duplicates the affected items (one downloadid may be shared by multiple affected items) downloadIDs = [] for affectedItem in reversed(affectedItems): - if affectedItem['downloadId'] not in downloadIDs: - downloadIDs.append(affectedItem['downloadId']) + if affectedItem["downloadId"] not in downloadIDs: + downloadIDs.append(affectedItem["downloadId"]) else: affectedItems.remove(affectedItem) # Skips protected items if doPrivateTrackerCheck: - affectedItems = privateTrackerCheck(settingsDict, affectedItems, failType, privateDowloadIDs) + affectedItems = privateTrackerCheck( + settingsDict, affectedItems, failType, privateDowloadIDs + ) if doProtectedDownloadCheck: - affectedItems = protectedDownloadCheck(settingsDict, affectedItems, failType, protectedDownloadIDs) - # Checks if failing more often than permitted + affectedItems = protectedDownloadCheck( + settingsDict, affectedItems, failType, protectedDownloadIDs + ) + # Checks if failing more often than permitted if doPermittedAttemptsCheck: - affectedItems = permittedAttemptsCheck(settingsDict, affectedItems, failType, BASE_URL, defective_tracker) - + affectedItems = permittedAttemptsCheck( + settingsDict, affectedItems, failType, BASE_URL, defective_tracker + ) + # Deletes all downloads that have not survived the checks for affectedItem in affectedItems: # Checks whether when removing the queue item from the *arr app the torrent should be kept removeFromClient = True - if extraParameters.get('keepTorrentForPrivateTrackers', False): - if settingsDict['IGNORE_PRIVATE_TRACKERS'] and affectedItem['downloadId'] in privateDowloadIDs: + if extraParameters.get("keepTorrentForPrivateTrackers", False): + if ( + settingsDict["IGNORE_PRIVATE_TRACKERS"] + and affectedItem["downloadId"] in privateDowloadIDs + ): removeFromClient = False - + # Removes the queue item - await remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads, removeFromClient) + await remove_download( + settingsDict, + BASE_URL, + API_KEY, + affectedItem, + failType, + addToBlocklist, + deleted_downloads, + removeFromClient, + ) # Exit Logs - if settingsDict['LOG_LEVEL'] == 'DEBUG': - queue = await get_queue(BASE_URL, API_KEY) - logger.debug('execute_checks/queue OUT (failType: %s): %s', failType, formattedQueueInfo(queue)) + if settingsDict["LOG_LEVEL"] == "DEBUG": + queue = await get_queue(BASE_URL, API_KEY) + logger.debug( + "execute_checks/queue OUT (failType: %s): %s", + failType, + formattedQueueInfo(queue), + ) # Return removed items return affectedItems except Exception as error: errorDetails(NAME, error) return [] -def permittedAttemptsCheck(settingsDict, affectedItems, failType, BASE_URL, defective_tracker): + +def permittedAttemptsCheck( + settingsDict, affectedItems, failType, BASE_URL, defective_tracker +): # Checks if downloads are repeatedly found as stalled / stuck in metadata. Removes the items that are not exeeding permitted attempts # Shows all affected items (for debugging) - logger.debug('permittedAttemptsCheck/affectedItems: %s', ', '.join(f"{affectedItem['id']}:{affectedItem['title']}:{affectedItem['downloadId']}" for affectedItem in affectedItems)) + logger.debug( + "permittedAttemptsCheck/affectedItems: %s", + ", ".join( + f"{affectedItem['id']}:{affectedItem['title']}:{affectedItem['downloadId']}" + for affectedItem in affectedItems + ), + ) # 2. Check if those that were previously defective are no longer defective -> those are recovered - affectedDownloadIDs = [affectedItem['downloadId'] for affectedItem in affectedItems] + affectedDownloadIDs = [affectedItem["downloadId"] for affectedItem in affectedItems] try: - recoveredDownloadIDs = [trackedDownloadIDs for trackedDownloadIDs in defective_tracker.dict[BASE_URL][failType] if trackedDownloadIDs not in affectedDownloadIDs] + recoveredDownloadIDs = [ + trackedDownloadIDs + for trackedDownloadIDs in defective_tracker.dict[BASE_URL][failType] + if trackedDownloadIDs not in affectedDownloadIDs + ] except KeyError: recoveredDownloadIDs = [] - logger.debug('permittedAttemptsCheck/recoveredDownloadIDs: %s', str(recoveredDownloadIDs)) + logger.debug( + "permittedAttemptsCheck/recoveredDownloadIDs: %s", str(recoveredDownloadIDs) + ) for recoveredDownloadID in recoveredDownloadIDs: - logger.info('>>> Download no longer marked as %s: %s', failType, defective_tracker.dict[BASE_URL][failType][recoveredDownloadID]['title']) + logger.info( + ">>> Download no longer marked as %s: %s", + failType, + defective_tracker.dict[BASE_URL][failType][recoveredDownloadID]["title"], + ) del defective_tracker.dict[BASE_URL][failType][recoveredDownloadID] - logger.debug('permittedAttemptsCheck/defective_tracker.dict IN: %s', str(defective_tracker.dict)) + logger.debug( + "permittedAttemptsCheck/defective_tracker.dict IN: %s", + str(defective_tracker.dict), + ) - # 3. For those that are defective, add attempt + 1 if present before, or make attempt = 1. + # 3. For those that are defective, add attempt + 1 if present before, or make attempt = 1. for affectedItem in reversed(affectedItems): - try: - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] += 1 + try: + defective_tracker.dict[BASE_URL][failType][affectedItem["downloadId"]][ + "Attempts" + ] += 1 except KeyError: - add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1}) - attempts_left = settingsDict['PERMITTED_ATTEMPTS'] - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] + add_keys_nested_dict( + defective_tracker.dict, + [BASE_URL, failType, affectedItem["downloadId"]], + {"title": affectedItem["title"], "Attempts": 1}, + ) + attempts_left = ( + settingsDict["PERMITTED_ATTEMPTS"] + - defective_tracker.dict[BASE_URL][failType][affectedItem["downloadId"]][ + "Attempts" + ] + ) # If not exceeding the number of permitted times, remove from being affected - if attempts_left >= 0: # Still got attempts left - logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settingsDict['PERMITTED_ATTEMPTS']), affectedItem['title']) + if attempts_left >= 0: # Still got attempts left + logger.info( + ">>> Detected %s download (%s out of %s permitted times): %s", + failType, + str( + defective_tracker.dict[BASE_URL][failType][ + affectedItem["downloadId"] + ]["Attempts"] + ), + str(settingsDict["PERMITTED_ATTEMPTS"]), + affectedItem["title"], + ) affectedItems.remove(affectedItem) - if attempts_left <= -1: # Too many attempts - logger.info('>>> Detected %s download too many times (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settingsDict['PERMITTED_ATTEMPTS']), affectedItem['title']) - if attempts_left <= -2: # Too many attempts and should already have been removed - # If supposedly deleted item keeps coming back, print out guidance for "Reject Blocklisted Torrent Hashes While Grabbing" - logger.verbose('>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s', affectedItem['title']) - logger.debug('permittedAttemptsCheck/defective_tracker.dict OUT: %s', str(defective_tracker.dict)) + if attempts_left <= -1: # Too many attempts + logger.info( + ">>> Detected %s download too many times (%s out of %s permitted times): %s", + failType, + str( + defective_tracker.dict[BASE_URL][failType][ + affectedItem["downloadId"] + ]["Attempts"] + ), + str(settingsDict["PERMITTED_ATTEMPTS"]), + affectedItem["title"], + ) + if ( + attempts_left <= -2 + ): # Too many attempts and should already have been removed + # If supposedly deleted item keeps coming back, print out guidance for "Reject Blocklisted Torrent Hashes While Grabbing" + logger.verbose( + '>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s', + affectedItem["title"], + ) + logger.debug( + "permittedAttemptsCheck/defective_tracker.dict OUT: %s", + str(defective_tracker.dict), + ) return affectedItems -async def remove_download(settingsDict, BASE_URL, API_KEY, affectedItem, failType, addToBlocklist, deleted_downloads, removeFromClient): + +async def remove_download( + settingsDict, + BASE_URL, + API_KEY, + affectedItem, + failType, + addToBlocklist, + deleted_downloads, + removeFromClient, +): # Removes downloads and creates log entry - logger.debug('remove_download/deleted_downloads.dict IN: %s', str(deleted_downloads.dict)) - if affectedItem['downloadId'] not in deleted_downloads.dict: + logger.debug( + "remove_download/deleted_downloads.dict IN: %s", str(deleted_downloads.dict) + ) + if affectedItem["downloadId"] not in deleted_downloads.dict: # "schizophrenic" removal: # Yes, the failed imports are removed from the -arr apps (so the removal kicks still in) # But in the torrent client they are kept if removeFromClient: - logger.info('>>> Removing %s download: %s', failType, affectedItem['title']) + logger.info(">>> Removing %s download: %s", failType, affectedItem["title"]) else: - logger.info('>>> Removing %s download (without removing from torrent client): %s', failType, affectedItem['title']) - + logger.info( + ">>> Removing %s download (without removing from torrent client): %s", + failType, + affectedItem["title"], + ) + # Print out detailed removal messages (if any were added in the jobs) - if 'removal_messages' in affectedItem: - for removal_message in affectedItem['removal_messages']: + if "removal_messages" in affectedItem: + for removal_message in affectedItem["removal_messages"]: logger.info(removal_message) - - if not settingsDict['TEST_RUN']: - await rest_delete(f'{BASE_URL}/queue/{affectedItem["id"]}', API_KEY, {'removeFromClient': removeFromClient, 'blocklist': addToBlocklist}) - deleted_downloads.dict.append(affectedItem['downloadId']) - - logger.debug('remove_download/deleted_downloads.dict OUT: %s', str(deleted_downloads.dict)) + + if not settingsDict["TEST_RUN"]: + await rest_delete( + f'{BASE_URL}/queue/{affectedItem["id"]}', + API_KEY, + {"removeFromClient": removeFromClient, "blocklist": addToBlocklist}, + ) + deleted_downloads.dict.append(affectedItem["downloadId"]) + + logger.debug( + "remove_download/deleted_downloads.dict OUT: %s", str(deleted_downloads.dict) + ) return + def errorDetails(NAME, error): exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] - logger.warning('>>> Queue cleaning failed on %s. (File: %s / Line: %s / Error Message: %s / Error Type: %s)', NAME, fname, exc_tb.tb_lineno, error, exc_type) + logger.warning( + ">>> Queue cleaning failed on %s. (File: %s / Line: %s / Error Message: %s / Error Type: %s)", + NAME, + fname, + exc_tb.tb_lineno, + error, + exc_type, + ) logger.debug(traceback.format_exc()) - return + return + def formattedQueueInfo(queue): try: # Returns queueID, title, and downloadID - if not queue: return 'empty' + if not queue: + return "empty" formatted_list = [] - for record in queue['records']: - download_id = record['downloadId'] - title = record['title'] - item_id = record['id'] + for record in queue["records"]: + download_id = record["downloadId"] + title = record["title"] + item_id = record["id"] # Check if there is an entry with the same download_id and title - existing_entry = next((item for item in formatted_list if item['downloadId'] == download_id), None) + existing_entry = next( + (item for item in formatted_list if item["downloadId"] == download_id), + None, + ) if existing_entry: - existing_entry['IDs'].append(item_id) + existing_entry["IDs"].append(item_id) else: - new_entry = {'downloadId': download_id, 'downloadTitle': title, 'IDs': [item_id]} + new_entry = { + "downloadId": download_id, + "downloadTitle": title, + "IDs": [item_id], + } formatted_list.append(new_entry) - return(formatted_list) + return formatted_list except Exception as error: - errorDetails('formattedQueueInfo', error) - logger.debug('formattedQueueInfo/queue for debug: %s', str(queue)) - return 'error' + errorDetails("formattedQueueInfo", error) + logger.debug("formattedQueueInfo/queue for debug: %s", str(queue)) + return "error" async def qBitOffline(settingsDict, failType, NAME): - if settingsDict['QBITTORRENT_URL']: - qBitConnectionStatus = (await rest_get(settingsDict['QBITTORRENT_URL']+'/sync/maindata', cookies=settingsDict['QBIT_COOKIE']))['server_state']['connection_status'] - if qBitConnectionStatus == 'disconnected': - logger.warning('>>> qBittorrent is disconnected. Skipping %s queue cleaning failed on %s.',failType, NAME) + if settingsDict["QBITTORRENT_URL"]: + qBitConnectionStatus = ( + await rest_get( + settingsDict["QBITTORRENT_URL"] + "/sync/maindata", + cookies=settingsDict["QBIT_COOKIE"], + ) + )["server_state"]["connection_status"] + if qBitConnectionStatus == "disconnected": + logger.warning( + ">>> qBittorrent is disconnected. Skipping %s queue cleaning failed on %s.", + failType, + NAME, + ) return True - return False \ No newline at end of file + return False diff --git a/src/utils/trackers.py b/src/utils/trackers.py index 24701c8..f95844d 100644 --- a/src/utils/trackers.py +++ b/src/utils/trackers.py @@ -3,13 +3,15 @@ class Defective_Tracker: # Keeps track of which downloads were already caught as stalled previously def __init__(self, dict): self.dict = dict - + + class Download_Sizes_Tracker: # Keeps track of the file sizes of the downloads def __init__(self, dict): self.dict = dict - + + class Deleted_Downloads: # Keeps track of which downloads have already been deleted (to not double-delete) def __init__(self, dict): - self.dict = dict \ No newline at end of file + self.dict = dict diff --git a/tests/jobs/remove_failed_imports/remove_failed_imports_utils.py b/tests/jobs/remove_failed_imports/remove_failed_imports_utils.py index 68eef7e..f41817a 100644 --- a/tests/jobs/remove_failed_imports/remove_failed_imports_utils.py +++ b/tests/jobs/remove_failed_imports/remove_failed_imports_utils.py @@ -1,5 +1,6 @@ import os -os.environ['IS_IN_PYTEST'] = 'true' + +os.environ["IS_IN_PYTEST"] = "true" import logging import json import pytest @@ -10,18 +11,20 @@ # Utility function to load mock data def load_mock_data(file_name): - with open(file_name, 'r') as file: + with open(file_name, "r") as file: return json.load(file) + async def mock_get_queue(mock_data): logging.debug("Mock get_queue called") return mock_data + async def run_test( - settingsDict: Dict[str, Any], + settingsDict: Dict[str, Any], expected_removal_messages: Dict[int, Set[str]], mock_data_file: str, - monkeypatch: pytest.MonkeyPatch + monkeypatch: pytest.MonkeyPatch, ) -> None: # Load mock data mock_data = load_mock_data(mock_data_file) @@ -33,7 +36,7 @@ async def run_test( def side_effect(*args, **kwargs): logging.debug("Mock execute_checks called with kwargs: %s", kwargs) # Return the affectedItems from kwargs - return kwargs.get('affectedItems', []) + return kwargs.get("affectedItems", []) # Attach side effect to the mock execute_checks_mock.side_effect = side_effect @@ -42,24 +45,37 @@ def side_effect(*args, **kwargs): mock_get_queue = AsyncMock(return_value=mock_data) # Patch the methods - monkeypatch.setattr('src.jobs.remove_failed_imports.get_queue', mock_get_queue) - monkeypatch.setattr('src.jobs.remove_failed_imports.execute_checks', execute_checks_mock) + monkeypatch.setattr("src.jobs.remove_failed_imports.get_queue", mock_get_queue) + monkeypatch.setattr( + "src.jobs.remove_failed_imports.execute_checks", execute_checks_mock + ) # Call the function - await remove_failed_imports(settingsDict=settingsDict, BASE_URL='', API_KEY='', NAME='', deleted_downloads=set(), defective_tracker=set(), protectedDownloadIDs=set(), privateDowloadIDs=set()) + await remove_failed_imports( + settingsDict=settingsDict, + BASE_URL="", + API_KEY="", + NAME="", + deleted_downloads=set(), + defective_tracker=set(), + protectedDownloadIDs=set(), + privateDowloadIDs=set(), + ) # Assertions assert execute_checks_mock.called # Ensure the mock was called # Assert expected items are there args, kwargs = execute_checks_mock.call_args - affectedItems = kwargs.get('affectedItems', []) - affectedItems_ids = {item['id'] for item in affectedItems} + affectedItems = kwargs.get("affectedItems", []) + affectedItems_ids = {item["id"] for item in affectedItems} expectedItems_ids = set(expected_removal_messages.keys()) assert len(affectedItems) == len(expected_removal_messages) assert affectedItems_ids == expectedItems_ids # Assert all expected messages are there for affectedItem in affectedItems: - assert 'removal_messages' in affectedItem - assert expected_removal_messages[affectedItem['id']] == set(affectedItem.get('removal_messages', [])) + assert "removal_messages" in affectedItem + assert expected_removal_messages[affectedItem["id"]] == set( + affectedItem.get("removal_messages", []) + ) diff --git a/tests/jobs/remove_failed_imports/test_remove_failed_imports_1.py b/tests/jobs/remove_failed_imports/test_remove_failed_imports_1.py index 1464767..d8745f0 100644 --- a/tests/jobs/remove_failed_imports/test_remove_failed_imports_1.py +++ b/tests/jobs/remove_failed_imports/test_remove_failed_imports_1.py @@ -1,39 +1,45 @@ import pytest from remove_failed_imports_utils import run_test -mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_1.json' + +mock_data_file = "tests/jobs/remove_failed_imports/mock_data/mock_data_1.json" + @pytest.mark.asyncio async def test_with_pattern_one_message(monkeypatch): - settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['not found in the grabbed release']} + settingsDict = { + "FAILED_IMPORT_MESSAGE_PATTERNS": ["not found in the grabbed release"] + } expected_removal_messages = { 2: { - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv", } } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + @pytest.mark.asyncio async def test_with_empty_pattern_one_message(monkeypatch): - settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': []} + settingsDict = {"FAILED_IMPORT_MESSAGE_PATTERNS": []} expected_removal_messages = { 2: { - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (All):', - '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (All):", + ">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv", } } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + @pytest.mark.asyncio async def test_without_pattern_one_message(monkeypatch): settingsDict = {} expected_removal_messages = { 2: { - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (All):', - '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (All):", + ">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv", } } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) diff --git a/tests/jobs/remove_failed_imports/test_remove_failed_imports_2.py b/tests/jobs/remove_failed_imports/test_remove_failed_imports_2.py index 827b096..328e1de 100644 --- a/tests/jobs/remove_failed_imports/test_remove_failed_imports_2.py +++ b/tests/jobs/remove_failed_imports/test_remove_failed_imports_2.py @@ -1,41 +1,45 @@ import pytest from remove_failed_imports_utils import run_test -mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_2.json' + +mock_data_file = "tests/jobs/remove_failed_imports/mock_data/mock_data_2.json" + @pytest.mark.asyncio async def test_multiple_status_messages_multiple_pattern(monkeypatch): - settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']} + settingsDict = {"FAILED_IMPORT_MESSAGE_PATTERNS": ["world", "all"]} expected_removal_messages = { 1: { - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Message 1 - hello world', - '>>>>> - Message 2 - goodbye all', + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Message 1 - hello world", + ">>>>> - Message 2 - goodbye all", } } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + @pytest.mark.asyncio async def test_multiple_status_messages_single_pattern(monkeypatch): - settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']} + settingsDict = {"FAILED_IMPORT_MESSAGE_PATTERNS": ["world"]} expected_removal_messages = { 1: { - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Message 1 - hello world' + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Message 1 - hello world", } } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + @pytest.mark.asyncio async def test_multiple_status_messages_no_pattern(monkeypatch): settingsDict = {} expected_removal_messages = { 1: { - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (All):', - '>>>>> - Message 1 - hello world', - '>>>>> - Message 2 - goodbye all', + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (All):", + ">>>>> - Message 1 - hello world", + ">>>>> - Message 2 - goodbye all", } } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) diff --git a/tests/jobs/remove_failed_imports/test_remove_failed_imports_3.py b/tests/jobs/remove_failed_imports/test_remove_failed_imports_3.py index 9522ea5..2c22f50 100644 --- a/tests/jobs/remove_failed_imports/test_remove_failed_imports_3.py +++ b/tests/jobs/remove_failed_imports/test_remove_failed_imports_3.py @@ -1,40 +1,43 @@ import pytest from remove_failed_imports_utils import run_test -mock_data_file = 'tests/jobs/remove_failed_imports/mock_data/mock_data_3.json' + +mock_data_file = "tests/jobs/remove_failed_imports/mock_data/mock_data_3.json" + @pytest.mark.asyncio async def test_multiple_statuses_multiple_pattern(monkeypatch): - settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world', 'all']} + settingsDict = {"FAILED_IMPORT_MESSAGE_PATTERNS": ["world", "all"]} expected_removal_messages = { 1: { - '>>>>> Tracked Download State: importPending', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Message 1 - hello world', - '>>>>> - Message 2 - goodbye all', + ">>>>> Tracked Download State: importPending", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Message 1 - hello world", + ">>>>> - Message 2 - goodbye all", }, 2: { - '>>>>> Tracked Download State: importFailed', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Message 1 - hello world', - '>>>>> - Message 2 - goodbye all', - } + ">>>>> Tracked Download State: importFailed", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Message 1 - hello world", + ">>>>> - Message 2 - goodbye all", + }, } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) + @pytest.mark.asyncio async def test_multiple_statuses_single_pattern(monkeypatch): - settingsDict = {'FAILED_IMPORT_MESSAGE_PATTERNS': ['world']} + settingsDict = {"FAILED_IMPORT_MESSAGE_PATTERNS": ["world"]} expected_removal_messages = { 1: { - '>>>>> Tracked Download State: importPending', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Message 1 - hello world' + ">>>>> Tracked Download State: importPending", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Message 1 - hello world", }, 2: { - '>>>>> Tracked Download State: importFailed', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Message 1 - hello world' - } + ">>>>> Tracked Download State: importFailed", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Message 1 - hello world", + }, } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) @@ -44,16 +47,16 @@ async def test_multiple_statuses_no_pattern(monkeypatch): settingsDict = {} expected_removal_messages = { 1: { - '>>>>> Tracked Download State: importPending', - '>>>>> Status Messages (All):', - '>>>>> - Message 1 - hello world', - '>>>>> - Message 2 - goodbye all', + ">>>>> Tracked Download State: importPending", + ">>>>> Status Messages (All):", + ">>>>> - Message 1 - hello world", + ">>>>> - Message 2 - goodbye all", }, 2: { - '>>>>> Tracked Download State: importFailed', - '>>>>> Status Messages (All):', - '>>>>> - Message 1 - hello world', - '>>>>> - Message 2 - goodbye all', - } + ">>>>> Tracked Download State: importFailed", + ">>>>> Status Messages (All):", + ">>>>> - Message 1 - hello world", + ">>>>> - Message 2 - goodbye all", + }, } await run_test(settingsDict, expected_removal_messages, mock_data_file, monkeypatch) diff --git a/tests/utils/nest_functions/test_nest_functions.py b/tests/utils/nest_functions/test_nest_functions.py index fe7866f..6eedd17 100644 --- a/tests/utils/nest_functions/test_nest_functions.py +++ b/tests/utils/nest_functions/test_nest_functions.py @@ -5,49 +5,148 @@ # import asyncio # Dictionary that is modified / queried as part of tests -input_dict = { 1: {'name': 'Breaking Bad 1', 'data': {'episodes': 3, 'year': 1991, 'actors': ['Peter', 'Paul', 'Ppacey']}}, - 2: {'name': 'Breaking Bad 2', 'data': {'episodes': 6, 'year': 1992, 'actors': ['Weter', 'Waul', 'Wpacey']}}, - 3: {'name': 'Breaking Bad 3', 'data': {'episodes': 9, 'year': 1993, 'actors': ['Zeter', 'Zaul', 'Zpacey']}}} +input_dict = { + 1: { + "name": "Breaking Bad 1", + "data": {"episodes": 3, "year": 1991, "actors": ["Peter", "Paul", "Ppacey"]}, + }, + 2: { + "name": "Breaking Bad 2", + "data": {"episodes": 6, "year": 1992, "actors": ["Weter", "Waul", "Wpacey"]}, + }, + 3: { + "name": "Breaking Bad 3", + "data": {"episodes": 9, "year": 1993, "actors": ["Zeter", "Zaul", "Zpacey"]}, + }, +} + # @pytest.mark.asyncio # async def test_nested_set(): def test_nested_set(): - expected_output = { 1: {'name': 'Breaking Bad 1', 'data': {'episodes': 3, 'year': 1991, 'actors': ['Peter', 'Paul', 'Ppacey']}}, - 2: {'name': 'Breaking Bad 2', 'data': {'episodes': 6, 'year': 1994, 'actors': ['Weter', 'Waul', 'Wpacey']}}, - 3: {'name': 'Breaking Bad 3', 'data': {'episodes': 9, 'year': 1993, 'actors': ['Zeter', 'Zaul', 'Zpacey']}}} + expected_output = { + 1: { + "name": "Breaking Bad 1", + "data": { + "episodes": 3, + "year": 1991, + "actors": ["Peter", "Paul", "Ppacey"], + }, + }, + 2: { + "name": "Breaking Bad 2", + "data": { + "episodes": 6, + "year": 1994, + "actors": ["Weter", "Waul", "Wpacey"], + }, + }, + 3: { + "name": "Breaking Bad 3", + "data": { + "episodes": 9, + "year": 1993, + "actors": ["Zeter", "Zaul", "Zpacey"], + }, + }, + } output = input_dict # await nested_set(output, [2, 'data' ,'year'], 1994) - nested_set(output, [2, 'data' ,'year'], 1994) + nested_set(output, [2, "data", "year"], 1994) assert expected_output == output + def test_nested_set_conditions(): - input = { 1: [{'year': 2001, 'rating': 'high'}, {'year': 2002, 'rating': 'high'}, {'year': 2003, 'rating': 'high'}], - 2: [{'year': 2001, 'rating': 'high'}, {'year': 2002, 'rating': 'high'}, {'year': 2003, 'rating': 'high'}]} - expected_output = { 1: [{'year': 2001, 'rating': 'high'}, {'year': 2002, 'rating': 'high'}, {'year': 2003, 'rating': 'high'}], - 2: [{'year': 2001, 'rating': 'high'}, {'year': 2002, 'rating': 'high'}, {'year': 2003, 'rating': 'LOW'}]} + input = { + 1: [ + {"year": 2001, "rating": "high"}, + {"year": 2002, "rating": "high"}, + {"year": 2003, "rating": "high"}, + ], + 2: [ + {"year": 2001, "rating": "high"}, + {"year": 2002, "rating": "high"}, + {"year": 2003, "rating": "high"}, + ], + } + expected_output = { + 1: [ + {"year": 2001, "rating": "high"}, + {"year": 2002, "rating": "high"}, + {"year": 2003, "rating": "high"}, + ], + 2: [ + {"year": 2001, "rating": "high"}, + {"year": 2002, "rating": "high"}, + {"year": 2003, "rating": "LOW"}, + ], + } output = input - nested_set(output, [2, 'rating'], 'LOW', {'year': 2003}) + nested_set(output, [2, "rating"], "LOW", {"year": 2003}) assert expected_output == output + def test_nested_set_conditions_multiple(): - input = { 1: [{'rating': 'high', 'color': 1, 'stack': 1}, {'rating': 'high', 'color': 2, 'stack': 2}, {'rating': 'high', 'color': 2, 'stack': 1}]} - expected_output = { 1: [{'rating': 'high', 'color': 1, 'stack': 1}, {'rating': 'high', 'color': 2, 'stack': 2}, {'rating': 'LOW', 'color': 2, 'stack': 1}]} + input = { + 1: [ + {"rating": "high", "color": 1, "stack": 1}, + {"rating": "high", "color": 2, "stack": 2}, + {"rating": "high", "color": 2, "stack": 1}, + ] + } + expected_output = { + 1: [ + {"rating": "high", "color": 1, "stack": 1}, + {"rating": "high", "color": 2, "stack": 2}, + {"rating": "LOW", "color": 2, "stack": 1}, + ] + } output = input - nested_set(output, [1, 'rating'], 'LOW', {'color': 2, 'stack': 1}) + nested_set(output, [1, "rating"], "LOW", {"color": 2, "stack": 1}) assert expected_output == output + def test_add_keys_nested_dict(): - expected_output = { 1: {'name': 'Breaking Bad 1', 'data': {'episodes': 3, 'year': 1991, 'actors': ['Peter', 'Paul', 'Ppacey']}}, - 2: {'name': 'Breaking Bad 2', 'data': {'episodes': 6, 'year': 1994, 'actors': ['Weter', 'Waul', 'Wpacey'], 'spaceship': True}}, - 3: {'name': 'Breaking Bad 3', 'data': {'episodes': 9, 'year': 1993, 'actors': ['Zeter', 'Zaul', 'Zpacey']}}} + expected_output = { + 1: { + "name": "Breaking Bad 1", + "data": { + "episodes": 3, + "year": 1991, + "actors": ["Peter", "Paul", "Ppacey"], + }, + }, + 2: { + "name": "Breaking Bad 2", + "data": { + "episodes": 6, + "year": 1994, + "actors": ["Weter", "Waul", "Wpacey"], + "spaceship": True, + }, + }, + 3: { + "name": "Breaking Bad 3", + "data": { + "episodes": 9, + "year": 1993, + "actors": ["Zeter", "Zaul", "Zpacey"], + }, + }, + } output = input_dict - add_keys_nested_dict(output, [2, 'data' ,'spaceship'], True) + add_keys_nested_dict(output, [2, "data", "spaceship"], True) assert expected_output == output def test_nested_get(): - input = { 1: [{'name': 'A', 'color': 1, 'stack': 1}, {'name': 'B', 'color': 2, 'stack': 2}, {'name': 'C', 'color': 2, 'stack': 1}]} - expected_output = ['C'] - output = nested_get(input[1], 'name', {'color': 2, 'stack': 1}) + input = { + 1: [ + {"name": "A", "color": 1, "stack": 1}, + {"name": "B", "color": 2, "stack": 2}, + {"name": "C", "color": 2, "stack": 1}, + ] + } + expected_output = ["C"] + output = nested_get(input[1], "name", {"color": 2, "stack": 1}) assert expected_output == output - diff --git a/tests/utils/remove_download/remove_download_utils.py b/tests/utils/remove_download/remove_download_utils.py index 9831b72..69b1f82 100644 --- a/tests/utils/remove_download/remove_download_utils.py +++ b/tests/utils/remove_download/remove_download_utils.py @@ -1,5 +1,6 @@ import os -os.environ['IS_IN_PYTEST'] = 'true' + +os.environ["IS_IN_PYTEST"] = "true" import logging import json import pytest @@ -8,45 +9,56 @@ from src.utils.trackers import Deleted_Downloads - # Utility function to load mock data def load_mock_data(file_name): - with open(file_name, 'r') as file: + with open(file_name, "r") as file: return json.load(file) + async def mock_rest_delete() -> None: logger.debug(f"Mock rest_delete called with URL") async def run_test( - settingsDict: Dict[str, Any], + settingsDict: Dict[str, Any], expected_removal_messages: Set[str], failType: str, removeFromClient: bool, mock_data_file: str, monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture + caplog: pytest.LogCaptureFixture, ) -> None: # Load mock data affectedItem = load_mock_data(mock_data_file) # Mock the `rest_delete` function - monkeypatch.setattr('src.utils.shared.rest_delete', mock_rest_delete) - + monkeypatch.setattr("src.utils.shared.rest_delete", mock_rest_delete) + # Call the function with caplog.at_level(logging.INFO): # Call the function and assert no exceptions - try: - deleted_downloads = Deleted_Downloads([]) - await remove_download(settingsDict=settingsDict, BASE_URL='', API_KEY='', affectedItem=affectedItem, failType=failType, addToBlocklist=True, deleted_downloads=deleted_downloads, removeFromClient=removeFromClient) + try: + deleted_downloads = Deleted_Downloads([]) + await remove_download( + settingsDict=settingsDict, + BASE_URL="", + API_KEY="", + affectedItem=affectedItem, + failType=failType, + addToBlocklist=True, + deleted_downloads=deleted_downloads, + removeFromClient=removeFromClient, + ) except Exception as e: pytest.fail(f"remove_download raised an exception: {e}") # Assertions: # Check that expected log messages are in the captured log - log_messages = {record.message for record in caplog.records if record.levelname == 'INFO'} + log_messages = { + record.message for record in caplog.records if record.levelname == "INFO" + } assert expected_removal_messages == log_messages # Check that the affectedItem's downloadId was added to deleted_downloads - assert affectedItem['downloadId'] in deleted_downloads.dict + assert affectedItem["downloadId"] in deleted_downloads.dict diff --git a/tests/utils/remove_download/test_remove_download_1.py b/tests/utils/remove_download/test_remove_download_1.py index 4761a12..9eb434f 100644 --- a/tests/utils/remove_download/test_remove_download_1.py +++ b/tests/utils/remove_download/test_remove_download_1.py @@ -1,33 +1,50 @@ - import pytest from remove_download_utils import run_test + # Parameters identical across all tests -mock_data_file = 'tests/utils/remove_download/mock_data/mock_data_1.json' -failType = 'failed import' +mock_data_file = "tests/utils/remove_download/mock_data/mock_data_1.json" +failType = "failed import" + @pytest.mark.asyncio async def test_removal_with_removal_messages(monkeypatch, caplog): - settingsDict = {'TEST_RUN': True} + settingsDict = {"TEST_RUN": True} removeFromClient = True expected_removal_messages = { - '>>> Removing failed import download: Sonarr Title 1', - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', - '>>>>> - And yet another message' + ">>> Removing failed import download: Sonarr Title 1", + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv", + ">>>>> - And yet another message", } - await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog) - + await run_test( + settingsDict=settingsDict, + expected_removal_messages=expected_removal_messages, + failType=failType, + removeFromClient=removeFromClient, + mock_data_file=mock_data_file, + monkeypatch=monkeypatch, + caplog=caplog, + ) + @pytest.mark.asyncio async def test_schizophrenic_removal_with_removal_messages(monkeypatch, caplog): - settingsDict = {'TEST_RUN': True} + settingsDict = {"TEST_RUN": True} removeFromClient = False expected_removal_messages = { - '>>> Removing failed import download (without removing from torrent client): Sonarr Title 1', - '>>>>> Tracked Download State: importBlocked', - '>>>>> Status Messages (matching specified patterns):', - '>>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv', - '>>>>> - And yet another message' - } - await run_test(settingsDict=settingsDict, expected_removal_messages=expected_removal_messages, failType=failType, removeFromClient=removeFromClient, mock_data_file=mock_data_file, monkeypatch=monkeypatch, caplog=caplog) + ">>> Removing failed import download (without removing from torrent client): Sonarr Title 1", + ">>>>> Tracked Download State: importBlocked", + ">>>>> Status Messages (matching specified patterns):", + ">>>>> - Episode XYZ was not found in the grabbed release: Sonarr Title 2.mkv", + ">>>>> - And yet another message", + } + await run_test( + settingsDict=settingsDict, + expected_removal_messages=expected_removal_messages, + failType=failType, + removeFromClient=removeFromClient, + mock_data_file=mock_data_file, + monkeypatch=monkeypatch, + caplog=caplog, + )