diff --git a/README.md b/README.md index 8b3740f2..8fc2cef6 100644 --- a/README.md +++ b/README.md @@ -5,8 +5,8 @@ This repository contains plugin and utility scripts created by the Stash communi There is also [a list of third-party plugins on our wiki page](https://github.com/stashapp/stash/wiki/Plugins-&--Scripts). ## Please note: V24 now uses an installer -# We recommend you use that to install plugins. -Manual installs are discouraged, and you shouldn't do so unless you otherwise know what you are doing. +# We recommend you use that to install (and update) plugins. +Manual installs are not recommended, and you shouldn't do so unless you otherwise know what you are doing. ## How To Install To download a plugin in Stash v24, the CommunityScripts repo source is automatically installed by default. @@ -14,25 +14,24 @@ To download a plugin in Stash v24, the CommunityScripts repo source is automatic This default source is located at https://stashapp.github.io/CommunityScripts/stable/index.yml # Plugin, Themes, and Scripts Directory -We used to list all plugins, themes, and scripts in this repository... -but with the changes in v24, ANY items installable by the plugin installer are no longer listed here. +We used to list all community supported plugins, themes, and scripts in this repository... +but with the changes in v24, ANY items installable by the plugin installer will no longer listed here. Use the Plugin Installer built into Stash. -We will continue to list the items NOT otherwise installable this way below. +We will continue to list the items NOT otherwise installable in this way below. ## NOTE: BREAKING CHANGES -The recent v24 release (and development branches) had major breaking changes to old schema, and plugin changes. +The recent v24 release (and future development branches) had major breaking changes to old schema and plugin changes. We're beginning to review plugins and the rest and patch them to work, but it's an ongoing process... We'll update the table below as we do this... We will also be rearranging things a bit, and updating documentation (including this page) -## Plugins and Themes are no longer listed here. +## Plugins and Themes will no longer be listed here. Category|Triggers|Plugin Name|Description|Minimum Stash version|Updated for v24| --------|-----------|-----------|-----------|---------------------|----- -Scraper|Task|[GHScraper_Checker](plugins/GHScraper_Checker)|Compare local file against github file from the community scraper repo.|v0.8|:x: -Maintenance|Task
Scene.Update|[renamerOnUpdate](plugins/renamerOnUpdate)|Rename/Move your file based on Stash metadata.|v0.7|:x: +Maintenance|Task
Scene.Update|[renamerOnUpdate](plugins/renamerOnUpdate)|Rename/Move your file based on Stash metadata.|v2.4|:white_check_mark: STOPGAP Maintenance|Set Scene Cover|[setSceneCoverFromFile](plugins/setSceneCoverFromFile)|Searchs Stash for Scenes with a cover image in the same folder and sets the cover image in stash to that image|v0.7|:x: Scenes|SceneMarker.Create
SceneMarker.Update|[markerTagToScene](plugins/markerTagToScene)|Adds primary tag of Scene Marker to the Scene on marker create/update.|v0.8 ([46bbede](https://github.com/stashapp/stash/commit/46bbede9a07144797d6f26cf414205b390ca88f9))|:x: Scanning|Scene.Create
Gallery.Create
Image.Create|[defaultDataForPath](plugins/defaultDataForPath)|Adds configured Tags, Performers and/or Studio to all newly scanned Scenes, Images and Galleries..|v0.8|:x: diff --git a/plugins/GHScraper_Checker/GHScraper_Checker.py b/plugins/GHScraper_Checker/GHScraper_Checker.py deleted file mode 100644 index b1e08f66..00000000 --- a/plugins/GHScraper_Checker/GHScraper_Checker.py +++ /dev/null @@ -1,208 +0,0 @@ -import json -import os -import re -import sys -import zipfile -from datetime import datetime - -import requests - -import log - -FRAGMENT = json.loads(sys.stdin.read()) -FRAGMENT_SERVER = FRAGMENT["server_connection"] -FRAGMENT_ARG = FRAGMENT['args']['mode'] -log.LogDebug("Starting Plugin: Github Scraper Checker") - -CHECK_LOG = False -GET_NEW_FILE = False -OVERWRITE = False - -if FRAGMENT_ARG == "CHECK": - CHECK_LOG = True -if FRAGMENT_ARG == "NEWFILE": - GET_NEW_FILE = True -if FRAGMENT_ARG == "OVERWRITE": - OVERWRITE = True - -# Don't write in log if the file don't exist locally. -IGNORE_MISS_LOCAL = False - -def graphql_getScraperPath(): - query = """ - query Configuration { - configuration { - general { - scrapersPath - } - } - } - """ - result = callGraphQL(query) - return result["configuration"]["general"]["scrapersPath"] - - -def callGraphQL(query, variables=None): - # Session cookie for authentication - graphql_port = FRAGMENT_SERVER['Port'] - graphql_scheme = FRAGMENT_SERVER['Scheme'] - graphql_cookies = { - 'session': FRAGMENT_SERVER.get('SessionCookie').get('Value') - } - graphql_headers = { - "Accept-Encoding": "gzip, deflate, br", - "Content-Type": "application/json", - "Accept": "application/json", - "Connection": "keep-alive", - "DNT": "1" - } - if FRAGMENT_SERVER.get('Domain'): - graphql_domain = FRAGMENT_SERVER['Domain'] - else: - if FRAGMENT_SERVER.get('Host'): - graphql_domain = FRAGMENT_SERVER['Host'] - else: - graphql_domain = 'localhost' - # Because i don't understand how host work... - graphql_domain = 'localhost' - # Stash GraphQL endpoint - graphql_url = graphql_scheme + "://" + \ - graphql_domain + ":" + str(graphql_port) + "/graphql" - - json = {'query': query} - if variables is not None: - json['variables'] = variables - try: - response = requests.post( - graphql_url, json=json, headers=graphql_headers, cookies=graphql_cookies, timeout=10) - except: - sys.exit("[FATAL] Error with the graphql request, are you sure the GraphQL endpoint ({}) is correct.".format( - graphql_url)) - if response.status_code == 200: - result = response.json() - if result.get("error"): - for error in result["error"]["errors"]: - raise Exception("GraphQL error: {}".format(error)) - if result.get("data"): - return result.get("data") - elif response.status_code == 401: - sys.exit("HTTP Error 401, Unauthorised.") - else: - raise ConnectionError("GraphQL query failed:{} - {}. Query: {}. Variables: {}".format( - response.status_code, response.content, query, variables)) - - -def file_getlastline(path): - with open(path, 'r', encoding="utf-8") as f: - for line in f: - u_match = re.search(r"^\s*#\s*last updated", line.lower()) - if u_match: - return line.strip() - return None - - -def get_date(line): - try: - date = datetime.strptime(re.sub(r".*#.*Last Updated\s*", "", line), "%B %d, %Y") - except: - return None - return date - - -scraper_folder_path = graphql_getScraperPath() -GITHUB_LINK = "https://github.com/stashapp/CommunityScrapers/archive/refs/heads/master.zip" - -try: - r = requests.get(GITHUB_LINK, timeout=10) -except: - sys.exit("Failing to download the zip file.") -zip_path = os.path.join(scraper_folder_path, "github.zip") -log.LogDebug(zip_path) -with open(zip_path, "wb") as zip_file: - zip_file.write(r.content) - -with zipfile.ZipFile(zip_path) as z: - change_detected = False - - for filename in z.namelist(): - # Only care about the scrapers folders - if "/scrapers/" in filename and filename.endswith(".yml"): - # read the file - line = bytes() - # Filename abc.yml - gh_file = os.path.basename(filename) - - # Filename /scrapers//abc.yml - if filename.endswith(f"/scrapers/{gh_file}") == False: - log.LogDebug("Subdirectory detected: " + filename) - subdir = re.findall('\/scrapers\/(.*)\/.*\.yml', filename) - - if len(subdir) != 1: - log.LogError(f"Unexpected number of matching subdirectories found. Expected 1. Found {len(subdir)}.") - exit(1) - - gh_file = subdir[0] + "/" + gh_file - - log.LogDebug(gh_file) - path_local = os.path.join(scraper_folder_path, gh_file) - gh_line = None - yml_script = None - if OVERWRITE: - with z.open(filename) as f: - scraper_content = f.read() - with open(path_local, 'wb') as yml_file: - yml_file.write(scraper_content) - log.LogInfo("Replacing/Creating {}".format(gh_file)) - continue - with z.open(filename) as f: - for line in f: - script_match = re.search(r"action:\sscript", line.decode().lower()) - update_match = re.search(r"^\s*#\s*last updated", line.decode().lower()) - if script_match: - yml_script = True - if update_match: - gh_line = line.decode().strip() - break - # Got last line - if gh_line is None: - log.LogError("[Github] Line Error ({}) ".format(gh_file)) - continue - gh_date = get_date(gh_line) - if gh_date is None: - log.LogError("[Github] Date Error ({}) ".format(gh_file)) - continue - elif os.path.exists(path_local): - # Local Part - local_line = file_getlastline(path_local) - if local_line is None: - log.LogError("[Local] Line Error ({}) ".format(gh_file)) - continue - local_date = get_date(local_line.strip()) - if local_date is None: - log.LogError("[Local] Date Error ({}) ".format(gh_file)) - continue - if gh_date > local_date and CHECK_LOG: - change_detected = True - - if yml_script: - log.LogInfo("[{}] New version on github (Can be any of the related files)".format(gh_file)) - else: - log.LogInfo("[{}] New version on github".format(gh_file)) - elif GET_NEW_FILE: - change_detected = True - # File don't exist local so we take the github version. - with z.open(filename) as f: - scraper_content = f.read() - with open(path_local, 'wb') as yml_file: - yml_file.write(scraper_content) - log.LogInfo("Creating {}".format(gh_file)) - continue - elif CHECK_LOG and IGNORE_MISS_LOCAL == False: - change_detected = True - - log.LogWarning("[{}] File don't exist locally".format(gh_file)) - -if change_detected == False: - log.LogInfo("Scrapers appear to be in sync with GitHub version.") - -os.remove(zip_path) diff --git a/plugins/GHScraper_Checker/GHScraper_Checker.yml b/plugins/GHScraper_Checker/GHScraper_Checker.yml deleted file mode 100644 index 48188be4..00000000 --- a/plugins/GHScraper_Checker/GHScraper_Checker.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: GHScraper_Checker -description: Check the community scraper repo. -version: 0.1.1 -url: https://github.com/stashapp/CommunityScripts/tree/main/plugins/GHScraper_Checker -exec: - - python - - "{pluginDir}/GHScraper_Checker.py" -interface: raw -tasks: - - name: 'Status Check' - description: "Show in log if you don't have the scraper or a new version is available." - defaultArgs: - mode: CHECK - - name: 'Getting new files' - description: "Download scraper that don't exist in your scraper folder." - defaultArgs: - mode: NEWFILE -# - name: 'Overwrite everything' -# description: 'Replace your scraper by github version. Overwrite anything existing.' -# defaultArgs: -# mode: OVERWRITE diff --git a/plugins/GHScraper_Checker/log.py b/plugins/GHScraper_Checker/log.py deleted file mode 100644 index f3812522..00000000 --- a/plugins/GHScraper_Checker/log.py +++ /dev/null @@ -1,52 +0,0 @@ -import sys - - -# Log messages sent from a plugin instance are transmitted via stderr and are -# encoded with a prefix consisting of special character SOH, then the log -# level (one of t, d, i, w, e, or p - corresponding to trace, debug, info, -# warning, error and progress levels respectively), then special character -# STX. -# -# The LogTrace, LogDebug, LogInfo, LogWarning, and LogError methods, and their equivalent -# formatted methods are intended for use by plugin instances to transmit log -# messages. The LogProgress method is also intended for sending progress data. -# - -def __prefix(level_char): - start_level_char = b'\x01' - end_level_char = b'\x02' - - ret = start_level_char + level_char + end_level_char - return ret.decode() - - -def __log(level_char, s): - if level_char == "": - return - - print(__prefix(level_char) + s + "\n", file=sys.stderr, flush=True) - - -def LogTrace(s): - __log(b't', s) - - -def LogDebug(s): - __log(b'd', s) - - -def LogInfo(s): - __log(b'i', s) - - -def LogWarning(s): - __log(b'w', s) - - -def LogError(s): - __log(b'e', s) - - -def LogProgress(p): - progress = min(max(0, p), 1) - __log(b'p', str(progress))