diff --git a/data/example-config.py b/data/example-config.py index 32bb850b..e7183b8d 100644 --- a/data/example-config.py +++ b/data/example-config.py @@ -86,6 +86,11 @@ # Needs a 5 second wait to ensure the API is updated "get_permalink": False, + # How many trackers need to pass successfull checking to continue with the upload process + # Default = 1. If 1 (or more) tracker/s pass banned_group and dupe checking, uploading will continue + # If less than the number of trackers pass the checking, exit immediately. + "tracker_pass_checks": "1", + }, "TRACKERS": { diff --git a/src/args.py b/src/args.py index cf6b03aa..c18e7588 100644 --- a/src/args.py +++ b/src/args.py @@ -49,6 +49,7 @@ def parse(self, args, meta): parser.add_argument('--no-tag', dest='no_tag', action='store_true', required=False, help="Remove Group Tag from title") parser.add_argument('--no-edition', dest='no_edition', action='store_true', required=False, help="Remove Edition from title") parser.add_argument('--dual-audio', dest='dual_audio', action='store_true', required=False, help="Add Dual-Audio to the title") + parser.add_argument('-ol', '--original-language', dest='manual_language', nargs='*', required=False, help="Set original audio language") parser.add_argument('-ns', '--no-seed', action='store_true', required=False, help="Do not add torrent to the client") parser.add_argument('-year', '--year', dest='manual_year', nargs='?', required=False, help="Override the year found", type=int, default=0) parser.add_argument('-ptp', '--ptp', nargs='*', required=False, help="PTP torrent id/permalink", type=str) diff --git a/src/prep.py b/src/prep.py index a13d6e3b..85373bd2 100644 --- a/src/prep.py +++ b/src/prep.py @@ -12,6 +12,8 @@ from src.trackers.COMMON import COMMON from src.clients import Clients from data.config import config +from src.uphelper import UploadHelper +from src.trackersetup import TRACKER_SETUP, tracker_class_map try: import traceback @@ -429,8 +431,8 @@ async def gather_prep(self, meta, mode): base_dir = meta['base_dir'] meta['saved_description'] = False + folder_id = os.path.basename(meta['path']) if meta.get('uuid', None) is None: - folder_id = os.path.basename(meta['path']) meta['uuid'] = folder_id if not os.path.exists(f"{base_dir}/tmp/{meta['uuid']}"): Path(f"{base_dir}/tmp/{meta['uuid']}").mkdir(parents=True, exist_ok=True) @@ -618,46 +620,11 @@ async def process_tracker(tracker_name, meta): else: console.print("Skipping existing search as meta already populated") - if 'manual_frames' not in meta: - meta['manual_frames'] = {} - manual_frames = meta['manual_frames'] - # Take Screenshots - if meta['is_disc'] == "BDMV": - if meta.get('edit', False) is False: - if meta.get('vapoursynth', False) is True: - use_vs = True - else: - use_vs = False - try: - ds = multiprocessing.Process(target=self.disc_screenshots, args=(meta, filename, bdinfo, meta['uuid'], base_dir, use_vs, meta.get('image_list', []), meta.get('ffdebug', False), None)) - ds.start() - while ds.is_alive() is True: - await asyncio.sleep(1) - except KeyboardInterrupt: - ds.terminate() - elif meta['is_disc'] == "DVD": - if meta.get('edit', False) is False: - try: - ds = multiprocessing.Process(target=self.dvd_screenshots, args=(meta, 0, None, None)) - ds.start() - while ds.is_alive() is True: - await asyncio.sleep(1) - except KeyboardInterrupt: - ds.terminate() - else: - if meta.get('edit', False) is False: - try: - s = multiprocessing.Process( - target=self.screenshots, - args=(videopath, filename, meta['uuid'], base_dir, meta), # Positional arguments - kwargs={'manual_frames': manual_frames} # Keyword argument - ) - s.start() - while s.is_alive() is True: - await asyncio.sleep(3) - except KeyboardInterrupt: - s.terminate() - + console.print("[yellow]Building meta data.....") + if meta['debug']: + meta_start_time = time.time() + if meta.get('manual_language'): + meta['original_langauge'] = meta.get('manual_language').lower() meta['tmdb'] = meta.get('tmdb_manual', None) meta['type'] = self.get_type(video, meta['scene'], meta['is_disc'], meta) if meta.get('category', None) is None: @@ -680,18 +647,22 @@ async def process_tracker(tracker_name, meta): else: meta = await self.tmdb_other_meta(meta) # Search tvmaze - meta['tvmaze_id'], meta['imdb_id'], meta['tvdb_id'] = await self.search_tvmaze(filename, meta['search_year'], meta.get('imdb_id', '0'), meta.get('tvdb_id', 0), meta) + if meta['category'] == "TV": + meta['tvmaze_id'], meta['imdb_id'], meta['tvdb_id'] = await self.search_tvmaze(filename, meta['search_year'], meta.get('imdb_id', '0'), meta.get('tvdb_id', 0), meta) + else: + meta.setdefault('tvmaze_id', '0') # If no imdb, search for it if meta.get('imdb_id', None) is None: meta['imdb_id'] = await self.search_imdb(filename, meta['search_year']) if meta.get('imdb_info', None) is None and int(meta['imdb_id']) != 0: - meta['imdb_info'] = await self.get_imdb_info(meta['imdb_id'], meta) + meta['imdb_info'] = await self.get_imdb_info_api(meta['imdb_id'], meta) if meta.get('tag', None) is None: meta['tag'] = self.get_tag(video, meta) else: if not meta['tag'].startswith('-') and meta['tag'] != "": meta['tag'] = f"-{meta['tag']}" - meta = await self.get_season_episode(video, meta) + if meta['category'] == "TV": + meta = await self.get_season_episode(video, meta) meta = await self.tag_override(meta) if meta.get('tag') == "-SubsPlease": # SubsPlease-specific tracks = meta.get('mediainfo').get('media', {}).get('track', []) # Get all tracks @@ -711,9 +682,9 @@ async def process_tracker(tracker_name, meta): meta['3D'] = self.is_3d(mi, bdinfo) if meta.get('manual_source', None): meta['source'] = meta['manual_source'] - _, meta['type'] = self.get_source(meta['type'], video, meta['path'], meta['is_disc'], meta) + _, meta['type'] = self.get_source(meta['type'], video, meta['path'], meta['is_disc'], meta, folder_id, base_dir) else: - meta['source'], meta['type'] = self.get_source(meta['type'], video, meta['path'], meta['is_disc'], meta) + meta['source'], meta['type'] = self.get_source(meta['type'], video, meta['path'], meta['is_disc'], meta, folder_id, base_dir) if meta.get('service', None) in (None, ''): meta['service'], meta['service_longname'] = self.get_service(video, meta.get('tag', ''), meta['audio'], meta['filename']) elif meta.get('service'): @@ -725,6 +696,8 @@ async def process_tracker(tracker_name, meta): if meta.get('is_disc', None) == "BDMV": # Blu-ray Specific meta['region'] = self.get_region(bdinfo, meta.get('region', None)) meta['video_codec'] = self.get_video_codec(bdinfo) + if meta['tag'][1:].startswith(meta['region']): + meta['tag'] = meta['tag'].replace(f"-{meta['region']}", '') else: meta['video_encode'], meta['video_codec'], meta['has_encode_settings'], meta['bit_depth'] = self.get_video_encode(mi, meta['type'], bdinfo) if meta.get('no_edition') is False: @@ -735,6 +708,157 @@ async def process_tracker(tracker_name, meta): else: meta['edition'] = "" + meta['name_notag'], meta['name'], meta['clean_name'], meta['potential_missing'] = await self.get_name(meta) + if meta['debug']: + meta_finish_time = time.time() + console.print(f"Metadata processed in {meta_finish_time - meta_start_time:.2f} seconds") + parser = Args(config) + helper = UploadHelper() + confirm = helper.get_confirmation(meta) + while confirm is False: + editargs = cli_ui.ask_string("Input args that need correction e.g. (--tag NTb --category tv --tmdb 12345)") + editargs = (meta['path'],) + tuple(editargs.split()) + if meta.get('debug', False): + editargs += ("--debug",) + meta, help, before_args = parser.parse(editargs, meta) + meta['edit'] = True + meta = await self.gather_prep(meta=meta, mode='cli') + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/meta.json", 'w') as f: + json.dump(meta, f, indent=4) + meta['name_notag'], meta['name'], meta['clean_name'], meta['potential_missing'] = await self.get_name(meta) + confirm = helper.get_confirmation(meta) + + common = COMMON(config=config) + tracker_setup = TRACKER_SETUP(config=config) + enabled_trackers = tracker_setup.trackers_enabled(meta) + + tracker_status = {} + successful_trackers = 0 + + for tracker_name in enabled_trackers: + disctype = meta.get('disctype', None) + tracker_name = tracker_name.replace(" ", "").upper().strip() + + if meta['name'].endswith('DUPE?'): + meta['name'] = meta['name'].replace(' DUPE?', '') + + if tracker_name in tracker_class_map: + tracker_class = tracker_class_map[tracker_name](config=config) + tracker_status[tracker_name] = {'banned': False, 'skipped': False, 'dupe': False, 'upload': False} + + if tracker_name in {"THR", "PTP"}: + if meta.get('imdb_id', '0') == '0': + imdb_id = cli_ui.ask_string("Unable to find IMDB id, please enter e.g.(tt1234567)") + meta['imdb_id'] = imdb_id.replace('tt', '').zfill(7) + if tracker_name == "PTP": + console.print("[yellow]Searching for Group ID") + ptp = PTP(config=config) + groupID = await ptp.get_group_by_imdb(meta['imdb_id']) + if groupID is None: + console.print("[yellow]No Existing Group found") + if meta.get('youtube', None) is None or "youtube" not in str(meta.get('youtube', '')): + youtube = cli_ui.ask_string("Unable to find youtube trailer, please link one e.g.(https://www.youtube.com/watch?v=dQw4w9WgXcQ)", default="") + meta['youtube'] = youtube + meta['ptp_groupID'] = groupID + + if tracker_name == "THR": + youtube = cli_ui.ask_string("Unable to find youtube trailer, please link one e.g.(https://www.youtube.com/watch?v=dQw4w9WgXcQ)") + meta['youtube'] = youtube + + if tracker_setup.check_banned_group(tracker_class.tracker, tracker_class.banned_groups, meta): + console.print(f"[red]Tracker '{tracker_name}' is banned. Skipping.[/red]") + tracker_status[tracker_name]['banned'] = True + continue + + if tracker_name not in {"THR", "PTP"}: + dupes = await tracker_class.search_existing(meta, disctype) + elif tracker_name == "PTP": + dupes = await ptp.search_existing(groupID, meta, disctype) + if 'skipping' not in meta or meta['skipping'] is None: + dupes = await common.filter_dupes(dupes, meta) + meta, is_dupe = helper.dupe_check(dupes, meta) + if is_dupe: + console.print(f"[yellow]Tracker '{tracker_name}' has confirmed dupes.[/yellow]") + tracker_status[tracker_name]['dupe'] = True + elif meta['skipping']: + tracker_status[tracker_name]['skipped'] = True + if meta.get('skipping') is None and not is_dupe and tracker_name == "PTP": + if meta.get('imdb_info', {}) == {}: + meta['imdb_info'] = self.get_imdb_info_api(meta['imdb_id'], meta) + meta['skipping'] = None + + if not tracker_status[tracker_name]['banned'] and not tracker_status[tracker_name]['skipped'] and not tracker_status[tracker_name]['dupe']: + console.print(f"[green]Tracker '{tracker_name}' passed all checks.[/green]") + tracker_status[tracker_name]['upload'] = True + successful_trackers += 1 + else: + if tracker_name == "MANUAL": + successful_trackers += 1 + + meta['tracker_status'] = tracker_status + + if meta['debug']: + console.print("\n[bold]Tracker Processing Summary:[/bold]") + for t_name, status in tracker_status.items(): + banned_status = 'Yes' if status['banned'] else 'No' + skipped_status = 'Yes' if status['skipped'] else 'No' + dupe_status = 'Yes' if status['dupe'] else 'No' + upload_status = 'Yes' if status['upload'] else 'No' + if meta['debug']: + console.print(f"Tracker: {t_name} | Banned: {banned_status} | Skipped: {skipped_status} | Dupe: {dupe_status} | [yellow]Upload:[/yellow] {upload_status}") + if meta['debug']: + console.print(f"\n[bold]Trackers Passed all Checks:[/bold] {successful_trackers}") + + meta['skip_uploading'] = int(self.config['DEFAULT'].get('tracker_pass_checks', 1)) + if successful_trackers < meta['skip_uploading']: + console.print(f"[red]Not enough successful trackers ({successful_trackers}/{meta['skip_uploading']}). EXITING........[/red]") + return + + meta['we_are_uploading'] = True + + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/meta.json", 'w') as f: + json.dump(meta, f, indent=4) + + if 'manual_frames' not in meta: + meta['manual_frames'] = {} + manual_frames = meta['manual_frames'] + # Take Screenshots + if meta['is_disc'] == "BDMV": + if meta.get('edit', False) is False: + if meta.get('vapoursynth', False) is True: + use_vs = True + else: + use_vs = False + try: + ds = multiprocessing.Process(target=self.disc_screenshots, args=(meta, filename, bdinfo, meta['uuid'], base_dir, use_vs, meta.get('image_list', []), meta.get('ffdebug', False), None)) + ds.start() + while ds.is_alive() is True: + await asyncio.sleep(1) + except KeyboardInterrupt: + ds.terminate() + elif meta['is_disc'] == "DVD": + if meta.get('edit', False) is False: + try: + ds = multiprocessing.Process(target=self.dvd_screenshots, args=(meta, 0, None, None)) + ds.start() + while ds.is_alive() is True: + await asyncio.sleep(1) + except KeyboardInterrupt: + ds.terminate() + else: + if meta.get('edit', False) is False: + try: + s = multiprocessing.Process( + target=self.screenshots, + args=(videopath, filename, meta['uuid'], base_dir, meta), # Positional arguments + kwargs={'manual_frames': manual_frames} # Keyword argument + ) + s.start() + while s.is_alive() is True: + await asyncio.sleep(3) + except KeyboardInterrupt: + s.terminate() + # WORK ON THIS meta.get('stream', False) meta['stream'] = self.stream_optimized(meta['stream']) @@ -1416,8 +1540,8 @@ def dvd_screenshots(self, meta, disc_num, num_screens=None, retry_cap=None): return if num_screens is None: - num_screens = self.screens - if num_screens == 0 or (len(meta.get('image_list', [])) >= num_screens and disc_num == 0): + num_screens = self.screens - len(existing_images) + if num_screens == 0 or (len(meta.get('image_list', [])) >= self.screens and disc_num == 0): return if len(glob.glob(f"{meta['base_dir']}/tmp/{meta['uuid']}/{meta['discs'][disc_num]['name']}-*.png")) >= num_screens: @@ -1615,7 +1739,11 @@ def capture_dvd_screenshot(self, task): if w_sar != 1 or h_sar != 1: ff = ff.filter('scale', int(round(width * w_sar)), int(round(height * h_sar))) - ff.output(image, vframes=1, pix_fmt="rgb24").overwrite_output().global_args('-loglevel', loglevel, '-accurate_seek').run() + try: + ff.output(image, vframes=1, pix_fmt="rgb24").overwrite_output().global_args('-loglevel', loglevel, '-accurate_seek').run() + except ffmpeg._run.Error as e: + stderr_output = e.stderr.decode() if e.stderr else "No stderr output available" + console.print(f"[red]Error capturing screenshot for {input_file} at {seek_time}s: {stderr_output}[/red]") if os.path.exists(image): return image else: @@ -1761,6 +1889,7 @@ def use_tqdm(): pool.join() valid_results = [] + remaining_retakes = [] for image_path in optimize_results: if "Error" in image_path: console.print(f"[red]{image_path}") @@ -1779,20 +1908,49 @@ def use_tqdm(): pass elif self.img_host in ["ptpimg", "lensdump", "ptscreens", "oeimg"] and not retake: pass - elif self.img_host == "freeimage.host": - console.print("[bold red]Support for freeimage.host has been removed. Please remove it from your config.") - exit() elif not retake: console.print("[red]Image too large for your image host, retaking.") retake = True time.sleep(1) if retake: - console.print(f"[yellow]Retaking screenshot for: {image_path}[/yellow]") - capture_tasks.append(image_path) + retry_attempts = 3 + for attempt in range(1, retry_attempts + 1): + console.print(f"[yellow]Retaking screenshot for: {image_path} (Attempt {attempt}/{retry_attempts})[/yellow]") + try: + os.remove(image_path) + random_time = random.uniform(0, length) + self.capture_screenshot((path, random_time, image_path, width, height, w_sar, h_sar, loglevel)) + self.optimize_image_task((image_path, config)) + new_size = os.path.getsize(image_path) + valid_image = False + + if new_size > 75000 and new_size <= 31000000 and self.img_host == "imgbb": + console.print(f"[green]Successfully retaken screenshot for: {image_path} ({new_size} bytes)[/green]") + valid_image = True + elif new_size > 75000 and new_size <= 10000000 and self.img_host in ["imgbox", "pixhost"]: + console.print(f"[green]Successfully retaken screenshot for: {image_path} ({new_size} bytes)[/green]") + valid_image = True + elif new_size > 75000 and self.img_host in ["ptpimg", "lensdump", "ptscreens", "oeimg"]: + console.print(f"[green]Successfully retaken screenshot for: {image_path} ({new_size} bytes)[/green]") + valid_image = True + + if valid_image: + valid_results.append(image_path) + break + else: + console.print(f"[red]Retaken image {image_path} does not meet the size requirements for {self.img_host}. Retrying...[/red]") + except Exception as e: + console.print(f"[red]Error retaking screenshot for {image_path}: {e}[/red]") + else: + console.print(f"[red]All retry attempts failed for {image_path}. Skipping.[/red]") + remaining_retakes.append(image_path) else: valid_results.append(image_path) + if remaining_retakes: + console.print(f"[red]The following images could not be retaken successfully: {remaining_retakes}[/red]") + for image_path in valid_results: img_dict = { 'img_url': image_path, @@ -1934,11 +2092,13 @@ async def get_tmdb_from_imdb(self, meta, filename): if len(info['movie_results']) >= 1: meta['category'] = "MOVIE" meta['tmdb'] = info['movie_results'][0]['id'] + meta['original_language'] = info['movie_results'][0].get('original_language') elif len(info['tv_results']) >= 1: meta['category'] = "TV" meta['tmdb'] = info['tv_results'][0]['id'] + meta['original_language'] = info['tv_results'][0].get('original_language') else: - imdb_info = await self.get_imdb_info(imdb_id.replace('tt', ''), meta) + imdb_info = await self.get_imdb_info_api(imdb_id.replace('tt', ''), meta) title = imdb_info.get("title") if title is None: title = filename @@ -2048,7 +2208,7 @@ async def tmdb_other_meta(self, meta): except Exception: console.print('[yellow]Unable to grab videos from TMDb.') - meta['aka'], original_language = await self.get_imdb_aka(meta['imdb_id']) + meta['aka'], original_language = await self.get_imdb_aka_api(meta['imdb_id'], meta) if original_language is not None: meta['original_language'] = original_language else: @@ -2102,7 +2262,7 @@ async def tmdb_other_meta(self, meta): console.print('[yellow]Unable to grab videos from TMDb.') # meta['aka'] = f" AKA {response['original_name']}" - meta['aka'], original_language = await self.get_imdb_aka(meta['imdb_id']) + meta['aka'], original_language = await self.get_imdb_aka_api(meta['imdb_id'], meta) if original_language is not None: meta['original_language'] = original_language else: @@ -2346,7 +2506,7 @@ def get_audio_v2(self, mi, meta, bdinfo): if meta.get('dual_audio', False): dual = "Dual-Audio" else: - if meta.get('original_language', '') != 'en': + if not meta.get('original_language', '').startswith('en'): eng, orig = False, False try: for t in tracks: @@ -2483,17 +2643,35 @@ def is_3d(self, mi, bdinfo): def get_tag(self, video, meta): try: - tag = guessit(video)['release_group'] - tag = f"-{tag}" - except Exception: + parsed = guessit(video) + release_group = parsed.get('release_group') + + if meta['is_disc'] == "BDMV": + if release_group: + if f"-{release_group}" not in video: + if meta['debug']: + console.print(f"[warning] Invalid release group format: {release_group}") + release_group = None + + tag = f"-{release_group}" if release_group else "" + except Exception as e: + console.print(f"Error while parsing: {e}") tag = "" + if tag == "-": tag = "" - if tag[1:].lower() in ["nogroup", 'nogrp']: + if tag[1:].lower() in ["nogroup", "nogrp"]: tag = "" + return tag - def get_source(self, type, video, path, is_disc, meta): + def get_source(self, type, video, path, is_disc, meta, folder_id, base_dir): + try: + with open(f'{base_dir}/tmp/{folder_id}/MediaInfo.json', 'r', encoding='utf-8') as f: + mi = json.load(f) + except Exception: + if meta['debug']: + console.print("No mediainfo.json") resolution = meta['resolution'] try: try: @@ -2528,6 +2706,17 @@ def get_source(self, type, video, path, is_disc, meta): system = "NTSC" except Exception: system = "" + if system == "": + try: + framerate = mi['media']['track'][1].get('FrameRate', '') + if framerate == "25": + system = "PAL" + elif framerate: + system = "NTSC" + else: + system = "" + except Exception: + system = "" finally: if system is None: system = "" @@ -4008,7 +4197,7 @@ async def package(self, meta): generic.write(f"IMDb: https://www.imdb.com/title/tt{meta['imdb_id']}\n") if meta['tvdb_id'] != "0": generic.write(f"TVDB: https://www.thetvdb.com/?id={meta['tvdb_id']}&tab=series\n") - if meta['tvmaze_id'] != "0": + if "tvmaze_id" in meta and meta['tvmaze_id'] != "0": generic.write(f"TVMaze: https://www.tvmaze.com/shows/{meta['tvmaze_id']}\n") poster_img = f"{meta['base_dir']}/tmp/{meta['uuid']}/POSTER.png" if meta.get('poster', None) not in ['', None] and not os.path.exists(poster_img): @@ -4068,12 +4257,67 @@ async def package(self, meta): return False return + async def get_imdb_aka_api(self, imdb_id, meta): + if imdb_id == "0": + return "", None + if not imdb_id.startswith("tt"): + imdb_id = f"tt{imdb_id}" + url = "https://api.graphql.imdb.com/" + query = { + "query": f""" + query {{ + title(id: "{imdb_id}") {{ + id + titleText {{ + text + isOriginalTitle + }} + originalTitleText {{ + text + }} + countriesOfOrigin {{ + countries {{ + id + }} + }} + }} + }} + """ + } + + headers = { + "Content-Type": "application/json", + } + + response = requests.post(url, headers=headers, json=query) + data = response.json() + + # Check if `data` and `title` exist + title_data = data.get("data", {}).get("title") + if title_data is None: + console.print("Title data is missing from response") + return "", None + + # Extract relevant fields from the response + aka = title_data.get("originalTitleText", {}).get("text", "") + is_original = title_data.get("titleText", {}).get("isOriginalTitle", False) + if meta.get('manual_language'): + original_language = meta.get('manual_language') + else: + original_language = None + + if not is_original and aka: + aka = f" AKA {aka}" + + return aka, original_language + async def get_imdb_aka(self, imdb_id): if imdb_id == "0": return "", None + if not imdb_id.startswith("tt"): + imdb_id = f"tt{imdb_id}" ia = Cinemagoer() result = ia.get_movie(imdb_id.replace('tt', '')) - original_language = result.get('language codes') if isinstance(original_language, list): if len(original_language) > 1: @@ -4138,6 +4382,138 @@ def daily_to_tmdb_season_episode(self, tmdbid, date): console.print(f"[yellow]Unable to map the date ([bold yellow]{str(date)}[/bold yellow]) to a Season/Episode number") return season, episode + async def get_imdb_info_api(self, imdbID, meta): + imdb_info = {} + + if imdbID == "0": + return "", None + else: + if not imdbID.startswith("tt"): + imdbIDtt = f"tt{imdbID}" + query = { + "query": f""" + query GetTitleInfo {{ + title(id: "{imdbIDtt}") {{ + id + titleText {{ + text + isOriginalTitle + }} + originalTitleText {{ + text + }} + releaseYear {{ + year + }} + titleType {{ + id + }} + plot {{ + plotText {{ + plainText + }} + }} + ratingsSummary {{ + aggregateRating + voteCount + }} + primaryImage {{ + url + }} + runtime {{ + displayableProperty {{ + value {{ + plainText + }} + }} + seconds + }} + titleGenres {{ + genres {{ + genre {{ + text + }} + }} + }} + principalCredits {{ + category {{ + text + id + }} + credits {{ + name {{ + id + nameText {{ + text + }} + }} + }} + }} + }} + }} + """ + } + + url = "https://api.graphql.imdb.com/" + headers = {"Content-Type": "application/json"} + + response = requests.post(url, json=query, headers=headers) + data = response.json() + + title_data = data.get("data", {}).get("title", {}) + if not title_data: + return meta + imdb_info['imdbID'] = imdbID + imdb_info['title'] = title_data.get('titleText', {}).get('text', '') or '' + imdb_info['year'] = title_data.get('releaseYear', {}).get('year', '') or '' + original_title = title_data.get('originalTitleText', {}).get('text', '') + if not original_title or original_title == imdb_info['title']: + original_title = imdb_info['title'] + imdb_info['aka'] = original_title + imdb_info['type'] = title_data.get('titleType', {}).get('id', '') + runtime_data = title_data.get('runtime', {}) + runtime_seconds = runtime_data.get('seconds', 0) + runtime_minutes = runtime_seconds // 60 if runtime_seconds else 0 + imdb_info['runtime'] = str(runtime_minutes) + imdb_info['cover'] = title_data.get('primaryImage', {}).get('url', '') or meta.get('poster', '') + imdb_info['plot'] = title_data.get('plot', {}).get('plotText', {}).get('plainText', '') or 'No plot available' + title_genres = title_data.get('titleGenres') + if title_genres and isinstance(title_genres, dict): + genres = title_genres.get('genres', []) + else: + genres = [] + genre_list = [g.get('genre', {}).get('text', '') for g in genres if g.get('genre', {}).get('text')] + imdb_info['genres'] = ', '.join(genre_list) or '' + imdb_info['rating'] = title_data.get('ratingsSummary', {}).get('aggregateRating', 'N/A') + imdb_info['directors'] = [] + principal_credits = title_data.get('principalCredits', []) + if principal_credits and isinstance(principal_credits, list): + for pc in principal_credits: + category_text = pc.get('category', {}).get('text', '') + if 'Direct' in category_text: + credits = pc.get('credits', []) + if credits and isinstance(credits, list): + for c in credits: + name_id = c.get('name', {}).get('id', '') + if name_id.startswith('nm'): + imdb_info['directors'].append(name_id) + break + if meta.get('manual_language'): + imdb_info['original_langauge'] = meta.get('manual_language') + + if not title_data: + imdb_info = { + 'title': meta['title'], + 'year': meta['year'], + 'aka': '', + 'type': None, + 'runtime': meta.get('runtime', '60'), + 'cover': meta.get('poster'), + } + if len(meta.get('tmdb_directors', [])) >= 1: + imdb_info['directors'] = meta['tmdb_directors'] + return imdb_info + async def get_imdb_info(self, imdbID, meta): imdb_info = {} if int(str(imdbID).replace('tt', '')) != 0: @@ -4176,7 +4552,6 @@ async def get_imdb_info(self, imdbID, meta): } if len(meta.get('tmdb_directors', [])) >= 1: imdb_info['directors'] = meta['tmdb_directors'] - return imdb_info async def search_imdb(self, filename, search_year): @@ -4190,7 +4565,7 @@ async def search_imdb(self, filename, search_year): return imdbID async def imdb_other_meta(self, meta): - imdb_info = meta['imdb_info'] = await self.get_imdb_info(meta['imdb_id'], meta) + imdb_info = meta['imdb_info'] = await self.get_imdb_info_api(meta['imdb_id'], meta) meta['title'] = imdb_info['title'] meta['year'] = imdb_info['year'] meta['aka'] = imdb_info['aka'] diff --git a/src/trackers/ACM.py b/src/trackers/ACM.py index 520ee3db..76fd3b9f 100644 --- a/src/trackers/ACM.py +++ b/src/trackers/ACM.py @@ -269,7 +269,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on ACM...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdb': meta['tmdb'], diff --git a/src/trackers/AITHER.py b/src/trackers/AITHER.py index 1a3f145c..942738ee 100644 --- a/src/trackers/AITHER.py +++ b/src/trackers/AITHER.py @@ -227,7 +227,7 @@ async def get_res_id(self, resolution): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on Aither...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/AL.py b/src/trackers/AL.py index 66e3949f..eea9eede 100644 --- a/src/trackers/AL.py +++ b/src/trackers/AL.py @@ -162,7 +162,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on AL...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/ANT.py b/src/trackers/ANT.py index 8a5cba97..f88ae379 100644 --- a/src/trackers/ANT.py +++ b/src/trackers/ANT.py @@ -147,7 +147,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "ANT" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on ANT...") params = { 'apikey': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 't': 'search', diff --git a/src/trackers/BHD.py b/src/trackers/BHD.py index 3cdb8093..037f948f 100644 --- a/src/trackers/BHD.py +++ b/src/trackers/BHD.py @@ -410,7 +410,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "BHD" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on BHD...") category = meta['category'] if category == 'MOVIE': tmdbID = "movie" diff --git a/src/trackers/BLU.py b/src/trackers/BLU.py index ec4a130a..c658adf1 100644 --- a/src/trackers/BLU.py +++ b/src/trackers/BLU.py @@ -208,7 +208,7 @@ async def derived_dv_layer(self, meta): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on BLU...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/FNP.py b/src/trackers/FNP.py index 498b8ea3..371e7e14 100644 --- a/src/trackers/FNP.py +++ b/src/trackers/FNP.py @@ -154,7 +154,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on FNP...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/HDB.py b/src/trackers/HDB.py index 5d703c7b..2279048f 100644 --- a/src/trackers/HDB.py +++ b/src/trackers/HDB.py @@ -320,7 +320,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on HDB...") url = "https://hdbits.org/api/torrents" data = { 'username': self.username, diff --git a/src/trackers/HP.py b/src/trackers/HP.py index 771949a9..d458c572 100644 --- a/src/trackers/HP.py +++ b/src/trackers/HP.py @@ -143,7 +143,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on HP...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/HUNO.py b/src/trackers/HUNO.py index a5b9702e..3d215259 100644 --- a/src/trackers/HUNO.py +++ b/src/trackers/HUNO.py @@ -297,7 +297,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "HUNO" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on HUNO...") params = { 'api_token': self.config['TRACKERS']['HUNO']['api_key'].strip(), diff --git a/src/trackers/JPTV.py b/src/trackers/JPTV.py index 1b079e2b..4b1aae56 100644 --- a/src/trackers/JPTV.py +++ b/src/trackers/JPTV.py @@ -150,7 +150,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on JPTV...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdb': meta['tmdb'], diff --git a/src/trackers/LST.py b/src/trackers/LST.py index aace023d..4144343b 100644 --- a/src/trackers/LST.py +++ b/src/trackers/LST.py @@ -199,7 +199,7 @@ async def get_flag(self, meta, flag_name): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on LST...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/MTV.py b/src/trackers/MTV.py index 9bfa86eb..5c64aa54 100644 --- a/src/trackers/MTV.py +++ b/src/trackers/MTV.py @@ -652,7 +652,7 @@ async def login(self, cookiefile): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on MTV...") params = { 't': 'search', 'apikey': self.config['TRACKERS'][self.tracker]['api_key'].strip(), diff --git a/src/trackers/NBL.py b/src/trackers/NBL.py index 4ee317b5..5eb39ea0 100644 --- a/src/trackers/NBL.py +++ b/src/trackers/NBL.py @@ -85,7 +85,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "NBL" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on NBL...") if int(meta.get('tvmaze_id', 0)) != 0: search_term = {'tvmaze': int(meta['tvmaze_id'])} elif int(meta.get('imdb_id', '0').replace('tt', '')) == 0: diff --git a/src/trackers/OE.py b/src/trackers/OE.py index 59d3d035..63f1f55e 100644 --- a/src/trackers/OE.py +++ b/src/trackers/OE.py @@ -279,7 +279,6 @@ def process_languages(tracks): else: console.print("[red]No media information available in meta.[/red]") - # Existing disc metadata handling bbcode = BBCODE() if meta.get('discs', []) != []: discs = meta['discs'] @@ -322,7 +321,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "OE" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on OE...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/OTW.py b/src/trackers/OTW.py index 3e925935..d1cc6915 100644 --- a/src/trackers/OTW.py +++ b/src/trackers/OTW.py @@ -154,7 +154,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on OTW...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/PSS.py b/src/trackers/PSS.py index 97f377bf..c4abe60f 100644 --- a/src/trackers/PSS.py +++ b/src/trackers/PSS.py @@ -156,7 +156,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on PSS...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/R4E.py b/src/trackers/R4E.py index deaf0b09..c340c17d 100644 --- a/src/trackers/R4E.py +++ b/src/trackers/R4E.py @@ -150,7 +150,7 @@ async def is_docu(self, genres): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on R4E...") url = "https://racing4everyone.eu/api/torrents/filter" params = { 'api_token': self.config['TRACKERS']['R4E']['api_key'].strip(), diff --git a/src/trackers/RF.py b/src/trackers/RF.py index 9e01187c..fe1e5a4f 100644 --- a/src/trackers/RF.py +++ b/src/trackers/RF.py @@ -172,7 +172,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "RF" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on RF...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/RTF.py b/src/trackers/RTF.py index cb146e58..4f1576c6 100644 --- a/src/trackers/RTF.py +++ b/src/trackers/RTF.py @@ -100,7 +100,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "RTF" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on RTF...") headers = { 'accept': 'application/json', 'Authorization': self.config['TRACKERS'][self.tracker]['api_key'].strip(), diff --git a/src/trackers/SHRI.py b/src/trackers/SHRI.py index 7d79fc73..a4112021 100644 --- a/src/trackers/SHRI.py +++ b/src/trackers/SHRI.py @@ -154,7 +154,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on SHRI...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/SN.py b/src/trackers/SN.py index 199ff68e..5f10c7eb 100644 --- a/src/trackers/SN.py +++ b/src/trackers/SN.py @@ -123,7 +123,7 @@ async def edit_desc(self, meta): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on SN...") params = { 'api_key': self.config['TRACKERS'][self.tracker]['api_key'].strip() diff --git a/src/trackers/SPD.py b/src/trackers/SPD.py index b5893da1..6dfa2956 100644 --- a/src/trackers/SPD.py +++ b/src/trackers/SPD.py @@ -125,7 +125,7 @@ async def get_cat_id(self, category_name): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on SPD...") headers = { 'accept': 'application/json', 'Authorization': self.config['TRACKERS'][self.tracker]['api_key'].strip(), diff --git a/src/trackers/STC.py b/src/trackers/STC.py index f5e2b7ee..ff72fc63 100644 --- a/src/trackers/STC.py +++ b/src/trackers/STC.py @@ -170,7 +170,7 @@ async def get_res_id(self, resolution): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on STC...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/STT.py b/src/trackers/STT.py index 7da80175..076a0f50 100644 --- a/src/trackers/STT.py +++ b/src/trackers/STT.py @@ -148,7 +148,7 @@ async def get_res_id(self, resolution): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on STT...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/TIK.py b/src/trackers/TIK.py index 2fc467d0..9446d05f 100644 --- a/src/trackers/TIK.py +++ b/src/trackers/TIK.py @@ -569,7 +569,7 @@ def country_code_to_name(self, code): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on TIK...") disctype = meta.get('disctype', None) params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), diff --git a/src/trackers/TVC.py b/src/trackers/TVC.py index 3e184ae3..7de6e800 100644 --- a/src/trackers/TVC.py +++ b/src/trackers/TVC.py @@ -292,7 +292,7 @@ async def search_existing(self, meta, disctype): # https://tvchaosuk.com/api/torrents/filter?api_token=&tmdb=138108 dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on TVC...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdb': meta['tmdb'], diff --git a/src/trackers/ULCX.py b/src/trackers/ULCX.py index 67f13912..099fac36 100644 --- a/src/trackers/ULCX.py +++ b/src/trackers/ULCX.py @@ -159,7 +159,7 @@ async def search_existing(self, meta, disctype): meta['skipping'] = "ULCX" return dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on ULCX...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/UTP.py b/src/trackers/UTP.py index 86f645d7..4ac259d8 100644 --- a/src/trackers/UTP.py +++ b/src/trackers/UTP.py @@ -151,7 +151,7 @@ async def get_res_id(self, resolution): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on UTP...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackers/YOINK.py b/src/trackers/YOINK.py index 90003c13..7fca9736 100644 --- a/src/trackers/YOINK.py +++ b/src/trackers/YOINK.py @@ -154,7 +154,7 @@ async def upload(self, meta, disctype): async def search_existing(self, meta, disctype): dupes = [] - console.print("[yellow]Searching for existing torrents on site...") + console.print("[yellow]Searching for existing torrents on YOINK...") params = { 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), 'tmdbId': meta['tmdb'], diff --git a/src/trackersetup.py b/src/trackersetup.py new file mode 100644 index 00000000..8357a0a6 --- /dev/null +++ b/src/trackersetup.py @@ -0,0 +1,118 @@ +from src.trackers.HUNO import HUNO +from src.trackers.BLU import BLU +from src.trackers.BHD import BHD +from src.trackers.AITHER import AITHER +from src.trackers.STC import STC +from src.trackers.R4E import R4E +from src.trackers.THR import THR +from src.trackers.STT import STT +from src.trackers.HP import HP +from src.trackers.PTP import PTP +from src.trackers.SN import SN +from src.trackers.ACM import ACM +from src.trackers.HDB import HDB +from src.trackers.LCD import LCD +from src.trackers.TTG import TTG +from src.trackers.LST import LST +from src.trackers.FL import FL +from src.trackers.LT import LT +from src.trackers.NBL import NBL +from src.trackers.ANT import ANT +from src.trackers.PTER import PTER +from src.trackers.MTV import MTV +from src.trackers.JPTV import JPTV +from src.trackers.TL import TL +from src.trackers.HDT import HDT +from src.trackers.RF import RF +from src.trackers.OE import OE +from src.trackers.BHDTV import BHDTV +from src.trackers.RTF import RTF +from src.trackers.OTW import OTW +from src.trackers.FNP import FNP +from src.trackers.CBR import CBR +from src.trackers.UTP import UTP +from src.trackers.AL import AL +from src.trackers.SHRI import SHRI +from src.trackers.TIK import TIK +from src.trackers.TVC import TVC +from src.trackers.PSS import PSS +from src.trackers.ULCX import ULCX +from src.trackers.SPD import SPD +from src.trackers.YOINK import YOINK +import cli_ui +from src.console import console + + +class TRACKER_SETUP: + def __init__(self, config): + self.config = config + # Add initialization details here + pass + + def trackers_enabled(self, meta): + from data.config import config + if meta.get('trackers', None) is not None: + trackers = meta['trackers'] + else: + trackers = config['TRACKERS']['default_trackers'] + if "," in trackers: + trackers = trackers.split(',') + + if isinstance(trackers, str): + trackers = trackers.split(',') + trackers = [s.strip().upper() for s in trackers] + if meta.get('manual', False): + trackers.insert(0, "MANUAL") + return trackers + + def check_banned_group(self, tracker, banned_group_list, meta): + if meta['tag'] == "": + return False + else: + q = False + for tag in banned_group_list: + if isinstance(tag, list): + if meta['tag'][1:].lower() == tag[0].lower(): + console.print(f"[bold yellow]{meta['tag'][1:]}[/bold yellow][bold red] was found on [bold yellow]{tracker}'s[/bold yellow] list of banned groups.") + console.print(f"[bold red]NOTE: [bold yellow]{tag[1]}") + q = True + else: + if meta['tag'][1:].lower() == tag.lower(): + console.print(f"[bold yellow]{meta['tag'][1:]}[/bold yellow][bold red] was found on [bold yellow]{tracker}'s[/bold yellow] list of banned groups.") + q = True + if q: + if not meta['unattended'] or (meta['unattended'] and meta.get('unattended-confirm', False)): + if not cli_ui.ask_yes_no(cli_ui.red, "Upload Anyways?", default=False): + return True + else: + return True + return False + + +tracker_class_map = { + 'ACM': ACM, 'AITHER': AITHER, 'AL': AL, 'ANT': ANT, 'BHD': BHD, 'BHDTV': BHDTV, 'BLU': BLU, 'CBR': CBR, + 'FNP': FNP, 'FL': FL, 'HDB': HDB, 'HDT': HDT, 'HP': HP, 'HUNO': HUNO, 'JPTV': JPTV, 'LCD': LCD, + 'LST': LST, 'LT': LT, 'MTV': MTV, 'NBL': NBL, 'OE': OE, 'OTW': OTW, 'PSS': PSS, 'PTP': PTP, 'PTER': PTER, + 'R4E': R4E, 'RF': RF, 'RTF': RTF, 'SHRI': SHRI, 'SN': SN, 'SPD': SPD, 'STC': STC, 'STT': STT, 'THR': THR, + 'TIK': TIK, 'TL': TL, 'TVC': TVC, 'TTG': TTG, 'ULCX': ULCX, 'UTP': UTP, 'YOINK': YOINK, +} + +tracker_capabilities = { + 'AITHER': {'mod_q': True, 'draft': False}, + 'BHD': {'draft_live': True}, + 'BLU': {'mod_q': True, 'draft': False}, + 'LST': {'mod_q': True, 'draft': True} +} + +api_trackers = { + 'ACM', 'AITHER', 'AL', 'BHD', 'BLU', 'CBR', 'FNP', 'HUNO', 'JPTV', 'LCD', 'LST', 'LT', + 'OE', 'OTW', 'PSS', 'RF', 'R4E', 'SHRI', 'STC', 'STT', 'TIK', 'ULCX', 'UTP', 'YOINK' +} + +other_api_trackers = { + 'ANT', 'BHDTV', 'NBL', 'RTF', 'SN', 'SPD', 'TL', 'TVC' +} + +http_trackers = { + 'FL', 'HDB', 'HDT', 'MTV', 'PTER', 'TTG' +} diff --git a/src/uphelper.py b/src/uphelper.py new file mode 100644 index 00000000..56e482ff --- /dev/null +++ b/src/uphelper.py @@ -0,0 +1,119 @@ +import cli_ui +from rich.console import Console +from data.config import config + +console = Console() + + +class UploadHelper: + def dupe_check(self, dupes, meta): + if not dupes: + console.print("[green]No dupes found") + meta['upload'] = True + return meta + else: + console.print() + dupe_text = "\n".join([d['name'] if isinstance(d, dict) else d for d in dupes]) + console.print() + cli_ui.info_section(cli_ui.bold, "Check if these are actually dupes!") + cli_ui.info(dupe_text) + if not meta['unattended'] or (meta['unattended'] and meta.get('unattended-confirm', False)): + if meta.get('dupe', False) is False: + upload = cli_ui.ask_yes_no("Upload Anyways?", default=False) + else: + upload = True + else: + if meta.get('dupe', False) is False: + console.print("[red]Found potential dupes. Aborting. If this is not a dupe, or you would like to upload anyways, pass --skip-dupe-check") + upload = False + else: + console.print("[yellow]Found potential dupes. --skip-dupe-check was passed. Uploading anyways") + upload = True + console.print() + if upload is False: + meta['upload'] = False + else: + meta['upload'] = True + for each in dupes: + each_name = each['name'] if isinstance(each, dict) else each + if each_name == meta['name']: + meta['name'] = f"{meta['name']} DUPE?" + + return meta + + def get_confirmation(self, meta): + if meta['debug'] is True: + console.print("[bold red]DEBUG: True") + console.print(f"Prep material saved to {meta['base_dir']}/tmp/{meta['uuid']}") + console.print() + console.print("[bold yellow]Database Info[/bold yellow]") + console.print(f"[bold]Title:[/bold] {meta['title']} ({meta['year']})") + console.print() + console.print(f"[bold]Overview:[/bold] {meta['overview']}") + console.print() + console.print(f"[bold]Category:[/bold] {meta['category']}") + if int(meta.get('tmdb', 0)) != 0: + console.print(f"[bold]TMDB:[/bold] https://www.themoviedb.org/{meta['category'].lower()}/{meta['tmdb']}") + if int(meta.get('imdb_id', '0')) != 0: + console.print(f"[bold]IMDB:[/bold] https://www.imdb.com/title/tt{meta['imdb_id']}") + if int(meta.get('tvdb_id', '0')) != 0: + console.print(f"[bold]TVDB:[/bold] https://www.thetvdb.com/?id={meta['tvdb_id']}&tab=series") + if int(meta.get('tvmaze_id', '0')) != 0: + console.print(f"[bold]TVMaze:[/bold] https://www.tvmaze.com/shows/{meta['tvmaze_id']}") + if int(meta.get('mal_id', 0)) != 0: + console.print(f"[bold]MAL:[/bold] https://myanimelist.net/anime/{meta['mal_id']}") + console.print() + if int(meta.get('freeleech', '0')) != 0: + console.print(f"[bold]Freeleech:[/bold] {meta['freeleech']}") + tag = "" if meta['tag'] == "" else f" / {meta['tag'][1:]}" + res = meta['source'] if meta['is_disc'] == "DVD" else meta['resolution'] + console.print(f"{res} / {meta['type']}{tag}") + if meta.get('personalrelease', False) is True: + console.print("[bold green]Personal Release![/bold green]") + console.print() + if meta.get('unattended', False) is False: + self.get_missing(meta) + ring_the_bell = "\a" if config['DEFAULT'].get("sfx_on_prompt", True) is True else "" + if ring_the_bell: + console.print(ring_the_bell) + + if meta.get('is disc', False) is True: + meta['keep_folder'] = False + + if meta.get('keep_folder') and meta['isdir']: + console.print("[bold yellow]Uploading with --keep-folder[/bold yellow]") + kf_confirm = input("You specified --keep-folder. Uploading in folders might not be allowed. Proceed? [y/N]: ").strip().lower() + if kf_confirm != 'y': + console.print("[bold red]Aborting...[/bold red]") + exit() + + console.print("[bold yellow]Is this correct?[/bold yellow]") + console.print(f"[bold]Name:[/bold] {meta['name']}") + confirm = input("Correct? [y/N]: ").strip().lower() == 'y' + else: + console.print(f"[bold]Name:[/bold] {meta['name']}") + confirm = True + + return confirm + + def get_missing(self, meta): + info_notes = { + 'edition': 'Special Edition/Release', + 'description': "Please include Remux/Encode Notes if possible", + 'service': "WEB Service e.g.(AMZN, NF)", + 'region': "Disc Region", + 'imdb': 'IMDb ID (tt1234567)', + 'distributor': "Disc Distributor e.g.(BFI, Criterion)" + } + missing = [] + if meta.get('imdb_id', '0') == '0': + meta['imdb_id'] = '0' + meta['potential_missing'].append('imdb_id') + for each in meta['potential_missing']: + if str(meta.get(each, '')).strip() in ["", "None", "0"]: + missing.append(f"--{each} | {info_notes.get(each, '')}") + if missing: + cli_ui.info_section(cli_ui.yellow, "Potentially missing information:") + for each in missing: + cli_ui.info(each) + console.print() diff --git a/upload.py b/upload.py index 5cba67e7..face85fb 100644 --- a/upload.py +++ b/upload.py @@ -4,48 +4,8 @@ from src.args import Args from src.clients import Clients from src.trackers.COMMON import COMMON -from src.trackers.HUNO import HUNO -from src.trackers.BLU import BLU -from src.trackers.BHD import BHD -from src.trackers.AITHER import AITHER -from src.trackers.STC import STC -from src.trackers.R4E import R4E from src.trackers.THR import THR -from src.trackers.STT import STT -from src.trackers.HP import HP from src.trackers.PTP import PTP -from src.trackers.SN import SN -from src.trackers.ACM import ACM -from src.trackers.HDB import HDB -from src.trackers.LCD import LCD -from src.trackers.TTG import TTG -from src.trackers.LST import LST -from src.trackers.FL import FL -from src.trackers.LT import LT -from src.trackers.NBL import NBL -from src.trackers.ANT import ANT -from src.trackers.PTER import PTER -from src.trackers.MTV import MTV -from src.trackers.JPTV import JPTV -from src.trackers.TL import TL -from src.trackers.HDT import HDT -from src.trackers.RF import RF -from src.trackers.OE import OE -from src.trackers.BHDTV import BHDTV -from src.trackers.RTF import RTF -from src.trackers.OTW import OTW -from src.trackers.FNP import FNP -from src.trackers.CBR import CBR -from src.trackers.UTP import UTP -from src.trackers.AL import AL -from src.trackers.SHRI import SHRI -from src.trackers.TIK import TIK -from src.trackers.TVC import TVC -from src.trackers.PSS import PSS -from src.trackers.ULCX import ULCX -from src.trackers.SPD import SPD -from src.trackers.YOINK import YOINK -from src.trackers.PTT import PTT import json from pathlib import Path import asyncio @@ -58,6 +18,8 @@ import traceback import click import re +from src.trackersetup import TRACKER_SETUP, tracker_class_map, api_trackers, other_api_trackers, http_trackers, tracker_capabilities +import time from src.console import console from rich.markdown import Markdown @@ -252,40 +214,40 @@ async def process_meta(meta, base_dir): prep = Prep(screens=meta['screens'], img_host=meta['imghost'], config=config) meta = await prep.gather_prep(meta=meta, mode='cli') - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/meta.json", 'w') as f: - json.dump(meta, f, indent=4) - meta['name_notag'], meta['name'], meta['clean_name'], meta['potential_missing'] = await prep.get_name(meta) - meta['cutoff'] = int(config['DEFAULT'].get('cutoff_screens', 3)) - if len(meta.get('image_list', [])) < meta.get('cutoff') and meta.get('skip_imghost_upload', False) is False: - if 'image_list' not in meta: - meta['image_list'] = [] - return_dict = {} - new_images, dummy_var = prep.upload_screens(meta, meta['screens'], 1, 0, meta['screens'], [], return_dict=return_dict) + if not meta: + return + else: + meta['cutoff'] = int(config['DEFAULT'].get('cutoff_screens', 3)) + if len(meta.get('image_list', [])) < meta.get('cutoff') and meta.get('skip_imghost_upload', False) is False: + if 'image_list' not in meta: + meta['image_list'] = [] + return_dict = {} + new_images, dummy_var = prep.upload_screens(meta, meta['screens'], 1, 0, meta['screens'], [], return_dict=return_dict) + + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/meta.json", 'w') as f: + json.dump(meta, f, indent=4) - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/meta.json", 'w') as f: - json.dump(meta, f, indent=4) + elif meta.get('skip_imghost_upload', False) is True and meta.get('image_list', False) is False: + meta['image_list'] = [] - elif meta.get('skip_imghost_upload', False) is True and meta.get('image_list', False) is False: - meta['image_list'] = [] + torrent_path = os.path.abspath(f"{meta['base_dir']}/tmp/{meta['uuid']}/BASE.torrent") + if not os.path.exists(torrent_path): + reuse_torrent = None + if meta.get('rehash', False) is False: + reuse_torrent = await client.find_existing_torrent(meta) + if reuse_torrent is not None: + prep.create_base_from_existing_torrent(reuse_torrent, meta['base_dir'], meta['uuid']) - torrent_path = os.path.abspath(f"{meta['base_dir']}/tmp/{meta['uuid']}/BASE.torrent") - if not os.path.exists(torrent_path): - reuse_torrent = None - if meta.get('rehash', False) is False: - reuse_torrent = await client.find_existing_torrent(meta) - if reuse_torrent is not None: - prep.create_base_from_existing_torrent(reuse_torrent, meta['base_dir'], meta['uuid']) + if meta['nohash'] is False and reuse_torrent is None: + prep.create_torrent(meta, Path(meta['path']), "BASE") + if meta['nohash']: + meta['client'] = "none" - if meta['nohash'] is False and reuse_torrent is None: + elif os.path.exists(torrent_path) and meta.get('rehash', False) is True and meta['nohash'] is False: prep.create_torrent(meta, Path(meta['path']), "BASE") - if meta['nohash']: - meta['client'] = "none" - elif os.path.exists(torrent_path) and meta.get('rehash', False) is True and meta['nohash'] is False: - prep.create_torrent(meta, Path(meta['path']), "BASE") - - if int(meta.get('randomized', 0)) >= 1: - prep.create_random_torrents(meta['base_dir'], meta['uuid'], meta['randomized'], meta['path']) + if int(meta.get('randomized', 0)) >= 1: + prep.create_random_torrents(meta['base_dir'], meta['uuid'], meta['randomized'], meta['path']) async def do_the_thing(base_dir): @@ -496,482 +458,180 @@ async def do_the_thing(base_dir): except Exception as e: console.print(f"[red]Failed to load metadata for path '{path}': {e}") - + if meta['debug']: + upload_start_time = time.time() console.print(f"[green]Gathering info for {os.path.basename(path)}") await process_meta(meta, base_dir) - prep = Prep(screens=meta['screens'], img_host=meta['imghost'], config=config) - if meta.get('trackers', None) is not None: - trackers = meta['trackers'] + if 'we_are_uploading' not in meta: + console.print("we are not uploading.......") + if meta.get('queue') is not None: + processed_files_count += 1 + console.print(f"[cyan]Processed {processed_files_count}/{total_files} files.") + if not meta['debug']: + if log_file: + save_processed_file(log_file, path) + else: - trackers = config['TRACKERS']['default_trackers'] - if "," in trackers: - trackers = trackers.split(',') - confirm = get_confirmation(meta) - while confirm is False: - editargs = cli_ui.ask_string("Input args that need correction e.g. (--tag NTb --category tv --tmdb 12345)") - editargs = (meta['path'],) + tuple(editargs.split()) - if meta.get('debug', False): - editargs += ("--debug",) - meta, help, before_args = parser.parse(editargs, meta) - meta['edit'] = True - meta = await prep.gather_prep(meta=meta, mode='cli') - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/meta.json", 'w') as f: - json.dump(meta, f, indent=4) - meta['name_notag'], meta['name'], meta['clean_name'], meta['potential_missing'] = await prep.get_name(meta) - confirm = get_confirmation(meta) - - if isinstance(trackers, str): - trackers = trackers.split(',') - trackers = [s.strip().upper() for s in trackers] - if meta.get('manual', False): - trackers.insert(0, "MANUAL") - #################################### - ####### Upload to Trackers ####### # noqa #F266 - #################################### - common = COMMON(config=config) - api_trackers = [ - 'ACM', 'AITHER', 'AL', 'BHD', 'BLU', 'CBR', 'FNP', 'HUNO', 'JPTV', 'LCD', 'LST', 'LT', - 'OE', 'OTW', 'PSS', 'RF', 'R4E', 'SHRI', 'STC', 'STT', 'TIK', 'ULCX', 'UTP', 'YOINK', 'PTT' - ] - other_api_trackers = [ - 'ANT', 'BHDTV', 'NBL', 'RTF', 'SN', 'SPD', 'TL', 'TVC' - ] - http_trackers = [ - 'FL', 'HDB', 'HDT', 'MTV', 'PTER', 'TTG' - ] - tracker_class_map = { - 'ACM': ACM, 'AITHER': AITHER, 'AL': AL, 'ANT': ANT, 'BHD': BHD, 'BHDTV': BHDTV, 'BLU': BLU, 'CBR': CBR, - 'FNP': FNP, 'FL': FL, 'HDB': HDB, 'HDT': HDT, 'HP': HP, 'HUNO': HUNO, 'JPTV': JPTV, 'LCD': LCD, - 'LST': LST, 'LT': LT, 'MTV': MTV, 'NBL': NBL, 'OE': OE, 'OTW': OTW, 'PSS': PSS, 'PTP': PTP, 'PTER': PTER, - 'R4E': R4E, 'RF': RF, 'RTF': RTF, 'SHRI': SHRI, 'SN': SN, 'SPD': SPD, 'STC': STC, 'STT': STT, 'THR': THR, - 'TIK': TIK, 'TL': TL, 'TVC': TVC, 'TTG': TTG, 'ULCX': ULCX, 'UTP': UTP, 'YOINK': YOINK, 'PTT': PTT, - } - - tracker_capabilities = { - 'AITHER': {'mod_q': True, 'draft': False}, - 'BHD': {'draft_live': True}, - 'BLU': {'mod_q': True, 'draft': False}, - 'LST': {'mod_q': True, 'draft': True} - } - - async def check_mod_q_and_draft(tracker_class, meta, debug, disctype): - modq, draft = None, None - - tracker_caps = tracker_capabilities.get(tracker_class.tracker, {}) - - # Handle BHD specific draft/live logic - if tracker_class.tracker == 'BHD' and tracker_caps.get('draft_live'): - draft_int = await tracker_class.get_live(meta) - draft = "Draft" if draft_int == 0 else "Live" - - # Handle mod_q and draft for other trackers - else: - if tracker_caps.get('mod_q'): - modq = await tracker_class.get_flag(meta, 'modq') - modq = 'Yes' if modq else 'No' - if tracker_caps.get('draft'): - draft = await tracker_class.get_flag(meta, 'draft') - draft = 'Yes' if draft else 'No' - - return modq, draft - - for tracker in trackers: - disctype = meta.get('disctype', None) - tracker = tracker.replace(" ", "").upper().strip() - if meta['name'].endswith('DUPE?'): - meta['name'] = meta['name'].replace(' DUPE?', '') - - if meta['debug']: - debug = "(DEBUG)" - else: - debug = "" + prep = Prep(screens=meta['screens'], img_host=meta['imghost'], config=config) - if tracker in api_trackers: - tracker_class = tracker_class_map[tracker](config=config) + #################################### + ####### Upload to Trackers ####### # noqa #F266 + #################################### - if meta['unattended']: - upload_to_tracker = True - else: - try: - upload_to_tracker = cli_ui.ask_yes_no( - f"Upload to {tracker_class.tracker}? {debug}", - default=meta['unattended'] - ) - except (KeyboardInterrupt, EOFError): - sys.exit(1) # Exit immediately - - if upload_to_tracker: - # Get mod_q, draft, or draft/live depending on the tracker - modq, draft = await check_mod_q_and_draft(tracker_class, meta, debug, disctype) - - # Print mod_q and draft info if relevant - if modq is not None: - console.print(f"(modq: {modq})") - if draft is not None: - console.print(f"(draft: {draft})") - - console.print(f"Uploading to {tracker_class.tracker}") - - # Check if the group is banned for the tracker - if check_banned_group(tracker_class.tracker, tracker_class.banned_groups, meta): - continue - - dupes = await tracker_class.search_existing(meta, disctype) - if 'skipping' not in meta or meta['skipping'] is None: - dupes = await common.filter_dupes(dupes, meta) - meta = dupe_check(dupes, meta) - - # Proceed with upload if the meta is set to upload - if meta.get('upload', False): - await tracker_class.upload(meta, disctype) - perm = config['DEFAULT'].get('get_permalink', False) - if perm: - # need a wait so we don't race the api - await asyncio.sleep(5) - await tracker_class.search_torrent_page(meta, disctype) - await asyncio.sleep(0.5) - await client.add_to_client(meta, tracker_class.tracker) - meta['skipping'] = None + common = COMMON(config=config) + tracker_setup = TRACKER_SETUP(config=config) + enabled_trackers = tracker_setup.trackers_enabled(meta) - if tracker in other_api_trackers: - tracker_class = tracker_class_map[tracker](config=config) + async def check_mod_q_and_draft(tracker_class, meta, debug, disctype): + modq, draft = None, None - if meta['unattended']: - upload_to_tracker = True - else: - try: - upload_to_tracker = cli_ui.ask_yes_no( - f"Upload to {tracker_class.tracker}? {debug}", - default=meta['unattended'] - ) - except (KeyboardInterrupt, EOFError): - sys.exit(1) # Exit immediately - - if upload_to_tracker: - # Get mod_q, draft, or draft/live depending on the tracker - modq, draft = await check_mod_q_and_draft(tracker_class, meta, debug, disctype) - - # Print mod_q and draft info if relevant - if modq is not None: - console.print(f"(modq: {modq})") - if draft is not None: - console.print(f"(draft: {draft})") - - console.print(f"Uploading to {tracker_class.tracker}") - - # Check if the group is banned for the tracker - if check_banned_group(tracker_class.tracker, tracker_class.banned_groups, meta): - continue - - # Perform the existing checks for dupes except TL - if tracker != "TL": - if tracker == "RTF": - await tracker_class.api_test(meta) - - dupes = await tracker_class.search_existing(meta, disctype) - if 'skipping' not in meta or meta['skipping'] is None: - dupes = await common.filter_dupes(dupes, meta) - meta = dupe_check(dupes, meta) - - if 'skipping' not in meta or meta['skipping'] is None: - # Proceed with upload if the meta is set to upload - if tracker == "TL" or meta.get('upload', False): - await tracker_class.upload(meta, disctype) - if tracker == 'SN': - await asyncio.sleep(16) - await client.add_to_client(meta, tracker_class.tracker) - meta['skipping'] = None + tracker_caps = tracker_capabilities.get(tracker_class.tracker, {}) - if tracker in http_trackers: - tracker_class = tracker_class_map[tracker](config=config) + # Handle BHD specific draft/live logic + if tracker_class.tracker == 'BHD' and tracker_caps.get('draft_live'): + draft_int = await tracker_class.get_live(meta) + draft = "Draft" if draft_int == 0 else "Live" - if meta['unattended']: - upload_to_tracker = True + # Handle mod_q and draft for other trackers else: - try: - upload_to_tracker = cli_ui.ask_yes_no( - f"Upload to {tracker_class.tracker}? {debug}", - default=meta['unattended'] - ) - except (KeyboardInterrupt, EOFError): - sys.exit(1) # Exit immediately - - if upload_to_tracker: - console.print(f"Uploading to {tracker}") - if check_banned_group(tracker_class.tracker, tracker_class.banned_groups, meta): - continue - if await tracker_class.validate_credentials(meta) is True: - dupes = await tracker_class.search_existing(meta, disctype) - dupes = await common.filter_dupes(dupes, meta) - meta = dupe_check(dupes, meta) - if meta['upload'] is True: - await tracker_class.upload(meta, disctype) - await client.add_to_client(meta, tracker_class.tracker) + if tracker_caps.get('mod_q'): + modq = await tracker_class.get_flag(meta, 'modq') + modq = 'Yes' if modq else 'No' + if tracker_caps.get('draft'): + draft = await tracker_class.get_flag(meta, 'draft') + draft = 'Yes' if draft else 'No' - if tracker == "MANUAL": - if meta['unattended']: - do_manual = True - else: - do_manual = cli_ui.ask_yes_no("Get files for manual upload?", default=True) - if do_manual: - for manual_tracker in trackers: - if manual_tracker != 'MANUAL': - manual_tracker = manual_tracker.replace(" ", "").upper().strip() - tracker_class = tracker_class_map[manual_tracker](config=config) - if manual_tracker in api_trackers: - await common.unit3d_edit_desc(meta, tracker_class.tracker, tracker_class.signature) - else: - await tracker_class.edit_desc(meta) - url = await prep.package(meta) - if url is False: - console.print(f"[yellow]Unable to upload prep files, they can be found at `tmp/{meta['uuid']}") - else: - console.print(f"[green]{meta['name']}") - console.print(f"[green]Files can be found at: [yellow]{url}[/yellow]") + return modq, draft - if tracker == "THR": - if meta['unattended']: - upload_to_thr = True - else: - try: - upload_to_ptp = cli_ui.ask_yes_no( - f"Upload to THR? {debug}", - default=meta['unattended'] - ) - except (KeyboardInterrupt, EOFError): - sys.exit(1) # Exit immediately - if upload_to_thr: - console.print("Uploading to THR") - # nable to get IMDB id/Youtube Link - if meta.get('imdb_id', '0') == '0': - imdb_id = cli_ui.ask_string("Unable to find IMDB id, please enter e.g.(tt1234567)") - meta['imdb_id'] = imdb_id.replace('tt', '').zfill(7) - if meta.get('youtube', None) is None: - youtube = cli_ui.ask_string("Unable to find youtube trailer, please link one e.g.(https://www.youtube.com/watch?v=dQw4w9WgXcQ)") - meta['youtube'] = youtube - thr = THR(config=config) - try: - with requests.Session() as session: - console.print("[yellow]Logging in to THR") - session = thr.login(session) - console.print("[yellow]Searching for Dupes") - dupes = thr.search_existing(session, disctype, meta.get('imdb_id')) - dupes = await common.filter_dupes(dupes, meta) - meta = dupe_check(dupes, meta) - if meta['upload'] is True: - await thr.upload(session, meta, disctype) - await client.add_to_client(meta, "THR") - except Exception: - console.print(traceback.format_exc()) + for tracker in enabled_trackers: + disctype = meta.get('disctype', None) + tracker = tracker.replace(" ", "").upper().strip() + if meta['name'].endswith('DUPE?'): + meta['name'] = meta['name'].replace(' DUPE?', '') - if tracker == "PTP": - if meta['unattended']: - upload_to_ptp = True + if meta['debug']: + debug = "(DEBUG)" else: - try: - upload_to_ptp = cli_ui.ask_yes_no( - f"Upload to {tracker}? {debug}", - default=meta['unattended'] - ) - except (KeyboardInterrupt, EOFError): - sys.exit(1) # Exit immediately - - if upload_to_ptp: # Ensure the variable is defined before this check - console.print(f"Uploading to {tracker}") - if meta.get('imdb_id', '0') == '0': - imdb_id = cli_ui.ask_string("Unable to find IMDB id, please enter e.g.(tt1234567)") - meta['imdb_id'] = imdb_id.replace('tt', '').zfill(7) - ptp = PTP(config=config) - if check_banned_group("PTP", ptp.banned_groups, meta): - continue - try: - console.print("[yellow]Searching for Group ID") - groupID = await ptp.get_group_by_imdb(meta['imdb_id']) - if groupID is None: - console.print("[yellow]No Existing Group found") - if meta.get('youtube', None) is None or "youtube" not in str(meta.get('youtube', '')): - youtube = cli_ui.ask_string("Unable to find youtube trailer, please link one e.g.(https://www.youtube.com/watch?v=dQw4w9WgXcQ)", default="") - meta['youtube'] = youtube - meta['upload'] = True - else: - console.print("[yellow]Searching for Existing Releases") - dupes = await ptp.search_existing(groupID, meta, disctype) - dupes = await common.filter_dupes(dupes, meta) - meta = dupe_check(dupes, meta) - if meta.get('imdb_info', {}) == {}: - meta['imdb_info'] = await prep.get_imdb_info(meta['imdb_id'], meta) - if meta['upload'] is True: - ptpUrl, ptpData = await ptp.fill_upload_form(groupID, meta) - await ptp.upload(meta, ptpUrl, ptpData, disctype) + debug = "" + + if tracker in api_trackers: + tracker_class = tracker_class_map[tracker](config=config) + tracker_status = meta.get('tracker_status', {}) + upload_status = tracker_status.get(tracker, {}).get('upload', False) + console.print(f"[red]Tracker: {tracker}, Upload: {'Yes' if upload_status else 'No'}[/red]") + + if upload_status: + modq, draft = await check_mod_q_and_draft(tracker_class, meta, debug, disctype) + + if modq is not None: + console.print(f"(modq: {modq})") + if draft is not None: + console.print(f"(draft: {draft})") + + console.print(f"Uploading to {tracker_class.tracker}") + if meta['debug']: + upload_finish_time = time.time() + console.print(f"Upload from Audionut UA processed in {upload_finish_time - upload_start_time:.2f} seconds") + await tracker_class.upload(meta, disctype) + await asyncio.sleep(0.5) + perm = config['DEFAULT'].get('get_permalink', False) + if perm: + # need a wait so we don't race the api await asyncio.sleep(5) - await client.add_to_client(meta, "PTP") - except Exception: - console.print(traceback.format_exc()) - - if meta.get('queue') is not None: - processed_files_count += 1 - console.print(f"[cyan]Processed {processed_files_count}/{total_files} files.") - if not meta['debug']: - if log_file: - save_processed_file(log_file, path) - - -def get_confirmation(meta): - if meta['debug'] is True: - console.print("[bold red]DEBUG: True") - console.print(f"Prep material saved to {meta['base_dir']}/tmp/{meta['uuid']}") - console.print() - console.print("[bold yellow]Database Info[/bold yellow]") - console.print(f"[bold]Title:[/bold] {meta['title']} ({meta['year']})") - console.print() - console.print(f"[bold]Overview:[/bold] {meta['overview']}") - console.print() - console.print(f"[bold]Category:[/bold] {meta['category']}") - if int(meta.get('tmdb', 0)) != 0: - console.print(f"[bold]TMDB:[/bold] https://www.themoviedb.org/{meta['category'].lower()}/{meta['tmdb']}") - if int(meta.get('imdb_id', '0')) != 0: - console.print(f"[bold]IMDB:[/bold] https://www.imdb.com/title/tt{meta['imdb_id']}") - if int(meta.get('tvdb_id', '0')) != 0: - console.print(f"[bold]TVDB:[/bold] https://www.thetvdb.com/?id={meta['tvdb_id']}&tab=series") - if int(meta.get('tvmaze_id', '0')) != 0: - console.print(f"[bold]TVMaze:[/bold] https://www.tvmaze.com/shows/{meta['tvmaze_id']}") - if int(meta.get('mal_id', 0)) != 0: - console.print(f"[bold]MAL:[/bold] https://myanimelist.net/anime/{meta['mal_id']}") - console.print() - if int(meta.get('freeleech', '0')) != 0: - console.print(f"[bold]Freeleech:[/bold] {meta['freeleech']}") - if meta['tag'] == "": - tag = "" - else: - tag = f" / {meta['tag'][1:]}" - if meta['is_disc'] == "DVD": - res = meta['source'] - else: - res = meta['resolution'] - - console.print(f"{res} / {meta['type']}{tag}") - if meta.get('personalrelease', False) is True: - console.print("[bold green]Personal Release![/bold green]") - console.print() - if meta.get('unattended', False) is False: - get_missing(meta) - ring_the_bell = "\a" if config['DEFAULT'].get("sfx_on_prompt", True) is True else "" # \a rings the bell - if ring_the_bell: - console.print(ring_the_bell) - - # Handle the 'keep_folder' logic based on 'is disc' and 'isdir' - if meta.get('is disc', False) is True: - meta['keep_folder'] = False # Ensure 'keep_folder' is False if 'is disc' is True - - if meta.get('keep_folder'): - if meta['isdir']: - console.print("[bold yellow]Uploading with --keep-folder[/bold yellow]") - kf_confirm = input("You specified --keep-folder. Uploading in folders might not be allowed. Are you sure you want to proceed? [y/N]: ").strip().lower() - if kf_confirm != 'y': - console.print("[bold red]Aborting...[/bold red]") - exit() - - console.print("[bold yellow]Is this correct?[/bold yellow]") - console.print(f"[bold]Name:[/bold] {meta['name']}") - confirm_input = input("Correct? [y/N]: ").strip().lower() - confirm = confirm_input == 'y' - - else: - console.print(f"[bold]Name:[/bold] {meta['name']}") - confirm = True - - return confirm - - -def dupe_check(dupes, meta): - if not dupes: - console.print("[green]No dupes found") - meta['upload'] = True - return meta - else: - console.print() - dupe_text = "\n".join(dupes) - console.print() - cli_ui.info_section(cli_ui.bold, "Check if these are actually dupes!") - cli_ui.info(dupe_text) - if not meta['unattended'] or (meta['unattended'] and meta.get('unattended-confirm', False)): - if meta.get('dupe', False) is False: - upload = cli_ui.ask_yes_no("Upload Anyways?", default=False) - else: - upload = True - else: - if meta.get('dupe', False) is False: - console.print("[red]Found potential dupes. Aborting. If this is not a dupe, or you would like to upload anyways, pass --skip-dupe-check") - upload = False - else: - console.print("[yellow]Found potential dupes. --skip-dupe-check was passed. Uploading anyways") - upload = True - console.print() - if upload is False: - meta['upload'] = False - else: - meta['upload'] = True - for each in dupes: - if each == meta['name']: - meta['name'] = f"{meta['name']} DUPE?" + await tracker_class.search_torrent_page(meta, disctype) + await asyncio.sleep(0.5) + await client.add_to_client(meta, tracker_class.tracker) + + if tracker in other_api_trackers: + tracker_class = tracker_class_map[tracker](config=config) + tracker_status = meta.get('tracker_status', {}) + upload_status = tracker_status.get(tracker, {}).get('upload', False) + console.print(f"[yellow]Tracker: {tracker}, Upload: {'Yes' if upload_status else 'No'}[/yellow]") + + if upload_status: + console.print(f"Uploading to {tracker_class.tracker}") + + if tracker != "TL": + if tracker == "RTF": + await tracker_class.api_test(meta) + if tracker == "TL" or upload_status: + await tracker_class.upload(meta, disctype) + if tracker == 'SN': + await asyncio.sleep(16) + await asyncio.sleep(0.5) + await client.add_to_client(meta, tracker_class.tracker) - return meta + if tracker in http_trackers: + tracker_class = tracker_class_map[tracker](config=config) + tracker_status = meta.get('tracker_status', {}) + upload_status = tracker_status.get(tracker, {}).get('upload', False) + console.print(f"[blue]Tracker: {tracker}, Upload: {'Yes' if upload_status else 'No'}[/blue]") + if upload_status: + console.print(f"Uploading to {tracker}") -# Return True if banned group -def check_banned_group(tracker, banned_group_list, meta): - if meta['tag'] == "": - return False - else: - q = False - for tag in banned_group_list: - if isinstance(tag, list): - if meta['tag'][1:].lower() == tag[0].lower(): - console.print(f"[bold yellow]{meta['tag'][1:]}[/bold yellow][bold red] was found on [bold yellow]{tracker}'s[/bold yellow] list of banned groups.") - console.print(f"[bold red]NOTE: [bold yellow]{tag[1]}") - q = True - else: - if meta['tag'][1:].lower() == tag.lower(): - console.print(f"[bold yellow]{meta['tag'][1:]}[/bold yellow][bold red] was found on [bold yellow]{tracker}'s[/bold yellow] list of banned groups.") - q = True - if q: - if not meta['unattended'] or (meta['unattended'] and meta.get('unattended-confirm', False)): - if not cli_ui.ask_yes_no(cli_ui.red, "Upload Anyways?", default=False): - return True - else: - return True - return False - - -def get_missing(meta): - info_notes = { - 'edition': 'Special Edition/Release', - 'description': "Please include Remux/Encode Notes if possible (either here or edit your upload)", - 'service': "WEB Service e.g.(AMZN, NF)", - 'region': "Disc Region", - 'imdb': 'IMDb ID (tt1234567)', - 'distributor': "Disc Distributor e.g.(BFI, Criterion, etc)" - } - missing = [] - if meta.get('imdb_id', '0') == '0': - meta['imdb_id'] = '0' - meta['potential_missing'].append('imdb_id') - if len(meta['potential_missing']) > 0: - for each in meta['potential_missing']: - if str(meta.get(each, '')).replace(' ', '') in ["", "None", "0"]: - if each == "imdb_id": - each = 'imdb' - missing.append(f"--{each} | {info_notes.get(each)}") - if missing != []: - cli_ui.info_section(cli_ui.yellow, "Potentially missing information:") - for each in missing: - if each.split('|')[0].replace('--', '').strip() in ["imdb"]: - cli_ui.info(cli_ui.red, each) - else: - cli_ui.info(each) + if await tracker_class.validate_credentials(meta) is True: + await tracker_class.upload(meta, disctype) + await asyncio.sleep(0.5) + await client.add_to_client(meta, tracker_class.tracker) - console.print() - return + if tracker == "MANUAL": + if meta['unattended']: + do_manual = True + else: + do_manual = cli_ui.ask_yes_no("Get files for manual upload?", default=True) + if do_manual: + for manual_tracker in enabled_trackers: + if manual_tracker != 'MANUAL': + manual_tracker = manual_tracker.replace(" ", "").upper().strip() + tracker_class = tracker_class_map[manual_tracker](config=config) + if manual_tracker in api_trackers: + await common.unit3d_edit_desc(meta, tracker_class.tracker, tracker_class.signature) + else: + await tracker_class.edit_desc(meta) + url = await prep.package(meta) + if url is False: + console.print(f"[yellow]Unable to upload prep files, they can be found at `tmp/{meta['uuid']}") + else: + console.print(f"[green]{meta['name']}") + console.print(f"[green]Files can be found at: [yellow]{url}[/yellow]") + + if tracker == "THR": + tracker_status = meta.get('tracker_status', {}) + upload_status = tracker_status.get(tracker, {}).get('upload', False) + print(f"Tracker: {tracker}, Upload: {'Yes' if upload_status else 'No'}") + + if upload_status: + thr = THR(config=config) + try: + with requests.Session() as session: + console.print("[yellow]Logging in to THR") + session = thr.login(session) + await thr.upload(session, meta, disctype) + await asyncio.sleep(0.5) + await client.add_to_client(meta, "THR") + except Exception: + console.print(traceback.format_exc()) + + if tracker == "PTP": + tracker_status = meta.get('tracker_status', {}) + upload_status = tracker_status.get(tracker, {}).get('upload', False) + print(f"Tracker: {tracker}, Upload: {'Yes' if upload_status else 'No'}") + + if upload_status: + ptp = PTP(config=config) + groupID = meta['ptp_groupID'] + ptpUrl, ptpData = await ptp.fill_upload_form(groupID, meta) + await ptp.upload(meta, ptpUrl, ptpData, disctype) + await asyncio.sleep(5) + await client.add_to_client(meta, "PTP") + + if meta.get('queue') is not None: + processed_files_count += 1 + console.print(f"[cyan]Processed {processed_files_count}/{total_files} files.") + if not meta['debug']: + if log_file: + save_processed_file(log_file, path) if __name__ == '__main__':