diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index f3534c52..0a33e0f0 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -5,7 +5,7 @@ on: branches: - master - develop - - aither_language + - tik env: REGISTRY: ghcr.io diff --git a/data/example-config.py b/data/example-config.py index 7ba72a87..ca13194b 100644 --- a/data/example-config.py +++ b/data/example-config.py @@ -36,9 +36,9 @@ "TRACKERS": { # Which trackers do you want to upload to? - # Available tracker: BLU, BHD, AITHER, STC, STT, SN, THR, R4E, HP, ACM, PTP, LCD, LST, PTER, NBL, ANT, MTV, CBR, RTF, HUNO, BHDTV, LT, PTER, TL, TDC, HDT, OE, RF, OTW, FNP, UTP, AL, HDB, PSS, ULCX + # Available tracker: BLU, BHD, AITHER, STC, STT, SN, THR, R4E, HP, ACM, PTP, LCD, LST, PTER, NBL, ANT, MTV, CBR, RTF, HUNO, BHDTV, LT, PTER, TL, TDC, HDT, OE, RF, OTW, FNP, UTP, AL, HDB, TIK, PSS, ULCX # Remove the trackers from the default_trackers list that are not used, to save being asked everytime - "default_trackers": "BLU, BHD, AITHER, STC, STT, SN, THR, R4E, HP, ACM, PTP, LCD, LST, PTER, NBL, ANT, MTV, CBR, RTF, HUNO, BHDTV, LT, PTER, TL, TDC, HDT, OE, RF, OTW, FNP, UTP, AL, HDB, PSS, ULCX", + "default_trackers": "BLU, BHD, AITHER, STC, STT, SN, THR, R4E, HP, ACM, PTP, LCD, LST, PTER, NBL, ANT, MTV, CBR, RTF, HUNO, BHDTV, LT, PTER, TL, TDC, HDT, OE, RF, OTW, FNP, UTP, AL, HDB, TIK, PSS, ULCX", "BLU": { "useAPI": False, # Set to True if using BLU @@ -238,6 +238,13 @@ "announce_url": "https://shareisland.org/announce/customannounceurl", # "anon" : "False" }, + "TIK": { + "useAPI": True, # Set to True if using TIK + "api_key": "", + "announce_url": "https://cinematik.net/announce/", + "anon": False, + "modq": True, + }, "PSS": { "api_key": "PSS api key", "announce_url": "https://privatesilverscreen.cc/announce/customannounceurl", diff --git a/src/args.py b/src/args.py index 0db8fbe3..66286fd8 100644 --- a/src/args.py +++ b/src/args.py @@ -47,8 +47,15 @@ def parse(self, args, meta): parser.add_argument('-aither', '--aither', nargs='*', required=False, help="Aither torrent id/link", type=str) parser.add_argument('-lst', '--lst', nargs='*', required=False, help="LST torrent id/link", type=str) parser.add_argument('-oe', '--oe', nargs='*', required=False, help="OE torrent id/link", type=str) + parser.add_argument('-tik', '--tik', nargs='*', required=False, help="TIK torrent id/link", type=str) parser.add_argument('-hdb', '--hdb', nargs='*', required=False, help="HDB torrent id/link", type=str) + parser.add_argument('--foreign', dest='foreign', action='store_true', required=False, help="Set for TIK Foreign category") + parser.add_argument('--opera', dest='opera', action='store_true', required=False, help="Set for TIK Opera & Musical category") + parser.add_argument('--asian', dest='asian', action='store_true', required=False, help="Set for TIK Asian category") + parser.add_argument('-disctype', '--disctype', nargs='*', required=False, help="Type of disc for TIK (BD100, BD66, BD50, BD25, NTSC DVD9, NTSC DVD5, PAL DVD9, PAL DVD5, Custom, 3D)", type=str) + parser.add_argument('--untouched', dest='untouched', action='store_true', required=False, help="Set when a completely untouched disc at TIK") parser.add_argument('-d', '--desc', nargs='*', required=False, help="Custom Description (string)") + parser.add_argument('-manual_dvds', '--manual_dvds', nargs='*', required=False, help="Override the default number of DVD's (eg: use 2xDVD9+DVD5 instead)", type=str, dest='manual_dvds', default="") parser.add_argument('-pb', '--desclink', nargs='*', required=False, help="Custom Description (link to hastebin/pastebin)") parser.add_argument('-df', '--descfile', nargs='*', required=False, help="Custom Description (path to file)") parser.add_argument('-ih', '--imghost', nargs='*', required=False, help="Image Host", choices=['imgbb', 'ptpimg', 'imgbox', 'pixhost', 'lensdump', 'ptscreens']) @@ -178,6 +185,19 @@ def parse(self, args, meta): console.print('[red]Continuing without --oe') else: meta['oe'] = value2 + elif key == 'tik': + if value2.startswith('http'): + parsed = urllib.parse.urlparse(value2) + try: + tikpath = parsed.path + if tikpath.endswith('/'): + tikpath = tikpath[:-1] + meta['tik'] = tikpath.split('/')[-1] + except Exception: + console.print('[red]Unable to parse id from url') + console.print('[red]Continuing without --tik') + else: + meta['tik'] = value2 elif key == 'hdb': if value2.startswith('http'): parsed = urllib.parse.urlparse(value2) @@ -195,6 +215,8 @@ def parse(self, args, meta): meta[key] = value elif key in ("manual_edition"): meta[key] = value + elif key in ("manual_dvds"): + meta[key] = value elif key in ("freeleech"): meta[key] = 100 elif key in ("tag") and value == []: diff --git a/src/prep.py b/src/prep.py index 5c9170b9..e5b53b3b 100644 --- a/src/prep.py +++ b/src/prep.py @@ -8,6 +8,7 @@ from src.trackers.LST import LST from src.trackers.OE import OE from src.trackers.HDB import HDB +from src.trackers.TIK import TIK from src.trackers.COMMON import COMMON try: @@ -156,7 +157,7 @@ async def update_metadata_from_tracker(self, tracker_name, tracker_instance, met manual_key = f"{tracker_key}_manual" found_match = False - if tracker_name in ["BLU", "AITHER", "LST", "OE"]: + if tracker_name in ["BLU", "AITHER", "LST", "OE", "TIK"]: if meta.get(tracker_key) is not None: console.print(f"[cyan]{tracker_name} ID found in meta, reusing existing ID: {meta[tracker_key]}[/cyan]") tracker_data = await COMMON(self.config).unit3d_torrent_info( @@ -376,7 +377,7 @@ async def gather_prep(self, meta, mode): else: mi = meta['mediainfo'] - meta['dvd_size'] = await self.get_dvd_size(meta['discs']) + meta['dvd_size'] = await self.get_dvd_size(meta['discs'], meta.get('manual_dvds')) meta['resolution'] = self.get_resolution(guessit(video), meta['uuid'], base_dir) meta['sd'] = self.is_sd(meta['resolution']) @@ -452,6 +453,8 @@ async def gather_prep(self, meta, mode): specific_tracker = 'LST' elif meta.get('oe'): specific_tracker = 'OE' + elif meta.get('tik'): + specific_tracker = 'TIK' # If a specific tracker is found, only process that one if specific_tracker: @@ -487,6 +490,12 @@ async def gather_prep(self, meta, mode): if match: found_match = True + elif specific_tracker == 'TIK' and str(self.config['TRACKERS'].get('TIK', {}).get('useAPI')).lower() == "true": + tik = TIK(config=self.config) + meta, match = await self.update_metadata_from_tracker('TIK', tik, meta, search_term, search_file_folder) + if match: + found_match = True + elif specific_tracker == 'HDB' and str(self.config['TRACKERS'].get('HDB', {}).get('useAPI')).lower() == "true": hdb = HDB(config=self.config) meta, match = await self.update_metadata_from_tracker('HDB', hdb, meta, search_term, search_file_folder) @@ -878,7 +887,6 @@ def filter_mediainfo(data): "@type": track["@type"], "extra": track.get("extra"), }) - return filtered if not os.path.exists(f"{base_dir}/tmp/{folder_id}/MEDIAINFO.txt") and export_text: @@ -1113,14 +1121,17 @@ def disc_screenshots(self, filename, bdinfo, folder_id, base_dir, use_vs, image_ time.sleep(1) progress.advance(screen_task) # remove smallest image - smallest = "" + smallest = None smallestsize = 99 ** 99 for screens in glob.glob1(f"{base_dir}/tmp/{folder_id}/", f"{filename}-*"): - screensize = os.path.getsize(screens) + screen_path = os.path.join(f"{base_dir}/tmp/{folder_id}/", screens) + screensize = os.path.getsize(screen_path) if screensize < smallestsize: smallestsize = screensize - smallest = screens - os.remove(smallest) + smallest = screen_path + + if smallest is not None: + os.remove(smallest) def dvd_screenshots(self, meta, disc_num, num_screens=None): if num_screens is None: @@ -1264,14 +1275,17 @@ def _is_vob_good(n, loops, num_screens): looped += 1 progress.advance(screen_task) # remove smallest image - smallest = "" + smallest = None smallestsize = 99**99 for screens in glob.glob1(f"{meta['base_dir']}/tmp/{meta['uuid']}/", f"{meta['discs'][disc_num]['name']}-*"): - screensize = os.path.getsize(screens) + screen_path = os.path.join(f"{meta['base_dir']}/tmp/{meta['uuid']}/", screens) + screensize = os.path.getsize(screen_path) if screensize < smallestsize: smallestsize = screensize - smallest = screens - os.remove(smallest) + smallest = screen_path + + if smallest is not None: + os.remove(smallest) def screenshots(self, path, filename, folder_id, base_dir, meta, num_screens=None, force_screenshots=False): # Ensure the image list is initialized and preserve existing images @@ -1603,6 +1617,7 @@ async def tmdb_other_meta(self, meta): if meta.get('anime', False) is False: meta['mal_id'], meta['aka'], meta['anime'] = self.get_anime(response, meta) meta['poster'] = response.get('poster_path', "") + meta['tmdb_poster'] = response.get('poster_path', "") meta['overview'] = response['overview'] meta['tmdb_type'] = 'Movie' meta['runtime'] = response.get('episode_run_time', 60) @@ -3126,7 +3141,7 @@ def clean_text(text): ptp_desc = "" imagelist = [] - desc_sources = ['ptp', 'blu', 'aither', 'lst', 'oe'] + desc_sources = ['ptp', 'blu', 'aither', 'lst', 'oe', 'tik'] desc_source = [source.upper() for source in desc_sources if meta.get(source)] desc_source = desc_source[0] if len(desc_source) == 1 else None @@ -3146,26 +3161,26 @@ def clean_text(text): meta['description'] = 'PTP' meta['imagelist'] = imagelist - # Handle BLU description if not ptp_desc and clean_text(meta.get('blu_desc', '')) and desc_source in ['BLU', None]: description.write(meta['blu_desc'] + "\n") meta['description'] = 'BLU' - # Handle LST description if not ptp_desc and clean_text(meta.get('lst_desc', '')) and desc_source in ['LST', None]: description.write(meta['lst_desc'] + "\n") meta['description'] = 'LST' - # Handle AITHER description if not ptp_desc and clean_text(meta.get('aither_desc', '')) and desc_source in ['AITHER', None]: description.write(meta['aither_desc'] + "\n") meta['description'] = 'AITHER' - # Handle OE description if not ptp_desc and clean_text(meta.get('oe_desc', '')) and desc_source in ['OE', None]: description.write(meta['oe_desc'] + "\n") meta['description'] = 'OE' + if not ptp_desc and clean_text(meta.get('tike_desc', '')) and desc_source in ['TIK', None]: + description.write(meta['tik_desc'] + "\n") + meta['description'] = 'TIK' + if meta.get('desc_template'): from jinja2 import Template try: @@ -3336,7 +3351,7 @@ async def get_imdb_aka(self, imdb_id): aka = f" AKA {aka}" return aka, original_language - async def get_dvd_size(self, discs): + async def get_dvd_size(self, discs, manual_dvds): sizes = [] dvd_sizes = [] for each in discs: @@ -3349,6 +3364,10 @@ async def get_dvd_size(self, discs): dvd_sizes.append(each[0]) dvd_sizes.sort() compact = " ".join(dvd_sizes) + + if manual_dvds: + compact = str(manual_dvds) + return compact def get_tmdb_imdb_from_mediainfo(self, mediainfo, category, is_disc, tmdbid, imdbid): @@ -3399,6 +3418,7 @@ async def get_imdb_info(self, imdbID, meta): imdb_info['cover'] = info.get('full-size cover url', '').replace(".jpg", "._V1_FMjpg_UX750_.jpg") imdb_info['plot'] = info.get('plot', [''])[0] imdb_info['genres'] = ', '.join(info.get('genres', '')) + imdb_info['rating'] = info.get('rating', 'N/A') imdb_info['original_language'] = info.get('language codes') if isinstance(imdb_info['original_language'], list): if len(imdb_info['original_language']) > 1: @@ -3443,6 +3463,7 @@ async def imdb_other_meta(self, meta): meta['poster'] = imdb_info['cover'] meta['original_language'] = imdb_info['original_language'] meta['overview'] = imdb_info['plot'] + meta['imdb_rating'] = imdb_info['rating'] difference = SequenceMatcher(None, meta['title'].lower(), meta['aka'][5:].lower()).ratio() if difference >= 0.9 or meta['aka'][5:].strip() == "" or meta['aka'][5:].strip().lower() in meta['title'].lower(): diff --git a/src/trackers/ACM.py b/src/trackers/ACM.py index cbf19147..1970a8e1 100644 --- a/src/trackers/ACM.py +++ b/src/trackers/ACM.py @@ -186,7 +186,7 @@ def get_subs_tag(self, subs): return ' [No Eng subs]' return f" [{subs[0]} subs only]" - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -210,7 +210,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -266,7 +266,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { @@ -334,8 +334,8 @@ async def edit_name(self, meta): return name async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as descfile: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding='utf-8') as descfile: from src.bbcode import BBCODE # Add This line for all web-dls if meta['type'] == 'WEBDL' and meta.get('service_longname', '') != '': diff --git a/src/trackers/AITHER.py b/src/trackers/AITHER.py index d2c2f290..693379d0 100644 --- a/src/trackers/AITHER.py +++ b/src/trackers/AITHER.py @@ -31,7 +31,7 @@ def __init__(self, config): 'Will1869', 'x0r', 'YIFY'] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature, comparison=True) @@ -52,7 +52,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -204,7 +204,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/AL.py b/src/trackers/AL.py index cadd0dbc..dcc7b677 100644 --- a/src/trackers/AL.py +++ b/src/trackers/AL.py @@ -69,7 +69,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature) @@ -90,7 +90,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -148,7 +148,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/ANT.py b/src/trackers/ANT.py index 1929172a..9e06f931 100644 --- a/src/trackers/ANT.py +++ b/src/trackers/ANT.py @@ -63,7 +63,7 @@ async def get_flags(self, meta): flags.append('Remux') return flags - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) torrent_filename = "BASE" torrent = Torrent.read(f"{meta['base_dir']}/tmp/{meta['uuid']}/BASE.torrent") @@ -173,7 +173,7 @@ def calculate_pieces_and_file_size(total_size, pathname_bytes, piece_size): async def edit_desc(self, meta): return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/BHD.py b/src/trackers/BHD.py index 7723934d..910ff6e8 100644 --- a/src/trackers/BHD.py +++ b/src/trackers/BHD.py @@ -28,7 +28,7 @@ def __init__(self, config): self.banned_groups = ['Sicario', 'TOMMY', 'x0r', 'nikt0', 'FGT', 'd3g', 'MeGusta', 'YIFY', 'tigole', 'TEKNO3D', 'C4K', 'RARBG', '4K4U', 'EASports', 'ReaLHD'] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -213,7 +213,7 @@ async def edit_desc(self, meta): desc.close() return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") category = meta['category'] diff --git a/src/trackers/BHDTV.py b/src/trackers/BHDTV.py index d675fdaa..7dd05ed7 100644 --- a/src/trackers/BHDTV.py +++ b/src/trackers/BHDTV.py @@ -30,7 +30,7 @@ def __init__(self, config): self.banned_groups = [] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await self.edit_desc(meta) @@ -59,7 +59,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO_CLEANPATH.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'file': open_torrent} @@ -185,8 +185,8 @@ async def get_res_id(self, resolution): return resolution_id async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as desc: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding='utf-8') as desc: desc.write(base.replace("[img=250]", "[img=250x250]")) images = meta['image_list'] if len(images) > 0: @@ -198,7 +198,7 @@ async def edit_desc(self, meta): desc.close() return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): console.print("[red]Dupes must be checked Manually") return ['Dupes must be checked Manually'] # hopefully someone else has the time to implement this. diff --git a/src/trackers/BLU.py b/src/trackers/BLU.py index 9af559cf..6ce0dba5 100644 --- a/src/trackers/BLU.py +++ b/src/trackers/BLU.py @@ -35,7 +35,7 @@ def __init__(self, config): pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) blu_name = meta['name'] desc_header = "" @@ -60,7 +60,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[BLU]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[BLU]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[BLU]{meta['clean_name']}.torrent", 'rb') files = {'torrent': ("placeholder.torrent", open_torrent, "application/x-bittorrent")} data = { @@ -190,7 +190,7 @@ async def derived_dv_layer(self, meta): name = name.replace(meta['resolution'], f"Hybrid {meta['resolution']}") return name, desc_header - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/CBR.py b/src/trackers/CBR.py index 8a58fd0b..c090e80a 100644 --- a/src/trackers/CBR.py +++ b/src/trackers/CBR.py @@ -28,7 +28,7 @@ def __init__(self, config): self.banned_groups = [""] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature) @@ -49,7 +49,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[CBR]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[CBR]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[CBR]{meta['clean_name']}.torrent", 'rb') files = {'torrent': ("placeholder.torrent", open_torrent, "application/x-bittorrent")} data = { @@ -143,7 +143,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Buscando por duplicatas no tracker...") params = { diff --git a/src/trackers/FL.py b/src/trackers/FL.py index a7c67d02..5d376c91 100644 --- a/src/trackers/FL.py +++ b/src/trackers/FL.py @@ -100,11 +100,7 @@ async def edit_name(self, meta): fl_name = fl_name.replace(' ', '.').replace('..', '.') return fl_name - ############################################################### - ###### STOP HERE UNLESS EXTRA MODIFICATION IS NEEDED ###### # noqa E266 - ############################################################### - - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await self.edit_desc(meta) @@ -137,7 +133,7 @@ async def upload(self, meta): torrentFileName = meta.get('uuid') # Download new .torrent from site - fl_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', newline='').read() + fl_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', newline='', encoding='utf-8').read() torrent_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent" if meta['bdinfo'] is not None: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8').read() @@ -194,7 +190,7 @@ async def upload(self, meta): raise UploadException(f"Upload to FL Failed: result URL {up.url} ({up.status_code}) was not expected", 'red') # noqa F405 return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] with requests.Session() as session: cookiefile = os.path.abspath(f"{meta['base_dir']}/data/cookies/FL.pkl") @@ -299,8 +295,8 @@ async def download_new_torrent(self, session, id, torrent_path): return async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', newline='') as descfile: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', newline='', encoding='utf-8') as descfile: from src.bbcode import BBCODE bbcode = BBCODE() diff --git a/src/trackers/FNP.py b/src/trackers/FNP.py index 4608925c..eb6ebaa4 100644 --- a/src/trackers/FNP.py +++ b/src/trackers/FNP.py @@ -62,7 +62,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -82,7 +82,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -140,7 +140,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/HDB.py b/src/trackers/HDB.py index 9a4777a9..a59c42e8 100644 --- a/src/trackers/HDB.py +++ b/src/trackers/HDB.py @@ -196,7 +196,7 @@ async def edit_name(self, meta): return hdb_name - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await self.edit_desc(meta) @@ -215,7 +215,7 @@ async def upload(self, meta): return # Download new .torrent from site - hdb_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + hdb_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() torrent_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent" torrent = Torrent.read(torrent_path) @@ -317,7 +317,7 @@ async def upload(self, meta): raise UploadException(f"Upload to HDB Failed: result URL {up.url} ({up.status_code}) was not expected", 'red') # noqa F405 return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") url = "https://hdbits.org/api/torrents" @@ -415,8 +415,8 @@ async def download_new_torrent(self, id, torrent_path): return async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as descfile: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding='utf-8') as descfile: from src.bbcode import BBCODE # Add This line for all web-dls if meta['type'] == 'WEBDL' and meta.get('service_longname', '') != '' and meta.get('description', None) is None: diff --git a/src/trackers/HDT.py b/src/trackers/HDT.py index a46abb83..a1117b67 100644 --- a/src/trackers/HDT.py +++ b/src/trackers/HDT.py @@ -102,11 +102,7 @@ async def edit_name(self, meta): hdt_name = hdt_name.replace(':', '').replace('..', ' ').replace(' ', ' ') return hdt_name - ############################################################### - ###### STOP HERE UNLESS EXTRA MODIFICATION IS NEEDED ###### # noqa E266 - ############################################################### - - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await self.edit_desc(meta) @@ -127,7 +123,7 @@ async def upload(self, meta): hdt_name = hdt_name_manually # Upload - hdt_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', newline='').read() + hdt_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', newline='', encoding='utf-8').read() torrent_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent" with open(torrent_path, 'rb') as torrentFile: @@ -196,7 +192,7 @@ async def upload(self, meta): raise UploadException(f"Upload to HDT Failed: result URL {up.url} ({up.status_code}) was not expected", 'red') # noqa F405 return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] with requests.Session() as session: common = COMMON(config=self.config) @@ -296,7 +292,7 @@ async def get_csrfToken(self, session, url): async def edit_desc(self, meta): # base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', newline='') as descfile: + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', newline='', encoding='utf-8') as descfile: if meta['is_disc'] != 'BDMV': # Beautify MediaInfo for HDT using custom template video = meta['filelist'][0] diff --git a/src/trackers/HP.py b/src/trackers/HP.py index 3250213b..76acbb83 100644 --- a/src/trackers/HP.py +++ b/src/trackers/HP.py @@ -62,7 +62,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -82,7 +82,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -140,7 +140,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/HUNO.py b/src/trackers/HUNO.py index 9d80b2c2..c1a5298f 100644 --- a/src/trackers/HUNO.py +++ b/src/trackers/HUNO.py @@ -29,7 +29,7 @@ def __init__(self, config): self.banned_groups = ["4K4U, Bearfish, BiTOR, BONE, D3FiL3R, d3g, DTR, ELiTE, EVO, eztv, EzzRips, FGT, HashMiner, HETeam, HEVCBay, HiQVE, HR-DR, iFT, ION265, iVy, JATT, Joy, LAMA, m3th, MeGusta, MRN, Musafirboy, OEPlus, Pahe.in, PHOCiS, PSA, RARBG, RMTeam, ShieldBearer, SiQ, TBD, Telly, TSP, VXT, WKS, YAWNiX, YIFY, YTS"] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.unit3d_edit_desc(meta, self.tracker, self.signature) await common.edit_torrent(meta, self.tracker, self.source_flag) @@ -52,7 +52,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[HUNO]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[HUNO]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[HUNO]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -256,7 +256,7 @@ async def is_plex_friendly(self, meta): return 0 - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") diff --git a/src/trackers/JPTV.py b/src/trackers/JPTV.py index 1502d878..76e8e78f 100644 --- a/src/trackers/JPTV.py +++ b/src/trackers/JPTV.py @@ -67,7 +67,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta) @@ -89,7 +89,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() # bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -147,7 +147,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/LCD.py b/src/trackers/LCD.py index 6ca5cac6..5b7397d3 100644 --- a/src/trackers/LCD.py +++ b/src/trackers/LCD.py @@ -28,7 +28,7 @@ def __init__(self, config): self.banned_groups = [""] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature) @@ -49,7 +49,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[LCD]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[LCD]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[LCD]{meta['clean_name']}.torrent", 'rb') files = {'torrent': ("placeholder.torrent", open_torrent, "application/x-bittorrent")} data = { @@ -145,7 +145,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Buscando por duplicatas no tracker...") params = { diff --git a/src/trackers/LST.py b/src/trackers/LST.py index 7278f7ed..83fc5e1b 100644 --- a/src/trackers/LST.py +++ b/src/trackers/LST.py @@ -70,7 +70,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category'], meta.get('keywords', ''), meta.get('service', '')) @@ -93,7 +93,7 @@ async def upload(self, meta): mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() if meta.get('service') == "hentai": desc = "[center]" + "[img]" + str(meta['poster']) + "[/img][/center]" + "\n[center]" + "https://www.themoviedb.org/tv/" + str(meta['tmdb']) + "\nhttps://myanimelist.net/anime/" + str(meta['mal']) + "[/center]" + desc @@ -164,7 +164,7 @@ async def get_flag(self, meta, flag_name): return 1 if meta.get(flag_name, False) else 0 - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/LT.py b/src/trackers/LT.py index 36742863..c6e0e4be 100644 --- a/src/trackers/LT.py +++ b/src/trackers/LT.py @@ -86,7 +86,7 @@ async def edit_name(self, meta): lt_name = lt_name.replace(meta['tag'], f" [SUBS]{meta['tag']}") return lt_name - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category'], meta) @@ -107,7 +107,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -163,7 +163,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/MTV.py b/src/trackers/MTV.py index 656a59f0..9e6ea24f 100644 --- a/src/trackers/MTV.py +++ b/src/trackers/MTV.py @@ -37,7 +37,7 @@ def __init__(self, config): ] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) cookiefile = os.path.abspath(f"{meta['base_dir']}/data/cookies/MTV.pkl") @@ -130,7 +130,7 @@ async def upload_with_retry(self, meta, cookiefile, common, img_host_index=1): anon = 1 if meta['anon'] != 0 or bool(str2bool(str(self.config['TRACKERS'][self.tracker].get('anon', "False")))) else 0 desc_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt" - desc = open(desc_path, 'r').read() + desc = open(desc_path, 'r', encoding='utf-8').read() torrent_file_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent" with open(torrent_file_path, 'rb') as f: @@ -226,8 +226,8 @@ async def handle_image_upload(self, meta, img_host_index=1, approved_image_hosts return meta['image_list'], False # No need to retry, successful upload async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as desc: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding='utf-8') as desc: # adding bd_dump to description if it exits and adding empty string to mediainfo if meta['bdinfo'] is not None: mi_dump = None @@ -540,7 +540,7 @@ async def login(self, cookiefile): console.print(resp.url) return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/NBL.py b/src/trackers/NBL.py index 35dd0fc5..3711c54c 100644 --- a/src/trackers/NBL.py +++ b/src/trackers/NBL.py @@ -43,7 +43,7 @@ async def edit_desc(self, meta): # Leave this in so manual works return - async def upload(self, meta): + async def upload(self, meta, disctype): if meta['category'] != 'TV': console.print("[red]Only TV Is allowed at NBL") return @@ -82,7 +82,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") if int(meta.get('tvmaze_id', 0)) != 0: diff --git a/src/trackers/OE.py b/src/trackers/OE.py index e1b7453e..ec332dc9 100644 --- a/src/trackers/OE.py +++ b/src/trackers/OE.py @@ -38,7 +38,7 @@ def __init__(self, config): 'YTS', 'YuiSubs', 'ZKBL', 'ZmN', 'ZMNT'] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature) @@ -56,7 +56,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -167,7 +167,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/OTW.py b/src/trackers/OTW.py index 71f85276..766ebd76 100644 --- a/src/trackers/OTW.py +++ b/src/trackers/OTW.py @@ -62,7 +62,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -82,7 +82,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -140,7 +140,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/PSS.py b/src/trackers/PSS.py index 66ca5b0a..0f0fde00 100644 --- a/src/trackers/PSS.py +++ b/src/trackers/PSS.py @@ -64,7 +64,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -84,7 +84,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -142,7 +142,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/PTER.py b/src/trackers/PTER.py index 50e44367..88fea3a8 100644 --- a/src/trackers/PTER.py +++ b/src/trackers/PTER.py @@ -59,7 +59,7 @@ async def validate_cookies(self, meta): console.print("[bold red]Missing Cookie File. (data/cookies/PTER.txt)") return False - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] common = COMMON(config=self.config) cookiefile = f"{meta['base_dir']}/data/cookies/PTER.txt" @@ -149,8 +149,8 @@ async def get_type_medium_id(self, meta): return medium_id async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as descfile: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding='utf-8') as descfile: from src.bbcode import BBCODE from src.trackers.COMMON import COMMON common = COMMON(config=self.config) @@ -317,7 +317,7 @@ async def is_zhongzi(self, meta): return 'yes' return None - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) diff --git a/src/trackers/PTP.py b/src/trackers/PTP.py index a2f992e9..dee15cc8 100644 --- a/src/trackers/PTP.py +++ b/src/trackers/PTP.py @@ -201,7 +201,7 @@ async def get_ptp_description(self, ptp_torrent_id, is_disc): await asyncio.sleep(1) ptp_desc = response.text - # console.print(f"[yellow]Raw description received:\n{ptp_desc[:3800]}...") # Show first 500 characters for brevity + # console.print(f"[yellow]Raw description received:\n{ptp_desc[:6800]}...") # Show first 500 characters for brevity bbcode = BBCODE() desc, imagelist = bbcode.clean_ptp_description(ptp_desc, is_disc) @@ -306,7 +306,7 @@ async def get_tags(self, check_against): tags.append(each) return tags - async def search_existing(self, groupID, meta): + async def search_existing(self, groupID, meta, disctype): # Map resolutions to SD / HD / UHD quality = None if meta.get('sd', 0) == 1: # 1 is SD @@ -765,7 +765,7 @@ async def fill_upload_form(self, groupID, meta): await common.edit_torrent(meta, self.tracker, self.source_flag) resolution, other_resolution = self.get_resolution(meta) await self.edit_desc(meta) - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", "r").read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", "r", encoding='utf-8').read() ptp_subtitles = self.get_subtitles(meta) ptp_trumpable = None if not any(x in [3, 50] for x in ptp_subtitles) or meta['hardcoded-subs']: @@ -841,7 +841,7 @@ async def fill_upload_form(self, groupID, meta): return url, data - async def upload(self, meta, url, data): + async def upload(self, meta, url, data, disctype): torrent_filename = f"[{self.tracker}]{meta['clean_name']}.torrent" torrent_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/{torrent_filename}" torrent = Torrent.read(torrent_path) diff --git a/src/trackers/R4E.py b/src/trackers/R4E.py index c3ba5abe..82a8c808 100644 --- a/src/trackers/R4E.py +++ b/src/trackers/R4E.py @@ -27,7 +27,7 @@ def __init__(self, config): self.banned_groups = [""] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category'], meta['tmdb']) @@ -44,7 +44,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[R4E]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[R4E]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[R4E]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -136,7 +136,7 @@ async def is_docu(self, genres): is_docu = True return is_docu - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") url = "https://racing4everyone.eu/api/torrents/filter" diff --git a/src/trackers/RF.py b/src/trackers/RF.py index 1019c225..c90f8b4d 100644 --- a/src/trackers/RF.py +++ b/src/trackers/RF.py @@ -28,7 +28,7 @@ def __init__(self, config): self.banned_groups = [""] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.forum_link) @@ -48,7 +48,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -142,7 +142,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/RTF.py b/src/trackers/RTF.py index 07b78d7e..b5ddf485 100644 --- a/src/trackers/RTF.py +++ b/src/trackers/RTF.py @@ -29,7 +29,7 @@ def __init__(self, config): self.banned_groups = [] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.forum_link) @@ -93,7 +93,7 @@ async def upload(self, meta): console.print("[cyan]Request Data:") console.print(json_data) - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") headers = { diff --git a/src/trackers/SHRI.py b/src/trackers/SHRI.py index 689bfc62..6862b431 100644 --- a/src/trackers/SHRI.py +++ b/src/trackers/SHRI.py @@ -18,12 +18,6 @@ class SHRI(): Upload """ - ############################################################### - ######## EDIT ME ######## # noqa #E266 - ############################################################### - - # ALSO EDIT CLASS NAME ABOVE - def __init__(self, config): self.config = config self.tracker = 'SHRI' @@ -68,11 +62,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - ############################################################### - ###### STOP HERE UNLESS EXTRA MODIFICATION IS NEEDED ###### # noqa #E266 - ############################################################### - - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -92,7 +82,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -150,7 +140,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/SN.py b/src/trackers/SN.py index 04547ce8..199ff68e 100644 --- a/src/trackers/SN.py +++ b/src/trackers/SN.py @@ -34,7 +34,7 @@ async def get_type_id(self, type): }.get(type, '0') return type_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) # await common.unit3d_edit_desc(meta, self.tracker, self.forum_link) @@ -60,7 +60,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') as f: tfile = f.read() @@ -106,8 +106,8 @@ async def upload(self, meta): console.print(data) async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as desc: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding='utf-8') as desc: desc.write(base) images = meta['image_list'] if len(images) > 0: @@ -121,7 +121,7 @@ async def edit_desc(self, meta): desc.close() return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") diff --git a/src/trackers/STC.py b/src/trackers/STC.py index 8e8c9ef5..fb17b2c0 100644 --- a/src/trackers/STC.py +++ b/src/trackers/STC.py @@ -26,7 +26,7 @@ def __init__(self, config): self.banned_groups = [""] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature) @@ -44,7 +44,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -156,7 +156,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/STT.py b/src/trackers/STT.py index 8117e144..2f8ee800 100644 --- a/src/trackers/STT.py +++ b/src/trackers/STT.py @@ -27,7 +27,7 @@ def __init__(self, config): self.banned_groups = [""] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature) @@ -45,7 +45,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -134,7 +134,7 @@ async def get_res_id(self, resolution): }.get(resolution, '11') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/TDC.py b/src/trackers/TDC.py index cd795249..b2dd45c8 100644 --- a/src/trackers/TDC.py +++ b/src/trackers/TDC.py @@ -61,7 +61,7 @@ async def get_res_id(self, resolution): }.get(resolution, '10') return resolution_id - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -81,7 +81,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -139,7 +139,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/THR.py b/src/trackers/THR.py index 84a97ad3..4a91b66e 100644 --- a/src/trackers/THR.py +++ b/src/trackers/THR.py @@ -28,7 +28,7 @@ def __init__(self, config): self.banned_groups = [""] pass - async def upload(self, session, meta): + async def upload(self, session, meta, disctype): await self.edit_torrent(meta) cat_id = await self.get_cat_id(meta) subs = self.get_subtitles(meta) @@ -59,7 +59,7 @@ async def upload(self, session, meta): f.close() # bd_file = None - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[THR]DESCRIPTION.txt", 'r') as f: + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[THR]DESCRIPTION.txt", 'r', encoding='utf-8') as f: desc = f.read() f.close() @@ -169,7 +169,7 @@ async def edit_torrent(self, meta): async def edit_desc(self, meta): pronfo = False - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[THR]DESCRIPTION.txt", 'w', encoding='utf-8') as desc: if meta['tag'] == "": tag = "" @@ -257,7 +257,7 @@ async def edit_desc(self, meta): desc.close() return pronfo - def search_existing(self, session, imdb_id): + def search_existing(self, session, imdb_id, disctype): from bs4 import BeautifulSoup imdb_id = imdb_id.replace('tt', '') search_url = f"https://www.torrenthr.org/browse.php?search={imdb_id}&blah=2&incldead=1" diff --git a/src/trackers/TIK.py b/src/trackers/TIK.py new file mode 100644 index 00000000..6a8b9d98 --- /dev/null +++ b/src/trackers/TIK.py @@ -0,0 +1,595 @@ +# -*- coding: utf-8 -*- +# import discord +import asyncio +import requests +import os +import re +import platform +import sys +import cli_ui +import urllib.request +import click +from str2bool import str2bool + +from src.trackers.COMMON import COMMON +from src.console import console + + +class TIK(): + """ + Edit for Tracker: + Edit BASE.torrent with announce and source + Check for duplicates + Set type/category IDs + Upload + """ + + def __init__(self, config): + self.config = config + self.tracker = 'TIK' + self.source_flag = 'TIK' + self.search_url = 'https://cinematik.net/api/torrents/filter' + self.upload_url = 'https://cinematik.net/api/torrents/upload' + self.torrent_url = 'https://cinematik.net/api/torrents/' + self.signature = "\n[center][url=https://github.com/Audionut/Upload-Assistant]Created by testing 123, Audionuts Upload Assistant[/url][/center]" + self.banned_groups = [""] + pass + + async def upload(self, meta, disctype): + common = COMMON(config=self.config) + await common.edit_torrent(meta, self.tracker, self.source_flag) + await common.unit3d_edit_desc(meta, self.tracker, self.signature, comparison=True) + cat_id = await self.get_cat_id(meta['category'], meta.get('foreign'), meta.get('opera'), meta.get('asian')) + type_id = await self.get_type_id(disctype) + resolution_id = await self.get_res_id(meta['resolution']) + modq = await self.get_flag(meta, 'modq') + region_id = await common.unit3d_region_ids(meta.get('region')) + distributor_id = await common.unit3d_distributor_ids(meta.get('distributor')) + if meta['anon'] == 0 and bool(str2bool(str(self.config['TRACKERS'][self.tracker].get('anon', "False")))) is False: + anon = 0 + else: + anon = 1 + + if not meta['is_disc']: + console.print("[red]Only disc-based content allowed at TIK") + return + elif meta['bdinfo'] is not None: + mi_dump = None + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8') as bd_file: + bd_dump = bd_file.read() + else: + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8') as mi_file: + mi_dump = mi_file.read() + bd_dump = None + + if meta.get('desclink'): + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", "r", encoding='utf-8').read() + print(f"Custom Description Link: {desc}") + + elif meta.get('descfile'): + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", "r", encoding='utf-8').read() + print(f"Custom Description File Path: {desc}") + + else: + await self.edit_desc(meta) + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", "r", encoding='utf-8').read() + + open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') + files = {'torrent': open_torrent} + data = { + 'name': await self.get_name(meta, disctype), + 'description': desc, + 'mediainfo': mi_dump, + 'bdinfo': bd_dump, + 'category_id': cat_id, + 'type_id': type_id, + 'resolution_id': resolution_id, + 'region_id': region_id, + 'distributor_id': distributor_id, + 'tmdb': meta['tmdb'], + 'imdb': meta['imdb_id'].replace('tt', ''), + 'tvdb': meta['tvdb_id'], + 'mal': meta['mal_id'], + 'igdb': 0, + 'anonymous': anon, + 'stream': meta['stream'], + 'sd': meta['sd'], + 'keywords': meta['keywords'], + 'personal_release': 0, + 'internal': 0, + 'featured': 0, + 'free': 0, + 'doubleup': 0, + 'sticky': 0, + 'mod_queue_opt_in': modq, + } + # Internal + if self.config['TRACKERS'][self.tracker].get('internal', False) is True: + if meta['tag'] != "" and (meta['tag'][1:] in self.config['TRACKERS'][self.tracker].get('internal_groups', [])): + data['internal'] = 1 + if self.config['TRACKERS'][self.tracker].get('personal', False) is True: + if meta['tag'] != "" and (meta['tag'][1:] in self.config['TRACKERS'][self.tracker].get('personal_group', [])): + data['personal_release'] = 1 + + if region_id != 0: + data['region_id'] = region_id + if distributor_id != 0: + data['distributor_id'] = distributor_id + if meta.get('category') == "TV": + data['season_number'] = meta.get('season_int', '0') + data['episode_number'] = meta.get('episode_int', '0') + headers = { + 'User-Agent': f'Upload Assistant/2.1 ({platform.system()} {platform.release()})' + } + params = { + 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip() + } + + if meta['debug'] is False: + response = requests.post(url=self.upload_url, files=files, data=data, headers=headers, params=params) + console.print(data) + console.print(f"TIK response: {response}") + try: + console.print(response.json()) + except Exception: + console.print("It may have uploaded, go check") + return + else: + console.print("[cyan]Request Data:") + console.print(data) + open_torrent.close() + + def get_basename(self, meta): + path = next(iter(meta['filelist']), meta['path']) + return os.path.basename(path) + + async def get_name(self, meta, disctype): + disctype = meta.get('disctype', None) + basename = self.get_basename(meta) + type = meta.get('type', "") + title = meta.get('title', "").replace('AKA', '/').strip() + alt_title = meta.get('aka', "").replace('AKA', '/').strip() + year = meta.get('year', "") + resolution = meta.get('resolution', "") + season = meta.get('season', "") + repack = meta.get('repack', "") + if repack.strip(): + repack = f"[{repack}]" + three_d = meta.get('3D', "") + three_d_tag = f"[{three_d}]" if three_d else "" + tag = meta.get('tag', "").replace("-", "- ") + if tag == "": + tag = "- NOGRP" + source = meta.get('source', "") + uhd = meta.get('uhd', "") # noqa #841 + hdr = meta.get('hdr', "") + if not hdr.strip(): + hdr = "SDR" + distributor = meta.get('distributor', "") # noqa F841 + video_codec = meta.get('video_codec', "") + video_encode = meta.get('video_encode', "").replace(".", "") + if 'x265' in basename: + video_encode = video_encode.replace('H', 'x') + dvd_size = meta.get('dvd_size', "") + search_year = meta.get('search_year', "") + if not str(search_year).strip(): + search_year = year + + category_name = meta.get('category', "") + foreign = meta.get('foreign') + opera = meta.get('opera') + asian = meta.get('asian') + meta['category_id'] = await self.get_cat_id(category_name, foreign, opera, asian) + + name = "" + alt_title_part = f" / {alt_title}" if alt_title else "" + if meta['category_id'] in ("1", "3", "5", "6"): + if meta['is_disc'] == 'BDMV': + name = f"{title}{alt_title_part} ({year}) {disctype} {resolution} {video_codec} {three_d_tag}" + elif meta['is_disc'] == 'DVD': + name = f"{title}{alt_title_part} ({year}) {source} {dvd_size}" + elif meta['category'] == "TV": # TV SPECIFIC + if type == "DISC": # Disk + if meta['is_disc'] == 'BDMV': + name = f"{title}{alt_title_part} ({search_year}) {season} {disctype} {resolution} {video_codec}" + if meta['is_disc'] == 'DVD': + name = f"{title}{alt_title_part} ({search_year}) {season} {source} {dvd_size}" + + # User confirmation + console.print(f"[yellow]Final generated name: [greee]{name}") + confirmation = cli_ui.ask_yes_no("Do you want to use this name?", default=False) # Default is 'No' + + if confirmation: + return name + else: + console.print("[red]Sorry, this seems to be an edge case, please report at (insert_link)") + sys.exit(1) + + async def get_cat_id(self, category_name, foreign, opera, asian): + category_id = { + 'FILM': '1', + 'TV': '2', + 'Foreign Film': '3', + 'Foreign TV': '4', + 'Opera & Musical': '5', + 'Asian Film': '6', + }.get(category_name, '0') + + if category_name == 'MOVIE': + if foreign: + category_id = '3' + elif opera: + category_id = '5' + elif asian: + category_id = '6' + else: + category_id = '1' + elif category_name == 'TV': + if foreign: + category_id = '4' + elif opera: + category_id = '5' + else: + category_id = '2' + + return category_id + + async def get_type_id(self, disctype): + type_id_map = { + 'Custom': '1', + 'BD100': '3', + 'BD66': '4', + 'BD50': '5', + 'BD25': '6', + 'NTSC DVD9': '7', + 'NTSC DVD5': '8', + 'PAL DVD9': '9', + 'PAL DVD5': '10', + '3D': '11' + } + + if not disctype: + console.print("[red]You must specify a --disctype") + return None + + disctype_value = disctype[0] if isinstance(disctype, list) else disctype + type_id = type_id_map.get(disctype_value, '1') # '1' is the default fallback + + return type_id + + async def get_res_id(self, resolution): + resolution_id = { + 'Other': '10', + '4320p': '1', + '2160p': '2', + '1440p': '3', + '1080p': '3', + '1080i': '4', + '720p': '5', + '576p': '6', + '576i': '7', + '480p': '8', + '480i': '9' + }.get(resolution, '10') + return resolution_id + + async def get_flag(self, meta, flag_name): + config_flag = self.config['TRACKERS'][self.tracker].get(flag_name) + if config_flag is not None: + return 1 if config_flag else 0 + + return 1 if meta.get(flag_name, False) else 0 + + async def edit_desc(self, meta): + from src.prep import Prep + prep = Prep(screens=meta['screens'], img_host=meta['imghost'], config=self.config) + + # Fetch additional IMDb metadata + meta_imdb = await prep.imdb_other_meta(meta) # noqa #F841 + + if len(meta.get('discs', [])) > 0: + summary = meta['discs'][0].get('summary', '') + else: + summary = None + + # Proceed with matching Total Bitrate if the summary exists + if summary: + match = re.search(r"Total Bitrate: ([\d.]+ Mbps)", summary) + if match: + total_bitrate = match.group(1) + else: + total_bitrate = "Unknown" + else: + total_bitrate = "Unknown" + + country_name = self.country_code_to_name(meta.get('region')) + + # Rehost poster if tmdb_poster is available + poster_url = f"https://image.tmdb.org/t/p/original{meta.get('tmdb_poster', '')}" + + # Define the paths for both jpg and png poster images + poster_jpg_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/poster.jpg" + poster_png_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/poster.png" + + # Check if either poster.jpg or poster.png already exists + if os.path.exists(poster_jpg_path): + poster_path = poster_jpg_path + console.print("[green]Poster already exists as poster.jpg, skipping download.[/green]") + elif os.path.exists(poster_png_path): + poster_path = poster_png_path + console.print("[green]Poster already exists as poster.png, skipping download.[/green]") + else: + # No poster file exists, download the poster image + poster_path = poster_jpg_path # Default to saving as poster.jpg + try: + urllib.request.urlretrieve(poster_url, poster_path) + console.print(f"[green]Poster downloaded to {poster_path}[/green]") + except Exception as e: + console.print(f"[red]Error downloading poster: {e}[/red]") + + # Upload the downloaded or existing poster image once + if os.path.exists(poster_path): + try: + console.print("Uploading standard poster to image host....") + new_poster_url, _ = prep.upload_screens(meta, 1, 1, 0, 1, [poster_path], {}) + + # Ensure that the new poster URL is assigned only once + if len(new_poster_url) > 0: + poster_url = new_poster_url[0]['raw_url'] + except Exception as e: + console.print(f"[red]Error uploading poster: {e}[/red]") + else: + console.print("[red]Poster file not found, cannot upload.[/red]") + + # Generate the description text + desc_text = [] + + images = meta['image_list'] + discs = meta.get('discs', []) # noqa #F841 + + if len(images) >= 4: + image_link_1 = images[0]['raw_url'] + image_link_2 = images[1]['raw_url'] + image_link_3 = images[2]['raw_url'] + image_link_4 = images[3]['raw_url'] + image_link_5 = images[4]['raw_url'] + image_link_6 = images[5]['raw_url'] + else: + image_link_1 = image_link_2 = image_link_3 = image_link_4 = image_link_5 = image_link_6 = "" + + # Write the cover section with rehosted poster URL + desc_text.append("[h3]Cover[/h3] [color=red]A stock poster has been automatically added, but you'll get more love if you include a proper cover, see rule 6.6[/color]\n") + desc_text.append("[center]\n") + desc_text.append(f"[IMG=500]{poster_url}[/IMG]\n") + desc_text.append("[/center]\n\n") + + # Write screenshots section + desc_text.append("[h3]Screenshots[/h3]\n") + desc_text.append("[center]\n") + desc_text.append(f"[URL={image_link_1}][IMG=300]{image_link_1}[/IMG][/URL] ") + desc_text.append(f"[URL={image_link_2}][IMG=300]{image_link_2}[/IMG][/URL] ") + desc_text.append(f"[URL={image_link_3}][IMG=300]{image_link_3}[/IMG][/URL]\n ") + desc_text.append(f"[URL={image_link_4}][IMG=300]{image_link_4}[/IMG][/URL] ") + desc_text.append(f"[URL={image_link_5}][IMG=300]{image_link_5}[/IMG][/URL] ") + desc_text.append(f"[URL={image_link_6}][IMG=300]{image_link_6}[/IMG][/URL]\n") + desc_text.append("[/center]\n\n") + + # Write synopsis section with the custom title + desc_text.append("[h3]Synopsis/Review/Personal Thoughts (edit as needed)[/h3]\n") + desc_text.append("[color=red]Default TMDB sypnosis added, more love if you use a sypnosis from credible film institutions such as the BFI or directly quoting well-known film critics, see rule 6.3[/color]\n") + desc_text.append("[quote]\n") + desc_text.append(f"{meta.get('overview', 'No synopsis available.')}\n") + desc_text.append("[/quote]\n\n") + + # Write technical info section + desc_text.append("[h3]Technical Info[/h3]\n") + desc_text.append("[code]\n") + if meta['is_disc'] == 'BDMV': + desc_text.append(f" Disc Label.........:{meta.get('bdinfo', {}).get('label', '')}\n") + desc_text.append(f" IMDb...............: [url=https://www.imdb.com/title/tt{meta.get('imdb_id')}]{meta.get('imdb_rating', '')}[/url]\n") + desc_text.append(f" Year...............: {meta.get('year', '')}\n") + desc_text.append(f" Country............: {country_name}\n") + if meta['is_disc'] == 'BDMV': + desc_text.append(f" Runtime............: {meta.get('bdinfo', {}).get('length', '')} hrs [color=red](double check this is actual runtime)[/color]\n") + else: + desc_text.append(" Runtime............: [color=red]Insert the actual runtime[/color]\n") + + if meta['is_disc'] == 'BDMV': + audio_languages = ', '.join([f"{track.get('language', 'Unknown')} {track.get('codec', 'Unknown')} {track.get('channels', 'Unknown')}" for track in meta.get('bdinfo', {}).get('audio', [])]) + desc_text.append(f" Audio..............: {audio_languages}\n") + desc_text.append(f" Subtitles..........: {', '.join(meta.get('bdinfo', {}).get('subtitles', []))}\n") + else: + # Process each disc's `vob_mi` or `ifo_mi` to extract audio and subtitles separately + for disc in meta.get('discs', []): + vob_mi = disc.get('vob_mi', '') + ifo_mi = disc.get('ifo_mi', '') + + unique_audio = set() # Store unique audio strings + + audio_section = vob_mi.split('\n\nAudio\n')[1].split('\n\n')[0] if 'Audio\n' in vob_mi else None + if audio_section: + if "AC-3" in audio_section: + codec = "AC-3" + elif "DTS" in audio_section: + codec = "DTS" + elif "MPEG Audio" in audio_section: + codec = "MPEG Audio" + elif "PCM" in audio_section: + codec = "PCM" + elif "AAC" in audio_section: + codec = "AAC" + else: + codec = "Unknown" + + channels = audio_section.split("Channel(s)")[1].split(":")[1].strip().split(" ")[0] if "Channel(s)" in audio_section else "Unknown" + # Convert 6 channels to 5.1, otherwise leave as is + channels = "5.1" if channels == "6" else channels + language = disc.get('ifo_mi_full', '').split('Language')[1].split(":")[1].strip().split('\n')[0] if "Language" in disc.get('ifo_mi_full', '') else "Unknown" + audio_info = f"{language} {codec} {channels}" + unique_audio.add(audio_info) + + # Append audio information to the description + if unique_audio: + desc_text.append(f" Audio..............: {', '.join(sorted(unique_audio))}\n") + + # Subtitle extraction using the helper function + unique_subtitles = self.parse_subtitles(ifo_mi) + + # Append subtitle information to the description + if unique_subtitles: + desc_text.append(f" Subtitles..........: {', '.join(sorted(unique_subtitles))}\n") + + if meta['is_disc'] == 'BDMV': + video_info = meta.get('bdinfo', {}).get('video', []) + video_codec = video_info[0].get('codec', 'Unknown') + video_bitrate = video_info[0].get('bitrate', 'Unknown') + desc_text.append(f" Video Format.......: {video_codec} / {video_bitrate}\n") + else: + desc_text.append(f" DVD Format.........: {meta.get('source', 'Unknown')}\n") + desc_text.append(" Film Aspect Ratio..: [color=red]The actual aspect ratio of the content, not including the black bars[/color]\n") + if meta['is_disc'] == 'BDMV': + desc_text.append(f" Source.............: {meta.get('disctype', 'Unknown')}\n") + else: + desc_text.append(f" Source.............: {meta.get('dvd_size', 'Unknown')}\n") + desc_text.append(f" Film Distributor...: [url={meta.get('distributor_link', '')}]{meta.get('distributor', 'Unknown')}[url] [color=red]Don't forget the actual distributor link\n") + desc_text.append(f" Average Bitrate....: {total_bitrate}\n") + desc_text.append(" Ripping Program....: [color=red]Specify - if it's your rip or custom version, otherwise 'Not my rip'[/color]\n") + desc_text.append("\n") + if meta.get('untouched') is True: + desc_text.append(" Menus......: [X] Untouched\n") + desc_text.append(" Video......: [X] Untouched\n") + desc_text.append(" Extras.....: [X] Untouched\n") + desc_text.append(" Audio......: [X] Untouched\n") + else: + desc_text.append(" Menus......: [ ] Untouched\n") + desc_text.append(" [ ] Stripped\n") + desc_text.append(" Video......: [ ] Untouched\n") + desc_text.append(" [ ] Re-encoded\n") + desc_text.append(" Extras.....: [ ] Untouched\n") + desc_text.append(" [ ] Stripped\n") + desc_text.append(" [ ] Re-encoded\n") + desc_text.append(" [ ] None\n") + desc_text.append(" Audio......: [ ] Untouched\n") + desc_text.append(" [ ] Stripped tracks\n") + + desc_text.append("[/code]\n\n") + + # Extras + desc_text.append("[h4]Extras[/h4]\n") + desc_text.append("[*] Insert special feature 1 here\n") + desc_text.append("[*] Insert special feature 2 here\n") + desc_text.append("... (add more special features as needed)\n\n") + + # Uploader Comments + desc_text.append("[h4]Uploader Comments[/h4]\n") + desc_text.append(f" - {meta.get('uploader_comments', 'No comments.')}\n") + + # Convert the list to a single string for the description + description = ''.join(desc_text) + + # Ask user if they want to edit or keep the description + console.print(f"Current description: {description}", markup=False) + console.print("[cyan]Do you want to edit or keep the description?[/cyan]") + edit_choice = input("Enter 'e' to edit, or press Enter to keep it as is: ") + + if edit_choice.lower() == 'e': + edited_description = click.edit(description) + if edited_description: + description = edited_description.strip() + console.print(f"Final description after editing: {description}", markup=False) + else: + console.print("[green]Keeping the original description.[/green]") + + # Write the final description to the file + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding="utf-8") as desc_file: + desc_file.write(description) + + def parse_subtitles(self, disc_mi): + unique_subtitles = set() # Store unique subtitle strings + lines = disc_mi.splitlines() # Split the multiline text into individual lines + current_block = None + + for line in lines: + # Detect the start of a subtitle block (Text #) + if line.startswith("Text #"): + current_block = "subtitle" + continue + + # Extract language information for subtitles + if current_block == "subtitle" and "Language" in line: + language = line.split(":")[1].strip() + unique_subtitles.add(language) + + return unique_subtitles + + def country_code_to_name(self, code): + country_mapping = { + 'AFG': 'Afghanistan', 'ALB': 'Albania', 'DZA': 'Algeria', 'AND': 'Andorra', 'AGO': 'Angola', + 'ARG': 'Argentina', 'ARM': 'Armenia', 'AUS': 'Australia', 'AUT': 'Austria', 'AZE': 'Azerbaijan', + 'BHS': 'Bahamas', 'BHR': 'Bahrain', 'BGD': 'Bangladesh', 'BRB': 'Barbados', 'BLR': 'Belarus', + 'BEL': 'Belgium', 'BLZ': 'Belize', 'BEN': 'Benin', 'BTN': 'Bhutan', 'BOL': 'Bolivia', + 'BIH': 'Bosnia and Herzegovina', 'BWA': 'Botswana', 'BRA': 'Brazil', 'BRN': 'Brunei', + 'BGR': 'Bulgaria', 'BFA': 'Burkina Faso', 'BDI': 'Burundi', 'CPV': 'Cabo Verde', 'KHM': 'Cambodia', + 'CMR': 'Cameroon', 'CAN': 'Canada', 'CAF': 'Central African Republic', 'TCD': 'Chad', 'CHL': 'Chile', + 'CHN': 'China', 'COL': 'Colombia', 'COM': 'Comoros', 'COG': 'Congo', 'CRI': 'Costa Rica', + 'HRV': 'Croatia', 'CUB': 'Cuba', 'CYP': 'Cyprus', 'CZE': 'Czech Republic', 'DNK': 'Denmark', + 'DJI': 'Djibouti', 'DMA': 'Dominica', 'DOM': 'Dominican Republic', 'ECU': 'Ecuador', 'EGY': 'Egypt', + 'SLV': 'El Salvador', 'GNQ': 'Equatorial Guinea', 'ERI': 'Eritrea', 'EST': 'Estonia', + 'SWZ': 'Eswatini', 'ETH': 'Ethiopia', 'FJI': 'Fiji', 'FIN': 'Finland', 'FRA': 'France', + 'GAB': 'Gabon', 'GMB': 'Gambia', 'GEO': 'Georgia', 'DEU': 'Germany', 'GHA': 'Ghana', + 'GRC': 'Greece', 'GRD': 'Grenada', 'GTM': 'Guatemala', 'GIN': 'Guinea', 'GNB': 'Guinea-Bissau', + 'GUY': 'Guyana', 'HTI': 'Haiti', 'HND': 'Honduras', 'HUN': 'Hungary', 'ISL': 'Iceland', 'IND': 'India', + 'IDN': 'Indonesia', 'IRN': 'Iran', 'IRQ': 'Iraq', 'IRL': 'Ireland', 'ISR': 'Israel', 'ITA': 'Italy', + 'JAM': 'Jamaica', 'JPN': 'Japan', 'JOR': 'Jordan', 'KAZ': 'Kazakhstan', 'KEN': 'Kenya', + 'KIR': 'Kiribati', 'KOR': 'Korea', 'KWT': 'Kuwait', 'KGZ': 'Kyrgyzstan', 'LAO': 'Laos', 'LVA': 'Latvia', + 'LBN': 'Lebanon', 'LSO': 'Lesotho', 'LBR': 'Liberia', 'LBY': 'Libya', 'LIE': 'Liechtenstein', + 'LTU': 'Lithuania', 'LUX': 'Luxembourg', 'MDG': 'Madagascar', 'MWI': 'Malawi', 'MYS': 'Malaysia', + 'MDV': 'Maldives', 'MLI': 'Mali', 'MLT': 'Malta', 'MHL': 'Marshall Islands', 'MRT': 'Mauritania', + 'MUS': 'Mauritius', 'MEX': 'Mexico', 'FSM': 'Micronesia', 'MDA': 'Moldova', 'MCO': 'Monaco', + 'MNG': 'Mongolia', 'MNE': 'Montenegro', 'MAR': 'Morocco', 'MOZ': 'Mozambique', 'MMR': 'Myanmar', + 'NAM': 'Namibia', 'NRU': 'Nauru', 'NPL': 'Nepal', 'NLD': 'Netherlands', 'NZL': 'New Zealand', + 'NIC': 'Nicaragua', 'NER': 'Niger', 'NGA': 'Nigeria', 'MKD': 'North Macedonia', 'NOR': 'Norway', + 'OMN': 'Oman', 'PAK': 'Pakistan', 'PLW': 'Palau', 'PAN': 'Panama', 'PNG': 'Papua New Guinea', + 'PRY': 'Paraguay', 'PER': 'Peru', 'PHL': 'Philippines', 'POL': 'Poland', 'PRT': 'Portugal', + 'QAT': 'Qatar', 'ROU': 'Romania', 'RUS': 'Russia', 'RWA': 'Rwanda', 'KNA': 'Saint Kitts and Nevis', + 'LCA': 'Saint Lucia', 'VCT': 'Saint Vincent and the Grenadines', 'WSM': 'Samoa', 'SMR': 'San Marino', + 'STP': 'Sao Tome and Principe', 'SAU': 'Saudi Arabia', 'SEN': 'Senegal', 'SRB': 'Serbia', + 'SYC': 'Seychelles', 'SLE': 'Sierra Leone', 'SGP': 'Singapore', 'SVK': 'Slovakia', 'SVN': 'Slovenia', + 'SLB': 'Solomon Islands', 'SOM': 'Somalia', 'ZAF': 'South Africa', 'SSD': 'South Sudan', + 'ESP': 'Spain', 'LKA': 'Sri Lanka', 'SDN': 'Sudan', 'SUR': 'Suriname', 'SWE': 'Sweden', + 'CHE': 'Switzerland', 'SYR': 'Syria', 'TWN': 'Taiwan', 'TJK': 'Tajikistan', 'TZA': 'Tanzania', + 'THA': 'Thailand', 'TLS': 'Timor-Leste', 'TGO': 'Togo', 'TON': 'Tonga', 'TTO': 'Trinidad and Tobago', + 'TUN': 'Tunisia', 'TUR': 'Turkey', 'TKM': 'Turkmenistan', 'TUV': 'Tuvalu', 'UGA': 'Uganda', + 'UKR': 'Ukraine', 'ARE': 'United Arab Emirates', 'GBR': 'United Kingdom', 'USA': 'United States', + 'URY': 'Uruguay', 'UZB': 'Uzbekistan', 'VUT': 'Vanuatu', 'VEN': 'Venezuela', 'VNM': 'Vietnam', + 'YEM': 'Yemen', 'ZMB': 'Zambia', 'ZWE': 'Zimbabwe' + } + return country_mapping.get(code.upper(), 'Unknown Country') + + async def search_existing(self, meta, disctype): + dupes = [] + console.print("[yellow]Searching for existing torrents on site...") + disctype = meta.get('disctype', None) + params = { + 'api_token': self.config['TRACKERS'][self.tracker]['api_key'].strip(), + 'tmdbId': meta['tmdb'], + 'categories[]': await self.get_cat_id(meta['category'], meta.get('foreign'), meta.get('opera'), meta.get('asian')), + 'types[]': await self.get_type_id(disctype), + 'resolutions[]': await self.get_res_id(meta['resolution']), + 'name': "" + } + if meta.get('edition', "") != "": + params['name'] = params['name'] + f" {meta['edition']}" + try: + response = requests.get(url=self.search_url, params=params) + response = response.json() + for each in response['data']: + result = [each][0]['attributes']['name'] + # difference = SequenceMatcher(None, meta['clean_name'], result).ratio() + # if difference >= 0.05: + dupes.append(result) + except Exception: + console.print('[bold red]Unable to search for existing torrents on site. Either the site is down or your API key is incorrect') + await asyncio.sleep(5) + + return dupes diff --git a/src/trackers/TL.py b/src/trackers/TL.py index 15d6935b..f563a683 100644 --- a/src/trackers/TL.py +++ b/src/trackers/TL.py @@ -75,13 +75,13 @@ async def get_cat_id(self, common, meta): raise NotImplementedError('Failed to determine TL category!') - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(common, meta) await common.unit3d_edit_desc(meta, self.tracker, self.signature) - open_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'a+') + open_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'a+', encoding='utf-8') info_filename = 'BD_SUMMARY_00' if meta['bdinfo'] is not None else 'MEDIAINFO_CLEANPATH' open_info = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/{info_filename}.txt", 'r', encoding='utf-8') diff --git a/src/trackers/TTG.py b/src/trackers/TTG.py index 94b27bc7..9337e8a8 100644 --- a/src/trackers/TTG.py +++ b/src/trackers/TTG.py @@ -105,7 +105,7 @@ async def get_anon(self, anon): anon = 'yes' return anon - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await self.edit_desc(meta) @@ -126,7 +126,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8') - ttg_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + ttg_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() torrent_path = f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent" with open(torrent_path, 'rb') as torrentFile: if len(meta['filelist']) == 1: @@ -177,7 +177,7 @@ async def upload(self, meta): raise UploadException(f"Upload to TTG Failed: result URL {up.url} ({up.status_code}) was not expected", 'red') # noqa #F405 return - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] with requests.Session() as session: cookiefile = os.path.abspath(f"{meta['base_dir']}/data/cookies/TTG.pkl") @@ -277,8 +277,8 @@ async def login(self, cookiefile): return async def edit_desc(self, meta): - base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() - with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as descfile: + base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r', encoding='utf-8').read() + with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w', encoding='utf-8') as descfile: from src.bbcode import BBCODE from src.trackers.COMMON import COMMON common = COMMON(config=self.config) diff --git a/src/trackers/ULCX.py b/src/trackers/ULCX.py index de5c146f..39555dfd 100644 --- a/src/trackers/ULCX.py +++ b/src/trackers/ULCX.py @@ -90,7 +90,7 @@ async def upload(self, meta): bd_dump = None desc = open( f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", - "r", + "r", encoding='utf-8', ).read() open_torrent = open( f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", diff --git a/src/trackers/UNIT3D_TEMPLATE.py b/src/trackers/UNIT3D_TEMPLATE.py index e778b6df..d3bc0677 100644 --- a/src/trackers/UNIT3D_TEMPLATE.py +++ b/src/trackers/UNIT3D_TEMPLATE.py @@ -72,7 +72,7 @@ async def get_res_id(self, resolution): ###### STOP HERE UNLESS EXTRA MODIFICATION IS NEEDED ###### noqa E266 ############################################################### - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) cat_id = await self.get_cat_id(meta['category']) @@ -92,7 +92,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'torrent': open_torrent} data = { @@ -150,7 +150,7 @@ async def upload(self, meta): console.print(data) open_torrent.close() - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/src/trackers/UTP.py b/src/trackers/UTP.py index d6bf86d6..30d16fea 100644 --- a/src/trackers/UTP.py +++ b/src/trackers/UTP.py @@ -28,7 +28,7 @@ def __init__(self, config): self.banned_groups = [] pass - async def upload(self, meta): + async def upload(self, meta, disctype): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.signature, comparison=True) @@ -48,7 +48,7 @@ async def upload(self, meta): else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() bd_dump = None - desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[UTOPIA]DESCRIPTION.txt", 'r').read() + desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[UTOPIA]DESCRIPTION.txt", 'r', encoding='utf-8').read() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[UTOPIA]{meta['clean_name']}.torrent", 'rb') files = {'torrent': ("placeholder.torrent", open_torrent, "application/x-bittorrent")} data = { @@ -137,7 +137,7 @@ async def get_res_id(self, resolution): }.get(resolution, '1') return resolution_id - async def search_existing(self, meta): + async def search_existing(self, meta, disctype): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { diff --git a/upload.py b/upload.py index 5efd25d3..2338d6f7 100644 --- a/upload.py +++ b/upload.py @@ -39,6 +39,7 @@ from src.trackers.UTP import UTP from src.trackers.AL import AL from src.trackers.SHRI import SHRI +from src.trackers.TIK import TIK from src.trackers.PSS import PSS from src.trackers.ULCX import ULCX import json @@ -249,12 +250,14 @@ async def do_the_thing(base_dir): ####### Upload to Trackers ####### # noqa #F266 #################################### common = COMMON(config=config) - api_trackers = ['BLU', 'AITHER', 'STC', 'R4E', 'STT', 'RF', 'ACM', 'LCD', 'HUNO', 'SN', 'LT', 'NBL', 'ANT', 'JPTV', 'TDC', 'OE', 'BHDTV', 'RTF', 'OTW', 'FNP', 'CBR', 'UTP', 'AL', 'SHRI', 'LST', 'BHD', 'TL', 'PSS', 'ULCX'] + api_trackers = ['BLU', 'AITHER', 'STC', 'R4E', 'STT', 'RF', 'ACM', 'LCD', 'HUNO', 'SN', 'LT', 'NBL', 'ANT', 'JPTV', 'TDC', 'OE', 'BHDTV', 'RTF', + 'OTW', 'FNP', 'CBR', 'UTP', 'AL', 'SHRI', 'LST', 'BHD', 'TL', 'TIK', 'PSS', 'ULCX'] http_trackers = ['HDB', 'TTG', 'FL', 'PTER', 'HDT', 'MTV'] tracker_class_map = { - 'BLU': BLU, 'BHD': BHD, 'AITHER': AITHER, 'STC': STC, 'R4E': R4E, 'THR': THR, 'STT': STT, 'HP': HP, 'PTP': PTP, 'RF': RF, 'SN': SN, + 'BLU': BLU, 'BHD': BHD, 'AITHER': AITHER, 'STC': STC, 'R4E': R4E, 'THR': THR, 'STT': STT, 'HP': HP, 'PTP': PTP, 'RF': RF, 'SN': SN, 'TIK': TIK, 'ACM': ACM, 'HDB': HDB, 'LCD': LCD, 'TTG': TTG, 'LST': LST, 'HUNO': HUNO, 'FL': FL, 'LT': LT, 'NBL': NBL, 'ANT': ANT, 'PTER': PTER, 'JPTV': JPTV, - 'TL': TL, 'TDC': TDC, 'HDT': HDT, 'MTV': MTV, 'OE': OE, 'BHDTV': BHDTV, 'RTF': RTF, 'OTW': OTW, 'FNP': FNP, 'CBR': CBR, 'UTP': UTP, 'AL': AL, 'SHRI': SHRI, 'PSS': PSS, 'ULCX': ULCX} + 'TL': TL, 'TDC': TDC, 'HDT': HDT, 'MTV': MTV, 'OE': OE, 'BHDTV': BHDTV, 'RTF': RTF, 'OTW': OTW, 'FNP': FNP, 'CBR': CBR, 'UTP': UTP, 'AL': AL, + 'SHRI': SHRI, 'PSS': PSS, 'ULCX': ULCX} tracker_capabilities = { 'LST': {'mod_q': True, 'draft': True}, @@ -264,7 +267,7 @@ async def do_the_thing(base_dir): 'ULCX': {'mod_q': True} } - async def check_mod_q_and_draft(tracker_class, meta, debug): + async def check_mod_q_and_draft(tracker_class, meta, debug, disctype): modq, draft = None, None tracker_caps = tracker_capabilities.get(tracker_class.tracker, {}) @@ -286,6 +289,7 @@ async def check_mod_q_and_draft(tracker_class, meta, debug): return modq, draft for tracker in trackers: + disctype = meta.get('disctype', None) tracker = tracker.replace(" ", "").upper().strip() if meta['name'].endswith('DUPE?'): meta['name'] = meta['name'].replace(' DUPE?', '') @@ -311,7 +315,7 @@ async def check_mod_q_and_draft(tracker_class, meta, debug): if upload_to_tracker: # Get mod_q, draft, or draft/live depending on the tracker - modq, draft = await check_mod_q_and_draft(tracker_class, meta, debug) + modq, draft = await check_mod_q_and_draft(tracker_class, meta, debug, disctype) # Print mod_q and draft info if relevant if modq is not None: @@ -330,13 +334,13 @@ async def check_mod_q_and_draft(tracker_class, meta, debug): if tracker == "RTF": await tracker_class.api_test(meta) - dupes = await tracker_class.search_existing(meta) + dupes = await tracker_class.search_existing(meta, disctype) dupes = await common.filter_dupes(dupes, meta) meta = dupe_check(dupes, meta) # Proceed with upload if the meta is set to upload if tracker == "TL" or meta.get('upload', False): - await tracker_class.upload(meta) + await tracker_class.upload(meta, disctype) if tracker == 'SN': await asyncio.sleep(16) await client.add_to_client(meta, tracker_class.tracker) @@ -360,11 +364,11 @@ async def check_mod_q_and_draft(tracker_class, meta, debug): if check_banned_group(tracker_class.tracker, tracker_class.banned_groups, meta): continue if await tracker_class.validate_credentials(meta) is True: - dupes = await tracker_class.search_existing(meta) + dupes = await tracker_class.search_existing(meta, disctype) dupes = await common.filter_dupes(dupes, meta) meta = dupe_check(dupes, meta) if meta['upload'] is True: - await tracker_class.upload(meta) + await tracker_class.upload(meta, disctype) await client.add_to_client(meta, tracker_class.tracker) if tracker == "MANUAL": @@ -414,11 +418,11 @@ async def check_mod_q_and_draft(tracker_class, meta, debug): console.print("[yellow]Logging in to THR") session = thr.login(session) console.print("[yellow]Searching for Dupes") - dupes = thr.search_existing(session, meta.get('imdb_id')) + dupes = thr.search_existing(session, disctype, meta.get('imdb_id')) dupes = await common.filter_dupes(dupes, meta) meta = dupe_check(dupes, meta) if meta['upload'] is True: - await thr.upload(session, meta) + await thr.upload(session, meta, disctype) await client.add_to_client(meta, "THR") except Exception: console.print(traceback.print_exc()) @@ -454,14 +458,14 @@ async def check_mod_q_and_draft(tracker_class, meta, debug): meta['upload'] = True else: console.print("[yellow]Searching for Existing Releases") - dupes = await ptp.search_existing(groupID, meta) + dupes = await ptp.search_existing(groupID, meta, disctype) dupes = await common.filter_dupes(dupes, meta) meta = dupe_check(dupes, meta) if meta.get('imdb_info', {}) == {}: meta['imdb_info'] = await prep.get_imdb_info(meta['imdb_id'], meta) if meta['upload'] is True: ptpUrl, ptpData = await ptp.fill_upload_form(groupID, meta) - await ptp.upload(meta, ptpUrl, ptpData) + await ptp.upload(meta, ptpUrl, ptpData, disctype) await asyncio.sleep(5) await client.add_to_client(meta, "PTP") except Exception: