From b7ed255f0815c154e684aafc978b7e160fe9756d Mon Sep 17 00:00:00 2001 From: Audionut Date: Sat, 31 Aug 2024 19:24:48 +1000 Subject: [PATCH] More linting --- cogs/commands.py | 2 +- discordbot.py | 1 + src/args.py | 6 +- src/prep.py | 4 +- src/trackers/ACM.py | 4 +- src/trackers/CBR.py | 2 +- src/trackers/HDB.py | 12 +- src/trackers/MTV.py | 89 +++++----- src/trackers/NBL.py | 59 +++---- src/trackers/PTP.py | 302 ++++++++++++++++---------------- src/trackers/RTF.py | 33 ++-- src/trackers/SN.py | 24 ++- src/trackers/TL.py | 24 +-- src/trackers/UNIT3D_TEMPLATE.py | 10 +- upload.py | 10 +- 15 files changed, 277 insertions(+), 305 deletions(-) diff --git a/cogs/commands.py b/cogs/commands.py index 647c4787b..7bcd8e7ef 100644 --- a/cogs/commands.py +++ b/cogs/commands.py @@ -313,7 +313,7 @@ async def send_embed_and_upload(self, ctx, meta): res = meta['resolution'] missing = await self.get_missing(meta) - embed=discord.Embed(title=f"Upload: {meta['title']}", url=f"https://www.themoviedb.org/{meta['category'].lower()}/{meta['tmdb']}", description=meta['overview'], color=0x0080ff, timestamp=datetime.utcnow()) + embed=discord.Embed(title = f"Upload: {meta['title']}", url=f"https://www.themoviedb.org/{meta['category'].lower()}/{meta['tmdb']}", description=meta['overview'], color=0x0080ff, timestamp=datetime.utcnow()) embed.add_field(name="Links", value=f"[TMDB](https://www.themoviedb.org/{meta['category'].lower()}/{meta['tmdb']}){imdb}{tvdb}") embed.add_field(name=f"{res} / {meta['type']}{tag}", value=f"```{meta['name']}```", inline=False) if missing != []: diff --git a/discordbot.py b/discordbot.py index 6fadac007..297c29713 100644 --- a/discordbot.py +++ b/discordbot.py @@ -97,6 +97,7 @@ async def on_message(self, message): return # ignore all bots await self.process_commands(message) + if __name__ == '__main__': logging.basicConfig(level=logging.INFO) diff --git a/src/args.py b/src/args.py index e3bf8651b..0db2f521e 100644 --- a/src/args.py +++ b/src/args.py @@ -118,7 +118,7 @@ def parse(self, args, meta): parsed = urllib.parse.urlparse(value2) try: meta['ptp'] = urllib.parse.parse_qs(parsed.query)['torrentid'][0] - except: + except Exception: console.print('[red]Your terminal ate part of the url, please surround in quotes next time, or pass only the torrentid') console.print('[red]Continuing without -ptp') else: @@ -131,7 +131,7 @@ def parse(self, args, meta): if blupath.endswith('/'): blupath = blupath[:-1] meta['blu'] = blupath.split('/')[-1] - except: + except Exception: console.print('[red]Unable to parse id from url') console.print('[red]Continuing without --blu') else: @@ -141,7 +141,7 @@ def parse(self, args, meta): parsed = urllib.parse.urlparse(value2) try: meta['hdb'] = urllib.parse.parse_qs(parsed.query)['id'][0] - except: + except Exception: console.print('[red]Your terminal ate part of the url, please surround in quotes next time, or pass only the torrentid') console.print('[red]Continuing without -hdb') else: diff --git a/src/prep.py b/src/prep.py index 390ef334a..912410e5a 100644 --- a/src/prep.py +++ b/src/prep.py @@ -1441,7 +1441,7 @@ def get_keywords(self, tmdb_info): keywords = [f"{keyword['name'].replace(',', ' ')}" for keyword in tmdb_keywords.get('keywords')] elif tmdb_keywords.get('results') is not None: keywords = [f"{keyword['name'].replace(',', ' ')}" for keyword in tmdb_keywords.get('results')] - return(', '.join (keywords)) + return (', '.join(keywords)) else: return '' @@ -1450,7 +1450,7 @@ def get_genres(self, tmdb_info): tmdb_genres = tmdb_info.get('genres', []) if tmdb_genres is not []: genres = [f"{genre['name'].replace(',', ' ')}" for genre in tmdb_genres] - return(', '.join (genres)) + return (', '.join(genres)) else: return '' diff --git a/src/trackers/ACM.py b/src/trackers/ACM.py index 99fecbe80..18a5fc7df 100644 --- a/src/trackers/ACM.py +++ b/src/trackers/ACM.py @@ -196,7 +196,7 @@ async def upload(self, meta): region_id = await common.unit3d_region_ids(meta.get('region')) distributor_id = await common.unit3d_distributor_ids(meta.get('distributor')) acm_name = await self.edit_name(meta) - if meta['anon'] == 0 and bool(str2bool(str(self.config['TRACKERS'][self.tracker].get('anon', "False")))) == False: + if meta['anon'] == 0 and bool(str2bool(str(self.config['TRACKERS'][self.tracker].get('anon', "False")))) is False: anon = 0 else: anon = 1 @@ -262,7 +262,7 @@ async def upload(self, meta): console.print("It may have uploaded, go check") return else: - console.print(f"[cyan]Request Data:") + console.print("[cyan]Request Data:") console.print(data) open_torrent.close() diff --git a/src/trackers/CBR.py b/src/trackers/CBR.py index 255636538..2c26c0897 100644 --- a/src/trackers/CBR.py +++ b/src/trackers/CBR.py @@ -130,7 +130,7 @@ async def get_type_id(self, type): async def get_res_id(self, resolution): resolution_id = { - '4320p': '1', + '4320p': '1', '2160p': '2', '1080p': '3', '1080i': '4', diff --git a/src/trackers/HDB.py b/src/trackers/HDB.py index 1d76ef09a..14a6ca5a1 100644 --- a/src/trackers/HDB.py +++ b/src/trackers/HDB.py @@ -132,7 +132,7 @@ async def get_tags(self, meta): "MASTERS OF CINEMA": 19, "MOC": 19, "KINO LORBER": 55, "KINO": 55, "BFI VIDEO": 63, "BFI": 63, "BRITISH FILM INSTITUTE": 63, - "STUDIO CANAL":65, + "STUDIO CANAL": 65, "ARROW": 64 } if meta.get('distributor') in distributor_dict.keys(): @@ -330,16 +330,16 @@ async def search_existing(self, meta): 'search': meta['resolution'] } if int(meta.get('imdb_id', '0').replace('tt', '0')) != 0: - data['imdb'] = {'id' : meta['imdb_id']} + data['imdb'] = {'id': meta['imdb_id']} if int(meta.get('tvdb_id', '0')) != 0: - data['tvdb'] = {'id' : meta['tvdb_id']} + data['tvdb'] = {'id': meta['tvdb_id']} try: response = requests.get(url=url, data=json.dumps(data)) response = response.json() for each in response['data']: result = each['name'] dupes.append(result) - except: + except Exception: console.print('[bold red]Unable to search for existing torrents on site. Either the site is down or your passkey is incorrect') await asyncio.sleep(5) @@ -367,7 +367,7 @@ async def validate_api(self): if r.get('status', 5) == 0: return True return False - except: + except Exception: return False async def validate_cookies(self, meta): @@ -503,7 +503,7 @@ async def get_info_from_torrent_id(self, hdb_id): hdb_name = response['data'][0]['name'] hdb_torrenthash = response['data'][0]['hash'] - except: + except Exception: console.print_exception() else: console.print("Failed to get info from HDB ID. Either the site is down or your credentials are invalid") diff --git a/src/trackers/MTV.py b/src/trackers/MTV.py index 56c071f11..fb5b3f601 100644 --- a/src/trackers/MTV.py +++ b/src/trackers/MTV.py @@ -8,11 +8,11 @@ import cli_ui import pickle import re -import traceback from pathlib import Path from str2bool import str2bool from src.trackers.COMMON import COMMON -from datetime import datetime, date +from datetime import datetime + class MTV(): """ @@ -43,7 +43,7 @@ async def upload(self, meta): # Initiate the upload with retry logic await self.upload_with_retry(meta, cookiefile, common) - + async def upload_with_retry(self, meta, cookiefile, common, img_host_index=1): approved_image_hosts = ['ptpimg', 'imgbox'] @@ -102,16 +102,16 @@ async def upload_with_retry(self, meta, cookiefile, common, img_host_index=1): comment="Created by L4G's Upload Assistant", created_by="L4G's Upload Assistant" ) - + # Explicitly set the piece size and update metainfo new_torrent.piece_size = 8388608 # 8 MiB in bytes new_torrent.metainfo['info']['piece length'] = 8388608 # Ensure 'piece length' is set - + # Validate and write the new torrent new_torrent.validate_piece_size() new_torrent.generate(callback=prep.torf_cb, interval=5) new_torrent.write(f"{meta['base_dir']}/tmp/{meta['uuid']}/MTV.torrent", overwrite=True) - + torrent_filename = "MTV" await common.edit_torrent(meta, self.tracker, self.source_flag, torrent_filename=torrent_filename) @@ -170,17 +170,17 @@ async def upload_with_retry(self, meta, cookiefile, common, img_host_index=1): console.print(response.url) else: if "authkey.php" in response.url: - console.print(f"[red]No DL link in response, It may have uploaded, check manually.") + console.print("[red]No DL link in response, It may have uploaded, check manually.") else: - console.print(f"[red]Upload Failed. It doesn't look like you are logged in.") - except: - console.print(f"[red]It may have uploaded, check manually.") + console.print("[red]Upload Failed. It doesn't look like you are logged in.") + except Exception: + console.print("[red]It may have uploaded, check manually.") print(traceback.print_exc()) else: - console.print(f"[cyan]Request Data:") + console.print("[cyan]Request Data:") console.print(data) return - + async def handle_image_upload(self, meta, img_host_index=1, approved_image_hosts=None): if approved_image_hosts is None: approved_image_hosts = ['ptpimg', 'imgbox'] @@ -229,7 +229,7 @@ async def edit_desc(self, meta): base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as desc: # adding bd_dump to description if it exits and adding empty string to mediainfo - if meta['bdinfo'] != None: + if meta['bdinfo'] is not None: mi_dump = None bd_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8').read() else: @@ -241,19 +241,19 @@ async def edit_desc(self, meta): desc.write("[mediainfo]" + mi_dump + "[/mediainfo]\n\n") images = meta['image_list'] if len(images) > 0: - desc.write(f"[spoiler=Screenshots]") + desc.write("[spoiler=Screenshots]") for each in range(len(images)): raw_url = images[each]['raw_url'] img_url = images[each]['img_url'] desc.write(f"[url={raw_url}][img=250]{img_url}[/img][/url]") - desc.write(f"[/spoiler]") + desc.write("[/spoiler]") desc.write(f"\n\n{base}") desc.close() return async def edit_group_desc(self, meta): description = "" - if meta['imdb_id'] not in ("0", "", None): + if meta['imdb_id'] not in ("0", "", None): description += f"https://www.imdb.com/title/tt{meta['imdb_id']}" if meta['tmdb'] != 0: description += f"\nhttps://www.themoviedb.org/{str(meta['category'].lower())}/{str(meta['tmdb'])}" @@ -289,15 +289,15 @@ async def edit_name(self, meta): mtv_name = re.sub(r"[^0-9a-zA-ZÀ-ÿ. &+'\-\[\]]+", "", mtv_name) mtv_name = mtv_name.replace(' ', '.').replace('..', '.') return mtv_name - + async def get_res_id(self, resolution): resolution_id = { - '8640p':'0', + '8640p': '0', '4320p': '4000', '2160p': '2160', - '1440p' : '1440', + '1440p': '1440', '1080p': '1080', - '1080i':'1080', + '1080i': '1080', '720p': '720', '576p': '0', '576i': '0', @@ -355,6 +355,7 @@ async def get_origin_id(self, meta): # returning P2P else: return '3' + async def get_tags(self, meta): tags = [] # Genres @@ -369,7 +370,7 @@ async def get_tags(self, meta): tags.append('hd') # Streaming Service if str(meta['service_longname']) != "": - tags.append(f"{meta['service_longname'].lower().replace(' ', '.')}.source") + tags.append(f"{meta['service_longname'].lower().replace(' ', '.')}.source") # Release Type/Source for each in ['remux', 'WEB.DL', 'WEBRip', 'HDTV', 'BluRay', 'DVD', 'HDDVD']: if (each.lower().replace('.', '') in meta['type'].lower()) or (each.lower().replace('-', '') in meta['source']): @@ -388,14 +389,14 @@ async def get_tags(self, meta): tags.append('sd.season') else: tags.append('hd.season') - + # movie tags if meta['category'] == 'MOVIE': if meta['sd'] == 1: tags.append('sd.movie') else: tags.append('hd.movie') - + # Audio tags audio_tag = "" for each in ['dd', 'ddp', 'aac', 'truehd', 'mp3', 'mp2', 'dts', 'dts.hd', 'dts.x']: @@ -436,10 +437,10 @@ async def validate_credentials(self, meta): if not os.path.exists(cookiefile): await self.login(cookiefile) vcookie = await self.validate_cookies(meta, cookiefile) - if vcookie != True: + if vcookie is not True: console.print('[red]Failed to validate cookies. Please confirm that the site is up and your username and password is valid.') recreate = cli_ui.ask_yes_no("Log in again and create new session?") - if recreate == True: + if recreate is True: if os.path.exists(cookiefile): os.remove(cookiefile) await self.login(cookiefile) @@ -448,14 +449,14 @@ async def validate_credentials(self, meta): else: return False vapi = await self.validate_api() - if vapi != True: + if vapi is not True: console.print('[red]Failed to validate API. Please confirm that the site is up and your API key is valid.') return True async def validate_api(self): url = self.search_url params = { - 'apikey' : self.config['TRACKERS'][self.tracker]['api_key'].strip(), + 'apikey': self.config['TRACKERS'][self.tracker]['api_key'].strip(), } try: r = requests.get(url, params=params) @@ -464,7 +465,7 @@ async def validate_api(self): console.print("[red]Invalid API Key") return False return True - except: + except Exception: return False async def validate_cookies(self, meta, cookiefile): @@ -499,12 +500,12 @@ async def login(self, cookiefile): with requests.Session() as session: url = 'https://www.morethantv.me/login' payload = { - 'username' : self.config['TRACKERS'][self.tracker].get('username'), - 'password' : self.config['TRACKERS'][self.tracker].get('password'), - 'keeploggedin' : 1, - 'cinfo' : '1920|1080|24|0', - 'submit' : 'login', - 'iplocked' : 1, + 'username': self.config['TRACKERS'][self.tracker].get('username'), + 'password': self.config['TRACKERS'][self.tracker].get('password'), + 'keeploggedin': 1, + 'cinfo': '1920|1080|24|0', + 'submit': 'login', + 'iplocked': 1, # 'ssl' : 'yes' } res = session.get(url="https://www.morethantv.me/login") @@ -521,11 +522,11 @@ async def login(self, cookiefile): mfa_code = pyotp.parse_uri(otp_uri).now() else: mfa_code = console.input('[yellow]MTV 2FA Code: ') - + two_factor_payload = { - 'token' : resp.text.rsplit('name="token" value="', 1)[1][:48], - 'code' : mfa_code, - 'submit' : 'login' + 'token': resp.text.rsplit('name="token" value="', 1)[1][:48], + 'code': mfa_code, + 'submit': 'login' } resp = session.post(url="https://www.morethantv.me/twofactor/login", data=two_factor_payload) # checking if logged in @@ -543,9 +544,9 @@ async def search_existing(self, meta): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { - 't' : 'search', + 't': 'search', 'apikey' : self.config['TRACKERS'][self.tracker]['api_key'].strip(), - 'q' : "" + 'q': "" } if meta['imdb_id'] not in ("0", "", None): params['imdbid'] = "tt" + meta['imdb_id'] @@ -569,11 +570,11 @@ async def search_existing(self, meta): console.print(f"[yellow]{rr.get('status_message')}") await asyncio.sleep(5) else: - console.print(f"[red]Site Seems to be down or not responding to API") - except: - console.print(f"[red]Unable to search for existing torrents on site. Most likely the site is down.") + console.print("[red]Site Seems to be down or not responding to API") + except Exception: + console.print("[red]Unable to search for existing torrents on site. Most likely the site is down.") dupes.append("FAILED SEARCH") print(traceback.print_exc()) await asyncio.sleep(5) - return dupes \ No newline at end of file + return dupes diff --git a/src/trackers/NBL.py b/src/trackers/NBL.py index 56e01a671..a870918d3 100644 --- a/src/trackers/NBL.py +++ b/src/trackers/NBL.py @@ -2,9 +2,7 @@ # import discord import asyncio import requests -import os -from guessit import guessit -from str2bool import str2bool +from guessit import guessit from src.trackers.COMMON import COMMON from src.console import console @@ -18,13 +16,6 @@ class NBL(): Set type/category IDs Upload """ - - ############################################################### - ######## EDIT ME ######## - ############################################################### - - # ALSO EDIT CLASS NAME ABOVE - def __init__(self, config): self.config = config self.tracker = 'NBL' @@ -38,9 +29,8 @@ def __init__(self, config): 'PlaySD', 'playXD', 'project-gxs', 'PSA', 'QaS', 'Ranger', 'RAPiDCOWS', 'Raze', 'Reaktor', 'REsuRRecTioN', 'RMTeam', 'ROBOTS', 'SpaceFish', 'SPASM', 'SSA', 'Telly', 'Tenrai-Sensei', 'TM', 'Trix', 'URANiME', 'VipapkStudios', 'ViSiON', 'Wardevil', 'xRed', 'XS', 'YakuboEncodes', 'YuiSubs', 'ZKBL', 'ZmN', 'ZMNT'] - + pass - async def get_cat_id(self, meta): if meta.get('tv_pack', 0) == 1: @@ -49,9 +39,6 @@ async def get_cat_id(self, meta): cat_id = 1 return cat_id - ############################################################### - ###### STOP HERE UNLESS EXTRA MODIFICATION IS NEEDED ###### - ############################################################### async def edit_desc(self, meta): # Leave this in so manual works return @@ -63,21 +50,21 @@ async def upload(self, meta): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) - if meta['bdinfo'] != None: + if meta['bdinfo'] is not None: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8').read() else: mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read()[:-65].strip() open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = {'file_input': open_torrent} data = { - 'api_key' : self.api_key, - 'tvmazeid' : int(meta.get('tvmaze_id', 0)), - 'mediainfo' : mi_dump, - 'category' : await self.get_cat_id(meta), - 'ignoredupes' : 'on' + 'api_key': self.api_key, + 'tvmazeid': int(meta.get('tvmaze_id', 0)), + 'mediainfo': mi_dump, + 'category': await self.get_cat_id(meta), + 'ignoredupes': 'on' } - - if meta['debug'] == False: + + if meta['debug'] is False: response = requests.post(url=self.upload_url, files=files, data=data) try: if response.ok: @@ -86,34 +73,30 @@ async def upload(self, meta): else: console.print(response) console.print(response.text) - except: + except Exception: console.print_exception() console.print("[bold yellow]It may have uploaded, go check") - return + return else: console.print(f"[cyan]Request Data:") console.print(data) open_torrent.close() - - - - async def search_existing(self, meta): dupes = [] console.print("[yellow]Searching for existing torrents on site...") if int(meta.get('tvmaze_id', 0)) != 0: - search_term = {'tvmaze' : int(meta['tvmaze_id'])} + search_term = {'tvmaze': int(meta['tvmaze_id'])} elif int(meta.get('imdb_id', '0').replace('tt', '')) == 0: - search_term = {'imdb' : meta.get('imdb_id', '0').replace('tt', '')} + search_term = {'imdb': meta.get('imdb_id', '0').replace('tt', '')} else: - search_term = {'series' : meta['title']} + search_term = {'series': meta['title']} json = { - 'jsonrpc' : '2.0', - 'id' : 1, - 'method' : 'getTorrents', - 'params' : [ - self.api_key, + 'jsonrpc': '2.0', + 'id': 1, + 'method': 'getTorrents', + 'params': [ + self.api_key, search_term ] } @@ -143,4 +126,4 @@ async def search_existing(self, meta): except Exception: console.print_exception() - return dupes \ No newline at end of file + return dupes diff --git a/src/trackers/PTP.py b/src/trackers/PTP.py index 758b87d6c..9bd25fa2d 100644 --- a/src/trackers/PTP.py +++ b/src/trackers/PTP.py @@ -5,8 +5,6 @@ import os from pathlib import Path from str2bool import str2bool -import time -import traceback import json import glob import multiprocessing @@ -18,7 +16,8 @@ from src.exceptions import * from src.console import console from torf import Torrent -from datetime import datetime, date +from datetime import datetime + class PTP(): @@ -28,69 +27,69 @@ def __init__(self, config): self.source_flag = 'PTP' self.api_user = config['TRACKERS']['PTP'].get('ApiUser', '').strip() self.api_key = config['TRACKERS']['PTP'].get('ApiKey', '').strip() - self.announce_url = config['TRACKERS']['PTP'].get('announce_url', '').strip() - self.username = config['TRACKERS']['PTP'].get('username', '').strip() + self.announce_url = config['TRACKERS']['PTP'].get('announce_url', '').strip() + self.username = config['TRACKERS']['PTP'].get('username', '').strip() self.password = config['TRACKERS']['PTP'].get('password', '').strip() self.web_source = str2bool(str(config['TRACKERS']['PTP'].get('add_web_source_to_desc', True))) self.user_agent = f'Upload Assistant/2.1 ({platform.system()} {platform.release()})' self.banned_groups = ['aXXo', 'BMDru', 'BRrip', 'CM8', 'CrEwSaDe', 'CTFOH', 'd3g', 'DNL', 'FaNGDiNG0', 'HD2DVD', 'HDTime', 'ION10', 'iPlanet', 'KiNGDOM', 'mHD', 'mSD', 'nHD', 'nikt0', 'nSD', 'NhaNc3', 'OFT', 'PRODJi', 'SANTi', 'SPiRiT', 'STUTTERSHIT', 'ViSION', 'VXT', 'WAF', 'x0r', 'YIFY',] - + self.sub_lang_map = { - ("Arabic", "ara", "ar") : 22, - ("Brazilian Portuguese", "Brazilian", "Portuguese-BR", 'pt-br') : 49, - ("Bulgarian", "bul", "bg") : 29, - ("Chinese", "chi", "zh", "Chinese (Simplified)", "Chinese (Traditional)") : 14, - ("Croatian", "hrv", "hr", "scr") : 23, - ("Czech", "cze", "cz", "cs") : 30, - ("Danish", "dan", "da") : 10, - ("Dutch", "dut", "nl") : 9, - ("English", "eng", "en", "English (CC)", "English - SDH") : 3, - ("English - Forced", "English (Forced)", "en (Forced)") : 50, - ("English Intertitles", "English (Intertitles)", "English - Intertitles", "en (Intertitles)") : 51, - ("Estonian", "est", "et") : 38, - ("Finnish", "fin", "fi") : 15, - ("French", "fre", "fr") : 5, - ("German", "ger", "de") : 6, - ("Greek", "gre", "el") : 26, - ("Hebrew", "heb", "he") : 40, - ("Hindi" "hin", "hi") : 41, - ("Hungarian", "hun", "hu") : 24, - ("Icelandic", "ice", "is") : 28, - ("Indonesian", "ind", "id") : 47, - ("Italian", "ita", "it") : 16, - ("Japanese", "jpn", "ja") : 8, - ("Korean", "kor", "ko") : 19, - ("Latvian", "lav", "lv") : 37, - ("Lithuanian", "lit", "lt") : 39, - ("Norwegian", "nor", "no") : 12, - ("Persian", "fa", "far") : 52, - ("Polish", "pol", "pl") : 17, - ("Portuguese", "por", "pt") : 21, - ("Romanian", "rum", "ro") : 13, - ("Russian", "rus", "ru") : 7, - ("Serbian", "srp", "sr", "scc") : 31, - ("Slovak", "slo", "sk") : 42, - ("Slovenian", "slv", "sl") : 43, - ("Spanish", "spa", "es") : 4, - ("Swedish", "swe", "sv") : 11, - ("Thai", "tha", "th") : 20, - ("Turkish", "tur", "tr") : 18, - ("Ukrainian", "ukr", "uk") : 34, - ("Vietnamese", "vie", "vi") : 25, + ("Arabic", "ara", "ar"): 22, + ("Brazilian Portuguese", "Brazilian", "Portuguese-BR", 'pt-br'): 49, + ("Bulgarian", "bul", "bg"): 29, + ("Chinese", "chi", "zh", "Chinese (Simplified)", "Chinese (Traditional)"): 14, + ("Croatian", "hrv", "hr", "scr"): 23, + ("Czech", "cze", "cz", "cs"): 30, + ("Danish", "dan", "da"): 10, + ("Dutch", "dut", "nl"): 9, + ("English", "eng", "en", "English (CC)", "English - SDH"): 3, + ("English - Forced", "English (Forced)", "en (Forced)"): 50, + ("English Intertitles", "English (Intertitles)", "English - Intertitles", "en (Intertitles)"): 51, + ("Estonian", "est", "et"): 38, + ("Finnish", "fin", "fi"): 15, + ("French", "fre", "fr"): 5, + ("German", "ger", "de"): 6, + ("Greek", "gre", "el"): 26, + ("Hebrew", "heb", "he"): 40, + ("Hindi" "hin", "hi"): 41, + ("Hungarian", "hun", "hu"): 24, + ("Icelandic", "ice", "is"): 28, + ("Indonesian", "ind", "id"): 47, + ("Italian", "ita", "it"): 16, + ("Japanese", "jpn", "ja"): 8, + ("Korean", "kor", "ko"): 19, + ("Latvian", "lav", "lv"): 37, + ("Lithuanian", "lit", "lt"): 39, + ("Norwegian", "nor", "no"): 12, + ("Persian", "fa", "far"): 52, + ("Polish", "pol", "pl"): 17, + ("Portuguese", "por", "pt"): 21, + ("Romanian", "rum", "ro"): 13, + ("Russian", "rus", "ru"): 7, + ("Serbian", "srp", "sr", "scc"): 31, + ("Slovak", "slo", "sk"): 42, + ("Slovenian", "slv", "sl"): 43, + ("Spanish", "spa", "es"): 4, + ("Swedish", "swe", "sv"): 11, + ("Thai", "tha", "th"): 20, + ("Turkish", "tur", "tr"): 18, + ("Ukrainian", "ukr", "uk"): 34, + ("Vietnamese", "vie", "vi"): 25, } async def get_ptp_id_imdb(self, search_term, search_file_folder): imdb_id = ptp_torrent_id = None filename = str(os.path.basename(search_term)) params = { - 'filelist' : filename + 'filelist': filename } headers = { - 'ApiUser' : self.api_user, - 'ApiKey' : self.api_key, - 'User-Agent' : self.user_agent + 'ApiUser': self.api_user, + 'ApiKey': self.api_key, + 'User-Agent': self.user_agent } url = 'https://passthepopcorn.me/torrents.php' response = requests.get(url, params=params, headers=headers) @@ -133,15 +132,15 @@ async def get_ptp_id_imdb(self, search_term, search_file_folder): pass console.print(f'[yellow]Could not find any release matching [bold yellow]{filename}[/bold yellow] on PTP') return None, None, None - + async def get_imdb_from_torrent_id(self, ptp_torrent_id): params = { - 'torrentid' : ptp_torrent_id + 'torrentid': ptp_torrent_id } headers = { - 'ApiUser' : self.api_user, - 'ApiKey' : self.api_key, - 'User-Agent' : self.user_agent + 'ApiUser': self.api_user, + 'ApiKey': self.api_key, + 'User-Agent': self.user_agent } url = 'https://passthepopcorn.me/torrents.php' response = requests.get(url, params=params, headers=headers) @@ -164,16 +163,16 @@ async def get_imdb_from_torrent_id(self, ptp_torrent_id): return None, None except Exception: return None, None - + async def get_ptp_description(self, ptp_torrent_id, is_disc): params = { - 'id' : ptp_torrent_id, - 'action' : 'get_description' + 'id': ptp_torrent_id, + 'action': 'get_description' } headers = { - 'ApiUser' : self.api_user, - 'ApiKey' : self.api_key, - 'User-Agent' : self.user_agent + 'ApiUser': self.api_user, + 'ApiKey': self.api_key, + 'User-Agent': self.user_agent } url = 'https://passthepopcorn.me/torrents.php' response = requests.get(url, params=params, headers=headers) @@ -181,26 +180,26 @@ async def get_ptp_description(self, ptp_torrent_id, is_disc): ptp_desc = response.text bbcode = BBCODE() desc = bbcode.clean_ptp_description(ptp_desc, is_disc) - console.print(f"[bold green]Successfully grabbed description from PTP") + console.print("[bold green]Successfully grabbed description from PTP") return desc - + async def get_group_by_imdb(self, imdb): params = { - 'imdb' : imdb, + 'imdb': imdb, } headers = { - 'ApiUser' : self.api_user, - 'ApiKey' : self.api_key, - 'User-Agent' : self.user_agent + 'ApiUser': self.api_user, + 'ApiKey': self.api_key, + 'User-Agent': self.user_agent } url = 'https://passthepopcorn.me/torrents.php' response = requests.get(url=url, headers=headers, params=params) await asyncio.sleep(1) try: response = response.json() - if response.get("Page") == "Browse": # No Releases on Site with ID + if response.get("Page") == "Browse": # No Releases on Site with ID return None - elif response.get('Page') == "Details": # Group Found + elif response.get('Page') == "Details": # Group Found groupID = response.get('GroupId') console.print(f"[green]Matched IMDb: [yellow]tt{imdb}[/yellow] to Group ID: [yellow]{groupID}[/yellow][/green]") console.print(f"[green]Title: [yellow]{response.get('Name')}[/yellow] ([yellow]{response.get('Year')}[/yellow])") @@ -212,14 +211,14 @@ async def get_group_by_imdb(self, imdb): async def get_torrent_info(self, imdb, meta): params = { - 'imdb' : imdb, - 'action' : 'torrent_info', - 'fast' : 1 + 'imdb': imdb, + 'action': 'torrent_info', + 'fast': 1 } headers = { - 'ApiUser' : self.api_user, - 'ApiKey' : self.api_key, - 'User-Agent' : self.user_agent + 'ApiUser': self.api_user, + 'ApiKey': self.api_key, + 'User-Agent': self.user_agent } url = "https://passthepopcorn.me/ajax.php" response = requests.get(url=url, params=params, headers=headers) @@ -240,9 +239,9 @@ async def get_torrent_info(self, imdb, meta): async def get_torrent_info_tmdb(self, meta): tinfo = { - "title" : meta.get("title", ""), - "year" : meta.get("year", ""), - "album_desc" : meta.get("overview", ""), + "title": meta.get("title", ""), + "year": meta.get("year", ""), + "album_desc": meta.get("overview", ""), } tags = await self.get_tags([meta.get("genres", ""), meta.get("keywords", "")]) tinfo['tags'] = ", ".join(tags) @@ -266,21 +265,20 @@ async def get_tags(self, check_against): async def search_existing(self, groupID, meta): # Map resolutions to SD / HD / UHD quality = None - if meta.get('sd', 0) == 1: # 1 is SD + if meta.get('sd', 0) == 1: # 1 is SD quality = "Standard Definition" elif meta['resolution'] in ["1440p", "1080p", "1080i", "720p"]: quality = "High Definition" elif meta['resolution'] in ["2160p", "4320p", "8640p"]: quality = "Ultra High Definition" - params = { - 'id' : groupID, + 'id': groupID, } headers = { - 'ApiUser' : self.api_user, - 'ApiKey' : self.api_key, - 'User-Agent' : self.user_agent + 'ApiUser': self.api_user, + 'ApiKey': self.api_key, + 'User-Agent': self.user_agent } url = 'https://passthepopcorn.me/torrents.php' response = requests.get(url=url, headers=headers, params=params) @@ -291,7 +289,7 @@ async def search_existing(self, groupID, meta): torrents = response.get('Torrents', []) if len(torrents) != 0: for torrent in torrents: - if torrent.get('Quality') == quality and quality != None: + if torrent.get('Quality') == quality and quality is not None: existing.append(f"[{torrent.get('Resolution')}] {torrent.get('ReleaseName', 'RELEASE NAME NOT FOUND')}") except Exception: console.print("[red]An error has occured trying to find existing releases") @@ -299,9 +297,9 @@ async def search_existing(self, groupID, meta): async def ptpimg_url_rehost(self, image_url): payload = { - 'format' : 'json', - 'api_key' : self.config["DEFAULT"]["ptpimg_api"], - 'link-upload' : image_url + 'format': 'json', + 'api_key': self.config["DEFAULT"]["ptpimg_api"], + 'link-upload': image_url } headers = { 'referer': 'https://ptpimg.me/index.php'} url = "https://ptpimg.me/upload.php" @@ -312,7 +310,7 @@ async def ptpimg_url_rehost(self, image_url): ptpimg_code = response[0]['code'] ptpimg_ext = response[0]['ext'] img_url = f"https://ptpimg.me/{ptpimg_code}.{ptpimg_ext}" - except: + except Exception: console.print("[red]PTPIMG image rehost failed") img_url = image_url # img_url = ptpimg_upload(image_url, ptpimg_api) @@ -351,7 +349,7 @@ def get_type(self, imdb_info, meta): ptpType = "Stand-up Comedy" elif "concert" in keywords: ptpType = "Concert" - if ptpType == None: + if ptpType is None: if meta.get('mode', 'discord') == 'cli': ptpTypeList = ["Feature Film", "Short Film", "Miniseries", "Stand-up Comedy", "Concert", "Movie Collection"] ptpType = cli_ui.ask_choice("Select the proper type", choices=ptpTypeList) @@ -372,14 +370,14 @@ def get_codec(self, meta): codec = "DVD9" else: codecmap = { - "AVC" : "H.264", - "H.264" : "H.264", - "HEVC" : "H.265", - "H.265" : "H.265", + "AVC": "H.264", + "H.264": "H.264", + "HEVC": "H.265", + "H.265": "H.265", } searchcodec = meta.get('video_codec', meta.get('video_encode')) codec = codecmap.get(searchcodec, searchcodec) - if meta.get('has_encode_settings') == True: + if meta.get('has_encode_settings') is True: codec = codec.replace("H.", "x") return codec @@ -403,23 +401,23 @@ def get_container(self, meta): else: ext = os.path.splitext(meta['filelist'][0])[1] containermap = { - '.mkv' : "MKV", - '.mp4' : 'MP4' + '.mkv': "MKV", + '.mp4': 'MP4' } container = containermap.get(ext, 'Other') return container def get_source(self, source): sources = { - "Blu-ray" : "Blu-ray", - "BluRay" : "Blu-ray", - "HD DVD" : "HD-DVD", - "HDDVD" : "HD-DVD", - "Web" : "WEB", - "HDTV" : "HDTV", - 'UHDTV' : 'HDTV', - "NTSC" : "DVD", - "PAL" : "DVD" + "Blu-ray": "Blu-ray", + "BluRay": "Blu-ray", + "HD DVD": "HD-DVD", + "HDDVD": "HD-DVD", + "Web": "WEB", + "HDTV": "HDTV", + 'UHDTV': 'HDTV', + "NTSC": "DVD", + "PAL": "DVD" } source_id = sources.get(source, "OtherR") return source_id @@ -451,27 +449,27 @@ def get_subtitles(self, meta): sub_langs.append(subID) if sub_langs == []: - sub_langs = [44] # No Subtitle + sub_langs = [44] # No Subtitle return sub_langs def get_trumpable(self, sub_langs): trumpable_values = { - "English Hardcoded Subs (Full)" : 4, - "English Hardcoded Subs (Forced)" : 50, - "No English Subs" : 14, - "English Softsubs Exist (Mislabeled)" : None, - "Hardcoded Subs (Non-English)" : "OTHER" + "English Hardcoded Subs (Full)": 4, + "English Hardcoded Subs (Forced)": 50, + "No English Subs": 14, + "English Softsubs Exist (Mislabeled)": None, + "Hardcoded Subs (Non-English)": "OTHER" } opts = cli_ui.select_choices("English subtitles not found. Please select any/all applicable options:", choices=list(trumpable_values.keys())) trumpable = [] if opts: for t, v in trumpable_values.items(): if t in ''.join(opts): - if v == None: + if v is None: break - elif v != 50: # Hardcoded, Forced + elif v != 50: # Hardcoded, Forced trumpable.append(v) - elif v == "OTHER": #Hardcoded, Non-English + elif v == "OTHER": # Hardcoded, Non-English trumpable.append(14) hc_sub_langs = cli_ui.ask_string("Enter language code for HC Subtitle languages") for lang, subID in self.sub_lang_map.items(): @@ -480,7 +478,7 @@ def get_trumpable(self, sub_langs): else: sub_langs.append(v) trumpable.append(4) - + sub_langs = list(set(sub_langs)) trumpable = list(set(trumpable)) if trumpable == []: @@ -497,7 +495,7 @@ def get_remaster_title(self, meta): remaster_title.append('The Criterion Collection') elif meta.get('distributor') in ('MASTERS OF CINEMA', 'MOC'): remaster_title.append('Masters of Cinema') - + # Editions # Director's Cut, Extended Edition, Rifftrax, Theatrical Cut, Uncut, Unrated if "director's cut" in meta.get('edition', '').lower(): @@ -518,7 +516,7 @@ def get_remaster_title(self, meta): # Features # 2-Disc Set, 2in1, 2D/3D Edition, 3D Anaglyph, 3D Full SBS, 3D Half OU, 3D Half SBS, - # 4K Restoration, 4K Remaster, + # 4K Restoration, 4K Remaster, # Extras, Remux, if meta.get('type') == "REMUX": remaster_title.append("Remux") @@ -532,10 +530,10 @@ def get_remaster_title(self, meta): remaster_title.append('Dual Audio') if "Dubbed" in meta['audio']: remaster_title.append('English Dub') - if meta.get('has_commentary', False) == True: + if meta.get('has_commentary', False) is True: remaster_title.append('With Commentary') - # HDR10, HDR10+, Dolby Vision, 10-bit, + # HDR10, HDR10+, Dolby Vision, 10-bit, # if "Hi10P" in meta.get('video_encode', ''): # remaster_title.append('10-bit') if meta.get('hdr', '').strip() == '' and meta.get('bit_depth') == '10': @@ -585,16 +583,16 @@ async def edit_desc(self, meta): mi_dump = each['summary'] else: mi_dump = each['summary'] - if meta.get('vapoursynth', False) == True: + if meta.get('vapoursynth', False) is True: use_vs = True else: use_vs = False ds = multiprocessing.Process(target=prep.disc_screenshots, args=(f"FILE_{i}", each['bdinfo'], meta['uuid'], meta['base_dir'], use_vs, [], meta.get('ffdebug', False), 2)) ds.start() - while ds.is_alive() == True: + while ds.is_alive() is True: await asyncio.sleep(1) - new_screens = glob.glob1(f"{meta['base_dir']}/tmp/{meta['uuid']}",f"FILE_{i}-*.png") - images, dummy = prep.upload_screens(meta, 2, 1, 0, 2, new_screens, {}) + new_screens = glob.glob1(f"{meta['base_dir']}/tmp/{meta['uuid']}", f"FILE_{i}-*.png") + images, dummy = prep.upload_screens(meta, 2, 1, 0, 2, new_screens, {}) if each['type'] == "DVD": desc.write(f"[b][size=3]{each['name']}:[/size][/b]\n") @@ -609,12 +607,12 @@ async def edit_desc(self, meta): else: ds = multiprocessing.Process(target=prep.dvd_screenshots, args=(meta, i, 2)) ds.start() - while ds.is_alive() == True: + while ds.is_alive() is True: await asyncio.sleep(1) new_screens = glob.glob1(f"{meta['base_dir']}/tmp/{meta['uuid']}", f"{meta['discs'][i]['name']}-*.png") - images, dummy = prep.upload_screens(meta, 2, 1, 0, 2, new_screens, {}) - - if len(images) > 0: + images, dummy = prep.upload_screens(meta, 2, 1, 0, 2, new_screens, {}) + + if len(images) > 0: for each in range(len(images[:int(meta['screens'])])): raw_url = images[each]['raw_url'] desc.write(f"[img]{raw_url}[/img]\n") @@ -625,12 +623,12 @@ async def edit_desc(self, meta): file = meta['filelist'][i] if i == 0: # Add This line for all web-dls - if meta['type'] == 'WEBDL' and meta.get('service_longname', '') != '' and meta.get('description', None) == None and self.web_source == True: + if meta['type'] == 'WEBDL' and meta.get('service_longname', '') is not '' and meta.get('description', None) == None and self.web_source is True: desc.write(f"[quote][align=center]This release is sourced from {meta['service_longname']}[/align][/quote]") mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read() else: # Export Mediainfo - mi_dump = MediaInfo.parse(file, output="STRING", full=False, mediainfo_options={'inform_version' : '1'}) + mi_dump = MediaInfo.parse(file, output="STRING", full=False, mediainfo_options={'inform_version': '1'}) # mi_dump = mi_dump.replace(file, os.path.basename(file)) with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/TEMP_PTP_MEDIAINFO.txt", "w", newline="", encoding="utf-8") as f: f.write(mi_dump) @@ -638,9 +636,9 @@ async def edit_desc(self, meta): # Generate and upload screens for other files s = multiprocessing.Process(target=prep.screenshots, args=(file, f"FILE_{i}", meta['uuid'], meta['base_dir'], meta, 2)) s.start() - while s.is_alive() == True: + while s.is_alive() is True: await asyncio.sleep(3) - new_screens = glob.glob1(f"{meta['base_dir']}/tmp/{meta['uuid']}",f"FILE_{i}-*.png") + new_screens = glob.glob1(f"{meta['base_dir']}/tmp/{meta['uuid']}", f"FILE_{i}-*.png") images, dummy = prep.upload_screens(meta, 2, 1, 0, 2, new_screens, {}) desc.write(f"[mediainfo]{mi_dump}[/mediainfo]\n") @@ -648,8 +646,8 @@ async def edit_desc(self, meta): base2ptp = self.convert_bbcode(base) if base2ptp.strip() != "": desc.write(base2ptp) - desc.write("\n\n") - if len(images) > 0: + desc.write("\n\n") + if len(images) > 0: for each in range(len(images[:int(meta['screens'])])): raw_url = images[each]['raw_url'] desc.write(f"[img]{raw_url}[/img]\n") @@ -668,7 +666,7 @@ async def get_AntiCsrfToken(self, meta): loggedIn = await self.validate_login(uploadresponse) else: console.print("[yellow]PTP Cookies not found. Creating new session.") - if loggedIn == True: + if loggedIn is True: AntiCsrfToken = re.search(r'data-AntiCsrfToken="(.*)"', uploadresponse.text).group(1) else: passKey = re.match(r"https?://please\.passthepopcorn\.me:?\d*/(.+)/announce",self.announce_url).group(1) @@ -678,7 +676,7 @@ async def get_AntiCsrfToken(self, meta): "passkey": passKey, "keeplogged": "1", } - headers = {"User-Agent" : self.user_agent} + headers = {"User-Agent": self.user_agent} loginresponse = session.post("https://passthepopcorn.me/ajax.php?action=login", data=data, headers=headers) await asyncio.sleep(2) try: @@ -724,26 +722,26 @@ async def fill_upload_form(self, groupID, meta): data = { "submit": "true", "remaster_year": "", - "remaster_title": self.get_remaster_title(meta), #Eg.: Hardcoded English + "remaster_title": self.get_remaster_title(meta), # Eg.: Hardcoded English "type": self.get_type(meta['imdb_info'], meta), - "codec": "Other", # Sending the codec as custom. + "codec": "Other", # Sending the codec as custom. "other_codec": self.get_codec(meta), "container": "Other", "other_container": self.get_container(meta), "resolution": resolution, - "source": "Other", # Sending the source as custom. + "source": "Other", # Sending the source as custom. "other_source": self.get_source(meta['source']), "release_desc": desc, "nfo_text": "", - "subtitles[]" : ptp_subtitles, - "trumpable[]" : ptp_trumpable, - "AntiCsrfToken" : await self.get_AntiCsrfToken(meta) + "subtitles[]": ptp_subtitles, + "trumpable[]": ptp_trumpable, + "AntiCsrfToken": await self.get_AntiCsrfToken(meta) } if data["remaster_year"] != "" or data["remaster_title"] != "": data["remaster"] = "on" if resolution == "Other": data["other_resolution"] = other_resolution - if meta.get('personalrelease', False) == True: + if meta.get('personalrelease', False) is True: data["internalrip"] = "on" # IF SPECIAL (idk how to check for this automatically) # data["special"] = "on" @@ -752,18 +750,18 @@ async def fill_upload_form(self, groupID, meta): else: data["imdb"] = meta["imdb_id"] - if groupID == None: # If need to make new group + if groupID is None: # If need to make new group url = "https://passthepopcorn.me/upload.php" if data["imdb"] == "0": tinfo = await self.get_torrent_info_tmdb(meta) else: tinfo = await self.get_torrent_info(meta.get("imdb_id", "0"), meta) cover = meta["imdb_info"].get("cover") - if cover == None: + if cover is None: cover = meta.get('poster') - if cover != None and "ptpimg" not in cover: + if cover is not None and "ptpimg" not in cover: cover = await self.ptpimg_url_rehost(cover) - while cover == None: + while cover is None: cover = cli_ui.ask_string("No Poster was found. Please input a link to a poster: \n", default="") if "ptpimg" not in str(cover) and str(cover).endswith(('.jpg', '.png')): cover = await self.ptpimg_url_rehost(cover) @@ -778,15 +776,15 @@ async def fill_upload_form(self, groupID, meta): if new_data['year'] in ['', '0', 0, None] and meta.get('manual_year') not in [0, '', None]: new_data['year'] = meta['manual_year'] while new_data["tags"] == "": - if meta.get('mode', 'discord') == 'cli': + if meta.get('mode', 'discord') == 'cli': console.print('[yellow]Unable to match any tags') console.print("Valid tags can be found on the PTP upload form") new_data["tags"] = console.input("Please enter at least one tag. Comma seperated (action, animation, short):") data.update(new_data) - if meta["imdb_info"].get("directors", None) != None: + if meta["imdb_info"].get("directors", None) is not None: data["artist[]"] = tuple(meta['imdb_info'].get('directors')) data["importance[]"] = "1" - else: # Upload on existing group + else: # Upload on existing group url = f"https://passthepopcorn.me/upload.php?groupid={groupID}" data["groupid"] = groupID @@ -826,7 +824,7 @@ async def upload(self, meta, url, data): comment="Created by L4G's Upload Assistant", created_by="L4G's Upload Assistant" ) - + # Explicitly set the piece size and update metainfo new_torrent.piece_size = 16777216 # 16 MiB in bytes new_torrent.metainfo['info']['piece length'] = 16777216 # Ensure 'piece length' is set diff --git a/src/trackers/RTF.py b/src/trackers/RTF.py index 28ce55924..232cbda2c 100644 --- a/src/trackers/RTF.py +++ b/src/trackers/RTF.py @@ -5,11 +5,11 @@ import base64 import re import datetime -import json from src.trackers.COMMON import COMMON from src.console import console + class RTF(): """ Edit for Tracker: @@ -18,10 +18,6 @@ class RTF(): Set type/category IDs Upload """ - - ############################################################### - ######## EDIT ME ######## - ############################################################### def __init__(self, config): self.config = config self.tracker = 'RTF' @@ -36,7 +32,7 @@ async def upload(self, meta): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) await common.unit3d_edit_desc(meta, self.tracker, self.forum_link) - if meta['bdinfo'] != None: + if meta['bdinfo'] is not None: mi_dump = None bd_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8').read() else: @@ -45,21 +41,21 @@ async def upload(self, meta): screenshots = [] for image in meta['image_list']: - if image['raw_url'] != None: + if image['raw_url'] is not None: screenshots.append(image['raw_url']) json_data = { - 'name' : meta['name'], + 'name': meta['name'], # description does not work for some reason # 'description' : meta['overview'] + "\n\n" + desc + "\n\n" + "Uploaded by L4G Upload Assistant", 'description': "this is a description", # editing mediainfo so that instead of 1 080p its 1,080p as site mediainfo parser wont work other wise. - 'mediaInfo': re.sub(r"(\d+)\s+(\d+)", r"\1,\2", mi_dump) if bd_dump == None else f"{bd_dump}", + 'mediaInfo': re.sub(r"(\d+)\s+(\d+)", r"\1,\2", mi_dump) if bd_dump is None else f"{bd_dump}", "nfo": "", "url": "https://www.imdb.com/title/" + (meta['imdb_id'] if str(meta['imdb_id']).startswith("tt") else "tt" + meta['imdb_id']) + "/", # auto pulled from IMDB "descr": "This is short description", - "poster": meta["poster"] if meta["poster"] != None else "", + "poster": meta["poster"] if meta["poster"] is not None else "", "type": "401" if meta['category'] == 'MOVIE'else "402", "screenshots": screenshots, 'isAnonymous': self.config['TRACKERS'][self.tracker]["anon"], @@ -77,13 +73,11 @@ async def upload(self, meta): 'Authorization': self.config['TRACKERS'][self.tracker]['api_key'].strip(), } - if datetime.date.today().year - meta['year'] <= 9: - console.print(f"[red]ERROR: Not uploading!\nMust be older than 10 Years as per rules") + console.print("[red]ERROR: Not uploading!\nMust be older than 10 Years as per rules") return - - if meta['debug'] == False: + if meta['debug'] is False: response = requests.post(url=self.upload_url, json=json_data, headers=headers) try: console.print(response.json()) @@ -91,14 +85,13 @@ async def upload(self, meta): t_id = response.json()['torrent']['id'] await common.add_tracker_torrent(meta, self.tracker, self.source_flag, self.config['TRACKERS'][self.tracker].get('announce_url'), "https://retroflix.club/browse/t/" + str(t_id)) - except: + except Exception: console.print("It may have uploaded, go check") return else: - console.print(f"[cyan]Request Data:") + console.print("[cyan]Request Data:") console.print(json_data) - async def search_existing(self, meta): dupes = [] console.print("[yellow]Searching for existing torrents on site...") @@ -108,7 +101,7 @@ async def search_existing(self, meta): } params = { - 'includingDead' : '1' + 'includingDead': '1' } if meta['imdb_id'] != "0": @@ -122,7 +115,7 @@ async def search_existing(self, meta): for each in response: result = [each][0]['name'] dupes.append(result) - except: + except Exception: console.print('[bold red]Unable to search for existing torrents on site. Either the site is down or your API key is incorrect') await asyncio.sleep(5) @@ -157,7 +150,7 @@ async def generate_new_api(self, meta): if response.status_code == 201: console.print('[bold green]Using New API key generated for this upload') - console.print(f'[bold green]Please update your L4G config with the below RTF API Key for future uploads') + console.print('[bold green]Please update your L4G config with the below RTF API Key for future uploads') console.print(f'[bold yellow]{response.json()["token"]}') self.config['TRACKERS'][self.tracker]['api_key'] = response.json()["token"] else: diff --git a/src/trackers/SN.py b/src/trackers/SN.py index 54f13d64d..b987d3f37 100644 --- a/src/trackers/SN.py +++ b/src/trackers/SN.py @@ -15,7 +15,6 @@ class SN(): Set type/category IDs Upload """ - def __init__(self, config): self.config = config self.tracker = 'SN' @@ -31,7 +30,7 @@ async def get_type_id(self, type): 'BluRay': '3', 'Web': '1', # boxset is 4 - #'NA': '4', + # 'NA': '4', 'DVD': '2' }.get(type, '0') return type_id @@ -39,11 +38,11 @@ async def get_type_id(self, type): async def upload(self, meta): common = COMMON(config=self.config) await common.edit_torrent(meta, self.tracker, self.source_flag) - #await common.unit3d_edit_desc(meta, self.tracker, self.forum_link) + # await common.unit3d_edit_desc(meta, self.tracker, self.forum_link) await self.edit_desc(meta) cat_id = "" sub_cat_id = "" - #cat_id = await self.get_cat_id(meta) + # cat_id = await self.get_cat_id(meta) if meta['category'] == 'MOVIE': cat_id = 1 # sub cat is source so using source to get @@ -56,8 +55,7 @@ async def upload(self, meta): sub_cat_id = 5 # todo need to do a check for docs and add as subcat - - if meta['bdinfo'] != None: + if meta['bdinfo'] is not None: mi_dump = None bd_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8').read() else: @@ -90,7 +88,7 @@ async def upload(self, meta): } - if meta['debug'] == False: + if meta['debug'] is False: response = requests.request("POST", url=self.upload_url, data=data, files=files) try: @@ -99,16 +97,15 @@ async def upload(self, meta): else: console.print("[red]Did not upload successfully") console.print(response.json()) - except: + except Exception: console.print("[red]Error! It may have uploaded, go check") console.print(data) console.print_exception() return else: - console.print(f"[cyan]Request Data:") + console.print("[cyan]Request Data:") console.print(data) - async def edit_desc(self, meta): base = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/DESCRIPTION.txt", 'r').read() with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'w') as desc: @@ -125,13 +122,12 @@ async def edit_desc(self, meta): desc.close() return - async def search_existing(self, meta): dupes = [] console.print("[yellow]Searching for existing torrents on site...") params = { - 'api_key' : self.config['TRACKERS'][self.tracker]['api_key'].strip() + 'api_key': self.config['TRACKERS'][self.tracker]['api_key'].strip() } # using title if IMDB id does not exist to search @@ -141,7 +137,7 @@ async def search_existing(self, meta): else: params['filter'] = meta['title'] else: - #using IMDB_id to search if it exists. + # using IMDB_id to search if it exists. if meta['category'] == 'TV': params['media_ref'] = f"tt{meta['imdb_id']}" params['filter'] = f"{meta.get('season', '')}{meta.get('episode', '')}" + " " + meta['resolution'] @@ -155,7 +151,7 @@ async def search_existing(self, meta): for i in response['data']: result = i['name'] dupes.append(result) - except: + except Exception: console.print('[red]Unable to search for existing torrents on site. Either the site is down or your API key is incorrect') await asyncio.sleep(5) diff --git a/src/trackers/TL.py b/src/trackers/TL.py index 9b98f602f..15d6935b3 100644 --- a/src/trackers/TL.py +++ b/src/trackers/TL.py @@ -35,13 +35,13 @@ def __init__(self, config): self.upload_url = 'https://www.torrentleech.org/torrents/upload/apiupload' self.signature = None self.banned_groups = [""] - + self.announce_key = self.config['TRACKERS'][self.tracker]['announce_key'] self.config['TRACKERS'][self.tracker]['announce_url'] = f"https://tracker.torrentleech.org/a/{self.announce_key}/announce" pass - + async def get_cat_id(self, common, meta): - if meta.get('anime', 0): + if meta.get('anime', 0): return self.CATEGORIES['Anime'] if meta['category'] == 'MOVIE': @@ -64,7 +64,7 @@ async def get_cat_id(self, common, meta): elif meta['type'] == 'HDTV': return self.CATEGORIES['MovieHdRip'] elif meta['category'] == 'TV': - if meta['original_language'] != 'en': + if meta['original_language'] != 'en': return self.CATEGORIES['TvForeign'] elif meta.get('tv_pack', 0): return self.CATEGORIES['TvBoxsets'] @@ -82,13 +82,13 @@ async def upload(self, meta): await common.unit3d_edit_desc(meta, self.tracker, self.signature) open_desc = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]DESCRIPTION.txt", 'a+') - - info_filename = 'BD_SUMMARY_00' if meta['bdinfo'] != None else 'MEDIAINFO_CLEANPATH' + + info_filename = 'BD_SUMMARY_00' if meta['bdinfo'] is not None else 'MEDIAINFO_CLEANPATH' open_info = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/{info_filename}.txt", 'r', encoding='utf-8') open_desc.write('\n\n') open_desc.write(open_info.read()) open_info.close() - + open_desc.seek(0) open_torrent = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') files = { @@ -96,19 +96,19 @@ async def upload(self, meta): 'torrent': (self.get_name(meta) + '.torrent', open_torrent) } data = { - 'announcekey' : self.announce_key, - 'category' : cat_id + 'announcekey': self.announce_key, + 'category': cat_id } headers = { 'User-Agent': f'Upload Assistant/2.1 ({platform.system()} {platform.release()})' } - - if meta['debug'] == False: + + if meta['debug'] is False: response = requests.post(url=self.upload_url, files=files, data=data, headers=headers) if not response.text.isnumeric(): console.print(f'[red]{response.text}') else: - console.print(f"[cyan]Request Data:") + console.print("[cyan]Request Data:") console.print(data) open_torrent.close() open_desc.close() diff --git a/src/trackers/UNIT3D_TEMPLATE.py b/src/trackers/UNIT3D_TEMPLATE.py index 9d84b6dae..996bab254 100644 --- a/src/trackers/UNIT3D_TEMPLATE.py +++ b/src/trackers/UNIT3D_TEMPLATE.py @@ -54,7 +54,7 @@ async def get_type_id(self, type): async def get_res_id(self, resolution): resolution_id = { - '8640p':'10', + '8640p': '10', '4320p': '1', '2160p': '2', '1440p': '3', @@ -99,7 +99,7 @@ async def upload(self, meta): 'name': meta['name'], 'description': desc, 'mediainfo': mi_dump, - 'bdinfo': bd_dump, + 'bdinfo': bd_dump, 'category_id': cat_id, 'type_id': type_id, 'resolution_id': resolution_id, @@ -142,11 +142,11 @@ async def upload(self, meta): response = requests.post(url=self.upload_url, files=files, data=data, headers=headers, params=params) try: console.print(response.json()) - except: + except Exception: console.print("It may have uploaded, go check") return else: - console.print(f"[cyan]Request Data:") + console.print("[cyan]Request Data:") console.print(data) open_torrent.close() @@ -171,7 +171,7 @@ async def search_existing(self, meta): # difference = SequenceMatcher(None, meta['clean_name'], result).ratio() # if difference >= 0.05: dupes.append(result) - except: + except Exception: console.print('[bold red]Unable to search for existing torrents on site. Either the site is down or your API key is incorrect') await asyncio.sleep(5) diff --git a/upload.py b/upload.py index 1bbb4fb7f..33c83499c 100644 --- a/upload.py +++ b/upload.py @@ -47,6 +47,7 @@ import shutil import glob import cli_ui +import traceback from src.console import console from rich.markdown import Markdown @@ -54,13 +55,12 @@ cli_ui.setup(color='always', title="L4G's Upload Assistant") -import traceback base_dir = os.path.abspath(os.path.dirname(__file__)) try: from data.config import config -except: +except Exception: if not os.path.exists(os.path.abspath(f"{base_dir}/data/config.py")): try: if os.path.exists(os.path.abspath(f"{base_dir}/data/config.json")): @@ -75,7 +75,7 @@ from data.config import config else: raise NotImplementedError - except: + except Exception: cli_ui.info(cli_ui.red, "We have switched from .json to .py for config to have a much more lenient experience") cli_ui.info(cli_ui.red, "Looks like the auto updater didnt work though") cli_ui.info(cli_ui.red, "Updating is just 2 easy steps:") @@ -369,7 +369,7 @@ async def do_the_thing(base_dir): if meta['upload'] is True: await thr.upload(session, meta) await client.add_to_client(meta, "THR") - except: + except Exception: console.print(traceback.print_exc()) if tracker == "PTP": @@ -406,7 +406,7 @@ async def do_the_thing(base_dir): await ptp.upload(meta, ptpUrl, ptpData) await asyncio.sleep(5) await client.add_to_client(meta, "PTP") - except: + except Exception: console.print(traceback.print_exc()) if tracker == "TL":