Skip to content

Commit

Permalink
adding retroflix to supported sites (#253)
Browse files Browse the repository at this point in the history
  • Loading branch information
swannie-eire authored Oct 3, 2023
1 parent 03e8a03 commit b64d345
Show file tree
Hide file tree
Showing 4 changed files with 143 additions and 5 deletions.
6 changes: 6 additions & 0 deletions data/example-config.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,12 @@
"announce_url" : "https://onlyencodes.cc/announce/customannounceurl",
# "anon" : False
},
"RTF": {
"api_key": 'get_it_by_running_/api/ login command from https://retroflix.club/api/doc',
"announce_url": "get from upload page",
# "tag": "RetroFlix, nd",
"anon": True
},
"RF" : {
"api_key" : "RF api key",
"announce_url" : "https://reelflix.xyz/announce/customannounceurl",
Expand Down
8 changes: 8 additions & 0 deletions src/trackers/HUNO.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,11 @@ async def upload(self, meta):
else:
anon = 1

# adding logic to check if its an encode or webrip and not HEVC as only HEVC encodes and webrips are allowed
if meta['video_codec'] != "HEVC" and (meta['type'] == "ENCODE" or meta['type'] == "WEBRIP"):
console.print(f'[bold red]Only x265/HEVC encodes are allowed')
return

if meta['bdinfo'] != None:
mi_dump = None
bd_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8').read()
Expand Down Expand Up @@ -94,6 +99,9 @@ async def upload(self, meta):
response = requests.post(url=self.upload_url, files=files, data=data, headers=headers, params=params)
try:
console.print(response.json())
# adding torrent link to comment of torrent file
t_id = response.json()['data'].split(".")[1].split("/")[3]
await common.add_tracker_torrent(meta, self.tracker, self.source_flag, self.config['TRACKERS'][self.tracker].get('announce_url'), "https://hawke.uno/torrents/" + t_id)
except:
console.print("It may have uploaded, go check")
return
Expand Down
125 changes: 125 additions & 0 deletions src/trackers/RTF.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
# -*- coding: utf-8 -*-
# import discord
import asyncio
import requests
import base64
import re
import datetime

from src.trackers.COMMON import COMMON
from src.console import console

class RTF():
"""
Edit for Tracker:
Edit BASE.torrent with announce and source
Check for duplicates
Set type/category IDs
Upload
"""

###############################################################
######## EDIT ME ########
###############################################################
def __init__(self, config):
self.config = config
self.tracker = 'RTF'
self.source_flag = 'sunshine'
self.upload_url = 'https://retroflix.club/api/upload'
self.search_url = 'https://retroflix.club/api/torrent'
self.forum_link = 'https://retroflix.club/forums.php?action=viewtopic&topicid=3619'
self.banned_groups = []
pass

async def upload(self, meta):
common = COMMON(config=self.config)
await common.edit_torrent(meta, self.tracker, self.source_flag)
await common.unit3d_edit_desc(meta, self.tracker, self.forum_link)
if meta['bdinfo'] != None:
mi_dump = None
bd_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/BD_SUMMARY_00.txt", 'r', encoding='utf-8').read()
else:
mi_dump = open(f"{meta['base_dir']}/tmp/{meta['uuid']}/MEDIAINFO.txt", 'r', encoding='utf-8').read()
bd_dump = None

screenshots = []
for image in meta['image_list']:
if image['raw_url'] != None:
screenshots.append(image['raw_url'])

json_data = {
'name' : meta['name'],
# description does not work for some reason
# 'description' : meta['overview'] + "\n\n" + desc + "\n\n" + "Uploaded by L4G Upload Assistant",
'description': "this is a description",
# editing mediainfo so that instead of 1 080p its 1,080p as site mediainfo parser wont work other wise.
'mediaInfo': re.sub("(\d+)\s+(\d+)", r"\1,\2", mi_dump) if bd_dump == None else f"{bd_dump}",
"nfo": "",
"url": "https://www.imdb.com/title/" + (meta['imdb_id'] if str(meta['imdb_id']).startswith("tt") else "tt" + meta['imdb_id']) + "/",
# auto pulled from IMDB
"descr": "This is short description",
"poster": meta["poster"] if meta["poster"] != None else "",
"type": "401" if meta['category'] == 'MOVIE'else "402",
"screenshots": screenshots,
'isAnonymous': self.config['TRACKERS'][self.tracker]["anon"],
}

with open(f"{meta['base_dir']}/tmp/{meta['uuid']}/[{self.tracker}]{meta['clean_name']}.torrent", 'rb') as binary_file:
binary_file_data = binary_file.read()
base64_encoded_data = base64.b64encode(binary_file_data)
base64_message = base64_encoded_data.decode('utf-8')
json_data['file'] = base64_message

headers = {
'accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': self.config['TRACKERS'][self.tracker]['api_key'].strip(),
}


if datetime.date.today().year - meta['year'] <= 9:
console.print(f"[red]ERROR: Not uploading!\nMust be older than 10 Years as per rules")
return


if meta['debug'] == False:
response = requests.post(url=self.upload_url, json=json_data, headers=headers)
try:
console.print(response.json())
except:
console.print("It may have uploaded, go check")
return
else:
console.print(f"[cyan]Request Data:")
console.print(json_data)


async def search_existing(self, meta):
dupes = []
console.print("[yellow]Searching for existing torrents on site...")
headers = {
'accept': 'application/json',
'Authorization': self.config['TRACKERS'][self.tracker]['api_key'].strip(),
}

params = {
'includingDead' : '1'
}

# search is intentionally vague and just uses IMDB if available as many releases are not named properly on site.
if meta['imdb_id'] != "0":
params['imdbId'] = meta['imdb_id'] if str(meta['imdb_id']).startswith("tt") else "tt" + meta['imdb_id']
else:
params['search'] = meta['title'].replace(':', '').replace("'", '').replace(",", '')

try:
response = requests.get(url=self.search_url, params=params, headers=headers)
response = response.json()
for each in response:
result = [each][0]['name']
dupes.append(result)
except:
console.print('[bold red]Unable to search for existing torrents on site. Either the site is down or your API key is incorrect')
await asyncio.sleep(5)

return dupes
9 changes: 4 additions & 5 deletions upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from src.trackers.RF import RF
from src.trackers.OE import OE
from src.trackers.BHDTV import BHDTV
from src.trackers.RTF import RTF
import json
from pathlib import Path
import asyncio
Expand Down Expand Up @@ -243,13 +244,12 @@ async def do_the_thing(base_dir):
####### Upload to Trackers #######
####################################
common = COMMON(config=config)
api_trackers = ['BLU', 'AITHER', 'STC', 'R4E', 'STT', 'RF', 'ACM','LCD','LST','HUNO', 'SN', 'LT', 'NBL', 'ANT', 'JPTV', 'TDC', 'OE', 'BHDTV']
api_trackers = ['BLU', 'AITHER', 'STC', 'R4E', 'STT', 'RF', 'ACM','LCD','LST','HUNO', 'SN', 'LT', 'NBL', 'ANT', 'JPTV', 'TDC', 'OE', 'BHDTV', 'RTF']
http_trackers = ['HDB', 'TTG', 'FL', 'PTER', 'HDT', 'MTV']
tracker_class_map = {
'BLU' : BLU, 'BHD': BHD, 'AITHER' : AITHER, 'STC' : STC, 'R4E' : R4E, 'THR' : THR, 'STT' : STT, 'HP' : HP, 'PTP' : PTP, 'RF' : RF, 'SN' : SN,
'ACM' : ACM, 'HDB' : HDB, 'LCD': LCD, 'TTG' : TTG, 'LST' : LST, 'HUNO': HUNO, 'FL' : FL, 'LT' : LT, 'NBL' : NBL, 'ANT' : ANT, 'PTER': PTER, 'JPTV' : JPTV,
'TL' : TL, 'TDC' : TDC, 'HDT' : HDT, 'MTV': MTV, 'OE': OE, 'BHDTV': BHDTV
}
'TL' : TL, 'TDC' : TDC, 'HDT' : HDT, 'MTV': MTV, 'OE': OE, 'BHDTV': BHDTV, 'RTF':RTF}

for tracker in trackers:
if meta['name'].endswith('DUPE?'):
Expand Down Expand Up @@ -418,8 +418,7 @@ async def do_the_thing(base_dir):
if check_banned_group(tracker_class.tracker, tracker_class.banned_groups, meta):
continue
await tracker_class.upload(meta)
await client.add_to_client(meta, tracker_class.tracker)

await client.add_to_client(meta, tracker_class.tracker)


def get_confirmation(meta):
Expand Down

0 comments on commit b64d345

Please sign in to comment.