From 4c7be2d203726b13f8155bb4550e366367e0a42a Mon Sep 17 00:00:00 2001 From: Anton Soroko Date: Thu, 3 Oct 2024 19:40:29 +0300 Subject: [PATCH] Remove unused "id" field from parser and api_format --- burst/burst.py | 18 ++++++------------ burst/provider.py | 3 +-- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/burst/burst.py b/burst/burst.py index 5cd15fa5..92410209 100644 --- a/burst/burst.py +++ b/burst/burst.py @@ -282,7 +282,6 @@ def extract_torrents(provider, client): dom = Html().feed(client.content) - id_search = get_search_query(definition, "id") key_search = get_search_query(definition, "key") row_search = get_search_query(definition, "row") name_search = get_search_query(definition, "name") @@ -301,7 +300,7 @@ def extract_torrents(provider, client): needs_subpage = 'subpage' in definition and definition['subpage'] if needs_subpage: - def extract_subpage(q, id, name, torrent, size, seeds, peers, info_hash, referer): + def extract_subpage(q, name, torrent, size, seeds, peers, info_hash, referer): try: log.debug("[%s] Getting subpage at %s" % (provider, repr(torrent))) except Exception as e: @@ -340,7 +339,7 @@ def extract_subpage(q, id, name, torrent, size, seeds, peers, info_hash, referer map(log.debug, traceback.format_exc().split("\n")) log.debug("[%s] Subpage torrent for %s: %s" % (provider, repr(uri[0]), torrent)) - ret = (id, name, info_hash, torrent, size, seeds, peers) + ret = (name, info_hash, torrent, size, seeds, peers) # Cache this subpage result if another query would need to request same url. provider_cache[uri[0]] = torrent @@ -373,7 +372,6 @@ def extract_subpage(q, id, name, torrent, size, seeds, peers, info_hash, referer continue try: - id = eval(id_search) if id_search else "" name = eval(name_search) if name_search else "" description = eval(description_search) if description_search else "" torrent = eval(torrent_search) if torrent_search else "" @@ -387,7 +385,6 @@ def extract_subpage(q, id, name, torrent, size, seeds, peers, info_hash, referer torrent = torrent[torrent.find('magnet:?'):] if debug_parser: - log.debug("[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'id', id_search, id)) log.debug("[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'name', name_search, name)) log.debug("[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'description', description_search, description)) log.debug("[%s] Parser debug | Matched '%s' iteration for query '%s': %s" % (provider, 'torrent', torrent_search, torrent)) @@ -459,13 +456,13 @@ def extract_subpage(q, id, name, torrent, size, seeds, peers, info_hash, referer # Check if this url was previously requested, to avoid doing same job again. uri = torrent.split('|') if uri and uri[0] and uri[0] in provider_cache and provider_cache[uri[0]]: - yield (id, name, info_hash, provider_cache[uri[0]], size, seeds, peers) + yield (name, info_hash, provider_cache[uri[0]], size, seeds, peers) continue - t = Thread(target=extract_subpage, args=(q, id, name, torrent, size, seeds, peers, info_hash, referer)) + t = Thread(target=extract_subpage, args=(q, name, torrent, size, seeds, peers, info_hash, referer)) threads.append(t) else: - yield (id, name, info_hash, torrent, size, seeds, peers) + yield (name, info_hash, torrent, size, seeds, peers) except Exception as e: log.error("[%s] Got an exception while parsing results: %s" % (provider, repr(e))) @@ -546,15 +543,12 @@ def get_nested_value(result, key, default): for result in results: if not result or not isinstance(result, dict): continue - id = '' name = '' info_hash = '' torrent = '' size = '' seeds = '' peers = '' - if 'id' in api_format: - id = result[api_format['id']] if 'name' in api_format: name = get_nested_value(result, api_format['name'], "") if 'description' in api_format: @@ -589,7 +583,7 @@ def get_nested_value(result, key, default): peers = result[api_format['peers']] if isinstance(peers, basestring) and peers.isdigit(): peers = int(peers) - yield (id, name, info_hash, torrent, size, seeds, peers) + yield (name, info_hash, torrent, size, seeds, peers) def extract_from_page(provider, content): diff --git a/burst/provider.py b/burst/provider.py index 605d5cab..f6ff9bbd 100644 --- a/burst/provider.py +++ b/burst/provider.py @@ -42,7 +42,7 @@ def generate_payload(provider, generator, filtering, verify_name=True, verify_si definition = definitions[provider] definition = get_alias(definition, get_setting("%s_alias" % provider)) - for id, name, info_hash, uri, size, seeds, peers in generator: + for name, info_hash, uri, size, seeds, peers in generator: size = clean_size(size) # uri, info_hash = clean_magnet(uri, info_hash) v_name = name if verify_name else filtering.title @@ -53,7 +53,6 @@ def generate_payload(provider, generator, filtering, verify_name=True, verify_si sort_balance = (sort_seeds + 1) * 3 * sort_resolution results.append({ - "id": id, "name": name, "uri": uri, "info_hash": info_hash,