Skip to content

Commit

Permalink
♻️ Simple logging module.
Browse files Browse the repository at this point in the history
  • Loading branch information
Rhilip committed Jun 9, 2018
1 parent 217ab47 commit 71bb846
Show file tree
Hide file tree
Showing 6 changed files with 31 additions and 31 deletions.
24 changes: 10 additions & 14 deletions extractors/base/site.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# Copyright (c) 2017-2020 Rhilip <[email protected]>
# Licensed under the GNU General Public License v3.0

import logging
import os
import re
import time
Expand All @@ -15,13 +14,10 @@
from utils.constants import Video_Containers
from utils.cookie import cookies_raw2jar
from utils.load.config import setting
from utils.load.handler import rootLogger as Logger
from utils.load.submodules import tc, db
from utils.pattern import pattern_group as search_ptn

# Disable log messages from the Requests library
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING)

REQUESTS_TIMEOUT = 5


Expand All @@ -40,7 +36,7 @@ def __init__(self, status: bool, cookies: dict or str, **kwargs):
try:
self.cookies = cookies_raw2jar(cookies) if isinstance(cookies, str) else cookies
except ValueError: # Empty raw_cookies will raise ValueError (,see utils.cookie )
logging.critical("Empty cookies, Not allowed to active Model \"{}\"".format(self.name))
Logger.critical("Empty cookies, Not allowed to active Model \"{}\"".format(self.name))
self.status = False

# -*- Assign Enhanced Features : Site -*-
Expand All @@ -63,10 +59,10 @@ def __init__(self, status: bool, cookies: dict or str, **kwargs):

# Check Site Online Status
if self.status:
logging.debug("Model \"{}\" is activation now.".format(self.name))
Logger.debug("Model \"{}\" is activation now.".format(self.name))
self.online_check()
else:
logging.info("Model \"{}\" isn't active due to your settings.".format(self.name))
Logger.info("Model \"{}\" isn't active due to your settings.".format(self.name))

def online_check(self) -> bool:
"""
Expand All @@ -80,11 +76,11 @@ def online_check(self) -> bool:
requests.head(self.url_host, timeout=REQUESTS_TIMEOUT)
except OSError: # requests.exceptions.RequestException
if self.suspended == 0:
logging.warning("Site: {si} is Offline now.".format(si=self.url_host))
Logger.warning("Site: {si} is Offline now.".format(si=self.url_host))
self.suspended += 1
else:
if self.suspended != 0:
logging.info("The Site: {si} is Online now,after {count} times tries."
Logger.info("The Site: {si} is Online now,after {count} times tries."
"Will check the session soon.".format(si=self.url_host, count=self.suspended))
self.suspended = 0 # Set self.suspended as 0 first, then session_check()
self.session_check()
Expand Down Expand Up @@ -125,7 +121,7 @@ def _descr_html2ubb(string: str) -> str:

def _assist_delay(self):
if self._ASSIST_ONLY:
logging.info("Autoseed-{mo} only allowed to assist."
Logger.info("Autoseed-{mo} only allowed to assist."
"it will sleep {sl} Seconds to wait the reseed site "
"to have this torrent".format(mo=self.name, sl=self._ASSIST_DELAY_TIME))
time.sleep(self._ASSIST_DELAY_TIME)
Expand All @@ -138,7 +134,7 @@ def _get_torrent_ptn(self, torrent):
for ptn in search_ptn:
search = re.search(ptn, tname)
if search:
logging.debug("The search group dict of Torrent: {tn} is {gr}".format(tn=tname, gr=search.groupdict()))
Logger.debug("The search group dict of Torrent: {tn} is {gr}".format(tn=tname, gr=search.groupdict()))
break

return search
Expand Down Expand Up @@ -182,14 +178,14 @@ def torrent_feed(self, torrent):
# It means that the pre-reseed torrent in this site is not reseed before,
# And this torrent not marked as an un-reseed torrent.
self._assist_delay()
logging.info("Autoseed-{mo} Get A feed torrent: {na}".format(mo=self.name, na=torrent.name))
Logger.info("Autoseed-{mo} Get A feed torrent: {na}".format(mo=self.name, na=torrent.name))

reseed_tag = -1
try:
reseed_tag = self.torrent_reseed(torrent)
except Exception as e: # TODO 针对不同的Error情况做不同的更新(e.g. 因为网络问题则置0,其他情况置1)
err_name = type(e).__name__
logging.error(
Logger.error(
"Reseed not success in Site: {} for torrent: {}, "
"With Exception: {}, {}".format(self.name, torrent.name, err_name, e)
)
Expand Down
14 changes: 7 additions & 7 deletions extractors/byrbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
# Copyright (c) 2017-2020 Rhilip <[email protected]>
# Licensed under the GNU General Public License v3.0

import logging
import re
from html import unescape
from urllib.parse import unquote

from extractors.base.nexusphp import NexusPHP
from utils.load.handler import rootLogger as Logger

type_dict = {
"电影": {
Expand Down Expand Up @@ -152,7 +152,7 @@ def sort_title_info(raw_title, raw_type, raw_sec_type) -> dict:

len_split = len(type_dict[raw_type]["split"])
if len_split != len(raw_title_group):
logging.warning("The raw title \"{raw}\" may lack of tag (now: {no},ask: {co}),"
Logger.warning("The raw title \"{raw}\" may lack of tag (now: {no},ask: {co}),"
"The split may wrong.".format(raw=raw_title, no=len(raw_title_group), co=len_split))
while len_split > len(raw_title_group):
raw_title_group.append("")
Expand All @@ -166,7 +166,7 @@ def sort_title_info(raw_title, raw_type, raw_sec_type) -> dict:
title_split = "" # type_dict[raw_type]["limit"][i][0]
raw_title_group.append(j)
return_dict.update({i: title_split})
logging.debug("the title split success.The title dict:{dic}".format(dic=return_dict))
Logger.debug("the title split success.The title dict:{dic}".format(dic=return_dict))
return return_dict


Expand Down Expand Up @@ -195,18 +195,18 @@ def torrent_clone(self, tid) -> dict:
title_search = re.search("种子详情 \"(?P<title>.*)\" - Powered", str(details_bs.title))
if title_search:
title = unescape(title_search.group("title"))
logging.info("Get clone torrent's info,id: {tid},title: \"{ti}\"".format(tid=tid, ti=title))
Logger.info("Get clone torrent's info,id: {tid},title: \"{ti}\"".format(tid=tid, ti=title))
title_dict = sort_title_info(raw_title=title, raw_type=details_bs.find("span", id="type").text.strip(),
raw_sec_type=details_bs.find("span", id="sec_type").text.strip())
return_dict.update(title_dict)
body = details_bs.body
imdb_url = dburl = ""
if body.find(class_="imdbRatingPlugin"):
imdb_url = 'http://www.imdb.com/title/' + body.find(class_="imdbRatingPlugin")["data-title"]
logging.debug("Found imdb link:{link} for this torrent.".format(link=imdb_url))
Logger.debug("Found imdb link:{link} for this torrent.".format(link=imdb_url))
if body.find("a", href=re.compile("://movie.douban.com/subject")):
dburl = body.find("a", href=re.compile("://movie.douban.com/subject")).text
logging.debug("Found douban link:{link} for this torrent.".format(link=dburl))
Logger.debug("Found douban link:{link} for this torrent.".format(link=dburl))
# Update description
descr = body.find(id="kdescr")

Expand All @@ -230,7 +230,7 @@ def torrent_clone(self, tid) -> dict:
"clone_id": tid
})
else:
logging.error("Error,this torrent may not exist or ConnectError")
Logger.error("Error,this torrent may not exist or ConnectError")
return return_dict

def date_raw_update(self, torrent_name_search, raw_info: dict) -> dict:
Expand Down
6 changes: 3 additions & 3 deletions extractors/npubits.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
# Licensed under the GNU General Public License v3.0

import base64
import logging
import re

from extractors.base.nexusphp import NexusPHP
from utils.constants import ubb_clean, episode_eng2chs
from utils.load.handler import rootLogger as Logger


def string2base64(raw):
Expand Down Expand Up @@ -45,12 +45,12 @@ def torrent_clone(self, tid) -> dict:
try:
res_dic = self.get_data(url=self.url_host + "/transfer.php", params={"url": transferred_url}, json=True)
except ValueError:
logging.error("Error,this torrent may not exist or ConnectError")
Logger.error("Error,this torrent may not exist or ConnectError")
else:
res_dic.update({"transferred_url": transferred_url, "clone_id": tid})
res_dic["descr"] = ubb_clean(res_dic["descr"])

logging.info("Get clone torrent's info,id: {tid},title:\"{ti}\"".format(tid=tid, ti=res_dic["name"]))
Logger.info("Get clone torrent's info,id: {tid},title:\"{ti}\"".format(tid=tid, ti=res_dic["name"]))
return res_dic

def date_raw_update(self, torrent_name_search, raw_info: dict) -> dict:
Expand Down
8 changes: 4 additions & 4 deletions extractors/nwsuaf6.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
# Copyright (c) 2017-2020 Rhilip <[email protected]>
# Licensed under the GNU General Public License v3.0

import logging
import re

from extractors.base.nexusphp import NexusPHP
from utils.constants import ubb_clean
from utils.load.handler import rootLogger as Logger

filetype_list = ["MKV", "RMVB", "MP4", "AVI", "MPEG", "ts", "ISO", "其他文件类型"]
resolution_list = ["1080P", "720P", "480P", "其他"]
Expand Down Expand Up @@ -120,13 +120,13 @@ def torrent_clone(self, tid) -> dict:
try:
res_dic = self.get_data(url=self.url_host + "/citetorrent.php", params={"torrent_id": tid}, json=True)
except ValueError:
logging.error("Error,this torrent may not exist or ConnectError")
Logger.error("Error,this torrent may not exist or ConnectError")
else:
res_dic["clone_id"] = tid
res_dic["descr"] = ubb_clean(res_dic["descr"])
res_dic["type"] = res_dic["category"]

logging.info("Get clone torrent's info,id: {tid},title:\"{ti}\"".format(tid=tid, ti=res_dic["name"]))
Logger.info("Get clone torrent's info,id: {tid},title:\"{ti}\"".format(tid=tid, ti=res_dic["name"]))
return res_dic

def date_raw_update(self, torrent_name_search, raw_info: dict) -> dict:
Expand All @@ -141,7 +141,7 @@ def date_raw_update(self, torrent_name_search, raw_info: dict) -> dict:
len_split = len(title_split_dict[cat]["order"])
# TODO if len_split == 0:
if len_split != len(raw_title_group):
logging.warning("The raw title \"{raw}\" may lack of tag (now: {no},ask: {co}),"
Logger.warning("The raw title \"{raw}\" may lack of tag (now: {no},ask: {co}),"
"The split may wrong.".format(raw=raw_title, no=len(raw_title_group), co=len_split))
while len_split > len(raw_title_group):
raw_title_group.append("")
Expand Down
6 changes: 3 additions & 3 deletions extractors/tjupt.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
# Copyright (c) 2017-2020 Rhilip <[email protected]>
# Licensed under the GNU General Public License v3.0

import logging
import re

import requests

from extractors.base.nexusphp import NexusPHP
from utils.constants import ubb_clean
from utils.load.handler import rootLogger as Logger

ask_dict = {
"401": ["cname", "ename", "issuedate", "language", "format", "subsinfo", "district"], # 电影
Expand Down Expand Up @@ -54,7 +54,7 @@ def exist_torrent_title(self, tag):
else: # Due to HIGH Authority (Ultimate User) asked to view this page.
torrent_file_info_table = torrent_file_page.find("ul", id="colapse")
torrent_title = re.search("\\[name\] \(\d+\): (?P<name>.+?) -", torrent_file_info_table.text).group("name")
logging.info("The torrent name for id({id}) is \"{name}\"".format(id=tag, name=torrent_title))
Logger.info("The torrent name for id({id}) is \"{name}\"".format(id=tag, name=torrent_title))
return torrent_title

def torrent_clone(self, tid):
Expand All @@ -69,7 +69,7 @@ def torrent_clone(self, tid):
page_clone = self.get_data(url=self.url_host + "/upsimilartorrent.php", params={"id": tid}, bs=True)

if not re.search(r"<h2>错误!</h2>", str(page_clone)):
logging.info("Got clone torrent's info,id: {tid}".format(tid=tid))
Logger.info("Got clone torrent's info,id: {tid}".format(tid=tid))

type_select = page_clone.find("select", id="oricat")
type_value = type_select.find("option", selected="selected")["value"]
Expand Down
4 changes: 4 additions & 0 deletions utils/load/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,7 @@
rootLogger.handlers.pop()
rootLogger.addHandler(fileHandler)
rootLogger.addHandler(consoleHandler)

# Disable log messages from the Requests library
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING)

0 comments on commit 71bb846

Please sign in to comment.