Skip to content

Commit

Permalink
Refactor eps complete.
Browse files Browse the repository at this point in the history
Fix api bugs.

Add new module to parse path problem
  • Loading branch information
EstrellaXD committed May 19, 2023
1 parent c26b669 commit 8f548d8
Show file tree
Hide file tree
Showing 16 changed files with 257 additions and 296 deletions.
16 changes: 8 additions & 8 deletions src/module/api/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@

from module.models.api import *
from module.models import BangumiData
from module.manager import FullSeasonGet
from module.rss import RSSAnalyser
from module.manager import SeasonCollector
from module.rss import analyser


def link_process(link):
return RSSAnalyser().rss_to_data(link, full_parse=False)
return analyser.rss_to_data(link, full_parse=False)


@router.post("/api/v1/download/analysis", tags=["download"])
async def analysis(link: RssLink):
data = link_process(link)
data = link_process(link.rss_link)
if data:
return data[0]
else:
Expand All @@ -22,8 +22,8 @@ async def analysis(link: RssLink):
@router.post("/api/v1/download/collection", tags=["download"])
async def download_collection(data: BangumiData):
if data:
with FullSeasonGet() as season:
season.download_collection(data, data.rss_link)
with SeasonCollector() as collector:
collector.collect_season(data, data.rss_link[0])
return {"status": "Success"}
else:
return {"status": "Failed to parse link"}
Expand All @@ -32,8 +32,8 @@ async def download_collection(data: BangumiData):
@router.post("/api/v1/download/subscribe", tags=["download"])
async def subscribe(data: BangumiData):
if data:
with FullSeasonGet() as season:
season.add_subscribe(data)
with SeasonCollector() as collector:
collector.subscribe_season(data)
return {"status": "Success"}
else:
return {"status": "Failed to parse link"}
10 changes: 4 additions & 6 deletions src/module/core/sub_thread.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

from .status import ProgramStatus

from module.rss import RSSAnalyser, add_rules
from module.manager import Renamer, FullSeasonGet
from module.rss import analyser, add_rules
from module.manager import Renamer, eps_complete
from module.conf import settings


Expand All @@ -13,15 +13,13 @@ def __init__(self):
self._rss_thread = threading.Thread(
target=self.rss_loop,
)
self._rss_analyser = RSSAnalyser()

def rss_loop(self):
while not self.stop_event.is_set():
self._rss_analyser.run()
analyser.run()
add_rules()
if settings.bangumi_manage.eps_complete:
with FullSeasonGet() as full_season_get:
full_season_get.eps_complete()
eps_complete()
self.stop_event.wait(settings.program.rss_time)

def rss_start(self):
Expand Down
31 changes: 28 additions & 3 deletions src/module/database/bangumi.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,18 @@ def __fetch_data(self) -> list[BangumiData]:
return [self.__db_to_data(x) for x in dict_data]

def insert(self, data: BangumiData):
db_data = self.__data_to_db(data)
self._insert(db_data=db_data, table_name=self.__table_name)
logger.debug(f"Insert {data.official_title} into database.")
if self.__check_exist(data):
self.update_one(data)
else:
db_data = self.__data_to_db(data)
db_data["id"] = self.gen_id()
self._insert(db_data=db_data, table_name=self.__table_name)
logger.debug(f"Insert {data.official_title} into database.")

def insert_list(self, data: list[BangumiData]):
_id = self.gen_id()
for i, item in enumerate(data):
item.id = _id + i
data_list = [self.__data_to_db(x) for x in data]
self._insert_list(data_list=data_list, table_name=self.__table_name)
logger.debug(f"Insert {len(data)} bangumi into database.")
Expand Down Expand Up @@ -208,3 +215,21 @@ def gen_id(self) -> int:
if data is None:
return 1
return data[0] + 1

def __check_exist(self, data: BangumiData):
self._cursor.execute(
"""
SELECT * FROM bangumi WHERE official_title = :official_title
""",
{"official_title": data.official_title},
)
values = self._cursor.fetchone()
if values is None:
return False
return True

def __check_list_exist(self, data_list: list[BangumiData]):
for data in data_list:
if self.__check_exist(data):
return True
return False
46 changes: 14 additions & 32 deletions src/module/downloader/download_client.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
import re
import logging

from .path import TorrentPath

from module.models import BangumiData
from module.conf import settings


logger = logging.getLogger(__name__)

if ":\\" in settings.downloader.path:
import ntpath as path
else:
import os.path as path


class DownloadClient:
class DownloadClient(TorrentPath):
def __init__(self):
super().__init__()
self.client = self.__getClient()
self.authed = False

Expand Down Expand Up @@ -62,41 +58,27 @@ def init_downloader(self):
logger.debug(e)
if settings.downloader.path == "":
prefs = self.client.get_app_prefs()
settings.downloader.path = path.join(prefs["save_path"], "Bangumi")

def set_rule(self, info: BangumiData):
official_name = f"{info.official_title}({info.year})" if info.year else info.official_title
raw_name, season, group = (
info.title_raw,
info.season,
info.group_name,
)
settings.downloader.path = self._join_path(prefs["save_path"], "Bangumi")

def set_rule(self, data: BangumiData):
rule = {
"enable": True,
"mustContain": raw_name,
"mustNotContain": "|".join(info.filter),
"mustContain": data.title_raw,
"mustNotContain": "|".join(data.filter),
"useRegex": True,
"episodeFilter": "",
"smartFilter": False,
"previouslyMatchedEpisodes": [],
"affectedFeeds": info.rss_link,
"affectedFeeds": data.rss_link,
"ignoreDays": 0,
"lastMatch": "",
"addPaused": False,
"assignedCategory": "Bangumi",
"savePath": str(
path.join(
settings.downloader.path,
re.sub(r"[:/.]", " ", official_name).strip(),
f"Season {season}",
)
),
"savePath": self._gen_save_path(data),
}
rule_name = f"[{group}] {official_name}" \
if settings.bangumi_manage.group_tag \
else official_name
self.client.rss_set_rule(rule_name=f"{rule_name} S{season}", rule_def=rule)
logger.info(f"Add {official_name} Season {season} to auto download rules.")
rule_name = self._rule_name(data)
self.client.rss_set_rule(rule_name=rule_name, rule_def=rule)
logger.info(f"Add {data.official_title} Season {data.season} to auto download rules.")

def add_collection_feed(self, rss_link, item_path):
self.client.rss_add_feed(url=rss_link, item_path=item_path)
Expand Down
68 changes: 68 additions & 0 deletions src/module/downloader/path.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import re
import logging

from module.conf import settings
from module.models import BangumiData

if ":\\" in settings.downloader.path:
import ntpath as path
else:
import os.path as path

logger = logging.getLogger(__name__)


class TorrentPath:
def __init__(self):
self.download_path = settings.downloader.path

@staticmethod
def check_files(info):
media_list = []
subtitle_list = []
for f in info.files:
file_name = f.name
suffix = path.splitext(file_name)[-1]
if suffix.lower() in [".mp4", ".mkv"]:
media_list.append(file_name)
elif suffix.lower() in [".ass", ".srt"]:
subtitle_list.append(file_name)
return media_list, subtitle_list

def _path_to_bangumi(self, save_path):
# Split save path and download path
save_parts = save_path.split(path.sep)
download_parts = self.download_path.split(path.sep)
# Get bangumi name and season
bangumi_name = ""
season = 1
for part in save_parts:
if re.match(r"S\d+|[Ss]eason \d+", part):
season = int(re.findall(r"\d+", part)[0])
elif part not in download_parts:
bangumi_name = part
return bangumi_name, season

@staticmethod
def _file_depth(path):
return len(path.split(path.sep))

@staticmethod
def is_ep(self, path):
return self._file_depth(path) <= 2

def _gen_save_path(self, data: BangumiData):
folder = f"{data.official_title}({data.year})" if data.year else data.official_title
save_path = path.join(self.download_path, folder, f"Season {data.season}")
return save_path

@staticmethod
def _rule_name(data: BangumiData):
rule_name = f"[{data.group_name}] {data.official_title} S{data.season}" \
if settings.bangumi_manage.group_tag \
else f"{data.official_title} S{data.season}"
return rule_name

@staticmethod
def _join_path(*args):
return path.join(*args)
2 changes: 1 addition & 1 deletion src/module/manager/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .eps_complete import FullSeasonGet
from .collector import SeasonCollector, eps_complete
from .renamer import Renamer
from .torrent import TorrentManager
55 changes: 55 additions & 0 deletions src/module/manager/collector.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import logging

from module.downloader import DownloadClient
from module.models import BangumiData
from module.database import BangumiDatabase
from module.searcher import SearchTorrent

logger = logging.getLogger(__name__)


class SeasonCollector(DownloadClient):
def add_season_torrents(self, data: BangumiData, torrents):
for torrent in torrents:
download_info = {
"url": torrent.torrent_link,
"save_path": self._gen_save_path(data),
}
self.add_torrent(download_info)

def collect_season(self, data: BangumiData, link: str = None):
logger.info(f"Start collecting {data.official_title} Season {data.season}...")
with SearchTorrent() as st:
if not link:
torrents = st.search_season(data)
else:
torrents = st.get_torrents(link)
self.add_season_torrents(data, torrents)
logger.info("Completed!")

def subscribe_season(self, data: BangumiData):
data.added = True
data.eps_collect = True
with BangumiDatabase() as db:
db.insert(data)
self.add_rss_feed(data.rss_link[0], item_path=data.official_title)
self.set_rule(data)


def eps_complete():
with BangumiDatabase() as bd:
datas = bd.not_complete()
if datas:
logger.info("Start collecting full season...")
for data in datas:
if not data.eps_collect:
with SeasonCollector() as sc:
sc.collect_season(data)
data.eps_collect = True
bd.update_list(datas)


if __name__ == '__main__':
from module.conf import setup_logger
setup_logger()
eps_complete()
Loading

0 comments on commit 8f548d8

Please sign in to comment.