Skip to content

Commit

Permalink
修复AB当作Proxy无法获取海报的问题。
Browse files Browse the repository at this point in the history
移除 RSS 检查器
  • Loading branch information
EstrellaXD committed May 18, 2023
1 parent 4b50f21 commit 9aa6b09
Show file tree
Hide file tree
Showing 8 changed files with 33 additions and 19 deletions.
2 changes: 1 addition & 1 deletion src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,4 @@
uvicorn.run(
router, host="0.0.0.0", port=settings.program.webui_port,
log_config=uvicorn_logging_config,
)
)
14 changes: 13 additions & 1 deletion src/module/api/proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,4 +69,16 @@ async def get_rss(full_path: str):
@router.get("/Download/{full_path:path}", tags=["proxy"])
async def download(full_path: str):
torrent = get_torrent(full_path)
return Response(torrent, media_type="application/x-bittorrent")
return Response(torrent, media_type="application/x-bittorrent")


@router.get("/Home/Episode/{full_path:path}", tags=["proxy"])
async def get_ep_info(full_path: str):
url = f"https://mikanani.me/Home/Episode/{full_path}"
try:
with RequestContent() as request:
return Response(request.get_html(url), media_type="text/html")
except Exception as e:
logger.debug(e)
logger.warning("Failed to get ep info")
raise HTTPException(status_code=500, detail="Failed to get ep info")
3 changes: 1 addition & 2 deletions src/module/checker/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@ def check_renamer(self) -> bool:
return False

def check_analyser(self) -> bool:
if self.check_torrents() and\
self.check_downloader() and\
if self.check_downloader() and\
settings.rss_parser.enable:
return True
else:
Expand Down
3 changes: 1 addition & 2 deletions src/module/core/program.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,9 @@ def start(self):
settings.load()
if self.enable_renamer:
self.rename_start()
logger.info("Renamer started.")
if self.enable_rss:
self.rss_start()
logger.info("RSS started.")
logger.info("Program running.")
return {"status": "Program started."}

def stop(self):
Expand Down
4 changes: 2 additions & 2 deletions src/module/core/sub_thread.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@ def __init__(self):
self._rss_thread = threading.Thread(
target=self.rss_loop,
)
self._rss_analyser = RSSAnalyser()

def rss_loop(self):
rss_analyser = RSSAnalyser()
while not self.stop_event.is_set():
rss_analyser.run()
self._rss_analyser.run()
add_rules()
if settings.bangumi_manage.eps_complete:
with FullSeasonGet() as full_season_get:
Expand Down
4 changes: 0 additions & 4 deletions src/module/database/bangumi.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,6 @@ class BangumiDatabase(DataConnector):
def __init__(self):
super().__init__()
self.__table_name = "bangumi"
self.__updated = False
if not self.__updated:
self.update_table()
self.__updated = True

def update_table(self):
db_data = self.__data_to_db(BangumiData())
Expand Down
12 changes: 8 additions & 4 deletions src/module/network/request_url.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def __init__(self):

def get_url(self, url, retry=3):
try_time = 0
while try_time < retry:
while True:
try:
req = self.session.get(url=url, headers=self.header, timeout=5)
req.raise_for_status()
Expand All @@ -26,16 +26,18 @@ def get_url(self, url, retry=3):
logger.debug(e)
logger.warning(f"Cannot connect to {url}. Wait for 5 seconds.")
logger.warning("Please check DNS/Connection settings")
time.sleep(5)
try_time += 1
if try_time >= retry:
break
time.sleep(5)
except Exception as e:
logger.debug(f"URL: {url}")
logger.debug(e)
break

def post_url(self, url: str, data: dict, retry=3):
try_time = 0
while try_time < retry:
while True:
try:
req = self.session.post(url=url, headers=self.header, data=data, timeout=5)
req.raise_for_status()
Expand All @@ -44,8 +46,10 @@ def post_url(self, url: str, data: dict, retry=3):
logger.debug(e)
logger.warning(f"Cannot connect to {url}.")
logger.warning("Please check DNS/Connection settings")
time.sleep(5)
try_time += 1
if try_time >= retry:
break
time.sleep(5)
except Exception as e:
logger.debug(f"URL: {url}")
logger.debug(e)
Expand Down
10 changes: 7 additions & 3 deletions src/module/rss/rss_analyser.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,12 @@
class RSSAnalyser:
def __init__(self):
self._title_analyser = TitleParser()
with BangumiDatabase() as db:
db.update_table()

def official_title_parser(self, data: BangumiData, mikan_title: str):
if settings.rss_parser.parser_type == "mikan":
data.official_title = mikan_title
data.official_title = mikan_title if mikan_title else data.official_title
elif settings.rss_parser.parser_type == "tmdb":
tmdb_title, season, year = self._title_analyser.tmdb_parser(
data.official_title,
Expand Down Expand Up @@ -44,7 +46,10 @@ def get_new_data_list(self, torrents: list, rss_link: str, _id: int, full_parse:
raw=torrent.name, rss_link=rss_link, _id=_id
)
if data and data.title_raw not in [i.title_raw for i in new_data]:
poster_link, mikan_title = torrent.poster_link, torrent.official_title
try:
poster_link, mikan_title = torrent.poster_link, torrent.official_title
except AttributeError:
poster_link, mikan_title = None, None
data.poster_link = poster_link
self.official_title_parser(data, mikan_title)
if not full_parse:
Expand Down Expand Up @@ -73,5 +78,4 @@ def run(self, rss_link: str = settings.rss_link):
self.rss_to_data(rss_link)
except Exception as e:
logger.debug(e)
print(e)
logger.error("Failed to collect RSS info.")

0 comments on commit 9aa6b09

Please sign in to comment.