main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup)
This commit is contained in:
145
logic_linkkf.py
145
logic_linkkf.py
@@ -39,6 +39,7 @@ from plugin import (
|
||||
# 철자가 틀린 부분이 있어서 분리함
|
||||
#
|
||||
from .lib.plugin import FfmpegQueue, FfmpegQueueEntity
|
||||
from .lib.utils import yommi_timeit
|
||||
|
||||
packages = ["beautifulsoup4", "requests-cache", "cloudscraper"]
|
||||
|
||||
@@ -241,6 +242,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
P.logger.error(traceback.format_exc())
|
||||
|
||||
@staticmethod
|
||||
@yommi_timeit
|
||||
def get_html(url: str, timeout: int = 10, cached=False):
|
||||
|
||||
try:
|
||||
@@ -310,7 +312,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
def add_whitelist(*args):
|
||||
ret = {}
|
||||
|
||||
logger.debug(f"args: {args}")
|
||||
# logger.debug(f"args: {args}")
|
||||
try:
|
||||
|
||||
if len(args) == 0:
|
||||
@@ -710,7 +712,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
data = {"ret": "success", "page": page}
|
||||
response_data = LogicLinkkf.get_html(url, timeout=10)
|
||||
# P.logger.debug(response_data)
|
||||
P.logger.debug("debug.....................")
|
||||
# P.logger.debug("debug.....................")
|
||||
tree = html.fromstring(response_data)
|
||||
tmp_items = tree.xpath(items_xpath)
|
||||
|
||||
@@ -758,7 +760,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
url = "%s/%s" % (P.ModelSetting.get("linkkf_url"), code)
|
||||
logger.info(url)
|
||||
|
||||
logger.debug(LogicLinkkf.headers)
|
||||
# logger.debug(LogicLinkkf.headers)
|
||||
html_content = LogicLinkkf.get_html(url, cached=False)
|
||||
# html_content = LogicLinkkf.get_html_playwright(url)
|
||||
# html_content = LogicLinkkf.get_html_cloudflare(url, cached=False)
|
||||
@@ -862,7 +864,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
else:
|
||||
tags = soup.select("ul > a")
|
||||
|
||||
logger.debug(len(tags))
|
||||
logger.debug(f"count: {len(tags)}")
|
||||
|
||||
# logger.info("tags", tags)
|
||||
# re1 = re.compile(r'\/(?P<code>\d+)')
|
||||
@@ -878,14 +880,16 @@ class LogicLinkkf(LogicModuleBase):
|
||||
idx = 1
|
||||
for t in tags:
|
||||
entity = {
|
||||
"_id": data["code"],
|
||||
"code": data["code"],
|
||||
"program_code": data["code"],
|
||||
"program_title": data["title"],
|
||||
"day": "",
|
||||
"save_folder": Util.change_text_for_use_filename(
|
||||
data["save_folder"]
|
||||
),
|
||||
"title": t.text.strip(),
|
||||
# "title": t.text_content().strip(),
|
||||
"episode_no": t.text.strip()
|
||||
# "title": data["title"],
|
||||
}
|
||||
# entity['code'] = re1.search(t.attrib['href']).group('code')
|
||||
|
||||
@@ -902,9 +906,9 @@ class LogicLinkkf(LogicModuleBase):
|
||||
# logger.debug(f"m_obj::> {m_obj}")
|
||||
if m_obj is not None:
|
||||
episode_code = m_obj.group(1)
|
||||
entity["code"] = data["code"] + episode_code.zfill(4)
|
||||
entity["_id"] = data["code"] + episode_code.zfill(4)
|
||||
else:
|
||||
entity["code"] = data["code"]
|
||||
entity["_id"] = data["code"]
|
||||
|
||||
aa = t["href"]
|
||||
if "/player" in aa:
|
||||
@@ -932,6 +936,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
data["episode"].append(entity)
|
||||
idx = idx + 1
|
||||
|
||||
# logger.debug(f"{data}")
|
||||
data["ret"] = True
|
||||
# logger.info('data', data)
|
||||
self.current_data = data
|
||||
@@ -977,7 +982,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
ret = "%s.S%sE%s.720p-LK.mp4" % (maintitle, season, epi_no)
|
||||
else:
|
||||
logger.debug("NOT MATCH")
|
||||
ret = "%s.720p-SA.mp4" % maintitle
|
||||
ret = "%s.720p-LK.mp4" % maintitle
|
||||
|
||||
return Util.change_text_for_use_filename(ret)
|
||||
except Exception as e:
|
||||
@@ -985,22 +990,22 @@ class LogicLinkkf(LogicModuleBase):
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
def add(self, episode_info):
|
||||
logger.debug("episode_info")
|
||||
logger.debug(episode_info)
|
||||
# logger.debug("episode_info")
|
||||
# logger.debug(episode_info)
|
||||
|
||||
if self.is_exist(episode_info):
|
||||
return "queue_exist"
|
||||
else:
|
||||
|
||||
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["_id"])
|
||||
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["code"])
|
||||
|
||||
logger.debug("db_entity:::> %s", db_entity)
|
||||
# logger.debug("db_entity:::> %s", db_entity)
|
||||
# logger.debug("db_entity.status ::: %s", db_entity.status)
|
||||
if db_entity is None:
|
||||
|
||||
entity = LinkkfQueueEntity(P, self, episode_info)
|
||||
|
||||
logger.debug("entity:::> %s", entity.as_dict())
|
||||
# logger.debug("entity:::> %s", entity.as_dict())
|
||||
ModelLinkkfItem.append(entity.as_dict())
|
||||
# # logger.debug("entity:: type >> %s", type(entity))
|
||||
#
|
||||
@@ -1036,7 +1041,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
logger.debug(f"linkkf scheduler_function:: =========================")
|
||||
|
||||
content_code_list = P.ModelSetting.get_list("linkkf_auto_code_list", "|")
|
||||
logger.debug(content_code_list)
|
||||
# logger.debug(content_code_list)
|
||||
|
||||
if "all" in content_code_list:
|
||||
url = f'{P.ModelSetting.get("linkkf_url")}/dailyani'
|
||||
@@ -1050,7 +1055,7 @@ class LogicLinkkf(LogicModuleBase):
|
||||
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
|
||||
content_info = self.get_series_info(item)
|
||||
|
||||
logger.debug(content_info["episode"])
|
||||
# logger.debug(content_info["episode"])
|
||||
# exit()
|
||||
|
||||
for episode_info in content_info["episode"]:
|
||||
@@ -1127,28 +1132,33 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
|
||||
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
|
||||
if db_entity is not None:
|
||||
db_entity.status = "completed"
|
||||
db_entity.complated_time = datetime.now()
|
||||
db_entity.completed_time = datetime.now()
|
||||
db_entity.save()
|
||||
|
||||
def donwload_completed(self):
|
||||
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
|
||||
if db_entity is not None:
|
||||
db_entity.status = "completed"
|
||||
db_entity.complated_time = datetime.now()
|
||||
db_entity.completed_time = datetime.now()
|
||||
db_entity.save()
|
||||
|
||||
# Get episode info from site
|
||||
def make_episode_info(self):
|
||||
logger.debug("call make_episode_info(): ")
|
||||
|
||||
url2s = []
|
||||
url = None
|
||||
logger.debug(self.info)
|
||||
logger.debug(f'self.info:: {self.info["url"]}')
|
||||
|
||||
# logger.debug(self)
|
||||
# print("")
|
||||
# logger.debug(self.info)
|
||||
# logger.debug(f'self.info:: {self.info["url"]}')
|
||||
# exit()
|
||||
try:
|
||||
# logger.debug(self)
|
||||
# logger.debug(self.url)
|
||||
data = LogicLinkkf.get_html_episode_content(self.info["url"])
|
||||
# logger.debug(f"data:: {data}")
|
||||
# exit()
|
||||
tree = html.fromstring(data)
|
||||
|
||||
xpath_select_query = '//*[@id="body"]/div/span/center/select/option'
|
||||
@@ -1198,7 +1208,7 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
|
||||
continue
|
||||
# logger.debug(f"url: {url}, url2: {url2}")
|
||||
ret = LogicLinkkf.get_video_url_from_url(url, url2)
|
||||
logger.debug(f"ret::::> {ret}")
|
||||
# logger.debug(f"ret::::> {ret}")
|
||||
|
||||
if ret is not None:
|
||||
video_url = ret
|
||||
@@ -1209,11 +1219,93 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
|
||||
|
||||
# logger.info(video_url)
|
||||
# return [video_url, referer_url]
|
||||
return video_url
|
||||
# return video_url
|
||||
|
||||
logger.debug(video_url)
|
||||
|
||||
logger.info("dx: urls2:: %s", url2s)
|
||||
|
||||
video_url = None
|
||||
referer_url = None # dx
|
||||
self.url = video_url[0]
|
||||
base_url = "https://kfani.me"
|
||||
self.srt_url = base_url + video_url[2]
|
||||
|
||||
match = re.compile(
|
||||
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
|
||||
% ("기", "화")
|
||||
).search(self.info["program_title"])
|
||||
#
|
||||
# epi_no 초기값
|
||||
epi_no = 1
|
||||
#
|
||||
|
||||
logger.debug(match)
|
||||
|
||||
if match:
|
||||
self.content_title = match.group("title").strip()
|
||||
# if "season" in match.groupdict() and match.group("season") is not None:
|
||||
# self.season = int(match.group("season"))
|
||||
#
|
||||
# # epi_no = 1
|
||||
# epi_no = int(match.group("epi_no"))
|
||||
# ret = "%s.S%sE%s.%s-LK.mp4" % (
|
||||
# self.content_title,
|
||||
# "0%s" % self.season if self.season < 10 else self.season,
|
||||
# "0%s" % epi_no if epi_no < 10 else epi_no,
|
||||
# self.quality,
|
||||
# )
|
||||
else:
|
||||
self.content_title = self.info["program_title"]
|
||||
# P.logger.debug("NOT MATCH")
|
||||
# ret = "%s.720p-LK.mp4" % self.info["program_title"]
|
||||
|
||||
# logger.info('self.content_title:: %s', self.content_title)
|
||||
self.epi_queue = epi_no
|
||||
# self.filename = Util.change_text_for_use_filename(ret)
|
||||
self.filename = self.info["filename"]
|
||||
logger.info(f"self.filename::> {self.filename}")
|
||||
self.savepath = P.ModelSetting.get("linkkf_download_path")
|
||||
logger.info(f"self.savepath::> {self.savepath}")
|
||||
|
||||
# TODO: 완결 처리
|
||||
|
||||
folder_name = None
|
||||
|
||||
if P.ModelSetting.get_bool("linkkf_auto_make_folder"):
|
||||
if self.info["day"].find("완결") != -1:
|
||||
folder_name = "%s %s" % (
|
||||
P.ModelSetting.get("linkkf_finished_insert"),
|
||||
self.content_title,
|
||||
)
|
||||
else:
|
||||
folder_name = self.content_title
|
||||
|
||||
# logger.debug(f"folder_name:: {folder_name}")
|
||||
# logger.debug(f"self.content_title:: {self.content_title}")
|
||||
|
||||
folder_name = Util.change_text_for_use_filename(folder_name.strip())
|
||||
self.savepath = os.path.join(self.savepath, folder_name)
|
||||
if P.ModelSetting.get_bool("linkkf_auto_make_season_folder"):
|
||||
self.savepath = os.path.join(
|
||||
self.savepath, "Season %s" % int(self.season)
|
||||
)
|
||||
|
||||
self.filepath = os.path.join(self.savepath, self.filename)
|
||||
if not os.path.exists(self.savepath):
|
||||
os.makedirs(self.savepath)
|
||||
|
||||
from framework.common.util import write_file, convert_vtt_to_srt
|
||||
|
||||
srt_filepath = os.path.join(
|
||||
self.savepath, self.filename.replace(".mp4", ".ko.srt")
|
||||
)
|
||||
|
||||
if (
|
||||
self.srt_url is not None
|
||||
and not os.path.exists(srt_filepath)
|
||||
and not ("thumbnails.vtt" in self.srt_url)
|
||||
):
|
||||
srt_data = requests.get(self.srt_url, headers=headers).text
|
||||
write_file(srt_data, srt_filepath)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Exception: {str(e)}")
|
||||
@@ -1234,6 +1326,7 @@ class ModelLinkkfItem(db.Model):
|
||||
title = db.Column(db.String)
|
||||
episode_title = db.Column(db.String)
|
||||
# linkkf_va = db.Column(db.String)
|
||||
linkkf_code = db.Column(db.String)
|
||||
linkkf_vi = db.Column(db.String)
|
||||
linkkf_id = db.Column(db.String)
|
||||
quality = db.Column(db.String)
|
||||
@@ -1329,7 +1422,7 @@ class ModelLinkkfItem(db.Model):
|
||||
|
||||
@classmethod
|
||||
def append(cls, q):
|
||||
logger.debug(q)
|
||||
# logger.debug(q)
|
||||
item = ModelLinkkfItem()
|
||||
item.content_code = q["program_code"]
|
||||
item.season = q["season"]
|
||||
|
||||
Reference in New Issue
Block a user