main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup)

This commit is contained in:
2023-03-01 18:01:39 +09:00
parent 3162911f1e
commit 10bd5e7412
6 changed files with 201 additions and 34 deletions

View File

@@ -142,8 +142,11 @@ class FfmpegQueue(object):
# os.makedirs(save_path)
# except:
# logger.debug('program path make fail!!')
# 파일 존재여부 체크
filepath = entity.get_video_filepath()
filepath = str(entity.get_video_filepath())
self.P.logger.debug(filepath)
self.P.logger.debug(entity.get_video_filepath())
if os.path.exists(filepath):
entity.ffmpeg_status_kor = "파일 있음"
entity.ffmpeg_percent = 100
@@ -151,6 +154,8 @@ class FfmpegQueue(object):
# plugin.socketio_list_refresh()
continue
dirname = os.path.dirname(filepath)
self.P.logger.debug(type(dirname))
self.P.logger.debug(dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
f = ffmpeg.Ffmpeg(

View File

@@ -18,7 +18,7 @@ def yommi_timeit(func):
total_time = end_time - start_time
# print(f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs")
logger.opt(colors=True).debug(
f"<red>{func.__name__}{args} {kwargs}</red> function took <green>{total_time:.4f}</green>secs"
f"<red>{func.__name__}{args} {kwargs}</red> function took <green>{total_time:.4f}</green> secs"
)
return result

View File

@@ -39,6 +39,7 @@ from plugin import (
# 철자가 틀린 부분이 있어서 분리함
#
from .lib.plugin import FfmpegQueue, FfmpegQueueEntity
from .lib.utils import yommi_timeit
packages = ["beautifulsoup4", "requests-cache", "cloudscraper"]
@@ -241,6 +242,7 @@ class LogicLinkkf(LogicModuleBase):
P.logger.error(traceback.format_exc())
@staticmethod
@yommi_timeit
def get_html(url: str, timeout: int = 10, cached=False):
try:
@@ -310,7 +312,7 @@ class LogicLinkkf(LogicModuleBase):
def add_whitelist(*args):
ret = {}
logger.debug(f"args: {args}")
# logger.debug(f"args: {args}")
try:
if len(args) == 0:
@@ -710,7 +712,7 @@ class LogicLinkkf(LogicModuleBase):
data = {"ret": "success", "page": page}
response_data = LogicLinkkf.get_html(url, timeout=10)
# P.logger.debug(response_data)
P.logger.debug("debug.....................")
# P.logger.debug("debug.....................")
tree = html.fromstring(response_data)
tmp_items = tree.xpath(items_xpath)
@@ -758,7 +760,7 @@ class LogicLinkkf(LogicModuleBase):
url = "%s/%s" % (P.ModelSetting.get("linkkf_url"), code)
logger.info(url)
logger.debug(LogicLinkkf.headers)
# logger.debug(LogicLinkkf.headers)
html_content = LogicLinkkf.get_html(url, cached=False)
# html_content = LogicLinkkf.get_html_playwright(url)
# html_content = LogicLinkkf.get_html_cloudflare(url, cached=False)
@@ -862,7 +864,7 @@ class LogicLinkkf(LogicModuleBase):
else:
tags = soup.select("ul > a")
logger.debug(len(tags))
logger.debug(f"count: {len(tags)}")
# logger.info("tags", tags)
# re1 = re.compile(r'\/(?P<code>\d+)')
@@ -878,14 +880,16 @@ class LogicLinkkf(LogicModuleBase):
idx = 1
for t in tags:
entity = {
"_id": data["code"],
"code": data["code"],
"program_code": data["code"],
"program_title": data["title"],
"day": "",
"save_folder": Util.change_text_for_use_filename(
data["save_folder"]
),
"title": t.text.strip(),
# "title": t.text_content().strip(),
"episode_no": t.text.strip()
# "title": data["title"],
}
# entity['code'] = re1.search(t.attrib['href']).group('code')
@@ -902,9 +906,9 @@ class LogicLinkkf(LogicModuleBase):
# logger.debug(f"m_obj::> {m_obj}")
if m_obj is not None:
episode_code = m_obj.group(1)
entity["code"] = data["code"] + episode_code.zfill(4)
entity["_id"] = data["code"] + episode_code.zfill(4)
else:
entity["code"] = data["code"]
entity["_id"] = data["code"]
aa = t["href"]
if "/player" in aa:
@@ -932,6 +936,7 @@ class LogicLinkkf(LogicModuleBase):
data["episode"].append(entity)
idx = idx + 1
# logger.debug(f"{data}")
data["ret"] = True
# logger.info('data', data)
self.current_data = data
@@ -977,7 +982,7 @@ class LogicLinkkf(LogicModuleBase):
ret = "%s.S%sE%s.720p-LK.mp4" % (maintitle, season, epi_no)
else:
logger.debug("NOT MATCH")
ret = "%s.720p-SA.mp4" % maintitle
ret = "%s.720p-LK.mp4" % maintitle
return Util.change_text_for_use_filename(ret)
except Exception as e:
@@ -985,22 +990,22 @@ class LogicLinkkf(LogicModuleBase):
logger.error(traceback.format_exc())
def add(self, episode_info):
logger.debug("episode_info")
logger.debug(episode_info)
# logger.debug("episode_info")
# logger.debug(episode_info)
if self.is_exist(episode_info):
return "queue_exist"
else:
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["_id"])
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["code"])
logger.debug("db_entity:::> %s", db_entity)
# logger.debug("db_entity:::> %s", db_entity)
# logger.debug("db_entity.status ::: %s", db_entity.status)
if db_entity is None:
entity = LinkkfQueueEntity(P, self, episode_info)
logger.debug("entity:::> %s", entity.as_dict())
# logger.debug("entity:::> %s", entity.as_dict())
ModelLinkkfItem.append(entity.as_dict())
# # logger.debug("entity:: type >> %s", type(entity))
#
@@ -1036,7 +1041,7 @@ class LogicLinkkf(LogicModuleBase):
logger.debug(f"linkkf scheduler_function:: =========================")
content_code_list = P.ModelSetting.get_list("linkkf_auto_code_list", "|")
logger.debug(content_code_list)
# logger.debug(content_code_list)
if "all" in content_code_list:
url = f'{P.ModelSetting.get("linkkf_url")}/dailyani'
@@ -1050,7 +1055,7 @@ class LogicLinkkf(LogicModuleBase):
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
content_info = self.get_series_info(item)
logger.debug(content_info["episode"])
# logger.debug(content_info["episode"])
# exit()
for episode_info in content_info["episode"]:
@@ -1127,28 +1132,33 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
db_entity.complated_time = datetime.now()
db_entity.completed_time = datetime.now()
db_entity.save()
def donwload_completed(self):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
db_entity.complated_time = datetime.now()
db_entity.completed_time = datetime.now()
db_entity.save()
# Get episode info from site
def make_episode_info(self):
logger.debug("call make_episode_info(): ")
url2s = []
url = None
logger.debug(self.info)
logger.debug(f'self.info:: {self.info["url"]}')
# logger.debug(self)
# print("")
# logger.debug(self.info)
# logger.debug(f'self.info:: {self.info["url"]}')
# exit()
try:
# logger.debug(self)
# logger.debug(self.url)
data = LogicLinkkf.get_html_episode_content(self.info["url"])
# logger.debug(f"data:: {data}")
# exit()
tree = html.fromstring(data)
xpath_select_query = '//*[@id="body"]/div/span/center/select/option'
@@ -1198,7 +1208,7 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
continue
# logger.debug(f"url: {url}, url2: {url2}")
ret = LogicLinkkf.get_video_url_from_url(url, url2)
logger.debug(f"ret::::> {ret}")
# logger.debug(f"ret::::> {ret}")
if ret is not None:
video_url = ret
@@ -1209,11 +1219,93 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
# logger.info(video_url)
# return [video_url, referer_url]
return video_url
# return video_url
logger.debug(video_url)
logger.info("dx: urls2:: %s", url2s)
video_url = None
referer_url = None # dx
self.url = video_url[0]
base_url = "https://kfani.me"
self.srt_url = base_url + video_url[2]
match = re.compile(
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
% ("", "")
).search(self.info["program_title"])
#
# epi_no 초기값
epi_no = 1
#
logger.debug(match)
if match:
self.content_title = match.group("title").strip()
# if "season" in match.groupdict() and match.group("season") is not None:
# self.season = int(match.group("season"))
#
# # epi_no = 1
# epi_no = int(match.group("epi_no"))
# ret = "%s.S%sE%s.%s-LK.mp4" % (
# self.content_title,
# "0%s" % self.season if self.season < 10 else self.season,
# "0%s" % epi_no if epi_no < 10 else epi_no,
# self.quality,
# )
else:
self.content_title = self.info["program_title"]
# P.logger.debug("NOT MATCH")
# ret = "%s.720p-LK.mp4" % self.info["program_title"]
# logger.info('self.content_title:: %s', self.content_title)
self.epi_queue = epi_no
# self.filename = Util.change_text_for_use_filename(ret)
self.filename = self.info["filename"]
logger.info(f"self.filename::> {self.filename}")
self.savepath = P.ModelSetting.get("linkkf_download_path")
logger.info(f"self.savepath::> {self.savepath}")
# TODO: 완결 처리
folder_name = None
if P.ModelSetting.get_bool("linkkf_auto_make_folder"):
if self.info["day"].find("완결") != -1:
folder_name = "%s %s" % (
P.ModelSetting.get("linkkf_finished_insert"),
self.content_title,
)
else:
folder_name = self.content_title
# logger.debug(f"folder_name:: {folder_name}")
# logger.debug(f"self.content_title:: {self.content_title}")
folder_name = Util.change_text_for_use_filename(folder_name.strip())
self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("linkkf_auto_make_season_folder"):
self.savepath = os.path.join(
self.savepath, "Season %s" % int(self.season)
)
self.filepath = os.path.join(self.savepath, self.filename)
if not os.path.exists(self.savepath):
os.makedirs(self.savepath)
from framework.common.util import write_file, convert_vtt_to_srt
srt_filepath = os.path.join(
self.savepath, self.filename.replace(".mp4", ".ko.srt")
)
if (
self.srt_url is not None
and not os.path.exists(srt_filepath)
and not ("thumbnails.vtt" in self.srt_url)
):
srt_data = requests.get(self.srt_url, headers=headers).text
write_file(srt_data, srt_filepath)
except Exception as e:
logger.error(f"Exception: {str(e)}")
@@ -1234,6 +1326,7 @@ class ModelLinkkfItem(db.Model):
title = db.Column(db.String)
episode_title = db.Column(db.String)
# linkkf_va = db.Column(db.String)
linkkf_code = db.Column(db.String)
linkkf_vi = db.Column(db.String)
linkkf_id = db.Column(db.String)
quality = db.Column(db.String)
@@ -1329,7 +1422,7 @@ class ModelLinkkfItem(db.Model):
@classmethod
def append(cls, q):
logger.debug(q)
# logger.debug(q)
item = ModelLinkkfItem()
item.content_code = q["program_code"]
item.season = q["season"]

View File

@@ -7,12 +7,16 @@
# @Software: PyCharm
import os, sys, traceback, re, json, threading
import time
from datetime import datetime, date
import copy
import hashlib
import discord
# third-party
import requests
from discord_webhook import DiscordWebhook, DiscordEmbed
from lxml import html
from urllib import parse
import urllib
@@ -62,7 +66,7 @@ logger = P.logger
class LogicOhli24(LogicModuleBase):
db_default = {
"ohli24_db_version": "1",
"ohli24_db_version": "1.1",
"ohli24_url": "https://ohli24.org",
"ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"),
"ohli24_auto_make_folder": "True",
@@ -105,6 +109,11 @@ class LogicOhli24(LogicModuleBase):
super(LogicOhli24, self).__init__(P, "setting", scheduler_desc="ohli24 자동 다운로드")
self.name = "ohli24"
self.queue = None
self.last_post_title = ""
self.discord_webhook_url = "https://discord.com/api/webhooks/1071430127860334663/viCiM5ssS-U1_ONWgdWa-64KgvPfU5jJ8WQAym-4vkiyASB0e8IcnlLnxG4F40nj10kZ"
self.discord_color = "242424"
self.discord_title = "새로운 애니"
self.DISCORD_CHANNEL_ID = "1071430054023798958"
default_route_socketio(P, self)
@staticmethod
@@ -503,10 +512,7 @@ class LogicOhli24(LogicModuleBase):
# print(code)
whitelist_program = P.ModelSetting.get("ohli24_auto_code_list")
# whitelist_programs = [
# str(x.strip().replace(" ", ""))
# for x in whitelist_program.replace("\n", "|").split("|")
# ]
whitelist_programs = [
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")
]
@@ -596,6 +602,8 @@ class LogicOhli24(LogicModuleBase):
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
content_info = self.get_series_info(item, "", "")
logger.debug(content_info)
for episode_info in content_info["episode"]:
add_ret = self.add(episode_info)
if add_ret.startswith("enqueue"):
@@ -666,7 +674,7 @@ class LogicOhli24(LogicModuleBase):
else:
pass
logger.debug("url:::> %s", url)
# logger.debug("url:::> %s", url)
# self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)
# AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36',
@@ -933,6 +941,59 @@ class LogicOhli24(LogicModuleBase):
P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)}
def check_for_new_post(self):
# Get the HTML content of the page
res = requests.get("https://ohli24.org/bbs/board.php?bo_table=ing")
soup = BeautifulSoup(res.content, "html.parser")
# Find the latest post on the page
latest_post = soup.find("div", class_="post-title").text
latest_post_image = (
soup.find("div", class_="img-item")
.find("img", class_="wr-img")
.get("src")
.replace("..", "https://ohli24.org")
)
logger.debug(f"latest_post:: {latest_post}")
logger.debug(f"self.last_post_title:: {self.last_post_title}")
logger.debug(f"latest_post_image:: {latest_post_image}")
# Compare the latest post with the last recorded post
if latest_post != self.last_post_title:
# If there is a new post, update the last recorded post
self.last_post_title = latest_post
# Send a notification to Discord channel
# discord_client = discord.Client()
# discord_client.run(self.DISCORD_BOT_TOKEN)
#
# async def on_ready():
# channel = discord_client.get_channel(self.DISCORD_CHANNEL_ID)
# await channel.send(f"A new post has been added: {latest_post}")
#
# discord_client.close()
webhook = DiscordWebhook(url=self.discord_webhook_url)
embed = DiscordEmbed(title=self.discord_title, color=self.discord_color)
embed.set_timestamp()
path = self.last_post_title
embed.set_image(url=latest_post_image)
embed.add_embed_field(name="", value=path, inline=True)
embed.set_timestamp()
webhook.add_embed(embed)
response = webhook.execute()
return self.last_post_title
return self.last_post_title
def send_notify(self):
logger.debug("send_notify() routine")
while True:
self.last_post_title = self.check_for_new_post()
logger.debug(self.last_post_title)
time.sleep(600)
# @staticmethod
def plugin_load(self):
try:
@@ -943,6 +1004,10 @@ class LogicOhli24(LogicModuleBase):
self.current_data = None
self.queue.queue_start()
logger.debug(P.ModelSetting.get_bool("ohli24_discord_notify"))
if P.ModelSetting.get_bool("ohli24_discord_notify"):
self.send_notify()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())

View File

@@ -243,7 +243,9 @@
$("#analysis_btn").unbind("click").bind('click', function (e) {
e.preventDefault();
e.stopPropagation()
const button = document.getElementById('analysis_btn');
const code = document.getElementById("code").value
button.setAttribute("disabled", "disabled");
console.log(code)
$.ajax({
url: '/' + package_name + '/ajax/' + sub + '/analysis',
@@ -256,6 +258,7 @@
// {#console.log(ret.code)#}
console.log(ret.data)
make_program(ret.data)
button.removeAttribute("disabled");
} else {
$.notify('<strong>분석 실패</strong><br>' + ret.log, {type: 'warning'});
}

View File

@@ -24,6 +24,7 @@
{{ macros.setting_checkbox('ohli24_auto_make_season_folder', '시즌 폴더 생성', value=arg['ohli24_auto_make_season_folder'], desc=['On : Season 번호 폴더를 만듭니다.']) }}
</div>
{{ macros.setting_checkbox('ohli24_uncompleted_auto_enqueue', '자동으로 다시 받기', value=arg['ohli24_uncompleted_auto_enqueue'], desc=['On : 플러그인 로딩시 미완료인 항목은 자동으로 다시 받습니다.']) }}
{{ macros.setting_checkbox('ohli24_discord_notify', '디스 코드 알림 받기', value=arg['ohli24_discord_notify'], desc=['On : 새로운 글이 올라올때 디스코드 알림을 보냅니다.']) }}
{{ macros.m_tab_content_end() }}
{{ macros.m_tab_content_start('auto', false) }}