Compare commits

..

36 Commits

Author SHA1 Message Date
a8486726f6 2024.08.21 patch.03 2024-08-21 19:39:50 +09:00
391a0ee861 2024.08.21 patch.02 2024-08-21 19:24:29 +09:00
408be433f2 2024.08.21 patch.01 2024-08-21 19:13:43 +09:00
c87e29f085 2024.08.13 19:22 patch.01 2024-08-13 19:26:16 +09:00
b27cd39aa4 2024.08.13 19:22 2024-08-13 19:22:47 +09:00
205c17ae4e edit fix3 2024-02-16 19:00:01 +09:00
e101a02886 error fix2 2024-02-16 15:07:14 +09:00
04c0e34db5 error fix 2024-02-13 15:23:57 +09:00
f1d5f1db68 main -> 2023.10.8 fix "작화" 2023-10-08 22:29:02 +09:00
f0eda8ef87 main -> 2023.10.5 fix 2023-10-05 21:20:18 +09:00
d4fcc9a633 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-23 22:48:21 +09:00
9ca8dcc3da main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 21:57:21 +09:00
301806a906 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 14:40:13 +09:00
eca29b6947 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 14:32:56 +09:00
d07cc820dc main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-19 23:47:52 +09:00
710d70dbfd main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-19 23:42:16 +09:00
6f2edeaf89 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-09-19 23:37:50 +09:00
769d40e5bb main -> 2023.08.31 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-31 00:20:57 +09:00
c53f1f50c9 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-12 21:22:28 +09:00
9cae04584d main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:16:59 +09:00
efcadde111 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:08:46 +09:00
145e277895 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:06:08 +09:00
1b76d36352 main -> 2023.07.01 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-07-01 00:13:09 +09:00
a7cf43e0cc main -> 2023.07.01 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-07-01 00:09:52 +09:00
dd8a68a267 main -> 2023.05.11 ohli24 버그 픽스 (.01. todo: setting_save_after) 2023-05-11 19:41:39 +09:00
6bf816db10 main -> 2023.05.09 ohli24 버그 픽스 (.01. referer url fix) 2023-05-09 20:56:57 +09:00
becfc7feef main -> 2023.05.09 ohli24 버그 픽스 (.01. referer url fix) 2023-05-09 20:54:59 +09:00
25cddecfe9 main -> 2023.04.22 ohli24 버그 픽스 (.01. img xpath fix) 2023-04-22 23:26:19 +09:00
292a3fd388 main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 21:11:49 +09:00
87461cce4a main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 20:35:22 +09:00
080ae6ab0c main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 20:26:38 +09:00
c8284f86b7 main -> 2023.03.03 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-03 18:54:10 +09:00
f4717c74e4 main -> 2023.03.01 ohli24 버그 픽스 (.02. code cleanup) 2023-03-01 19:34:53 +09:00
c6940bbca5 main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:05:58 +09:00
5506cc2e7f main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:03:56 +09:00
10bd5e7412 main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:01:39 +09:00
7 changed files with 381 additions and 105 deletions

View File

@@ -142,8 +142,11 @@ class FfmpegQueue(object):
# os.makedirs(save_path)
# except:
# logger.debug('program path make fail!!')
# 파일 존재여부 체크
filepath = entity.get_video_filepath()
filepath = str(entity.get_video_filepath())
self.P.logger.debug(filepath)
self.P.logger.debug(entity.get_video_filepath())
if os.path.exists(filepath):
entity.ffmpeg_status_kor = "파일 있음"
entity.ffmpeg_percent = 100
@@ -151,6 +154,8 @@ class FfmpegQueue(object):
# plugin.socketio_list_refresh()
continue
dirname = os.path.dirname(filepath)
self.P.logger.debug(type(dirname))
self.P.logger.debug(dirname)
if not os.path.exists(dirname):
os.makedirs(dirname)
f = ffmpeg.Ffmpeg(

View File

@@ -18,7 +18,7 @@ def yommi_timeit(func):
total_time = end_time - start_time
# print(f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs")
logger.opt(colors=True).debug(
f"<red>{func.__name__}{args} {kwargs}</red> function took <green>{total_time:.4f}</green>secs"
f"<red>{func.__name__}{args} {kwargs}</red> function took <green>{total_time:.4f}</green> secs"
)
return result

View File

@@ -1033,12 +1033,14 @@ class LogicAniLife(LogicModuleBase):
return ret
def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
"anilife_max_ffmpeg_process_count"
):
self.queue.set_max_ffmpeg_count(
P.ModelSetting.get_int("anilife_max_ffmpeg_process_count")
)
pass
# Todo: 버그 고쳐야함
# if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
# "anilife_max_ffmpeg_process_count"
# ):
# self.queue.set_max_ffmpeg_count(
# P.ModelSetting.get_int("anilife_max_ffmpeg_process_count")
# )
def scheduler_function(self):
logger.debug(f"anilife scheduler_function:: =========================")

View File

@@ -39,6 +39,7 @@ from plugin import (
# 철자가 틀린 부분이 있어서 분리함
#
from .lib.plugin import FfmpegQueue, FfmpegQueueEntity
from .lib.utils import yommi_timeit
packages = ["beautifulsoup4", "requests-cache", "cloudscraper"]
@@ -241,6 +242,7 @@ class LogicLinkkf(LogicModuleBase):
P.logger.error(traceback.format_exc())
@staticmethod
@yommi_timeit
def get_html(url: str, timeout: int = 10, cached=False):
try:
@@ -310,7 +312,7 @@ class LogicLinkkf(LogicModuleBase):
def add_whitelist(*args):
ret = {}
logger.debug(f"args: {args}")
# logger.debug(f"args: {args}")
try:
if len(args) == 0:
@@ -360,12 +362,14 @@ class LogicLinkkf(LogicModuleBase):
return ret
def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
"linkkf_max_ffmpeg_process_count"
):
self.queue.set_max_ffmpeg_count(
P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
)
# Todo:
pass
# if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
# "linkkf_max_ffmpeg_process_count"
# ):
# self.queue.set_max_ffmpeg_count(
# P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
# )
def get_video_url_from_url(url, url2):
video_url = None
@@ -710,7 +714,7 @@ class LogicLinkkf(LogicModuleBase):
data = {"ret": "success", "page": page}
response_data = LogicLinkkf.get_html(url, timeout=10)
# P.logger.debug(response_data)
P.logger.debug("debug.....................")
# P.logger.debug("debug.....................")
tree = html.fromstring(response_data)
tmp_items = tree.xpath(items_xpath)
@@ -758,7 +762,7 @@ class LogicLinkkf(LogicModuleBase):
url = "%s/%s" % (P.ModelSetting.get("linkkf_url"), code)
logger.info(url)
logger.debug(LogicLinkkf.headers)
# logger.debug(LogicLinkkf.headers)
html_content = LogicLinkkf.get_html(url, cached=False)
# html_content = LogicLinkkf.get_html_playwright(url)
# html_content = LogicLinkkf.get_html_cloudflare(url, cached=False)
@@ -862,7 +866,7 @@ class LogicLinkkf(LogicModuleBase):
else:
tags = soup.select("ul > a")
logger.debug(len(tags))
logger.debug(f"count: {len(tags)}")
# logger.info("tags", tags)
# re1 = re.compile(r'\/(?P<code>\d+)')
@@ -878,14 +882,16 @@ class LogicLinkkf(LogicModuleBase):
idx = 1
for t in tags:
entity = {
"_id": data["code"],
"code": data["code"],
"program_code": data["code"],
"program_title": data["title"],
"day": "",
"save_folder": Util.change_text_for_use_filename(
data["save_folder"]
),
"title": t.text.strip(),
# "title": t.text_content().strip(),
"episode_no": t.text.strip()
# "title": data["title"],
}
# entity['code'] = re1.search(t.attrib['href']).group('code')
@@ -902,9 +908,9 @@ class LogicLinkkf(LogicModuleBase):
# logger.debug(f"m_obj::> {m_obj}")
if m_obj is not None:
episode_code = m_obj.group(1)
entity["code"] = data["code"] + episode_code.zfill(4)
entity["_id"] = data["code"] + episode_code.zfill(4)
else:
entity["code"] = data["code"]
entity["_id"] = data["code"]
aa = t["href"]
if "/player" in aa:
@@ -932,6 +938,7 @@ class LogicLinkkf(LogicModuleBase):
data["episode"].append(entity)
idx = idx + 1
# logger.debug(f"{data}")
data["ret"] = True
# logger.info('data', data)
self.current_data = data
@@ -977,7 +984,7 @@ class LogicLinkkf(LogicModuleBase):
ret = "%s.S%sE%s.720p-LK.mp4" % (maintitle, season, epi_no)
else:
logger.debug("NOT MATCH")
ret = "%s.720p-SA.mp4" % maintitle
ret = "%s.720p-LK.mp4" % maintitle
return Util.change_text_for_use_filename(ret)
except Exception as e:
@@ -985,22 +992,22 @@ class LogicLinkkf(LogicModuleBase):
logger.error(traceback.format_exc())
def add(self, episode_info):
logger.debug("episode_info")
logger.debug(episode_info)
# logger.debug("episode_info")
# logger.debug(episode_info)
if self.is_exist(episode_info):
return "queue_exist"
else:
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["_id"])
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["code"])
logger.debug("db_entity:::> %s", db_entity)
# logger.debug("db_entity:::> %s", db_entity)
# logger.debug("db_entity.status ::: %s", db_entity.status)
if db_entity is None:
entity = LinkkfQueueEntity(P, self, episode_info)
logger.debug("entity:::> %s", entity.as_dict())
# logger.debug("entity:::> %s", entity.as_dict())
ModelLinkkfItem.append(entity.as_dict())
# # logger.debug("entity:: type >> %s", type(entity))
#
@@ -1036,7 +1043,7 @@ class LogicLinkkf(LogicModuleBase):
logger.debug(f"linkkf scheduler_function:: =========================")
content_code_list = P.ModelSetting.get_list("linkkf_auto_code_list", "|")
logger.debug(content_code_list)
# logger.debug(content_code_list)
if "all" in content_code_list:
url = f'{P.ModelSetting.get("linkkf_url")}/dailyani'
@@ -1050,7 +1057,7 @@ class LogicLinkkf(LogicModuleBase):
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
content_info = self.get_series_info(item)
logger.debug(content_info["episode"])
# logger.debug(content_info["episode"])
# exit()
for episode_info in content_info["episode"]:
@@ -1127,28 +1134,33 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
db_entity.complated_time = datetime.now()
db_entity.completed_time = datetime.now()
db_entity.save()
def donwload_completed(self):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
db_entity.complated_time = datetime.now()
db_entity.completed_time = datetime.now()
db_entity.save()
# Get episode info from site
def make_episode_info(self):
logger.debug("call make_episode_info(): ")
url2s = []
url = None
logger.debug(self.info)
logger.debug(f'self.info:: {self.info["url"]}')
# logger.debug(self)
# print("")
# logger.debug(self.info)
# logger.debug(f'self.info:: {self.info["url"]}')
# exit()
try:
# logger.debug(self)
# logger.debug(self.url)
data = LogicLinkkf.get_html_episode_content(self.info["url"])
# logger.debug(f"data:: {data}")
# exit()
tree = html.fromstring(data)
xpath_select_query = '//*[@id="body"]/div/span/center/select/option'
@@ -1198,7 +1210,7 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
continue
# logger.debug(f"url: {url}, url2: {url2}")
ret = LogicLinkkf.get_video_url_from_url(url, url2)
logger.debug(f"ret::::> {ret}")
# logger.debug(f"ret::::> {ret}")
if ret is not None:
video_url = ret
@@ -1209,11 +1221,93 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
# logger.info(video_url)
# return [video_url, referer_url]
return video_url
# return video_url
logger.debug(video_url)
logger.info("dx: urls2:: %s", url2s)
video_url = None
referer_url = None # dx
self.url = video_url[0]
base_url = "https://kfani.me"
self.srt_url = base_url + video_url[2]
match = re.compile(
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
% ("", "")
).search(self.info["program_title"])
#
# epi_no 초기값
epi_no = 1
#
logger.debug(match)
if match:
self.content_title = match.group("title").strip()
# if "season" in match.groupdict() and match.group("season") is not None:
# self.season = int(match.group("season"))
#
# # epi_no = 1
# epi_no = int(match.group("epi_no"))
# ret = "%s.S%sE%s.%s-LK.mp4" % (
# self.content_title,
# "0%s" % self.season if self.season < 10 else self.season,
# "0%s" % epi_no if epi_no < 10 else epi_no,
# self.quality,
# )
else:
self.content_title = self.info["program_title"]
# P.logger.debug("NOT MATCH")
# ret = "%s.720p-LK.mp4" % self.info["program_title"]
# logger.info('self.content_title:: %s', self.content_title)
self.epi_queue = epi_no
# self.filename = Util.change_text_for_use_filename(ret)
self.filename = self.info["filename"]
logger.info(f"self.filename::> {self.filename}")
self.savepath = P.ModelSetting.get("linkkf_download_path")
logger.info(f"self.savepath::> {self.savepath}")
# TODO: 완결 처리
folder_name = None
if P.ModelSetting.get_bool("linkkf_auto_make_folder"):
if self.info["day"].find("완결") != -1:
folder_name = "%s %s" % (
P.ModelSetting.get("linkkf_finished_insert"),
self.content_title,
)
else:
folder_name = self.content_title
# logger.debug(f"folder_name:: {folder_name}")
# logger.debug(f"self.content_title:: {self.content_title}")
folder_name = Util.change_text_for_use_filename(folder_name.strip())
self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("linkkf_auto_make_season_folder"):
self.savepath = os.path.join(
self.savepath, "Season %s" % int(self.season)
)
self.filepath = os.path.join(self.savepath, self.filename)
if not os.path.exists(self.savepath):
os.makedirs(self.savepath)
from framework.common.util import write_file, convert_vtt_to_srt
srt_filepath = os.path.join(
self.savepath, self.filename.replace(".mp4", ".ko.srt")
)
if (
self.srt_url is not None
and not os.path.exists(srt_filepath)
and not ("thumbnails.vtt" in self.srt_url)
):
srt_data = requests.get(self.srt_url, headers=headers).text
write_file(srt_data, srt_filepath)
except Exception as e:
logger.error(f"Exception: {str(e)}")
@@ -1234,6 +1328,7 @@ class ModelLinkkfItem(db.Model):
title = db.Column(db.String)
episode_title = db.Column(db.String)
# linkkf_va = db.Column(db.String)
linkkf_code = db.Column(db.String)
linkkf_vi = db.Column(db.String)
linkkf_id = db.Column(db.String)
quality = db.Column(db.String)
@@ -1329,7 +1424,7 @@ class ModelLinkkfItem(db.Model):
@classmethod
def append(cls, q):
logger.debug(q)
# logger.debug(q)
item = ModelLinkkfItem()
item.content_code = q["program_code"]
item.season = q["season"]

View File

@@ -7,12 +7,16 @@
# @Software: PyCharm
import os, sys, traceback, re, json, threading
import time
from datetime import datetime, date
import copy
import hashlib
import discord
# third-party
import requests
from discord_webhook import DiscordWebhook, DiscordEmbed
from lxml import html
from urllib import parse
import urllib
@@ -62,9 +66,11 @@ logger = P.logger
class LogicOhli24(LogicModuleBase):
db_default = {
"ohli24_db_version": "1",
"ohli24_url": "https://ohli24.org",
"ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"),
"ohli24_db_version": "1.1",
"ohli24_url": "https://a21.ohli24.com",
"ohli24_download_path": os.path.join(
path_data, P.package_name, "ohli24"
),
"ohli24_auto_make_folder": "True",
"ohli24_auto_make_season_folder": "True",
"ohli24_finished_insert": "[완결]",
@@ -102,9 +108,16 @@ class LogicOhli24(LogicModuleBase):
}
def __init__(self, P):
super(LogicOhli24, self).__init__(P, "setting", scheduler_desc="ohli24 자동 다운로드")
super(LogicOhli24, self).__init__(
P, "setting", scheduler_desc="ohli24 자동 다운로드"
)
self.name = "ohli24"
self.queue = None
self.last_post_title = ""
self.discord_webhook_url = "https://discord.com/api/webhooks/1071430127860334663/viCiM5ssS-U1_ONWgdWa-64KgvPfU5jJ8WQAym-4vkiyASB0e8IcnlLnxG4F40nj10kZ"
self.discord_color = "242424"
self.discord_title = "새로운 애니"
self.DISCORD_CHANNEL_ID = "1071430054023798958"
default_route_socketio(P, self)
@staticmethod
@@ -207,7 +220,9 @@ class LogicOhli24(LogicModuleBase):
try:
if engine == "chrome":
browser = await p.chromium.launch(
channel="chrome", args=browser_args, headless=headless
channel="chrome",
args=browser_args,
headless=headless,
)
elif engine == "webkit":
browser = await p.webkit.launch(
@@ -223,9 +238,9 @@ class LogicOhli24(LogicModuleBase):
# user_agent=ua,
# )
LogicOhli24.headers[
"Referer"
] = "https://anilife.live/detail/id/471"
LogicOhli24.headers["Referer"] = (
"https://anilife.com/detail/id/471"
)
# print(LogicAniLife.headers)
LogicOhli24.headers["Referer"] = LogicOhli24.episode_url
@@ -235,7 +250,8 @@ class LogicOhli24(LogicModuleBase):
# logger.debug(f"LogicAniLife.headers::: {LogicOhli24.headers}")
context = await browser.new_context(
extra_http_headers=LogicOhli24.headers, ignore_https_errors=True
extra_http_headers=LogicOhli24.headers,
ignore_https_errors=True,
)
# await context.add_cookies(LogicOhli24.cookies)
@@ -329,7 +345,9 @@ class LogicOhli24(LogicModuleBase):
),
arg=arg,
)
return render_template("sample.html", title="%s - %s" % (P.package_name, sub))
return render_template(
"sample.html", title="%s - %s" % (P.package_name, sub)
)
# @staticmethod
def process_ajax(self, sub, req):
@@ -414,7 +432,8 @@ class LogicOhli24(LogicModuleBase):
count += 1
notify = {
"type": "success",
"msg": "%s 개의 에피소드를 큐에 추가 하였습니다." % count,
"msg": "%s 개의 에피소드를 큐에 추가 하였습니다."
% count,
}
socketio.emit(
"notify", notify, namespace="/framework", broadcast=True
@@ -503,12 +522,10 @@ class LogicOhli24(LogicModuleBase):
# print(code)
whitelist_program = P.ModelSetting.get("ohli24_auto_code_list")
# whitelist_programs = [
# str(x.strip().replace(" ", ""))
# for x in whitelist_program.replace("\n", "|").split("|")
# ]
whitelist_programs = [
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")
str(x.strip())
for x in whitelist_program.replace("\n", "|").split("|")
]
if code not in whitelist_programs:
@@ -535,7 +552,7 @@ class LogicOhli24(LogicModuleBase):
ret["ret"] = False
ret["log"] = "이미 추가되어 있습니다."
except Exception as e:
logger.error("Exception:%s", e)
logger.error(f"Exception: {str(e)}")
logger.error(traceback.format_exc())
ret["ret"] = False
ret["log"] = str(e)
@@ -553,15 +570,25 @@ class LogicOhli24(LogicModuleBase):
# Todo: 스케쥴링 함수 미구현
logger.debug(f"ohli24 scheduler_function::=========================")
content_code_list = P.ModelSetting.get_list("ohli24_auto_code_list", "|")
content_code_list = P.ModelSetting.get_list(
"ohli24_auto_code_list", "|"
)
logger.debug(f"content_code_list::: {content_code_list}")
url_list = ["https://www.naver.com/", "https://www.daum.net/"]
week = ["월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"]
week = [
"월요일",
"화요일",
"수요일",
"목요일",
"금요일",
"토요일",
"일요일",
]
today = date.today()
print(today)
print()
print(today.weekday())
# print(today)
# print()
# print(today.weekday())
url = f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}'
@@ -592,10 +619,14 @@ class LogicOhli24(LogicModuleBase):
elif len(content_code_list) > 0:
for item in content_code_list:
url = P.ModelSetting.get("ohli24_url") + "/c/" + item
print("scheduling url: %s", url)
logger.debug(f"scheduling url: {url}")
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
print("debug===")
print(item)
content_info = self.get_series_info(item, "", "")
# logger.debug(content_info)
for episode_info in content_info["episode"]:
add_ret = self.add(episode_info)
if add_ret.startswith("enqueue"):
@@ -620,6 +651,7 @@ class LogicOhli24(LogicModuleBase):
def get_series_info(self, code, wr_id, bo_table):
code_type = "c"
code = urllib.parse.quote(code)
try:
if (
@@ -666,7 +698,7 @@ class LogicOhli24(LogicModuleBase):
else:
pass
logger.debug("url:::> %s", url)
# logger.debug("url:::> %s", url)
# self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)
# AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36',
@@ -691,6 +723,8 @@ class LogicOhli24(LogicModuleBase):
"_total_chapter",
"_show_time",
"_release_year",
"_drawing",
"_character_design"
]
description_dict = {
"원제": "_otit",
@@ -711,8 +745,11 @@ class LogicOhli24(LogicModuleBase):
"개봉년도": "_release_year",
"개봉일": "_opening_date",
"런타임": "_run_time",
"작화": "_drawing",
"캐릭터디자인": "_character_design"
}
list_body_li = tree.xpath('//ul[@class="list-body"]/li')
# logger.debug(f"list_body_li:: {list_body_li}")
episodes = []
@@ -844,12 +881,24 @@ class LogicOhli24(LogicModuleBase):
entity = {}
entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[
0
].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
0
].replace("..", P.ModelSetting.get("ohli24_url"))
entity["title"] = item.xpath(
".//div[@class='post-title']/text()"
)[0].strip()
# logger.debug(item.xpath(".//div[@class='img-item']/img/@src")[0])
# logger.debug(item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0])
# entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
# 0
# ].replace("..", P.ModelSetting.get("ohli24_url"))
if len(item.xpath(".//div[@class='img-item']/img/@src")) > 0:
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@src"
)[0].replace("..", P.ModelSetting.get("ohli24_url"))
else:
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@data-ezsrc"
)[0]
data["ret"] = "success"
data["anime_list"].append(entity)
@@ -874,12 +923,12 @@ class LogicOhli24(LogicModuleBase):
entity = {}
entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[
0
].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
0
].replace("..", P.ModelSetting.get("ohli24_url"))
entity["title"] = item.xpath(
".//div[@class='post-title']/text()"
)[0].strip()
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@src"
)[0].replace("..", P.ModelSetting.get("ohli24_url"))
data["ret"] = "success"
data["anime_list"].append(entity)
@@ -918,21 +967,80 @@ class LogicOhli24(LogicModuleBase):
entity["title"] = "".join(
item.xpath(".//div[@class='post-title']/text()")
).strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
0
].replace("..", P.ModelSetting.get("ohli24_url"))
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@src"
)[0].replace("..", P.ModelSetting.get("ohli24_url"))
entity["code"] = item.xpath(".//div[@class='img-item']/img/@alt")[0]
entity["code"] = item.xpath(
".//div[@class='img-item']/img/@alt"
)[0]
data["ret"] = "success"
data["anime_list"].append(entity)
return data
except Exception as e:
P.logger.error("Exception:%s", e)
P.logger.error(f"Exception: {str(e)}")
P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)}
def check_for_new_post(self):
# Get the HTML content of the page
res = requests.get(
f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing'
)
soup = BeautifulSoup(res.content, "html.parser")
# Find the latest post on the page
latest_post = soup.find("div", class_="post-title").text
latest_post_image = (
soup.find("div", class_="img-item")
.find("img", class_="wr-img")
.get("src")
.replace("..", P.ModelSetting.get("ohli24_url"))
)
logger.debug(f"latest_post:: {latest_post}")
logger.debug(f"self.last_post_title:: {self.last_post_title}")
logger.debug(f"latest_post_image:: {latest_post_image}")
# Compare the latest post with the last recorded post
if latest_post != self.last_post_title:
# If there is a new post, update the last recorded post
self.last_post_title = latest_post
# Send a notification to Discord channel
# discord_client = discord.Client()
# discord_client.run(self.DISCORD_BOT_TOKEN)
#
# async def on_ready():
# channel = discord_client.get_channel(self.DISCORD_CHANNEL_ID)
# await channel.send(f"A new post has been added: {latest_post}")
#
# discord_client.close()
webhook = DiscordWebhook(url=self.discord_webhook_url)
embed = DiscordEmbed(
title=self.discord_title, color=self.discord_color
)
embed.set_timestamp()
path = self.last_post_title
embed.set_image(url=latest_post_image)
embed.add_embed_field(name="", value=path, inline=True)
embed.set_timestamp()
webhook.add_embed(embed)
response = webhook.execute()
return self.last_post_title
return self.last_post_title
def send_notify(self):
logger.debug("send_notify() routine")
while True:
self.last_post_title = self.check_for_new_post()
logger.debug(self.last_post_title)
time.sleep(600)
# @staticmethod
def plugin_load(self):
try:
@@ -943,6 +1051,10 @@ class LogicOhli24(LogicModuleBase):
self.current_data = None
self.queue.queue_start()
logger.debug(P.ModelSetting.get_bool("ohli24_discord_notify"))
if P.ModelSetting.get_bool("ohli24_discord_notify"):
self.send_notify()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
@@ -965,11 +1077,12 @@ class LogicOhli24(LogicModuleBase):
@staticmethod
@yommi_timeit
def get_html(
url, headers=None, referer=None, stream=False, timeout=5, stealth=False
url, headers=None, referer=None, stream=False, timeout=10, stealth=False
):
# global response_data
data = ""
# response_date = ""
logger.debug(f"url: {url}")
try:
print("cloudflare protection bypass ==================P")
@@ -997,6 +1110,7 @@ class LogicOhli24(LogicModuleBase):
if LogicOhli24.session is None:
LogicOhli24.session = requests.session()
LogicOhli24.session.verify = False
# logger.debug('get_html :%s', url)
# LogicOhli24.headers["Referer"] = "" if referer is None else referer
# logger.debug(f"referer:: {referer}")
@@ -1005,8 +1119,17 @@ class LogicOhli24(LogicModuleBase):
# logger.info(headers)
# logger.debug(f"LogicOhli24.headers:: {LogicOhli24.headers}")
proxies = {
"http": "http://192.168.0.2:3138",
"https": "http://192.168.0.2:3138",
}
page_content = LogicOhli24.session.get(
url, headers=LogicOhli24.headers, timeout=timeout
url,
headers=LogicOhli24.headers,
timeout=timeout,
proxies=proxies,
)
response_data = page_content.text
# logger.debug(response_data)
@@ -1088,7 +1211,8 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
# Get episode info from OHLI24 site
def make_episode_info(self):
try:
base_url = "https://ohli24.org"
base_url = "https://a24.ohli24.com"
base_url = P.ModelSetting.get("ohli24_url")
iframe_url = ""
# https://ohli24.org/e/%EB%85%B9%EC%9D%84%20%EB%A8%B9%EB%8A%94%20%EB%B9%84%EC%8A%A4%EC%BD%94%206%ED%99%94
@@ -1111,24 +1235,43 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
)
# logger.debug(text)
soup1 = BeautifulSoup(text, "lxml")
pattern = re.compile(r"url : \"\.\.(.*)\"")
script = soup1.find("script", text=pattern)
# pattern = re.compile(r"url : \"\.\.(.*)\"")
# script = soup1.find("script", text=pattern)
#
# if script:
# match = pattern.search(script.text)
# if match:
# iframe_url = match.group(1)
# logger.info("iframe_url::> %s", iframe_url)
pattern = r"<iframe src=\"(.*?)\" allowfullscreen>"
if script:
match = pattern.search(script.text)
if match:
iframe_url = match.group(1)
logger.info("iframe_url::> %s", iframe_url)
match = re.search(pattern, text)
if match:
iframe_src = match.group(1)
logger.debug(f"iframe_src:::> {iframe_src}")
iframe_url = soup1.find("iframe")["src"]
iframe_src = iframe_url
# iframe_src = f'{P.ModelSetting.get("ohli24_url")}{iframe_url}'
iframe_html = LogicOhli24.get_html(
iframe_src, headers=headers, timeout=600
)
# print(iframe_html)
pattern = r"<iframe src=\"(.*?)\" allowfullscreen>"
match = re.search(pattern, iframe_html)
if match:
iframe_src = match.group(1)
print(iframe_src)
logger.debug(f"iframe_src:::> {iframe_src}")
# resp1 = requests.get(iframe_src, headers=headers, timeout=600).text
resp1 = LogicOhli24.get_html(iframe_src, headers=headers, timeout=600)
logger.info("resp1::>> %s", resp1)
resp1 = LogicOhli24.get_html(
iframe_src, headers=headers, timeout=600
)
# logger.info("resp1::>> %s", resp1)
soup3 = BeautifulSoup(resp1, "lxml")
# packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL)
s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL)
@@ -1149,7 +1292,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
logger.debug(type(packed_script))
unpack_script = jsbeautifier.beautify(str(packed_script))
p1 = re.compile(r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL)
p1 = re.compile(
r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL
)
m2 = re.search(
r"(\"tracks\".*\]).*\"captions\"",
unpack_script,
@@ -1167,7 +1312,10 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
video_hash = iframe_src.split("/")
video_hashcode = re.sub(r"index\.php\?data=", "", video_hash[-1])
self._vi = video_hashcode
logger.debug(f"video_hash::> {video_hash}")
video_info_url = f"{video_hash[0]}//{video_hash[2]}/player/index.php?data={video_hashcode}&do=getVideo"
# video_info_url = f"{video_hash[0]}//michealcdn.com/player/index.php?data={video_hashcode}&do=getVideo"
# print('hash:::', video_hash)
logger.debug(f"video_info_url::: {video_info_url}")
@@ -1176,10 +1324,11 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
"Mozilla/5.0 (Macintosh; Intel "
"Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 "
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/116.0.0.0 Safari/537.36"
"Whale/3.12.129.46 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
"Cookie": "PHPSESSID=hhhnrora8o9omv1tljq4efv216; 2a0d2363701f23f8a75028924a3af643=NDkuMTYzLjExMS4xMDk=; e1192aefb64683cc97abb83c71057733=aW5n",
"Cookie": "PHPSESSID=b6hnl2crfvtg36sm6rjjkso4p0; 2a0d2363701f23f8a75028924a3af643=MTgwLjY2LjIyMi4xODk%3D; _ga=GA1.1.586565509.1695135593; __gads=ID=60e47defb3337e02-227f0fc9e3e3009a:T=1695135593:RT=1695135593:S=ALNI_MagY46XGCbx9E4Et2DRzfUHdTAKsg; __gpi=UID=00000c4bb3d077c8:T=1695135593:RT=1695135593:S=ALNI_MYvj_8OjdhtGPEGoXhPsQWq1qye8Q; _ga_MWWDFMDJR0=GS1.1.1695135593.1.1.1695135599.0.0.0",
}
payload = {
@@ -1209,7 +1358,15 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
}
self.url = stream_info[1].strip()
match = re.compile(r'NAME="(?P<quality>.*?)"').search(stream_info[0])
logger.info(self.url)
if "anibeast.com" in self.url:
self.headers["Referer"] = iframe_src
if "crazypatutu.com" in self.url:
self.headers["Referer"] = iframe_src
match = re.compile(r'NAME="(?P<quality>.*?)"').search(
stream_info[0]
)
self.quality = "720P"
if match is not None:
self.quality = match.group("quality")
@@ -1225,7 +1382,10 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
if match:
self.content_title = match.group("title").strip()
if "season" in match.groupdict() and match.group("season") is not None:
if (
"season" in match.groupdict()
and match.group("season") is not None
):
self.season = int(match.group("season"))
# epi_no = 1
@@ -1258,7 +1418,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
)
else:
folder_name = self.content_title
folder_name = Util.change_text_for_use_filename(folder_name.strip())
folder_name = Util.change_text_for_use_filename(
folder_name.strip()
)
self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"):
self.savepath = os.path.join(
@@ -1288,7 +1450,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
class ModelOhli24Item(db.Model):
__tablename__ = "{package_name}_ohli24_item".format(package_name=P.package_name)
__tablename__ = "{package_name}_ohli24_item".format(
package_name=P.package_name
)
__table_args__ = {"mysql_collate": "utf8_general_ci"}
__bind_key__ = P.package_name
id = db.Column(db.Integer, primary_key=True)
@@ -1373,20 +1537,26 @@ class ModelOhli24Item(db.Model):
conditions = []
for tt in tmp:
if tt != "":
conditions.append(cls.filename.like("%" + tt.strip() + "%"))
conditions.append(
cls.filename.like("%" + tt.strip() + "%")
)
query = query.filter(or_(*conditions))
elif search.find(",") != -1:
tmp = search.split(",")
for tt in tmp:
if tt != "":
query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
query = query.filter(
cls.filename.like("%" + tt.strip() + "%")
)
else:
query = query.filter(cls.filename.like("%" + search + "%"))
if option == "completed":
query = query.filter(cls.status == "completed")
query = (
query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
query.order_by(desc(cls.id))
if order == "desc"
else query.order_by(cls.id)
)
return query

View File

@@ -243,7 +243,9 @@
$("#analysis_btn").unbind("click").bind('click', function (e) {
e.preventDefault();
e.stopPropagation()
const button = document.getElementById('analysis_btn');
const code = document.getElementById("code").value
button.setAttribute("disabled", "disabled");
console.log(code)
$.ajax({
url: '/' + package_name + '/ajax/' + sub + '/analysis',
@@ -256,6 +258,7 @@
// {#console.log(ret.code)#}
console.log(ret.data)
make_program(ret.data)
button.removeAttribute("disabled");
} else {
$.notify('<strong>분석 실패</strong><br>' + ret.log, {type: 'warning'});
}

View File

@@ -24,6 +24,7 @@
{{ macros.setting_checkbox('ohli24_auto_make_season_folder', '시즌 폴더 생성', value=arg['ohli24_auto_make_season_folder'], desc=['On : Season 번호 폴더를 만듭니다.']) }}
</div>
{{ macros.setting_checkbox('ohli24_uncompleted_auto_enqueue', '자동으로 다시 받기', value=arg['ohli24_uncompleted_auto_enqueue'], desc=['On : 플러그인 로딩시 미완료인 항목은 자동으로 다시 받습니다.']) }}
{{ macros.setting_checkbox('ohli24_discord_notify', '디스 코드 알림 받기', value=arg['ohli24_discord_notify'], desc=['On : 새로운 글이 올라올때 디스코드 알림을 보냅니다.']) }}
{{ macros.m_tab_content_end() }}
{{ macros.m_tab_content_start('auto', false) }}