Compare commits

..

43 Commits

Author SHA1 Message Date
a8486726f6 2024.08.21 patch.03 2024-08-21 19:39:50 +09:00
391a0ee861 2024.08.21 patch.02 2024-08-21 19:24:29 +09:00
408be433f2 2024.08.21 patch.01 2024-08-21 19:13:43 +09:00
c87e29f085 2024.08.13 19:22 patch.01 2024-08-13 19:26:16 +09:00
b27cd39aa4 2024.08.13 19:22 2024-08-13 19:22:47 +09:00
205c17ae4e edit fix3 2024-02-16 19:00:01 +09:00
e101a02886 error fix2 2024-02-16 15:07:14 +09:00
04c0e34db5 error fix 2024-02-13 15:23:57 +09:00
f1d5f1db68 main -> 2023.10.8 fix "작화" 2023-10-08 22:29:02 +09:00
f0eda8ef87 main -> 2023.10.5 fix 2023-10-05 21:20:18 +09:00
d4fcc9a633 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-23 22:48:21 +09:00
9ca8dcc3da main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 21:57:21 +09:00
301806a906 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 14:40:13 +09:00
eca29b6947 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-20 14:32:56 +09:00
d07cc820dc main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-19 23:47:52 +09:00
710d70dbfd main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결)a 2023-09-19 23:42:16 +09:00
6f2edeaf89 main -> 2023.09.20 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-09-19 23:37:50 +09:00
769d40e5bb main -> 2023.08.31 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-31 00:20:57 +09:00
c53f1f50c9 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-12 21:22:28 +09:00
9cae04584d main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:16:59 +09:00
efcadde111 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:08:46 +09:00
145e277895 main -> 2023.08.07 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-08-07 19:06:08 +09:00
1b76d36352 main -> 2023.07.01 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-07-01 00:13:09 +09:00
a7cf43e0cc main -> 2023.07.01 ohli24 버그 픽스 (.01. 다운로드 문제 해결) 2023-07-01 00:09:52 +09:00
dd8a68a267 main -> 2023.05.11 ohli24 버그 픽스 (.01. todo: setting_save_after) 2023-05-11 19:41:39 +09:00
6bf816db10 main -> 2023.05.09 ohli24 버그 픽스 (.01. referer url fix) 2023-05-09 20:56:57 +09:00
becfc7feef main -> 2023.05.09 ohli24 버그 픽스 (.01. referer url fix) 2023-05-09 20:54:59 +09:00
25cddecfe9 main -> 2023.04.22 ohli24 버그 픽스 (.01. img xpath fix) 2023-04-22 23:26:19 +09:00
292a3fd388 main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 21:11:49 +09:00
87461cce4a main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 20:35:22 +09:00
080ae6ab0c main -> 2023.03.07 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-07 20:26:38 +09:00
c8284f86b7 main -> 2023.03.03 ohli24 버그 픽스 (.01. img xpath fix) 2023-03-03 18:54:10 +09:00
f4717c74e4 main -> 2023.03.01 ohli24 버그 픽스 (.02. code cleanup) 2023-03-01 19:34:53 +09:00
c6940bbca5 main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:05:58 +09:00
5506cc2e7f main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:03:56 +09:00
10bd5e7412 main -> 2023.03.01 ohli24 버그 픽스 (.01. code cleanup) 2023-03-01 18:01:39 +09:00
3162911f1e 2023.02.01 ohli24 버그 픽스 (.04. code cleanup) 2023-02-01 19:35:19 +09:00
7f00ca6055 2023.02.01 ohli24 버그 픽스 (.03. 코드 정리) 2023-02-01 19:22:06 +09:00
367cb85657 2023.02.01 ohli24 버그 픽스 (.02. etc) 2023-02-01 14:53:18 +09:00
9be3e03c2d 2023.02.01 linkkf 버그 픽스 (.01. etc) 2023-02-01 14:50:17 +09:00
2c67b0cacd 2022.01.31 linkkf 버그 픽스 (.07. 기타) 2023-01-31 23:42:51 +09:00
6c198dcc76 2022.01.31 linkkf 버그 픽스 (.06. 기타) 2023-01-31 23:09:12 +09:00
e37a3c652b 2022.01.31 linkkf 버그 픽스 (.05. 기타) 2023-01-31 22:46:52 +09:00
10 changed files with 755 additions and 150 deletions

View File

@@ -142,8 +142,11 @@ class FfmpegQueue(object):
# os.makedirs(save_path) # os.makedirs(save_path)
# except: # except:
# logger.debug('program path make fail!!') # logger.debug('program path make fail!!')
# 파일 존재여부 체크 # 파일 존재여부 체크
filepath = entity.get_video_filepath() filepath = str(entity.get_video_filepath())
self.P.logger.debug(filepath)
self.P.logger.debug(entity.get_video_filepath())
if os.path.exists(filepath): if os.path.exists(filepath):
entity.ffmpeg_status_kor = "파일 있음" entity.ffmpeg_status_kor = "파일 있음"
entity.ffmpeg_percent = 100 entity.ffmpeg_percent = 100
@@ -151,6 +154,8 @@ class FfmpegQueue(object):
# plugin.socketio_list_refresh() # plugin.socketio_list_refresh()
continue continue
dirname = os.path.dirname(filepath) dirname = os.path.dirname(filepath)
self.P.logger.debug(type(dirname))
self.P.logger.debug(dirname)
if not os.path.exists(dirname): if not os.path.exists(dirname):
os.makedirs(dirname) os.makedirs(dirname)
f = ffmpeg.Ffmpeg( f = ffmpeg.Ffmpeg(

View File

@@ -17,8 +17,8 @@ def yommi_timeit(func):
end_time = time.perf_counter() end_time = time.perf_counter()
total_time = end_time - start_time total_time = end_time - start_time
# print(f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs") # print(f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs")
logger.debug( logger.opt(colors=True).debug(
f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs" f"<red>{func.__name__}{args} {kwargs}</red> function took <green>{total_time:.4f}</green> secs"
) )
return result return result

View File

@@ -21,6 +21,9 @@ import urllib
# my # my
from .lib.utils import yommi_timeit from .lib.utils import yommi_timeit
os.system(f"pip install playwright==1.27.1")
packages = [ packages = [
"beautifulsoup4", "beautifulsoup4",
"requests-cache", "requests-cache",
@@ -574,8 +577,6 @@ class LogicAniLife(LogicModuleBase):
# page.reload() # page.reload()
# time.sleep(10) # time.sleep(10)
# cookies = context.cookies
# print(cookies)
# print(page.content()) # print(page.content())
# vod_url = page.evaluate( # vod_url = page.evaluate(
@@ -588,28 +589,11 @@ class LogicAniLife(LogicModuleBase):
# return _0x55265f(0x99) + alJson[_0x55265f(0x91)] # return _0x55265f(0x99) + alJson[_0x55265f(0x91)]
# }""" # }"""
# ) # )
result_har_json = har.to_json() # result_har_json = har.to_json()
result_har_dict = har.to_dict()
# logger.debug(result_har_dict)
tmp_video_url = [] await context.close()
for i, elem in enumerate(result_har_dict["log"]["entries"]): await browser.close()
# if "m3u8" in elem["request"]["url"]:
if "m3u8" in elem["request"]["url"]:
logger.debug(elem["request"]["url"])
tmp_video_url.append(elem["request"]["url"])
logger.debug(tmp_video_url)
vod_url = tmp_video_url[-1]
for i, el in enumerate(tmp_video_url):
if el.endswith("m3u8"):
vod_url = el
logger.debug(f"vod_url:: {vod_url}")
logger.debug(f"run at {time.time() - start} sec")
return vod_url
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
result = subprocess.run( result = subprocess.run(
@@ -618,9 +602,32 @@ class LogicAniLife(LogicModuleBase):
print(result.stdout) print(result.stdout)
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
finally: finally:
await browser.close() await browser.close()
result_har_dict = har.to_dict()
# logger.debug(result_har_dict)
tmp_video_url = []
for i, elem in enumerate(result_har_dict["log"]["entries"]):
# if "m3u8" in elem["request"]["url"]:
if "m3u8" in elem["request"]["url"]:
logger.debug(elem["request"]["url"])
tmp_video_url.append(elem["request"]["url"])
logger.debug(tmp_video_url)
vod_url = tmp_video_url[-1]
for i, el in enumerate(tmp_video_url):
if el.endswith("m3u8"):
vod_url = el
logger.debug(f"vod_url:: {vod_url}")
logger.debug(f"run at {time.time() - start} sec")
return vod_url
@staticmethod @staticmethod
@yommi_timeit @yommi_timeit
def get_html_selenium( def get_html_selenium(
@@ -1026,12 +1033,14 @@ class LogicAniLife(LogicModuleBase):
return ret return ret
def setting_save_after(self): def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int( pass
"anilife_max_ffmpeg_process_count" # Todo: 버그 고쳐야함
): # if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
self.queue.set_max_ffmpeg_count( # "anilife_max_ffmpeg_process_count"
P.ModelSetting.get_int("anilife_max_ffmpeg_process_count") # ):
) # self.queue.set_max_ffmpeg_count(
# P.ModelSetting.get_int("anilife_max_ffmpeg_process_count")
# )
def scheduler_function(self): def scheduler_function(self):
logger.debug(f"anilife scheduler_function:: =========================") logger.debug(f"anilife scheduler_function:: =========================")

View File

@@ -31,11 +31,16 @@ from framework.util import Util
from framework.common.util import headers from framework.common.util import headers
from plugin import ( from plugin import (
LogicModuleBase, LogicModuleBase,
FfmpegQueueEntity,
FfmpegQueue,
default_route_socketio, default_route_socketio,
# FfmpegQueue,
# FfmpegQueueEntity,
) )
# 철자가 틀린 부분이 있어서 분리함
#
from .lib.plugin import FfmpegQueue, FfmpegQueueEntity
from .lib.utils import yommi_timeit
packages = ["beautifulsoup4", "requests-cache", "cloudscraper"] packages = ["beautifulsoup4", "requests-cache", "cloudscraper"]
for _package in packages: for _package in packages:
@@ -115,7 +120,7 @@ class LogicLinkkf(LogicModuleBase):
"linkkf_auto_start": "False", "linkkf_auto_start": "False",
"linkkf_interval": "* 5 * * *", "linkkf_interval": "* 5 * * *",
"linkkf_auto_mode_all": "False", "linkkf_auto_mode_all": "False",
"linkkf_auto_code_list": "all", "linkkf_auto_code_list": "",
"linkkf_current_code": "", "linkkf_current_code": "",
"linkkf_uncompleted_auto_enqueue": "False", "linkkf_uncompleted_auto_enqueue": "False",
"linkkf_image_url_prefix_series": "", "linkkf_image_url_prefix_series": "",
@@ -219,13 +224,25 @@ class LogicLinkkf(LogicModuleBase):
elif sub == "db_remove": elif sub == "db_remove":
return jsonify(ModelLinkkfItem.delete_by_id(req.form["id"])) return jsonify(ModelLinkkfItem.delete_by_id(req.form["id"]))
elif sub == "add_whitelist": elif sub == "add_whitelist":
pass # params = request.get_data()
# logger.debug(f"params: {params}")
# data_code = request.args.get("data_code")
params = request.get_json()
logger.debug(f"params:: {params}")
if params is not None:
code = params["data_code"]
logger.debug(f"params: {code}")
ret = LogicLinkkf.add_whitelist(code)
else:
ret = LogicLinkkf.add_whitelist()
return jsonify(ret)
except Exception as e: except Exception as e:
P.logger.error(f"Exception: {str(e)}") P.logger.error(f"Exception: {str(e)}")
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
@staticmethod @staticmethod
@yommi_timeit
def get_html(url: str, timeout: int = 10, cached=False): def get_html(url: str, timeout: int = 10, cached=False):
try: try:
@@ -295,7 +312,7 @@ class LogicLinkkf(LogicModuleBase):
def add_whitelist(*args): def add_whitelist(*args):
ret = {} ret = {}
logger.debug(f"args: {args}") # logger.debug(f"args: {args}")
try: try:
if len(args) == 0: if len(args) == 0:
@@ -345,12 +362,14 @@ class LogicLinkkf(LogicModuleBase):
return ret return ret
def setting_save_after(self): def setting_save_after(self):
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int( # Todo:
"linkkf_max_ffmpeg_process_count" pass
): # if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
self.queue.set_max_ffmpeg_count( # "linkkf_max_ffmpeg_process_count"
P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count") # ):
) # self.queue.set_max_ffmpeg_count(
# P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
# )
def get_video_url_from_url(url, url2): def get_video_url_from_url(url, url2):
video_url = None video_url = None
@@ -695,7 +714,7 @@ class LogicLinkkf(LogicModuleBase):
data = {"ret": "success", "page": page} data = {"ret": "success", "page": page}
response_data = LogicLinkkf.get_html(url, timeout=10) response_data = LogicLinkkf.get_html(url, timeout=10)
# P.logger.debug(response_data) # P.logger.debug(response_data)
P.logger.debug("debug.....................") # P.logger.debug("debug.....................")
tree = html.fromstring(response_data) tree = html.fromstring(response_data)
tmp_items = tree.xpath(items_xpath) tmp_items = tree.xpath(items_xpath)
@@ -743,7 +762,7 @@ class LogicLinkkf(LogicModuleBase):
url = "%s/%s" % (P.ModelSetting.get("linkkf_url"), code) url = "%s/%s" % (P.ModelSetting.get("linkkf_url"), code)
logger.info(url) logger.info(url)
logger.debug(LogicLinkkf.headers) # logger.debug(LogicLinkkf.headers)
html_content = LogicLinkkf.get_html(url, cached=False) html_content = LogicLinkkf.get_html(url, cached=False)
# html_content = LogicLinkkf.get_html_playwright(url) # html_content = LogicLinkkf.get_html_playwright(url)
# html_content = LogicLinkkf.get_html_cloudflare(url, cached=False) # html_content = LogicLinkkf.get_html_cloudflare(url, cached=False)
@@ -847,7 +866,7 @@ class LogicLinkkf(LogicModuleBase):
else: else:
tags = soup.select("ul > a") tags = soup.select("ul > a")
logger.debug(len(tags)) logger.debug(f"count: {len(tags)}")
# logger.info("tags", tags) # logger.info("tags", tags)
# re1 = re.compile(r'\/(?P<code>\d+)') # re1 = re.compile(r'\/(?P<code>\d+)')
@@ -863,14 +882,16 @@ class LogicLinkkf(LogicModuleBase):
idx = 1 idx = 1
for t in tags: for t in tags:
entity = { entity = {
"_id": data["code"], "code": data["code"],
"program_code": data["code"], "program_code": data["code"],
"program_title": data["title"], "program_title": data["title"],
"day": "",
"save_folder": Util.change_text_for_use_filename( "save_folder": Util.change_text_for_use_filename(
data["save_folder"] data["save_folder"]
), ),
"title": t.text.strip(), "title": t.text.strip(),
# "title": t.text_content().strip(), "episode_no": t.text.strip()
# "title": data["title"],
} }
# entity['code'] = re1.search(t.attrib['href']).group('code') # entity['code'] = re1.search(t.attrib['href']).group('code')
@@ -887,9 +908,9 @@ class LogicLinkkf(LogicModuleBase):
# logger.debug(f"m_obj::> {m_obj}") # logger.debug(f"m_obj::> {m_obj}")
if m_obj is not None: if m_obj is not None:
episode_code = m_obj.group(1) episode_code = m_obj.group(1)
entity["code"] = data["code"] + episode_code.zfill(4) entity["_id"] = data["code"] + episode_code.zfill(4)
else: else:
entity["code"] = data["code"] entity["_id"] = data["code"]
aa = t["href"] aa = t["href"]
if "/player" in aa: if "/player" in aa:
@@ -917,6 +938,7 @@ class LogicLinkkf(LogicModuleBase):
data["episode"].append(entity) data["episode"].append(entity)
idx = idx + 1 idx = idx + 1
# logger.debug(f"{data}")
data["ret"] = True data["ret"] = True
# logger.info('data', data) # logger.info('data', data)
self.current_data = data self.current_data = data
@@ -962,7 +984,7 @@ class LogicLinkkf(LogicModuleBase):
ret = "%s.S%sE%s.720p-LK.mp4" % (maintitle, season, epi_no) ret = "%s.S%sE%s.720p-LK.mp4" % (maintitle, season, epi_no)
else: else:
logger.debug("NOT MATCH") logger.debug("NOT MATCH")
ret = "%s.720p-SA.mp4" % maintitle ret = "%s.720p-LK.mp4" % maintitle
return Util.change_text_for_use_filename(ret) return Util.change_text_for_use_filename(ret)
except Exception as e: except Exception as e:
@@ -970,21 +992,22 @@ class LogicLinkkf(LogicModuleBase):
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
def add(self, episode_info): def add(self, episode_info):
print("episode_info") # logger.debug("episode_info")
logger.debug(episode_info) # logger.debug(episode_info)
if self.is_exist(episode_info): if self.is_exist(episode_info):
return "queue_exist" return "queue_exist"
else: else:
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["_id"]) db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["code"])
logger.debug("db_entity:::> %s", db_entity) # logger.debug("db_entity:::> %s", db_entity)
# logger.debug("db_entity.status ::: %s", db_entity.status) # logger.debug("db_entity.status ::: %s", db_entity.status)
if db_entity is None: if db_entity is None:
entity = LinkkfQueueEntity(P, self, episode_info) entity = LinkkfQueueEntity(P, self, episode_info)
logger.debug("entity:::> %s", entity.as_dict()) # logger.debug("entity:::> %s", entity.as_dict())
ModelLinkkfItem.append(entity.as_dict()) ModelLinkkfItem.append(entity.as_dict())
# # logger.debug("entity:: type >> %s", type(entity)) # # logger.debug("entity:: type >> %s", type(entity))
# #
@@ -1016,6 +1039,35 @@ class LogicLinkkf(LogicModuleBase):
return True return True
return False return False
def scheduler_function(self):
logger.debug(f"linkkf scheduler_function:: =========================")
content_code_list = P.ModelSetting.get_list("linkkf_auto_code_list", "|")
# logger.debug(content_code_list)
if "all" in content_code_list:
url = f'{P.ModelSetting.get("linkkf_url")}/dailyani'
ret_data = LogicLinkkf.get_auto_anime_info(self, url=url)
elif len(content_code_list) > 0:
for item in content_code_list:
url = P.ModelSetting.get("linkkf_url") + "/" + item
print("scheduling url: %s", url)
# exit()
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
content_info = self.get_series_info(item)
# logger.debug(content_info["episode"])
# exit()
for episode_info in content_info["episode"]:
add_ret = self.add(episode_info)
if add_ret.startswith("enqueue"):
self.socketio_callback("list_refresh", "")
# logger.debug(f"data: {data}")
# self.current_data = data
# db에서 다운로드 완료 유무 체크
# @staticmethod # @staticmethod
def plugin_load(self): def plugin_load(self):
try: try:
@@ -1082,28 +1134,33 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"]) db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None: if db_entity is not None:
db_entity.status = "completed" db_entity.status = "completed"
db_entity.complated_time = datetime.now() db_entity.completed_time = datetime.now()
db_entity.save() db_entity.save()
def donwload_completed(self): def donwload_completed(self):
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"]) db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
if db_entity is not None: if db_entity is not None:
db_entity.status = "completed" db_entity.status = "completed"
db_entity.complated_time = datetime.now() db_entity.completed_time = datetime.now()
db_entity.save() db_entity.save()
# Get episode info from site # Get episode info from site
def make_episode_info(self): def make_episode_info(self):
logger.debug("call make_episode_info(): ")
url2s = [] url2s = []
url = None url = None
logger.debug(self.info) # logger.debug(self)
logger.debug(self.info["url"]) # print("")
# logger.debug(self.info)
# logger.debug(f'self.info:: {self.info["url"]}')
# exit()
try: try:
# logger.debug(self) # logger.debug(self)
# logger.debug(self.url) # logger.debug(self.url)
data = LogicLinkkf.get_html_episode_content(self.info["url"]) data = LogicLinkkf.get_html_episode_content(self.info["url"])
# logger.debug(f"data:: {data}")
# exit()
tree = html.fromstring(data) tree = html.fromstring(data)
xpath_select_query = '//*[@id="body"]/div/span/center/select/option' xpath_select_query = '//*[@id="body"]/div/span/center/select/option'
@@ -1153,7 +1210,7 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
continue continue
# logger.debug(f"url: {url}, url2: {url2}") # logger.debug(f"url: {url}, url2: {url2}")
ret = LogicLinkkf.get_video_url_from_url(url, url2) ret = LogicLinkkf.get_video_url_from_url(url, url2)
logger.debug(f"ret::::> {ret}") # logger.debug(f"ret::::> {ret}")
if ret is not None: if ret is not None:
video_url = ret video_url = ret
@@ -1164,12 +1221,93 @@ class LinkkfQueueEntity(FfmpegQueueEntity):
# logger.info(video_url) # logger.info(video_url)
# return [video_url, referer_url] # return [video_url, referer_url]
return video_url # return video_url
logger.debug(video_url)
logger.info("dx: urls2:: %s", url2s) logger.info("dx: urls2:: %s", url2s)
video_url = None self.url = video_url[0]
referer_url = None # dx base_url = "https://kfani.me"
self.srt_url = base_url + video_url[2]
match = re.compile(
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
% ("", "")
).search(self.info["program_title"])
#
# epi_no 초기값
epi_no = 1
#
logger.debug(match)
if match:
self.content_title = match.group("title").strip()
# if "season" in match.groupdict() and match.group("season") is not None:
# self.season = int(match.group("season"))
#
# # epi_no = 1
# epi_no = int(match.group("epi_no"))
# ret = "%s.S%sE%s.%s-LK.mp4" % (
# self.content_title,
# "0%s" % self.season if self.season < 10 else self.season,
# "0%s" % epi_no if epi_no < 10 else epi_no,
# self.quality,
# )
else:
self.content_title = self.info["program_title"]
# P.logger.debug("NOT MATCH")
# ret = "%s.720p-LK.mp4" % self.info["program_title"]
# logger.info('self.content_title:: %s', self.content_title)
self.epi_queue = epi_no
# self.filename = Util.change_text_for_use_filename(ret)
self.filename = self.info["filename"]
logger.info(f"self.filename::> {self.filename}")
self.savepath = P.ModelSetting.get("linkkf_download_path")
logger.info(f"self.savepath::> {self.savepath}")
# TODO: 완결 처리
folder_name = None
if P.ModelSetting.get_bool("linkkf_auto_make_folder"):
if self.info["day"].find("완결") != -1:
folder_name = "%s %s" % (
P.ModelSetting.get("linkkf_finished_insert"),
self.content_title,
)
else:
folder_name = self.content_title
# logger.debug(f"folder_name:: {folder_name}")
# logger.debug(f"self.content_title:: {self.content_title}")
folder_name = Util.change_text_for_use_filename(folder_name.strip())
self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("linkkf_auto_make_season_folder"):
self.savepath = os.path.join(
self.savepath, "Season %s" % int(self.season)
)
self.filepath = os.path.join(self.savepath, self.filename)
if not os.path.exists(self.savepath):
os.makedirs(self.savepath)
from framework.common.util import write_file, convert_vtt_to_srt
srt_filepath = os.path.join(
self.savepath, self.filename.replace(".mp4", ".ko.srt")
)
if (
self.srt_url is not None
and not os.path.exists(srt_filepath)
and not ("thumbnails.vtt" in self.srt_url)
):
srt_data = requests.get(self.srt_url, headers=headers).text
write_file(srt_data, srt_filepath)
except Exception as e: except Exception as e:
logger.error(f"Exception: {str(e)}") logger.error(f"Exception: {str(e)}")
@@ -1190,6 +1328,7 @@ class ModelLinkkfItem(db.Model):
title = db.Column(db.String) title = db.Column(db.String)
episode_title = db.Column(db.String) episode_title = db.Column(db.String)
# linkkf_va = db.Column(db.String) # linkkf_va = db.Column(db.String)
linkkf_code = db.Column(db.String)
linkkf_vi = db.Column(db.String) linkkf_vi = db.Column(db.String)
linkkf_id = db.Column(db.String) linkkf_id = db.Column(db.String)
quality = db.Column(db.String) quality = db.Column(db.String)
@@ -1285,7 +1424,7 @@ class ModelLinkkfItem(db.Model):
@classmethod @classmethod
def append(cls, q): def append(cls, q):
logger.debug(q) # logger.debug(q)
item = ModelLinkkfItem() item = ModelLinkkfItem()
item.content_code = q["program_code"] item.content_code = q["program_code"]
item.season = q["season"] item.season = q["season"]

View File

@@ -7,12 +7,16 @@
# @Software: PyCharm # @Software: PyCharm
import os, sys, traceback, re, json, threading import os, sys, traceback, re, json, threading
import time
from datetime import datetime, date from datetime import datetime, date
import copy import copy
import hashlib import hashlib
import discord
# third-party # third-party
import requests import requests
from discord_webhook import DiscordWebhook, DiscordEmbed
from lxml import html from lxml import html
from urllib import parse from urllib import parse
import urllib import urllib
@@ -25,6 +29,8 @@ from flask import request, render_template, jsonify
from sqlalchemy import or_, and_, func, not_, desc from sqlalchemy import or_, and_, func, not_, desc
from pip._internal import main from pip._internal import main
from .lib.utils import yommi_timeit
pkgs = ["beautifulsoup4", "jsbeautifier", "aiohttp"] pkgs = ["beautifulsoup4", "jsbeautifier", "aiohttp"]
for pkg in pkgs: for pkg in pkgs:
try: try:
@@ -60,9 +66,11 @@ logger = P.logger
class LogicOhli24(LogicModuleBase): class LogicOhli24(LogicModuleBase):
db_default = { db_default = {
"ohli24_db_version": "1", "ohli24_db_version": "1.1",
"ohli24_url": "https://ohli24.org", "ohli24_url": "https://a21.ohli24.com",
"ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"), "ohli24_download_path": os.path.join(
path_data, P.package_name, "ohli24"
),
"ohli24_auto_make_folder": "True", "ohli24_auto_make_folder": "True",
"ohli24_auto_make_season_folder": "True", "ohli24_auto_make_season_folder": "True",
"ohli24_finished_insert": "[완결]", "ohli24_finished_insert": "[완결]",
@@ -100,9 +108,16 @@ class LogicOhli24(LogicModuleBase):
} }
def __init__(self, P): def __init__(self, P):
super(LogicOhli24, self).__init__(P, "setting", scheduler_desc="ohli24 자동 다운로드") super(LogicOhli24, self).__init__(
P, "setting", scheduler_desc="ohli24 자동 다운로드"
)
self.name = "ohli24" self.name = "ohli24"
self.queue = None self.queue = None
self.last_post_title = ""
self.discord_webhook_url = "https://discord.com/api/webhooks/1071430127860334663/viCiM5ssS-U1_ONWgdWa-64KgvPfU5jJ8WQAym-4vkiyASB0e8IcnlLnxG4F40nj10kZ"
self.discord_color = "242424"
self.discord_title = "새로운 애니"
self.DISCORD_CHANNEL_ID = "1071430054023798958"
default_route_socketio(P, self) default_route_socketio(P, self)
@staticmethod @staticmethod
@@ -205,7 +220,9 @@ class LogicOhli24(LogicModuleBase):
try: try:
if engine == "chrome": if engine == "chrome":
browser = await p.chromium.launch( browser = await p.chromium.launch(
channel="chrome", args=browser_args, headless=headless channel="chrome",
args=browser_args,
headless=headless,
) )
elif engine == "webkit": elif engine == "webkit":
browser = await p.webkit.launch( browser = await p.webkit.launch(
@@ -221,9 +238,9 @@ class LogicOhli24(LogicModuleBase):
# user_agent=ua, # user_agent=ua,
# ) # )
LogicOhli24.headers[ LogicOhli24.headers["Referer"] = (
"Referer" "https://anilife.com/detail/id/471"
] = "https://anilife.live/detail/id/471" )
# print(LogicAniLife.headers) # print(LogicAniLife.headers)
LogicOhli24.headers["Referer"] = LogicOhli24.episode_url LogicOhli24.headers["Referer"] = LogicOhli24.episode_url
@@ -233,7 +250,8 @@ class LogicOhli24(LogicModuleBase):
# logger.debug(f"LogicAniLife.headers::: {LogicOhli24.headers}") # logger.debug(f"LogicAniLife.headers::: {LogicOhli24.headers}")
context = await browser.new_context( context = await browser.new_context(
extra_http_headers=LogicOhli24.headers, ignore_https_errors=True extra_http_headers=LogicOhli24.headers,
ignore_https_errors=True,
) )
# await context.add_cookies(LogicOhli24.cookies) # await context.add_cookies(LogicOhli24.cookies)
@@ -327,7 +345,9 @@ class LogicOhli24(LogicModuleBase):
), ),
arg=arg, arg=arg,
) )
return render_template("sample.html", title="%s - %s" % (P.package_name, sub)) return render_template(
"sample.html", title="%s - %s" % (P.package_name, sub)
)
# @staticmethod # @staticmethod
def process_ajax(self, sub, req): def process_ajax(self, sub, req):
@@ -412,7 +432,8 @@ class LogicOhli24(LogicModuleBase):
count += 1 count += 1
notify = { notify = {
"type": "success", "type": "success",
"msg": "%s 개의 에피소드를 큐에 추가 하였습니다." % count, "msg": "%s 개의 에피소드를 큐에 추가 하였습니다."
% count,
} }
socketio.emit( socketio.emit(
"notify", notify, namespace="/framework", broadcast=True "notify", notify, namespace="/framework", broadcast=True
@@ -490,7 +511,7 @@ class LogicOhli24(LogicModuleBase):
def add_whitelist(*args): def add_whitelist(*args):
ret = {} ret = {}
logger.debug(f"args: {args}") # logger.debug(f"args: {args}")
try: try:
if len(args) == 0: if len(args) == 0:
@@ -498,15 +519,13 @@ class LogicOhli24(LogicModuleBase):
else: else:
code = str(args[0]) code = str(args[0])
print(code) # print(code)
whitelist_program = P.ModelSetting.get("ohli24_auto_code_list") whitelist_program = P.ModelSetting.get("ohli24_auto_code_list")
# whitelist_programs = [
# str(x.strip().replace(" ", ""))
# for x in whitelist_program.replace("\n", "|").split("|")
# ]
whitelist_programs = [ whitelist_programs = [
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|") str(x.strip())
for x in whitelist_program.replace("\n", "|").split("|")
] ]
if code not in whitelist_programs: if code not in whitelist_programs:
@@ -533,7 +552,7 @@ class LogicOhli24(LogicModuleBase):
ret["ret"] = False ret["ret"] = False
ret["log"] = "이미 추가되어 있습니다." ret["log"] = "이미 추가되어 있습니다."
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error(f"Exception: {str(e)}")
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
ret["ret"] = False ret["ret"] = False
ret["log"] = str(e) ret["log"] = str(e)
@@ -551,15 +570,25 @@ class LogicOhli24(LogicModuleBase):
# Todo: 스케쥴링 함수 미구현 # Todo: 스케쥴링 함수 미구현
logger.debug(f"ohli24 scheduler_function::=========================") logger.debug(f"ohli24 scheduler_function::=========================")
content_code_list = P.ModelSetting.get_list("ohli24_auto_code_list", "|") content_code_list = P.ModelSetting.get_list(
"ohli24_auto_code_list", "|"
)
logger.debug(f"content_code_list::: {content_code_list}") logger.debug(f"content_code_list::: {content_code_list}")
url_list = ["https://www.naver.com/", "https://www.daum.net/"] url_list = ["https://www.naver.com/", "https://www.daum.net/"]
week = ["월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"] week = [
"월요일",
"화요일",
"수요일",
"목요일",
"금요일",
"토요일",
"일요일",
]
today = date.today() today = date.today()
print(today) # print(today)
print() # print()
print(today.weekday()) # print(today.weekday())
url = f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}' url = f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}'
@@ -590,10 +619,14 @@ class LogicOhli24(LogicModuleBase):
elif len(content_code_list) > 0: elif len(content_code_list) > 0:
for item in content_code_list: for item in content_code_list:
url = P.ModelSetting.get("ohli24_url") + "/c/" + item url = P.ModelSetting.get("ohli24_url") + "/c/" + item
print("scheduling url: %s", url) logger.debug(f"scheduling url: {url}")
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url) # ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
print("debug===")
print(item)
content_info = self.get_series_info(item, "", "") content_info = self.get_series_info(item, "", "")
# logger.debug(content_info)
for episode_info in content_info["episode"]: for episode_info in content_info["episode"]:
add_ret = self.add(episode_info) add_ret = self.add(episode_info)
if add_ret.startswith("enqueue"): if add_ret.startswith("enqueue"):
@@ -618,6 +651,7 @@ class LogicOhli24(LogicModuleBase):
def get_series_info(self, code, wr_id, bo_table): def get_series_info(self, code, wr_id, bo_table):
code_type = "c" code_type = "c"
code = urllib.parse.quote(code)
try: try:
if ( if (
@@ -664,7 +698,7 @@ class LogicOhli24(LogicModuleBase):
else: else:
pass pass
logger.debug("url:::> %s", url) # logger.debug("url:::> %s", url)
# self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) # self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)
# AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36', # AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36',
@@ -689,6 +723,8 @@ class LogicOhli24(LogicModuleBase):
"_total_chapter", "_total_chapter",
"_show_time", "_show_time",
"_release_year", "_release_year",
"_drawing",
"_character_design"
] ]
description_dict = { description_dict = {
"원제": "_otit", "원제": "_otit",
@@ -709,8 +745,11 @@ class LogicOhli24(LogicModuleBase):
"개봉년도": "_release_year", "개봉년도": "_release_year",
"개봉일": "_opening_date", "개봉일": "_opening_date",
"런타임": "_run_time", "런타임": "_run_time",
"작화": "_drawing",
"캐릭터디자인": "_character_design"
} }
list_body_li = tree.xpath('//ul[@class="list-body"]/li') list_body_li = tree.xpath('//ul[@class="list-body"]/li')
# logger.debug(f"list_body_li:: {list_body_li}") # logger.debug(f"list_body_li:: {list_body_li}")
episodes = [] episodes = []
@@ -744,7 +783,8 @@ class LogicOhli24(LogicModuleBase):
} }
) )
logger.debug(P.ModelSetting.get("ohli24_order_desc")) # 정렬 여부 체크
# logger.debug(P.ModelSetting.get("ohli24_order_desc"))
# if P.ModelSetting.get("ohli24_order_desc") == "False": # if P.ModelSetting.get("ohli24_order_desc") == "False":
# print("Here....") # print("Here....")
# episodes.reverse() # episodes.reverse()
@@ -763,7 +803,7 @@ class LogicOhli24(LogicModuleBase):
# logger.info(f"des::>> {des}") # logger.info(f"des::>> {des}")
image = image.replace("..", P.ModelSetting.get("ohli24_url")) image = image.replace("..", P.ModelSetting.get("ohli24_url"))
# logger.info("images:: %s", image) # logger.info("images:: %s", image)
logger.info("title:: %s", title) # logger.info("title:: %s", title)
ser_description = tree.xpath( ser_description = tree.xpath(
'//div[@class="view-stocon"]/div[@class="c"]/text()' '//div[@class="view-stocon"]/div[@class="c"]/text()'
@@ -817,7 +857,9 @@ class LogicOhli24(LogicModuleBase):
+ page + page
) )
# cate == "complete": # cate == "complete":
logger.info("url:::> %s", url)
# logger.info("url:::> %s", url)
data = {} data = {}
response_data = LogicOhli24.get_html(url, timeout=10) response_data = LogicOhli24.get_html(url, timeout=10)
# response_data = asyncio.run( # response_data = asyncio.run(
@@ -839,12 +881,24 @@ class LogicOhli24(LogicModuleBase):
entity = {} entity = {}
entity["link"] = item.xpath(".//a/@href")[0] entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1] entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[ entity["title"] = item.xpath(
0 ".//div[@class='post-title']/text()"
].strip() )[0].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ # logger.debug(item.xpath(".//div[@class='img-item']/img/@src")[0])
0 # logger.debug(item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0])
].replace("..", P.ModelSetting.get("ohli24_url")) # entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
# 0
# ].replace("..", P.ModelSetting.get("ohli24_url"))
if len(item.xpath(".//div[@class='img-item']/img/@src")) > 0:
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@src"
)[0].replace("..", P.ModelSetting.get("ohli24_url"))
else:
entity["image_link"] = item.xpath(
".//div[@class='img-item']/img/@data-ezsrc"
)[0]
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
@@ -869,12 +923,12 @@ class LogicOhli24(LogicModuleBase):
entity = {} entity = {}
entity["link"] = item.xpath(".//a/@href")[0] entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1] entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[ entity["title"] = item.xpath(
0 ".//div[@class='post-title']/text()"
].strip() )[0].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ entity["image_link"] = item.xpath(
0 ".//div[@class='img-item']/img/@src"
].replace("..", P.ModelSetting.get("ohli24_url")) )[0].replace("..", P.ModelSetting.get("ohli24_url"))
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
@@ -913,21 +967,80 @@ class LogicOhli24(LogicModuleBase):
entity["title"] = "".join( entity["title"] = "".join(
item.xpath(".//div[@class='post-title']/text()") item.xpath(".//div[@class='post-title']/text()")
).strip() ).strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[ entity["image_link"] = item.xpath(
0 ".//div[@class='img-item']/img/@src"
].replace("..", P.ModelSetting.get("ohli24_url")) )[0].replace("..", P.ModelSetting.get("ohli24_url"))
entity["code"] = item.xpath(".//div[@class='img-item']/img/@alt")[0] entity["code"] = item.xpath(
".//div[@class='img-item']/img/@alt"
)[0]
data["ret"] = "success" data["ret"] = "success"
data["anime_list"].append(entity) data["anime_list"].append(entity)
return data return data
except Exception as e: except Exception as e:
P.logger.error("Exception:%s", e) P.logger.error(f"Exception: {str(e)}")
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)} return {"ret": "exception", "log": str(e)}
def check_for_new_post(self):
# Get the HTML content of the page
res = requests.get(
f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing'
)
soup = BeautifulSoup(res.content, "html.parser")
# Find the latest post on the page
latest_post = soup.find("div", class_="post-title").text
latest_post_image = (
soup.find("div", class_="img-item")
.find("img", class_="wr-img")
.get("src")
.replace("..", P.ModelSetting.get("ohli24_url"))
)
logger.debug(f"latest_post:: {latest_post}")
logger.debug(f"self.last_post_title:: {self.last_post_title}")
logger.debug(f"latest_post_image:: {latest_post_image}")
# Compare the latest post with the last recorded post
if latest_post != self.last_post_title:
# If there is a new post, update the last recorded post
self.last_post_title = latest_post
# Send a notification to Discord channel
# discord_client = discord.Client()
# discord_client.run(self.DISCORD_BOT_TOKEN)
#
# async def on_ready():
# channel = discord_client.get_channel(self.DISCORD_CHANNEL_ID)
# await channel.send(f"A new post has been added: {latest_post}")
#
# discord_client.close()
webhook = DiscordWebhook(url=self.discord_webhook_url)
embed = DiscordEmbed(
title=self.discord_title, color=self.discord_color
)
embed.set_timestamp()
path = self.last_post_title
embed.set_image(url=latest_post_image)
embed.add_embed_field(name="", value=path, inline=True)
embed.set_timestamp()
webhook.add_embed(embed)
response = webhook.execute()
return self.last_post_title
return self.last_post_title
def send_notify(self):
logger.debug("send_notify() routine")
while True:
self.last_post_title = self.check_for_new_post()
logger.debug(self.last_post_title)
time.sleep(600)
# @staticmethod # @staticmethod
def plugin_load(self): def plugin_load(self):
try: try:
@@ -938,6 +1051,10 @@ class LogicOhli24(LogicModuleBase):
self.current_data = None self.current_data = None
self.queue.queue_start() self.queue.queue_start()
logger.debug(P.ModelSetting.get_bool("ohli24_discord_notify"))
if P.ModelSetting.get_bool("ohli24_discord_notify"):
self.send_notify()
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
@@ -958,12 +1075,14 @@ class LogicOhli24(LogicModuleBase):
return True return True
@staticmethod @staticmethod
@yommi_timeit
def get_html( def get_html(
url, headers=None, referer=None, stream=False, timeout=5, stealth=False url, headers=None, referer=None, stream=False, timeout=10, stealth=False
): ):
# global response_data # global response_data
data = "" data = ""
# response_date = "" # response_date = ""
logger.debug(f"url: {url}")
try: try:
print("cloudflare protection bypass ==================P") print("cloudflare protection bypass ==================P")
@@ -971,7 +1090,8 @@ class LogicOhli24(LogicModuleBase):
if headers is not None: if headers is not None:
LogicOhli24.headers = headers LogicOhli24.headers = headers
logger.debug(f"headers: {LogicOhli24.headers}") # logger.debug(f"headers: {LogicOhli24.headers}")
# response_data = asyncio.run( # response_data = asyncio.run(
# LogicOhli24.get_html_playwright( # LogicOhli24.get_html_playwright(
# url, # url,
@@ -990,16 +1110,26 @@ class LogicOhli24(LogicModuleBase):
if LogicOhli24.session is None: if LogicOhli24.session is None:
LogicOhli24.session = requests.session() LogicOhli24.session = requests.session()
LogicOhli24.session.verify = False
# logger.debug('get_html :%s', url) # logger.debug('get_html :%s', url)
# LogicOhli24.headers["Referer"] = "" if referer is None else referer # LogicOhli24.headers["Referer"] = "" if referer is None else referer
logger.debug(f"referer:: {referer}") # logger.debug(f"referer:: {referer}")
if referer: if referer:
LogicOhli24.headers["Referer"] = referer LogicOhli24.headers["Referer"] = referer
# logger.info(headers) # logger.info(headers)
logger.debug(f"LogicOhli24.headers:: {LogicOhli24.headers}") # logger.debug(f"LogicOhli24.headers:: {LogicOhli24.headers}")
proxies = {
"http": "http://192.168.0.2:3138",
"https": "http://192.168.0.2:3138",
}
page_content = LogicOhli24.session.get( page_content = LogicOhli24.session.get(
url, headers=LogicOhli24.headers, timeout=timeout url,
headers=LogicOhli24.headers,
timeout=timeout,
proxies=proxies,
) )
response_data = page_content.text response_data = page_content.text
# logger.debug(response_data) # logger.debug(response_data)
@@ -1081,7 +1211,8 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
# Get episode info from OHLI24 site # Get episode info from OHLI24 site
def make_episode_info(self): def make_episode_info(self):
try: try:
base_url = "https://ohli24.org" base_url = "https://a24.ohli24.com"
base_url = P.ModelSetting.get("ohli24_url")
iframe_url = "" iframe_url = ""
# https://ohli24.org/e/%EB%85%B9%EC%9D%84%20%EB%A8%B9%EB%8A%94%20%EB%B9%84%EC%8A%A4%EC%BD%94%206%ED%99%94 # https://ohli24.org/e/%EB%85%B9%EC%9D%84%20%EB%A8%B9%EB%8A%94%20%EB%B9%84%EC%8A%A4%EC%BD%94%206%ED%99%94
@@ -1104,24 +1235,43 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
) )
# logger.debug(text) # logger.debug(text)
soup1 = BeautifulSoup(text, "lxml") soup1 = BeautifulSoup(text, "lxml")
pattern = re.compile(r"url : \"\.\.(.*)\"") # pattern = re.compile(r"url : \"\.\.(.*)\"")
script = soup1.find("script", text=pattern) # script = soup1.find("script", text=pattern)
#
# if script:
# match = pattern.search(script.text)
# if match:
# iframe_url = match.group(1)
# logger.info("iframe_url::> %s", iframe_url)
pattern = r"<iframe src=\"(.*?)\" allowfullscreen>"
if script: match = re.search(pattern, text)
match = pattern.search(script.text) if match:
if match: iframe_src = match.group(1)
iframe_url = match.group(1) logger.debug(f"iframe_src:::> {iframe_src}")
logger.info("iframe_url::> %s", iframe_url)
iframe_url = soup1.find("iframe")["src"]
iframe_src = iframe_url # iframe_src = f'{P.ModelSetting.get("ohli24_url")}{iframe_url}'
iframe_html = LogicOhli24.get_html(
iframe_src, headers=headers, timeout=600
)
# print(iframe_html)
pattern = r"<iframe src=\"(.*?)\" allowfullscreen>"
match = re.search(pattern, iframe_html)
if match:
iframe_src = match.group(1)
print(iframe_src)
logger.debug(f"iframe_src:::> {iframe_src}") logger.debug(f"iframe_src:::> {iframe_src}")
# resp1 = requests.get(iframe_src, headers=headers, timeout=600).text # resp1 = requests.get(iframe_src, headers=headers, timeout=600).text
resp1 = LogicOhli24.get_html(iframe_src, headers=headers, timeout=600) resp1 = LogicOhli24.get_html(
logger.info("resp1::>> %s", resp1) iframe_src, headers=headers, timeout=600
)
# logger.info("resp1::>> %s", resp1)
soup3 = BeautifulSoup(resp1, "lxml") soup3 = BeautifulSoup(resp1, "lxml")
# packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL) # packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL)
s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL) s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL)
@@ -1142,7 +1292,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
logger.debug(type(packed_script)) logger.debug(type(packed_script))
unpack_script = jsbeautifier.beautify(str(packed_script)) unpack_script = jsbeautifier.beautify(str(packed_script))
p1 = re.compile(r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL) p1 = re.compile(
r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL
)
m2 = re.search( m2 = re.search(
r"(\"tracks\".*\]).*\"captions\"", r"(\"tracks\".*\]).*\"captions\"",
unpack_script, unpack_script,
@@ -1160,7 +1312,10 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
video_hash = iframe_src.split("/") video_hash = iframe_src.split("/")
video_hashcode = re.sub(r"index\.php\?data=", "", video_hash[-1]) video_hashcode = re.sub(r"index\.php\?data=", "", video_hash[-1])
self._vi = video_hashcode self._vi = video_hashcode
logger.debug(f"video_hash::> {video_hash}")
video_info_url = f"{video_hash[0]}//{video_hash[2]}/player/index.php?data={video_hashcode}&do=getVideo" video_info_url = f"{video_hash[0]}//{video_hash[2]}/player/index.php?data={video_hashcode}&do=getVideo"
# video_info_url = f"{video_hash[0]}//michealcdn.com/player/index.php?data={video_hashcode}&do=getVideo"
# print('hash:::', video_hash) # print('hash:::', video_hash)
logger.debug(f"video_info_url::: {video_info_url}") logger.debug(f"video_info_url::: {video_info_url}")
@@ -1169,10 +1324,11 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) " "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36" "Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
"Mozilla/5.0 (Macintosh; Intel " "Mozilla/5.0 (Macintosh; Intel "
"Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 " "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/116.0.0.0 Safari/537.36"
"Whale/3.12.129.46 Safari/537.36", "Whale/3.12.129.46 Safari/537.36",
"X-Requested-With": "XMLHttpRequest", "X-Requested-With": "XMLHttpRequest",
"Cookie": "PHPSESSID=hhhnrora8o9omv1tljq4efv216; 2a0d2363701f23f8a75028924a3af643=NDkuMTYzLjExMS4xMDk=; e1192aefb64683cc97abb83c71057733=aW5n", "Cookie": "PHPSESSID=b6hnl2crfvtg36sm6rjjkso4p0; 2a0d2363701f23f8a75028924a3af643=MTgwLjY2LjIyMi4xODk%3D; _ga=GA1.1.586565509.1695135593; __gads=ID=60e47defb3337e02-227f0fc9e3e3009a:T=1695135593:RT=1695135593:S=ALNI_MagY46XGCbx9E4Et2DRzfUHdTAKsg; __gpi=UID=00000c4bb3d077c8:T=1695135593:RT=1695135593:S=ALNI_MYvj_8OjdhtGPEGoXhPsQWq1qye8Q; _ga_MWWDFMDJR0=GS1.1.1695135593.1.1.1695135599.0.0.0",
} }
payload = { payload = {
@@ -1202,7 +1358,15 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
} }
self.url = stream_info[1].strip() self.url = stream_info[1].strip()
match = re.compile(r'NAME="(?P<quality>.*?)"').search(stream_info[0]) logger.info(self.url)
if "anibeast.com" in self.url:
self.headers["Referer"] = iframe_src
if "crazypatutu.com" in self.url:
self.headers["Referer"] = iframe_src
match = re.compile(r'NAME="(?P<quality>.*?)"').search(
stream_info[0]
)
self.quality = "720P" self.quality = "720P"
if match is not None: if match is not None:
self.quality = match.group("quality") self.quality = match.group("quality")
@@ -1218,7 +1382,10 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
if match: if match:
self.content_title = match.group("title").strip() self.content_title = match.group("title").strip()
if "season" in match.groupdict() and match.group("season") is not None: if (
"season" in match.groupdict()
and match.group("season") is not None
):
self.season = int(match.group("season")) self.season = int(match.group("season"))
# epi_no = 1 # epi_no = 1
@@ -1251,7 +1418,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
) )
else: else:
folder_name = self.content_title folder_name = self.content_title
folder_name = Util.change_text_for_use_filename(folder_name.strip()) folder_name = Util.change_text_for_use_filename(
folder_name.strip()
)
self.savepath = os.path.join(self.savepath, folder_name) self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"): if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"):
self.savepath = os.path.join( self.savepath = os.path.join(
@@ -1281,7 +1450,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
class ModelOhli24Item(db.Model): class ModelOhli24Item(db.Model):
__tablename__ = "{package_name}_ohli24_item".format(package_name=P.package_name) __tablename__ = "{package_name}_ohli24_item".format(
package_name=P.package_name
)
__table_args__ = {"mysql_collate": "utf8_general_ci"} __table_args__ = {"mysql_collate": "utf8_general_ci"}
__bind_key__ = P.package_name __bind_key__ = P.package_name
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
@@ -1366,20 +1537,26 @@ class ModelOhli24Item(db.Model):
conditions = [] conditions = []
for tt in tmp: for tt in tmp:
if tt != "": if tt != "":
conditions.append(cls.filename.like("%" + tt.strip() + "%")) conditions.append(
cls.filename.like("%" + tt.strip() + "%")
)
query = query.filter(or_(*conditions)) query = query.filter(or_(*conditions))
elif search.find(",") != -1: elif search.find(",") != -1:
tmp = search.split(",") tmp = search.split(",")
for tt in tmp: for tt in tmp:
if tt != "": if tt != "":
query = query.filter(cls.filename.like("%" + tt.strip() + "%")) query = query.filter(
cls.filename.like("%" + tt.strip() + "%")
)
else: else:
query = query.filter(cls.filename.like("%" + search + "%")) query = query.filter(cls.filename.like("%" + search + "%"))
if option == "completed": if option == "completed":
query = query.filter(cls.status == "completed") query = query.filter(cls.status == "completed")
query = ( query = (
query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id) query.order_by(desc(cls.id))
if order == "desc"
else query.order_by(cls.id)
) )
return query return query

67
static/js/sjva_global1.js Normal file
View File

@@ -0,0 +1,67 @@
function global_sub_request_search(page, move_top=true) {
var formData = get_formdata('#form_search')
formData += '&page=' + page;
$.ajax({
url: '/' + package_name + '/ajax/' + sub + '/web_list',
type: "POST",
cache: false,
data: formData,
dataType: "json",
success: function (data) {
current_data = data;
if (move_top)
window.scrollTo(0,0);
make_list(data.list)
make_page_html(data.paging)
}
});
}
function get_formdata(form_id) {
// on, off 일수도 있으니 모두 True, False로 통일하고
// 밑에서는 False인 경우 값이 추가되지 않으니.. 수동으로 넣어줌
var checkboxs = $(form_id + ' input[type=checkbox]');
//for (var i in checkboxs) {
for (var i =0 ; i < checkboxs.length; i++) {
if ( $(checkboxs[i]).is(':checked') ) {
$(checkboxs[i]).val('True');
} else {
$(checkboxs[i]).val('False');
}
}
var formData = $(form_id).serialize();
$.each($(form_id + ' input[type=checkbox]')
.filter(function(idx) {
return $(this).prop('checked') === false
}),
function(idx, el) {
var emptyVal = "False";
formData += '&' + $(el).attr('name') + '=' + emptyVal;
}
);
formData = formData.replace("&global_scheduler=True", "")
formData = formData.replace("&global_scheduler=False", "")
formData = formData.replace("global_scheduler=True&", "")
formData = formData.replace("global_scheduler=False&", "")
return formData;
}
function globalRequestSearch2(page, move_top = true) {
var formData = getFormdata("#form_search")
formData += "&page=" + page
console.log(formData)
$.ajax({
url: "/" + PACKAGE_NAME + "/ajax/" + MODULE_NAME + "/web_list2",
type: "POST",
cache: false,
data: formData,
dataType: "json",
success: function (data) {
current_data = data
if (move_top) window.scrollTo(0, 0)
make_list(data.list)
make_page_html(data.paging)
},
})
}

204
static/js/sjva_ui14.js Normal file
View File

@@ -0,0 +1,204 @@
function m_row_start(padding='10', align='center') {
var str = '<div class="row" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+';">';
return str;
}
function m_row_start_hover(padding='10', align='center') {
var str = '<div class="row my_hover" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+';">';
return str;
}
function m_row_start_top(padding='10') {
return m_row_start(padding, 'top');
}
function m_row_start_color(padding='10', align='center', color='') {
var str = '<div class="row" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+'; background-color:'+color+'">';
return str;
}
function m_row_start_color2(padding='10', align='center') {
var str = '<div class="row bg-dark text-white" style="padding-top: '+padding+'px; padding-bottom:'+padding+'px; align-items:'+align+';">';
return str;
}
function m_row_end() {
var str = '</div>';
return str;
}
//border
function m_col(w, h, align='left') {
var str = '<div class="col-sm-' + w + ' " style="text-align: '+align+'; word-break:break-all;">';
str += h
str += '</div>';
return str
}
function m_col2(w, h, align='left') {
var str = '<div class="col-sm-' + w + ' " style="padding:5px; margin:0px; text-align: '+align+'; word-break:break-all;">';
str += h
str += '</div>';
return str
}
function m_button_group(h) {
var str = '<div class="btn-group btn-group-sm flex-wrap mr-2" role="group">';
str += h
str += '</div>';
return str;
}
function m_button(id, text, data) {
var str = '<button id="'+id+'" name="'+id+'" class="btn btn-sm btn-outline-success" '
for ( var i in data) {
str += ' data-' + data[i].key + '="' + data[i].value+ '" '
}
str += '>' + text + '</button>';
return str;
}
function m_button2(id, text, data, outline_color) {
var str = '<button id="'+id+'" name="'+id+'" class="btn btn-sm btn-outline-'+outline_color+'" '
for ( var i in data) {
str += ' data-' + data[i].key + '="' + data[i].value+ '" '
}
str += '>' + text + '</button>';
return str;
}
function m_hr(margin='5') {
var str = '<hr style="width: 100%; margin:'+margin+'px;" />';
return str;
}
function m_hr_black() {
var str = '<hr style="width: 100%; color: black; height: 2px; background-color:black;" />';
return str;
}
// 체크박스는 자바로 하면 on/off 스크립트가 안먹힘.
function m_modal(data='EMPTY', title='JSON', json=true) {
document.getElementById("modal_title").innerHTML = title;
if (json) {
data = JSON.stringify(data, null, 2);
}
document.getElementById("modal_body").innerHTML = "<pre>"+ data + "</pre>";;
$("#large_modal").modal();
}
function m_tab_head(name, active) {
if (active) {
var str = '<a class="nav-item nav-link active" id="id_'+name+'" data-toggle="tab" href="#'+name+'" role="tab">'+name+'</a>';
} else {
var str = '<a class="nav-item nav-link" id="id_'+name+'" data-toggle="tab" href="#'+name+'" role="tab">'+name+'</a>';
}
return str;
}
function m_tab_content(name, content, active) {
if (active) {
var str = '<div class="tab-pane fade show active" id="'+name+'" role="tabpanel" >';
} else {
var str = '<div class="tab-pane fade show" id="'+name+'" role="tabpanel" >';
}
str += content;
str += '</div>'
return str;
}
function m_progress(id, width, label) {
var str = '';
str += '<div class="progress" style="height: 25px;">'
str += '<div id="'+id+'" class="progress-bar" style="background-color:yellow;width:'+width+'%"></div>';
str += '<div id="'+id+'_label" class="justify-content-center d-flex w-100 position-absolute" style="margin-top:2px">'+label+'</div>';
str += '</div>'
return str;
}
function m_progress2(id, width, label) {
var str = '';
str += '<div class="progress" style="height: 25px;">'
str += '<div id="'+id+'" class="progress-bar" style="background-color:yellow;width:'+width+'%"></div>';
str += '<div id="'+id+'_label" class="justify-content-center d-flex w-100 position-absolute" style="margin:0px; margin-top:2px">'+label+'</div>';
str += '</div>'
return str;
}
function make_page_html(data) {
str = ' \
<div class="d-inline-block"></div> \
<div class="row mb-3"> \
<div class="col-sm-12"> \
<div class="btn-toolbar" style="justify-content: center;" role="toolbar" aria-label="Toolbar with button groups" > \
<div class="btn-group btn-group-sm mr-2" role="group" aria-label="First group">'
if (data.prev_page) {
str += '<button id="page" data-page="' + (data.start_page-1) + '" type="button" class="btn btn-secondary">&laquo;</button>'
}
for (var i = data.start_page ; i <= data.last_page ; i++) {
str += '<button id="page" data-page="' + i +'" type="button" class="btn btn-secondary" ';
if (i == data.current_page) {
str += 'disabled';
}
str += '>'+i+'</button>';
}
if (data.next_page) {
str += '<button id="page" data-page="' + (data.last_page+1) + '" type="button" class="btn btn-secondary">&raquo;</button>'
}
str += '</div> \
</div> \
</div> \
</div> \
'
document.getElementById("page1").innerHTML = str;
document.getElementById("page2").innerHTML = str;
}
function use_collapse(div, reverse=false) {
var ret = $('#' + div).prop('checked');
if (reverse) {
if (ret) {
$('#' + div + '_div').collapse('hide')
} else {
$('#' + div + '_div').collapse('show')
}
} else {
if (ret) {
$('#' + div + '_div').collapse('show')
} else {
$('#' + div + '_div').collapse('hide')
}
}
}
// primary, secondary, success, danger, warning, info, light, dark, white
function j_button(id, text, data={}, color='primary', outline=true, small=false, _class='') {
let str = '<button id="'+id+'" name="'+id+'" class="btn btn-sm btn';
if (outline) {
str += '-outline';
}
str += '-' + color+'';
str += ' ' + _class;
if (small) {
str += ' py-0" style="font-size: 0.8em;"';
} else {
str += '" ';
}
for ( var key in data) {
str += ' data-' + key + '="' + data[key]+ '" '
}
str += '>' + text + '</button>';
return str;
}

View File

@@ -24,8 +24,8 @@
<div> <div>
<form id="program_list"> <form id="program_list">
{{ macros.setting_input_text_and_buttons('code', '작품 Code', {{ macros.setting_input_text_and_buttons('code', '작품 Code',
[['analysis_btn', '분석'], ['go_ohli24_btn', 'Go OHLI24']], desc='예) [['analysis_btn', '분석'], ['go_linkkf_btn', 'Go 링크 애니']], desc='예)
"https://ohli24.net/c/녹을 먹는 비스코" 나 "녹을 먹는 비스코"') }} "https://linkkf.app/코드" 나 "코"') }}
</form> </form>
<form id="program_auto_form"> <form id="program_auto_form">
<div id="episode_list"></div> <div id="episode_list"></div>
@@ -45,7 +45,7 @@
const package_name = "{{arg['package_name'] }}"; const package_name = "{{arg['package_name'] }}";
const sub = "{{arg['sub'] }}"; const sub = "{{arg['sub'] }}";
const ohli24_url = "{{arg['ohli24_url']}}"; const linkkf_url = "{{arg['linkkf_url']}}";
const params = new Proxy(new URLSearchParams(window.location.search), { const params = new Proxy(new URLSearchParams(window.location.search), {
@@ -132,7 +132,7 @@
str += tmp; str += tmp;
// program // program
// str += m_hr_black(); // str += m_hr_black();
str += "<div class='card p-lg-5 mt-md-3 p-md-3 border-light'>" str += "<div class='card p-lg-5 mt-md-3 p-md-3 mt-sm-3 p-sm-3 border-light'>"
str += m_row_start(0); str += m_row_start(0);
tmp = ""; tmp = "";
@@ -209,13 +209,13 @@
// {#document.getElementById("analysis_btn").click();#} // {#document.getElementById("analysis_btn").click();#}
} }
if ("{{arg['ohli24_current_code']}}" !== "") { if ("{{arg['linkkf_current_code']}}" !== "") {
if (params.code === null) { if (params.code === null) {
console.log('params.code === null') console.log('params.code === null')
document.getElementById("code").value = "{{arg['ohli24_current_code']}}"; document.getElementById("code").value = "{{arg['linkkf_current_code']}}";
} else if (params.code === '') { } else if (params.code === '') {
document.getElementById("code").value = "{{arg['ohli24_current_code']}}"; document.getElementById("code").value = "{{arg['linkkf_current_code']}}";
} else { } else {
console.log('params code exist') console.log('params code exist')
@@ -243,7 +243,9 @@
$("#analysis_btn").unbind("click").bind('click', function (e) { $("#analysis_btn").unbind("click").bind('click', function (e) {
e.preventDefault(); e.preventDefault();
e.stopPropagation() e.stopPropagation()
const button = document.getElementById('analysis_btn');
const code = document.getElementById("code").value const code = document.getElementById("code").value
button.setAttribute("disabled", "disabled");
console.log(code) console.log(code)
$.ajax({ $.ajax({
url: '/' + package_name + '/ajax/' + sub + '/analysis', url: '/' + package_name + '/ajax/' + sub + '/analysis',
@@ -256,6 +258,7 @@
// {#console.log(ret.code)#} // {#console.log(ret.code)#}
console.log(ret.data) console.log(ret.data)
make_program(ret.data) make_program(ret.data)
button.removeAttribute("disabled");
} else { } else {
$.notify('<strong>분석 실패</strong><br>' + ret.log, {type: 'warning'}); $.notify('<strong>분석 실패</strong><br>' + ret.log, {type: 'warning'});
} }
@@ -264,9 +267,9 @@
}); });
$("body").on('click', '#go_ohli24_btn', function (e) { $("body").on('click', '#go_linkkf_btn', function (e) {
e.preventDefault(); e.preventDefault();
window.open("{{arg['ohli24_url']}}", "_blank"); window.open("{{arg['linkkf_url']}}", "_blank");
}); });
$("body").on('click', '#all_check_on_btn', function (e) { $("body").on('click', '#all_check_on_btn', function (e) {

View File

@@ -7,7 +7,7 @@
<nav> <nav>
{{ macros.m_tab_head_start() }} {{ macros.m_tab_head_start() }}
{{ macros.m_tab_head2('normal', '일반', true) }} {{ macros.m_tab_head2('normal', '일반', true) }}
{{ macros.m_tab_head2('auto', '홈화면 자동', false) }} {{ macros.m_tab_head2('auto', '자동 설정', false) }}
{{ macros.m_tab_head2('action', '기타', false) }} {{ macros.m_tab_head2('action', '기타', false) }}
{{ macros.m_tab_head_end() }} {{ macros.m_tab_head_end() }}
</nav> </nav>
@@ -28,9 +28,9 @@
{{ macros.m_tab_content_start('auto', false) }} {{ macros.m_tab_content_start('auto', false) }}
{{ macros.setting_global_scheduler_sub_button(arg['scheduler'], arg['is_running']) }} {{ macros.setting_global_scheduler_sub_button(arg['scheduler'], arg['is_running']) }}
{{ macros.setting_input_text('linkkf_interval', '스케쥴링 실행 정보', value=arg['linkkf_interval'], col='3', desc=['Inverval(minute 단위)이나 Cron 설정']) }} {{ macros.setting_input_text('linkkf_interval', '스케쥴링 실행 정보', value=arg['linkkf_interval'], col='4', desc=['Inverval(minute 단위)이나 Cron 설정']) }}
{{ macros.setting_checkbox('linkkf_auto_start', '시작시 자동실행', value=arg['linkkf_auto_start'], desc='On : 시작시 자동으로 스케쥴러에 등록됩니다.') }} {{ macros.setting_checkbox('linkkf_auto_start', '시작시 자동실행', value=arg['linkkf_auto_start'], desc='On : 시작시 자동으로 스케쥴러에 등록됩니다.') }}
{{ macros.setting_input_textarea('linkkf_auto_code_list', '자동 다운로드할 작품 코드', desc=['all 입력시 모두 받기', '구분자 | 또는 엔터'], value=arg['linkkf_auto_code_list'], row='10') }} {{ macros.setting_input_textarea('linkkf_auto_code_list', '자동 다운로드할 작품 코드', desc=['구분자 | 또는 엔터'], value=arg['linkkf_auto_code_list'], row='10') }}
{{ macros.setting_checkbox('linkkf_auto_mode_all', '에피소드 모두 받기', value=arg['linkkf_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }} {{ macros.setting_checkbox('linkkf_auto_mode_all', '에피소드 모두 받기', value=arg['linkkf_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}

View File

@@ -24,6 +24,7 @@
{{ macros.setting_checkbox('ohli24_auto_make_season_folder', '시즌 폴더 생성', value=arg['ohli24_auto_make_season_folder'], desc=['On : Season 번호 폴더를 만듭니다.']) }} {{ macros.setting_checkbox('ohli24_auto_make_season_folder', '시즌 폴더 생성', value=arg['ohli24_auto_make_season_folder'], desc=['On : Season 번호 폴더를 만듭니다.']) }}
</div> </div>
{{ macros.setting_checkbox('ohli24_uncompleted_auto_enqueue', '자동으로 다시 받기', value=arg['ohli24_uncompleted_auto_enqueue'], desc=['On : 플러그인 로딩시 미완료인 항목은 자동으로 다시 받습니다.']) }} {{ macros.setting_checkbox('ohli24_uncompleted_auto_enqueue', '자동으로 다시 받기', value=arg['ohli24_uncompleted_auto_enqueue'], desc=['On : 플러그인 로딩시 미완료인 항목은 자동으로 다시 받습니다.']) }}
{{ macros.setting_checkbox('ohli24_discord_notify', '디스 코드 알림 받기', value=arg['ohli24_discord_notify'], desc=['On : 새로운 글이 올라올때 디스코드 알림을 보냅니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}
{{ macros.m_tab_content_start('auto', false) }} {{ macros.m_tab_content_start('auto', false) }}