2022.01.30 버그 픽스
This commit is contained in:
25
lib/utils.py
Normal file
25
lib/utils.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import os
|
||||
import time
|
||||
from functools import wraps
|
||||
|
||||
try:
|
||||
from loguru import logger
|
||||
except:
|
||||
os.system(f"pip install loguru")
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def yommi_timeit(func):
|
||||
@wraps(func)
|
||||
def timeit_wrapper(*args, **kwargs):
|
||||
start_time = time.perf_counter()
|
||||
result = func(*args, **kwargs)
|
||||
end_time = time.perf_counter()
|
||||
total_time = end_time - start_time
|
||||
# print(f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs")
|
||||
logger.debug(
|
||||
f"Function {func.__name__}{args} {kwargs} Took {total_time:.4f} secs"
|
||||
)
|
||||
return result
|
||||
|
||||
return timeit_wrapper
|
||||
@@ -18,6 +18,9 @@ from lxml import html
|
||||
from urllib import parse
|
||||
import urllib
|
||||
|
||||
# my
|
||||
from .lib.utils import yommi_timeit
|
||||
|
||||
packages = [
|
||||
"beautifulsoup4",
|
||||
"requests-cache",
|
||||
@@ -590,7 +593,18 @@ class LogicAniLife(LogicModuleBase):
|
||||
await browser.close()
|
||||
|
||||
@staticmethod
|
||||
def get_html_selenium(url: str, referer: str) -> bytes:
|
||||
@yommi_timeit
|
||||
def get_html_selenium(
|
||||
url: str,
|
||||
referer: str,
|
||||
headless=True,
|
||||
linux=True,
|
||||
maximize=True,
|
||||
user_agent=False,
|
||||
lang_kr=False,
|
||||
secret_mode=False,
|
||||
download_path=None,
|
||||
) -> bytes:
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium import webdriver
|
||||
from selenium_stealth import stealth
|
||||
@@ -598,16 +612,44 @@ class LogicAniLife(LogicModuleBase):
|
||||
import time
|
||||
|
||||
options = webdriver.ChromeOptions()
|
||||
# options.add_experimental_option('excludeSwitches', ['enable-logging'])
|
||||
# 크롬드라이버 헤더 옵션추가 (리눅스에서 실행시 필수)
|
||||
options.add_argument("start-maximized")
|
||||
options.add_argument("--headless")
|
||||
|
||||
if headless:
|
||||
options.add_argument("--headless")
|
||||
options.add_argument("--no-sandbox")
|
||||
options.add_argument("window-size=1920x1080")
|
||||
options.add_argument("disable-gpu")
|
||||
# options.add_argument('--no-sandbox')
|
||||
options.add_argument("--disable-dev-shm-usage")
|
||||
options.add_experimental_option("excludeSwitches", ["enable-automation"])
|
||||
# 크롬 드라이버에 setuid를 하지 않음으로써 크롬의 충돌 막음
|
||||
options.add_argument("--disable-setuid-sandbox")
|
||||
|
||||
# disabling extensions
|
||||
options.add_argument("--disable-extensions")
|
||||
|
||||
if download_path:
|
||||
pass
|
||||
|
||||
if maximize:
|
||||
options.add_argument("start-maximized")
|
||||
|
||||
# 일단 좀 더 확인 필요
|
||||
options.add_experimental_option(
|
||||
"excludeSwitches", ["enable-automation", "enable-logging"]
|
||||
)
|
||||
options.add_experimental_option("useAutomationExtension", False)
|
||||
options.add_argument("--single-process")
|
||||
|
||||
if user_agent:
|
||||
options.add_argument(
|
||||
"user-agent=Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 "
|
||||
"Safari/537.36"
|
||||
)
|
||||
if lang_kr:
|
||||
options.add_argument("--lang=ko_KR")
|
||||
if secret_mode:
|
||||
options.add_argument("--incognito")
|
||||
|
||||
if LogicAniLife.os_platform == "Darwin":
|
||||
# 크롬드라이버 경로
|
||||
@@ -616,6 +658,7 @@ class LogicAniLife(LogicModuleBase):
|
||||
driver = webdriver.Chrome(
|
||||
ChromeDriverManager().install(), chrome_options=options
|
||||
)
|
||||
|
||||
else:
|
||||
# driver_bin_path = os.path.join(
|
||||
# os.path.dirname(__file__), "bin", f"{LogicAniLife.os_platform}"
|
||||
@@ -628,6 +671,8 @@ class LogicAniLife(LogicModuleBase):
|
||||
ChromeDriverManager().install(), chrome_options=options
|
||||
)
|
||||
|
||||
driver.implicitly_wait(5)
|
||||
|
||||
stealth(
|
||||
driver,
|
||||
languages=["ko-KR", "ko"],
|
||||
|
||||
729
logic_linkkf.py
729
logic_linkkf.py
@@ -73,25 +73,6 @@ name = "linkkf"
|
||||
|
||||
|
||||
class LogicLinkkf(LogicModuleBase):
|
||||
db_default = {
|
||||
"linkkf_db_version": "1",
|
||||
"linkkf_url": "https://linkkf.app",
|
||||
"linkkf_download_path": os.path.join(path_data, P.package_name, "linkkf"),
|
||||
"linkkf_auto_make_folder": "True",
|
||||
"linkkf_auto_make_season_folder": "True",
|
||||
"linkkf_finished_insert": "[완결]",
|
||||
"linkkf_max_ffmpeg_process_count": "1",
|
||||
"linkkf_order_desc": "True",
|
||||
"linkkf_auto_start": "False",
|
||||
"linkkf_interval": "* 5 * * *",
|
||||
"linkkf_auto_mode_all": "False",
|
||||
"linkkf_auto_code_list": "all",
|
||||
"linkkf_current_code": "",
|
||||
"linkkf_uncompleted_auto_enqueue": "False",
|
||||
"linkkf_image_url_prefix_series": "",
|
||||
"linkkf_image_url_prefix_episode": "",
|
||||
"linkkf_discord_notify": "True",
|
||||
}
|
||||
current_headers = None
|
||||
current_data = None
|
||||
referer = None
|
||||
@@ -116,9 +97,38 @@ class LogicLinkkf(LogicModuleBase):
|
||||
|
||||
def __init__(self, P):
|
||||
super(LogicLinkkf, self).__init__(P, "setting", scheduler_desc="linkkf 자동 다운로드")
|
||||
self.name = "linkkf"
|
||||
self.queue = None
|
||||
self.name = name
|
||||
self.db_default = {
|
||||
"linkkf_db_version": "1",
|
||||
"linkkf_url": "https://linkkf.app",
|
||||
f"{self.name}_recent_code": "",
|
||||
"linkkf_download_path": os.path.join(path_data, P.package_name, "linkkf"),
|
||||
"linkkf_save_path": os.path.join(path_data, P.package_name, "linkkf"),
|
||||
"linkkf_auto_make_folder": "True",
|
||||
"linkkf_auto_make_season_folder": "True",
|
||||
"linkkf_finished_insert": "[완결]",
|
||||
"linkkf_max_ffmpeg_process_count": "2",
|
||||
f"{self.name}_max_download_count": "2",
|
||||
f"{self.name}_quality": "720p",
|
||||
"linkkf_order_desc": "False",
|
||||
"linkkf_auto_start": "False",
|
||||
"linkkf_interval": "* 5 * * *",
|
||||
"linkkf_auto_mode_all": "False",
|
||||
"linkkf_auto_code_list": "all",
|
||||
"linkkf_current_code": "",
|
||||
"linkkf_uncompleted_auto_enqueue": "False",
|
||||
"linkkf_image_url_prefix_series": "",
|
||||
"linkkf_image_url_prefix_episode": "",
|
||||
"linkkf_discord_notify": "True",
|
||||
}
|
||||
self.current_data = None
|
||||
default_route_socketio(P, self)
|
||||
|
||||
@staticmethod
|
||||
def db_init():
|
||||
pass
|
||||
|
||||
def process_menu(self, sub, req):
|
||||
arg = P.ModelSetting.to_dict()
|
||||
arg["sub"] = self.name
|
||||
@@ -196,15 +206,18 @@ class LogicLinkkf(LogicModuleBase):
|
||||
ret["ret"] = self.add(info)
|
||||
return jsonify(ret)
|
||||
elif sub == "entity_list":
|
||||
pass
|
||||
return jsonify(self.queue.get_entity_list())
|
||||
elif sub == "queue_command":
|
||||
pass
|
||||
ret = self.queue.command(
|
||||
req.form["command"], int(req.form["entity_id"])
|
||||
)
|
||||
return jsonify(ret)
|
||||
elif sub == "add_queue_checked_list":
|
||||
pass
|
||||
elif sub == "web_list":
|
||||
pass
|
||||
return jsonify(ModelLinkkfItem.web_list(request))
|
||||
elif sub == "db_remove":
|
||||
pass
|
||||
return jsonify(ModelLinkkfItem.delete_by_id(req.form["id"]))
|
||||
elif sub == "add_whitelist":
|
||||
pass
|
||||
|
||||
@@ -278,6 +291,382 @@ class LogicLinkkf(LogicModuleBase):
|
||||
timeout=10,
|
||||
).content.decode("utf8", errors="replace")
|
||||
|
||||
@staticmethod
|
||||
def add_whitelist(*args):
|
||||
ret = {}
|
||||
|
||||
logger.debug(f"args: {args}")
|
||||
try:
|
||||
|
||||
if len(args) == 0:
|
||||
code = str(LogicLinkkf.current_data["code"])
|
||||
else:
|
||||
code = str(args[0])
|
||||
|
||||
print(code)
|
||||
|
||||
whitelist_program = P.ModelSetting.get("linkkf_auto_code_list")
|
||||
# whitelist_programs = [
|
||||
# str(x.strip().replace(" ", ""))
|
||||
# for x in whitelist_program.replace("\n", "|").split("|")
|
||||
# ]
|
||||
whitelist_programs = [
|
||||
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")
|
||||
]
|
||||
|
||||
if code not in whitelist_programs:
|
||||
whitelist_programs.append(code)
|
||||
whitelist_programs = filter(
|
||||
lambda x: x != "", whitelist_programs
|
||||
) # remove blank code
|
||||
whitelist_program = "|".join(whitelist_programs)
|
||||
entity = (
|
||||
db.session.query(P.ModelSetting)
|
||||
.filter_by(key="linkkf_auto_code_list")
|
||||
.with_for_update()
|
||||
.first()
|
||||
)
|
||||
entity.value = whitelist_program
|
||||
db.session.commit()
|
||||
ret["ret"] = True
|
||||
ret["code"] = code
|
||||
if len(args) == 0:
|
||||
return LogicLinkkf.current_data
|
||||
else:
|
||||
return ret
|
||||
else:
|
||||
ret["ret"] = False
|
||||
ret["log"] = "이미 추가되어 있습니다."
|
||||
except Exception as e:
|
||||
logger.error("Exception:%s", e)
|
||||
logger.error(traceback.format_exc())
|
||||
ret["ret"] = False
|
||||
ret["log"] = str(e)
|
||||
return ret
|
||||
|
||||
def setting_save_after(self):
|
||||
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
|
||||
"linkkf_max_ffmpeg_process_count"
|
||||
):
|
||||
self.queue.set_max_ffmpeg_count(
|
||||
P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
|
||||
)
|
||||
|
||||
def get_video_url_from_url(url, url2):
|
||||
video_url = None
|
||||
referer_url = None
|
||||
vtt_url = None
|
||||
LogicLinkkf.referer = url2
|
||||
# logger.info("dx download url : %s , url2 : %s" % (url, url2))
|
||||
# logger.debug(LogicLinkkfYommi.referer)
|
||||
|
||||
try:
|
||||
if "ani1" in url2:
|
||||
# kfani 계열 처리 => 방문해서 m3u8을 받아온다.
|
||||
logger.debug("ani1 routine=========================")
|
||||
LogicLinkkf.referer = "https://linkkf.app"
|
||||
# logger.debug(f"url2: {url2}")
|
||||
ani1_html = LogicLinkkf.get_html(url2)
|
||||
|
||||
tree = html.fromstring(ani1_html)
|
||||
option_url = tree.xpath("//select[@id='server-list']/option[1]/@value")
|
||||
|
||||
# logger.debug(f"option_url:: {option_url}")
|
||||
|
||||
data = LogicLinkkf.get_html(option_url[0])
|
||||
# print(type(data))
|
||||
regex2 = r'"([^\"]*m3u8)"|<source[^>]+src=\"([^"]+)'
|
||||
|
||||
temp_url = re.findall(regex2, data)[0]
|
||||
video_url = ""
|
||||
ref = "https://ani1.app"
|
||||
for i in temp_url:
|
||||
if i is None:
|
||||
continue
|
||||
video_url = i
|
||||
# video_url = '{1} -headers \'Referer: "{0}"\' -user_agent "Mozilla/5.0 (Windows NT 10.0; Win64;
|
||||
# x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3554.0 Safari/537.36"'.format(ref,
|
||||
# video_url)
|
||||
|
||||
data_tree = html.fromstring(data)
|
||||
# print(data_tree.xpath("//video/source/@src"))
|
||||
vtt_elem = data_tree.xpath("//track/@src")[0]
|
||||
# vtt_elem = data_tree.xpath("//*[contains(@src, '.vtt']")[0]
|
||||
|
||||
# print(vtt_elem)
|
||||
|
||||
match = re.compile(
|
||||
r"<track.+src=\"(?P<vtt_url>.*?.vtt)\"", re.MULTILINE
|
||||
).search(data)
|
||||
|
||||
vtt_url = match.group("vtt_url")
|
||||
|
||||
referer_url = "https://kfani.me/"
|
||||
|
||||
elif "kfani" in url2:
|
||||
# kfani 계열 처리 => 방문해서 m3u8을 받아온다.
|
||||
logger.debug("kfani routine=================================")
|
||||
LogicLinkkf.referer = url2
|
||||
# logger.debug(f"url2: {url2}")
|
||||
data = LogicLinkkf.get_html(url2)
|
||||
# logger.info("dx: data", data)
|
||||
regex2 = r'"([^\"]*m3u8)"|<source[^>]+src=\"([^"]+)'
|
||||
|
||||
temp_url = re.findall(regex2, data)[0]
|
||||
video_url = ""
|
||||
ref = "https://kfani.me"
|
||||
for i in temp_url:
|
||||
if i is None:
|
||||
continue
|
||||
video_url = i
|
||||
# video_url = '{1} -headers \'Referer: "{0}"\' -user_agent "Mozilla/5.0 (Windows NT 10.0; Win64;
|
||||
# x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3554.0 Safari/537.36"'.format(ref,
|
||||
# video_url)
|
||||
|
||||
# @k45734
|
||||
vtt_url = None
|
||||
try:
|
||||
_match1 = re.compile(
|
||||
r"<track.+src=\"(?P<vtt_url>.*?.vtt)", re.MULTILINE
|
||||
).search(data)
|
||||
vtt_url = _match1.group("vtt_url")
|
||||
except:
|
||||
_match2 = re.compile(
|
||||
r"url: \'(?P<vtt_url>.*?.vtt)", re.MULTILINE
|
||||
).search(data)
|
||||
vtt_url = _match2.group("vtt_url")
|
||||
|
||||
logger.info("vtt_url: %s", vtt_url)
|
||||
|
||||
referer_url = url2
|
||||
|
||||
elif "kftv" in url2:
|
||||
# kftv 계열 처리 => url의 id로 https://yt.kftv.live/getLinkStreamMd5/df6960891d226e24b117b850b44a2290 페이지
|
||||
# 접속해서 json 받아오고, json에서 url을 추출해야함
|
||||
if "=" in url2:
|
||||
md5 = urlparse.urlparse(url2).query.split("=")[1]
|
||||
elif "embedplay" in url2:
|
||||
md5 = url2.split("/")[-1]
|
||||
url3 = "https://yt.kftv.live/getLinkStreamMd5/" + md5
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
data3 = LogicLinkkf.get_html(url3)
|
||||
data3dict = json.loads(data3)
|
||||
# print(data3dict)
|
||||
video_url = data3dict[0]["file"]
|
||||
|
||||
elif "k40chan" in url2:
|
||||
# k40chan 계열 처리 => 방문해서 m3u8을 받아온다.
|
||||
# k45734 님 소스 반영 (확인은 안해봄 잘 동작할꺼라고 믿고,)
|
||||
logger.debug("k40chan routine=================================")
|
||||
LogicLinkkf.referer = url2
|
||||
data = LogicLinkkf.get_html(url2)
|
||||
|
||||
regex2 = r'"([^\"]*m3u8)"|<source[^>]+src=\"([^"]+)'
|
||||
|
||||
temp_url = re.findall(regex2, data)[0]
|
||||
video_url = ""
|
||||
# ref = "https://kfani.me"
|
||||
for i in temp_url:
|
||||
if i is None:
|
||||
continue
|
||||
video_url = i
|
||||
|
||||
match = re.compile(r"<track.+src\=\"(?P<vtt_url>.*?.vtt)").search(data)
|
||||
vtt_url = match.group("vtt_url")
|
||||
|
||||
referer_url = url2
|
||||
|
||||
elif "linkkf" in url2:
|
||||
logger.deubg("linkkf routine")
|
||||
# linkkf 계열 처리 => URL 리스트를 받아오고, 하나 골라 방문 해서 m3u8을 받아온다.
|
||||
referer_url = url2
|
||||
data2 = LogicLinkkf.get_html(url2)
|
||||
# print(data2)
|
||||
regex = r"cat1 = [^\[]*([^\]]*)"
|
||||
cat = re.findall(regex, data2)[0]
|
||||
# logger.info("cat: %s", cat)
|
||||
regex = r"\"([^\"]*)\""
|
||||
url3s = re.findall(regex, cat)
|
||||
url3 = random.choice(url3s)
|
||||
# logger.info("url3: %s", url3)
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
if "kftv" in url3:
|
||||
return LogicLinkkf.get_video_url_from_url(url2, url3)
|
||||
elif url3.startswith("/"):
|
||||
url3 = urlparse.urljoin(url2, url3)
|
||||
print("url3 = ", url3)
|
||||
LogicLinkkf.referer = url2
|
||||
data3 = LogicLinkkf.get_html(url3)
|
||||
# logger.info('data3: %s', data3)
|
||||
# regex2 = r'"([^\"]*m3u8)"'
|
||||
regex2 = r'"([^\"]*mp4|m3u8)"'
|
||||
video_url = re.findall(regex2, data3)[0]
|
||||
# logger.info('video_url: %s', video_url)
|
||||
referer_url = url3
|
||||
|
||||
else:
|
||||
logger.error("새로운 유형의 url 발생! %s %s %s" % (url, url2, url3))
|
||||
elif "kakao" in url2:
|
||||
# kakao 계열 처리, 외부 API 이용
|
||||
payload = {"inputUrl": url2}
|
||||
kakao_url = (
|
||||
"http://webtool.cusis.net/wp-pages/download-kakaotv-video/video.php"
|
||||
)
|
||||
data2 = requests.post(
|
||||
kakao_url,
|
||||
json=payload,
|
||||
headers={
|
||||
"referer": "http://webtool.cusis.net/download-kakaotv-video/"
|
||||
},
|
||||
).content
|
||||
time.sleep(3) # 서버 부하 방지를 위해 단시간에 너무 많은 URL전송을 하면 IP를 차단합니다.
|
||||
url3 = json.loads(data2)
|
||||
# logger.info("download url2 : %s , url3 : %s" % (url2, url3))
|
||||
video_url = url3
|
||||
elif "#V" in url2: # V 패턴 추가
|
||||
print("#v routine")
|
||||
|
||||
data2 = LogicLinkkf.get_html(url2)
|
||||
|
||||
regex = r"cat1 = [^\[]*([^\]]*)"
|
||||
cat = re.findall(regex, data2)[0]
|
||||
regex = r"\"([^\"]*)\""
|
||||
url3s = re.findall(regex, cat)
|
||||
url3 = random.choice(url3s)
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
if "kftv" in url3:
|
||||
return LogicLinkkf.get_video_url_from_url(url2, url3)
|
||||
elif url3.startswith("/"):
|
||||
url3 = urlparse.urljoin(url2, url3)
|
||||
LogicLinkkf.referer = url2
|
||||
data3 = LogicLinkkf.get_html(url3)
|
||||
|
||||
regex2 = r'"([^\"]*mp4)"'
|
||||
video_url = re.findall(regex2, data3)[0]
|
||||
else:
|
||||
logger.error("새로운 유형의 url 발생! %s %s %s" % (url, url2, url3))
|
||||
|
||||
elif "#M2" in url2:
|
||||
LogicLinkkf.referer = url2
|
||||
data2 = LogicLinkkf.get_html(url2)
|
||||
# print(data2)
|
||||
|
||||
regex = r"cat1 = [^\[]*([^\]]*)"
|
||||
cat = re.findall(regex, data2)[0]
|
||||
regex = r"\"([^\"]*)\""
|
||||
url3s = re.findall(regex, cat)
|
||||
url3 = random.choice(url3s)
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
if "kftv" in url3:
|
||||
return LogicLinkkf.get_video_url_from_url(url2, url3)
|
||||
elif url3.startswith("/"):
|
||||
url3 = urlparse.urljoin(url2, url3)
|
||||
LogicLinkkf.referer = url2
|
||||
data3 = LogicLinkkf.get_html(url3)
|
||||
# print("내용: %s", data3)
|
||||
# logger.info("movie content: %s", data3)
|
||||
# regex2 = r'"([^\"]*m3u8)"'
|
||||
regex2 = r'"([^\"]*mp4)"'
|
||||
video_url = re.findall(regex2, data3)[0]
|
||||
else:
|
||||
logger.error("새로운 유형의 url 발생! %s %s %s" % (url, url2, url3))
|
||||
elif "😀#i" in url2:
|
||||
LogicLinkkf.referer = url2
|
||||
data2 = LogicLinkkf.get_html(url2)
|
||||
# logger.info(data2)
|
||||
|
||||
regex = r"cat1 = [^\[]*([^\]]*)"
|
||||
cat = re.findall(regex, data2)[0]
|
||||
regex = r"\"([^\"]*)\""
|
||||
url3s = re.findall(regex, cat)
|
||||
url3 = random.choice(url3s)
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
|
||||
elif "#k" in url2:
|
||||
data2 = LogicLinkkf.get_html(url2)
|
||||
# logger.info(data2)
|
||||
|
||||
regex = r"cat1 = [^\[]*([^\]]*)"
|
||||
cat = re.findall(regex, data2)[0]
|
||||
regex = r"\"([^\"]*)\""
|
||||
url3s = re.findall(regex, cat)
|
||||
url3 = random.choice(url3s)
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
|
||||
elif "#k2" in url2:
|
||||
data2 = LogicLinkkf.get_html(url2)
|
||||
# logger.info(data2)
|
||||
|
||||
regex = r"cat1 = [^\[]*([^\]]*)"
|
||||
cat = re.findall(regex, data2)[0]
|
||||
regex = r"\"([^\"]*)\""
|
||||
url3s = re.findall(regex, cat)
|
||||
url3 = random.choice(url3s)
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
elif "mopipi" in url2:
|
||||
LogicLinkkf.referer = url
|
||||
data2 = LogicLinkkf.get_html(url2)
|
||||
# logger.info(data2)
|
||||
match = re.compile(r"src\=\"(?P<video_url>http.*?\.mp4)").search(data2)
|
||||
video_url = match.group("video_url")
|
||||
|
||||
match = re.compile(r"src\=\"(?P<vtt_url>http.*?.vtt)").search(data2)
|
||||
logger.info("match group: %s", match.group("video_url"))
|
||||
vtt_url = match.group("vtt_url")
|
||||
|
||||
# logger.info("download url : %s , url3 : %s" % (url, url3))
|
||||
|
||||
else:
|
||||
logger.error("새로운 유형의 url 발생! %s %s" % (url, url2))
|
||||
except Exception as e:
|
||||
logger.error("Exception:%s", e)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
return [video_url, referer_url, vtt_url]
|
||||
|
||||
@staticmethod
|
||||
def get_html_episode_content(url: str) -> str:
|
||||
if url.startswith("http"):
|
||||
html_data = LogicLinkkf.get_html(url)
|
||||
else:
|
||||
url = f"https://linkkf.app{url}"
|
||||
|
||||
logger.info("get_video_url(): url: %s" % url)
|
||||
data = LogicLinkkf.get_html(url)
|
||||
|
||||
tree = html.fromstring(data)
|
||||
|
||||
tree = html.fromstring(data)
|
||||
|
||||
pattern = re.compile("var player_data=(.*)")
|
||||
|
||||
js_scripts = tree.xpath("//script")
|
||||
|
||||
iframe_info = None
|
||||
index = 0
|
||||
|
||||
for js_script in js_scripts:
|
||||
|
||||
# print(f"{index}.. {js_script.text_content()}")
|
||||
if pattern.match(js_script.text_content()):
|
||||
# logger.debug("match::::")
|
||||
match_data = pattern.match(js_script.text_content())
|
||||
iframe_info = json.loads(
|
||||
match_data.groups()[0].replace("path:", '"path":')
|
||||
)
|
||||
# logger.debug(f"iframe_info:: {iframe_info}")
|
||||
|
||||
index += 1
|
||||
|
||||
##################################################
|
||||
# iframe url:: https://s2.ani1c12.top/player/index.php?data='+player_data.url+'
|
||||
####################################################
|
||||
|
||||
url = f'https://s2.ani1c12.top/player/index.php?data={iframe_info["url"]}'
|
||||
html_data = LogicLinkkf.get_html(url)
|
||||
|
||||
return html_data
|
||||
|
||||
def get_anime_info(self, cate, page):
|
||||
try:
|
||||
if cate == "ing":
|
||||
@@ -580,7 +969,211 @@ class LogicLinkkf(LogicModuleBase):
|
||||
logger.error(f"Exception: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
pass
|
||||
def add(self, episode_info):
|
||||
print("episode_info")
|
||||
logger.debug(episode_info)
|
||||
if self.is_exist(episode_info):
|
||||
return "queue_exist"
|
||||
else:
|
||||
|
||||
db_entity = ModelLinkkfItem.get_by_linkkf_id(episode_info["_id"])
|
||||
|
||||
logger.debug("db_entity:::> %s", db_entity)
|
||||
# logger.debug("db_entity.status ::: %s", db_entity.status)
|
||||
if db_entity is None:
|
||||
|
||||
entity = LinkkfQueueEntity(P, self, episode_info)
|
||||
|
||||
logger.debug("entity:::> %s", entity.as_dict())
|
||||
ModelLinkkfItem.append(entity.as_dict())
|
||||
# # logger.debug("entity:: type >> %s", type(entity))
|
||||
#
|
||||
|
||||
self.queue.add_queue(entity)
|
||||
# self.download_queue.add_queue(entity)
|
||||
|
||||
# P.logger.debug(F.config['path_data'])
|
||||
# P.logger.debug(self.headers)
|
||||
|
||||
# filename = os.path.basename(entity.filepath)
|
||||
# ffmpeg = SupportFfmpeg(entity.url, entity.filename, callback_function=self.callback_function,
|
||||
# max_pf_count=0,
|
||||
# save_path=entity.savepath, timeout_minute=60, headers=self.headers)
|
||||
# ret = {'ret': 'success'}
|
||||
# ret['json'] = ffmpeg.start()
|
||||
return "enqueue_db_append"
|
||||
elif db_entity.status != "completed":
|
||||
entity = LinkkfQueueEntity(P, self, episode_info)
|
||||
|
||||
self.queue.add_queue(entity)
|
||||
return "enqueue_db_exist"
|
||||
else:
|
||||
return "db_completed"
|
||||
|
||||
def is_exist(self, info):
|
||||
for _ in self.queue.entity_list:
|
||||
if _.info["_id"] == info["_id"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
# @staticmethod
|
||||
def plugin_load(self):
|
||||
try:
|
||||
logger.debug("%s plugin_load", P.package_name)
|
||||
self.queue = FfmpegQueue(
|
||||
P, P.ModelSetting.get_int("linkkf_max_ffmpeg_process_count")
|
||||
)
|
||||
self.current_data = None
|
||||
self.queue.queue_start()
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Exception:%s", e)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
@staticmethod
|
||||
def plugin_unload():
|
||||
try:
|
||||
logger.debug("%s plugin_unload", P.package_name)
|
||||
scheduler.remove_job("%s_recent" % P.package_name)
|
||||
except Exception as e:
|
||||
logger.error("Exception:%s", e)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
@staticmethod
|
||||
def reset_db() -> bool:
|
||||
db.session.query(ModelLinkkfItem).delete()
|
||||
db.session.commit()
|
||||
return True
|
||||
|
||||
|
||||
class LinkkfQueueEntity(FfmpegQueueEntity):
|
||||
def __init__(self, P, module_logic, info):
|
||||
super(LinkkfQueueEntity, self).__init__(P, module_logic, info)
|
||||
self._vi = None
|
||||
self.url = None
|
||||
self.epi_queue = None
|
||||
self.filepath = None
|
||||
self.savepath = None
|
||||
self.quality = None
|
||||
self.filename = None
|
||||
self.vtt = None
|
||||
self.season = 1
|
||||
self.content_title = None
|
||||
self.srt_url = None
|
||||
self.headers = None
|
||||
# Todo::: 임시 주석 처리
|
||||
self.make_episode_info()
|
||||
|
||||
def refresh_status(self):
|
||||
self.module_logic.socketio_callback("status", self.as_dict())
|
||||
|
||||
def info_dict(self, tmp):
|
||||
# logger.debug('self.info::> %s', self.info)
|
||||
for key, value in self.info.items():
|
||||
tmp[key] = value
|
||||
tmp["vtt"] = self.vtt
|
||||
tmp["season"] = self.season
|
||||
tmp["content_title"] = self.content_title
|
||||
tmp["linkkf_info"] = self.info
|
||||
tmp["epi_queue"] = self.epi_queue
|
||||
return tmp
|
||||
|
||||
def download_completed(self):
|
||||
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
|
||||
if db_entity is not None:
|
||||
db_entity.status = "completed"
|
||||
db_entity.complated_time = datetime.now()
|
||||
db_entity.save()
|
||||
|
||||
def donwload_completed(self):
|
||||
db_entity = ModelLinkkfItem.get_by_linkkf_id(self.info["_id"])
|
||||
if db_entity is not None:
|
||||
db_entity.status = "completed"
|
||||
db_entity.complated_time = datetime.now()
|
||||
db_entity.save()
|
||||
|
||||
# Get episode info from site
|
||||
def make_episode_info(self):
|
||||
|
||||
url2s = []
|
||||
url = None
|
||||
logger.debug(self.info)
|
||||
logger.debug(self.info["url"])
|
||||
|
||||
try:
|
||||
# logger.debug(self)
|
||||
# logger.debug(self.url)
|
||||
data = LogicLinkkf.get_html_episode_content(self.info["url"])
|
||||
tree = html.fromstring(data)
|
||||
|
||||
xpath_select_query = '//*[@id="body"]/div/span/center/select/option'
|
||||
|
||||
if len(tree.xpath(xpath_select_query)) > 0:
|
||||
# by k45734
|
||||
print("ok")
|
||||
xpath_select_query = '//select[@class="switcher"]/option'
|
||||
for tag in tree.xpath(xpath_select_query):
|
||||
url2s2 = tag.attrib["value"]
|
||||
if "k40chan" in url2s2:
|
||||
pass
|
||||
elif "ani1c12" in url2s2:
|
||||
pass
|
||||
else:
|
||||
url2s.append(url2s2)
|
||||
else:
|
||||
print(":: else ::")
|
||||
|
||||
tt = re.search(r"var player_data=(.*?)<", data, re.S)
|
||||
json_string = tt.group(1)
|
||||
tt2 = re.search(r'"url":"(.*?)"', json_string, re.S)
|
||||
json_string2 = tt2.group(1)
|
||||
ttt = "https://s2.ani1c12.top/player/index.php?data=" + json_string2
|
||||
response = LogicLinkkf.get_html(ttt)
|
||||
tree = html.fromstring(response)
|
||||
xpath_select_query = '//select[@id="server-list"]/option'
|
||||
for tag in tree.xpath(xpath_select_query):
|
||||
url2s2 = tag.attrib["value"]
|
||||
# if 'k40chan' in url2s2:
|
||||
# pass
|
||||
# elif 'k39aha' in url2s2:
|
||||
if "ds" in url2s2:
|
||||
pass
|
||||
else:
|
||||
url2s.append(url2s2)
|
||||
|
||||
# logger.info('dx: url', url)
|
||||
logger.info("dx: urls2:: %s", url2s)
|
||||
|
||||
video_url = None
|
||||
referer_url = None # dx
|
||||
|
||||
for url2 in url2s:
|
||||
try:
|
||||
if video_url is not None:
|
||||
continue
|
||||
# logger.debug(f"url: {url}, url2: {url2}")
|
||||
ret = LogicLinkkf.get_video_url_from_url(url, url2)
|
||||
logger.debug(f"ret::::> {ret}")
|
||||
|
||||
if ret is not None:
|
||||
video_url = ret
|
||||
referer_url = url2
|
||||
except Exception as e:
|
||||
logger.error("Exception:%s", e)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
# logger.info(video_url)
|
||||
# return [video_url, referer_url]
|
||||
return video_url
|
||||
|
||||
logger.info("dx: urls2:: %s", url2s)
|
||||
|
||||
video_url = None
|
||||
referer_url = None # dx
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Exception: {str(e)}")
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
class ModelLinkkfItem(db.Model):
|
||||
@@ -596,7 +1189,7 @@ class ModelLinkkfItem(db.Model):
|
||||
episode_no = db.Column(db.Integer)
|
||||
title = db.Column(db.String)
|
||||
episode_title = db.Column(db.String)
|
||||
linkkf_va = db.Column(db.String)
|
||||
# linkkf_va = db.Column(db.String)
|
||||
linkkf_vi = db.Column(db.String)
|
||||
linkkf_id = db.Column(db.String)
|
||||
quality = db.Column(db.String)
|
||||
@@ -609,8 +1202,8 @@ class ModelLinkkfItem(db.Model):
|
||||
status = db.Column(db.String)
|
||||
linkkf_info = db.Column(db.JSON)
|
||||
|
||||
def __int__(self):
|
||||
self.created_time == datetime.now()
|
||||
def __init__(self):
|
||||
self.created_time = datetime.now()
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.as_dict())
|
||||
@@ -632,3 +1225,83 @@ class ModelLinkkfItem(db.Model):
|
||||
@classmethod
|
||||
def get_by_id(cls, idx):
|
||||
return db.session.query(cls).filter_by(id=idx).first()
|
||||
|
||||
@classmethod
|
||||
def get_by_linkkf_id(cls, linkkf_id):
|
||||
return db.session.query(cls).filter_by(linkkf_id=linkkf_id).first()
|
||||
|
||||
@classmethod
|
||||
def delete_by_id(cls, idx):
|
||||
db.session.query(cls).filter_by(id=idx).delete()
|
||||
db.session.commit()
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def web_list(cls, req):
|
||||
ret = {}
|
||||
page = int(req.form["page"]) if "page" in req.form else 1
|
||||
page_size = 30
|
||||
job_id = ""
|
||||
search = req.form["search_word"] if "search_word" in req.form else ""
|
||||
option = req.form["option"] if "option" in req.form else "all"
|
||||
order = req.form["order"] if "order" in req.form else "desc"
|
||||
query = cls.make_query(search=search, order=order, option=option)
|
||||
count = query.count()
|
||||
query = query.limit(page_size).offset((page - 1) * page_size)
|
||||
lists = query.all()
|
||||
ret["list"] = [item.as_dict() for item in lists]
|
||||
ret["paging"] = Util.get_paging_info(count, page, page_size)
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def make_query(cls, search="", order="desc", option="all"):
|
||||
query = db.session.query(cls)
|
||||
if search is not None and search != "":
|
||||
if search.find("|") != -1:
|
||||
tmp = search.split("|")
|
||||
conditions = []
|
||||
for tt in tmp:
|
||||
if tt != "":
|
||||
conditions.append(cls.filename.like("%" + tt.strip() + "%"))
|
||||
query = query.filter(or_(*conditions))
|
||||
elif search.find(",") != -1:
|
||||
tmp = search.split(",")
|
||||
for tt in tmp:
|
||||
if tt != "":
|
||||
query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
|
||||
else:
|
||||
query = query.filter(cls.filename.like("%" + search + "%"))
|
||||
if option == "completed":
|
||||
query = query.filter(cls.status == "completed")
|
||||
|
||||
query = (
|
||||
query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
|
||||
)
|
||||
return query
|
||||
|
||||
@classmethod
|
||||
def get_list_uncompleted(cls):
|
||||
return db.session.query(cls).filter(cls.status != "completed").all()
|
||||
|
||||
@classmethod
|
||||
def append(cls, q):
|
||||
logger.debug(q)
|
||||
item = ModelLinkkfItem()
|
||||
item.content_code = q["program_code"]
|
||||
item.season = q["season"]
|
||||
item.episode_no = q["epi_queue"]
|
||||
item.title = q["content_title"]
|
||||
item.episode_title = q["title"]
|
||||
# item.linkkf_va = q["va"]
|
||||
item.linkkf_code = q["code"]
|
||||
item.linkkf_id = q["_id"]
|
||||
item.quality = q["quality"]
|
||||
item.filepath = q["filepath"]
|
||||
item.filename = q["filename"]
|
||||
item.savepath = q["savepath"]
|
||||
item.video_url = q["url"]
|
||||
item.vtt_url = q["vtt"]
|
||||
item.thumbnail = q["image"][0]
|
||||
item.status = "wait"
|
||||
item.linkkf_info = q["linkkf_info"]
|
||||
item.save()
|
||||
|
||||
@@ -74,8 +74,8 @@ class LogicOhli24(LogicModuleBase):
|
||||
"ohli24_auto_code_list": "all",
|
||||
"ohli24_current_code": "",
|
||||
"ohli24_uncompleted_auto_enqueue": "False",
|
||||
"ohli24_image_url_prefix_series": "https://www.jetcloud.cc/series/",
|
||||
"ohli24_image_url_prefix_episode": "https://www.jetcloud-list.cc/thumbnail/",
|
||||
"ohli24_image_url_prefix_series": "",
|
||||
"ohli24_image_url_prefix_episode": "",
|
||||
"ohli24_discord_notify": "True",
|
||||
}
|
||||
current_headers = None
|
||||
@@ -387,6 +387,7 @@ class LogicOhli24(LogicModuleBase):
|
||||
}
|
||||
)
|
||||
elif sub == "add_queue":
|
||||
logger.debug(f"linkkf add_queue routine ===============")
|
||||
ret = {}
|
||||
info = json.loads(request.form["data"])
|
||||
logger.info(f"info:: {info}")
|
||||
@@ -443,7 +444,7 @@ class LogicOhli24(LogicModuleBase):
|
||||
logger.error("Exception:%s", e)
|
||||
logger.error(traceback.format_exc())
|
||||
except Exception as e:
|
||||
P.logger.error("Exception:%s", e)
|
||||
P.logger.error(f"Exception: {str(e)}")
|
||||
P.logger.error(traceback.format_exc())
|
||||
|
||||
def process_api(self, sub, req):
|
||||
|
||||
@@ -1,174 +1,131 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
|
||||
<table id="result_table" class="table table-sm tableRowHover">
|
||||
<thead class="thead-dark">
|
||||
<tr>
|
||||
<th style="width:5%; text-align:center;">IDX</th>
|
||||
<th style="width:8%; text-align:center;">Plugin</th>
|
||||
<th style="width:10%; text-align:center;">시작시간</th>
|
||||
<th style="width:20%; text-align:center;">파일명</th>
|
||||
<th style="width:8%; text-align:center;">상태</th>
|
||||
<th style="width:15%; text-align:center;">진행률</th>
|
||||
<th style="width:5%; text-align:center;">길이</th>
|
||||
<th style="width:5%; text-align:center;">PF</th>
|
||||
<th style="width:8%; text-align:center;">배속</th>
|
||||
<th style="width:8%; text-align:center;">진행시간</th>
|
||||
<th style="width:8%; text-align:center;">Action</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="list"></tbody>
|
||||
</table>
|
||||
<div>
|
||||
{{ macros.m_button_group([['reset_btn', '초기화'], ['delete_completed_btn', '완료 목록 삭제'], ['go_ffmpeg_btn', 'Go FFMPEG']])}}
|
||||
{{ macros.m_row_start('0') }}
|
||||
{{ macros.m_row_end() }}
|
||||
{{ macros.m_hr_head_top() }}
|
||||
{{ macros.m_row_start('0') }}
|
||||
{{ macros.m_col(1, macros.m_strong('Idx')) }}
|
||||
{{ macros.m_col(2, macros.m_strong('CreatedTime')) }}
|
||||
{{ macros.m_col(4, macros.m_strong('Filename')) }}
|
||||
{{ macros.m_col(3, macros.m_strong('Status')) }}
|
||||
{{ macros.m_col(2, macros.m_strong('Action')) }}
|
||||
{{ macros.m_row_end() }}
|
||||
{{ macros.m_hr_head_bottom() }}
|
||||
<div id="download_list_div"></div>
|
||||
</div> <!--전체-->
|
||||
|
||||
<script type="text/javascript">
|
||||
var package_name = "{{arg['package_name'] }}";
|
||||
var sub = "{{arg['sub'] }}";
|
||||
var current_data = null;
|
||||
socket = io.connect(window.location.protocol + "//" + document.domain + ":" + location.port + "/" + package_name + '/' + sub);
|
||||
|
||||
$(document).ready(function(){
|
||||
var socket = io.connect(window.location.href);
|
||||
});
|
||||
|
||||
socket.on('on_start', function(data){
|
||||
document.getElementById("log").innerHTML += data.data;
|
||||
document.getElementById("log").scrollTop = document.getElementById("log").scrollHeight;
|
||||
document.getElementById("log").style.visibility = 'visible';
|
||||
$('#loading').hide();
|
||||
});
|
||||
socket.on('start', function(data){
|
||||
on_start();
|
||||
});
|
||||
socket.on('list_refresh', function(data){
|
||||
on_start()
|
||||
});
|
||||
|
||||
socket.on('add', function(data){
|
||||
str = make_item(data);
|
||||
if (current_data == null || current_data.length == 0) {
|
||||
current_data = Array();
|
||||
$("#list").html(str);
|
||||
} else {
|
||||
$("#list").html($("#list").html() + str);
|
||||
}
|
||||
current_data.push(data);
|
||||
});
|
||||
|
||||
socket.on('status_change', function(data) {
|
||||
button_html(data);
|
||||
});
|
||||
|
||||
socket.on('status', function(data){
|
||||
status_html(data);
|
||||
});
|
||||
|
||||
socket.on('last', function(data){
|
||||
status_html(data);
|
||||
button_html(data);
|
||||
});
|
||||
|
||||
globalSendCommand('list', null, null, null, function(data) {
|
||||
current_data = data;
|
||||
$("#list").html('');
|
||||
console.log(data)
|
||||
if (data.length == 0) {
|
||||
str = "<tr><td colspan='10'><h4>작업이 없습니다.</h4><td><tr>";
|
||||
} else {
|
||||
str = ''
|
||||
for(i in data) {
|
||||
str += make_item(data[i]);
|
||||
}
|
||||
}
|
||||
$("#list").html(str);
|
||||
});
|
||||
socket.on('status', function(data){
|
||||
console.log(data);
|
||||
on_status(data)
|
||||
});
|
||||
|
||||
|
||||
$("body").on('click', '#stop_btn', function(e){
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
globalSendCommand('stop', $(this).data('idx'), null, null, function(ret){
|
||||
refresh_item(ret.data);
|
||||
function on_start() {
|
||||
$.ajax({
|
||||
url: '/' + package_name + '/ajax/' + sub + '/entity_list',
|
||||
type: "POST",
|
||||
cache: false,
|
||||
data: {},
|
||||
dataType: "json",
|
||||
success: function (data) {
|
||||
make_download_list(data)
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
function refresh_item(data) {
|
||||
$('#tr1_'+data.idx).html(make_item1(data));
|
||||
$('#collapse_'+data.idx).html(make_item2(data));
|
||||
}
|
||||
|
||||
function make_item(data) {
|
||||
str = '<tr id="tr1_'+data.idx+'" style="cursor: pointer;" data-toggle="collapse" data-target="#collapse_'+ data.idx + '" aria-expanded="true" >';
|
||||
str += make_item1(data);
|
||||
str += '</tr>';
|
||||
str += '<tr class="collapse tableRowHoverOff" style="cursor: pointer;" id="collapse_' + data.idx + '">';
|
||||
str += make_item2(data);
|
||||
str += '</tr>';
|
||||
return str;
|
||||
}
|
||||
|
||||
function make_item1(data) {
|
||||
//console.log(data);
|
||||
str = '';
|
||||
str += '<td scope="col" style="width:5%; text-align:center;">'+ data.idx + '</td>';
|
||||
str += '<td scope="col" style="width:8%; text-align:center;">'+ data.callback_id + '</td>';
|
||||
str += '<td scope="col" style="width:10%; text-align:center;">'+ data.start_time + '</td>';
|
||||
str += '<td scope="col" style="width:20%; text-align:center;">'+ data.filename + '</td>';
|
||||
str += '<td id="status_'+data.idx+'" scope="col" style="width:8%; text-align:center;">'+ data.status_kor + '</td>';
|
||||
var visi = 'hidden';
|
||||
if (parseInt(data.percent) > 0) {
|
||||
visi = 'visible';
|
||||
}
|
||||
str += '<td scope="col" style="width:20%; text-align:center;"><div class="progress"><div id="progress_'+data.idx+'" class="progress-bar" style="visibility: '+visi+'; width:'+data.percent+'%">'+data.percent +'%</div></div></td>';
|
||||
str += '<td scope="col" style="width:5%; text-align:center;">'+ data.duration_str + '</td>';
|
||||
str += '<td id="current_pf_count_'+data.idx+'" scope="col" style="width:5%; text-align:center;">'+ data.current_pf_count + '</td>';
|
||||
str += '<td id="current_speed_'+data.idx+'" scope="col" style="width:8%; text-align:center;">'+ data.current_speed + '</td>';
|
||||
str += '<td id="download_time_'+data.idx+'" scope="col" style="width:8%; text-align:center;">'+ data.download_time + '</td>';
|
||||
str += '<td id="button_'+data.idx+'" scope="col" style="width:8%; text-align:center;">';
|
||||
if (data.status_str == 'DOWNLOADING') {
|
||||
str += j_button('stop_btn', '중지', {'idx':data.idx}, 'danger', false, false);
|
||||
}
|
||||
str += '</td>'
|
||||
return str;
|
||||
}
|
||||
|
||||
function make_item2(data) {
|
||||
str = '';
|
||||
str += '<td colspan="11">';
|
||||
str += '<div id="detail_'+data.idx+'">';
|
||||
str += get_detail(data);
|
||||
str += '</div>';
|
||||
str += '</td>';
|
||||
return str
|
||||
}
|
||||
|
||||
|
||||
function get_detail(data) {
|
||||
var str = j_row_info('URL', data.url);
|
||||
str += j_row_info('임시경로', data.temp_fullpath);
|
||||
str += j_row_info('저장경로', data.save_fullpath);
|
||||
str += j_row_info('진행률(current/total)', data.percent+ '% (' + data.current_duration + ' / ' + data.duration + ')');
|
||||
str += j_row_info('현재 비트레이트', data.current_bitrate);
|
||||
str += j_row_info('종료시간', data.end_time);
|
||||
str += j_row_info('허용 Packet Fail 수', data.max_pf_count);
|
||||
str += j_row_info('파일 Exist', data.exist);
|
||||
if (data.status_str == 'COMPLETED') {
|
||||
str += j_row_info('파일 크기', data.filesize_str);
|
||||
str += j_row_info('다운 속도', data.download_speed);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
function button_html(data) {
|
||||
function on_status(data) {
|
||||
//console.log(data)
|
||||
str = '';
|
||||
if (data.status_str == 'DOWNLOADING') {
|
||||
str = j_button('stop_btn', '중지', {'idx':data.idx}, 'danger', false, false);
|
||||
tmp = document.getElementById("progress_"+data.entity_id)
|
||||
if (tmp != null) {
|
||||
document.getElementById("progress_"+data.entity_id).style.width = data.ffmpeg_percent+ '%';
|
||||
document.getElementById("progress_"+data.entity_id+"_label").innerHTML = data.ffmpeg_status_kor + "(" + data.ffmpeg_percent + "%)" + ' ' + ((data.ffmpeg_arg != null)?data.ffmpeg_arg.data.current_speed:'')
|
||||
}
|
||||
$("#button_" + data.idx).html(str);
|
||||
}
|
||||
|
||||
function status_html(data) {
|
||||
var progress = document.getElementById("progress_" + data.idx);
|
||||
progress.style.width = data.percent+ '%';
|
||||
progress.innerHTML = data.percent+ '%';
|
||||
progress.style.visibility = 'visible';
|
||||
document.getElementById("status_" + data.idx).innerHTML = data.status_kor;
|
||||
document.getElementById("current_pf_count_" + data.idx).innerHTML = data.current_pf_count;
|
||||
document.getElementById("current_speed_" + data.idx).innerHTML = data.current_speed;
|
||||
document.getElementById("download_time_" + data.idx).innerHTML = data.download_time;
|
||||
document.getElementById("detail_" + data.idx).innerHTML = get_detail(data);
|
||||
function make_download_list(data) {
|
||||
str = '';
|
||||
for (i in data) {
|
||||
str += m_row_start();
|
||||
str += m_col(1, data[i].entity_id);
|
||||
str += m_col(2, data[i].created_time);
|
||||
str += m_col(4, (data[i].filename != null) ? data[i].filename : '');
|
||||
|
||||
label = data[i].ffmpeg_status_kor
|
||||
if (data[i].ffmpeg_percent != 0) {
|
||||
label += '(' + data[i].ffmpeg_percent + '%)'
|
||||
}
|
||||
tmp = m_progress('progress_'+data[i].entity_id, data[i].ffmpeg_percent, label)
|
||||
str += m_col(3, tmp);
|
||||
tmp = m_button('program_cancel_btn', '취소', [{'key':'id', 'value':data[i].entity_id}]);
|
||||
tmp = m_button_group(tmp)
|
||||
str += m_col(2, tmp)
|
||||
str += m_row_end();
|
||||
if (i != data.length -1) str += m_hr(0);
|
||||
}
|
||||
document.getElementById("download_list_div").innerHTML = str;
|
||||
}
|
||||
|
||||
</script>
|
||||
{% endblock %}
|
||||
$("body").on('click', '#program_cancel_btn', function(e){
|
||||
e.preventDefault();
|
||||
entity_id = $(this).data('id')
|
||||
send_data = {'command':'cancel', 'entity_id':entity_id}
|
||||
queue_command(send_data)
|
||||
});
|
||||
|
||||
$("body").on('click', '#reset_btn', function(e){
|
||||
e.preventDefault();
|
||||
entity_id = $(this).data('id')
|
||||
send_data = {'command':'reset', 'entity_id':-1}
|
||||
queue_command(send_data)
|
||||
});
|
||||
|
||||
$("body").on('click', '#delete_completed_btn', function(e){
|
||||
e.preventDefault();
|
||||
entity_id = $(this).data('id')
|
||||
send_data = {'command':'delete_completed', 'entity_id':-1}
|
||||
queue_command(send_data)
|
||||
});
|
||||
|
||||
function queue_command(data) {
|
||||
$.ajax({
|
||||
url: '/' + package_name + '/ajax/' + sub + '/queue_command',
|
||||
type: "POST",
|
||||
cache: false,
|
||||
data: data,
|
||||
dataType: "json",
|
||||
success: function (ret) {
|
||||
if (ret.ret == 'notify') {
|
||||
$.notify('<strong>'+ ret.log +'</strong>', {type: 'warning'});
|
||||
}
|
||||
on_start();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$("body").on('click', '#go_ffmpeg_btn', function(e){
|
||||
e.preventDefault();
|
||||
$(location).attr('href', '/ffmpeg')
|
||||
});
|
||||
|
||||
</script>
|
||||
{% endblock %}
|
||||
Reference in New Issue
Block a user