2022.11.05 (01. 링크 애니 요청 페이지 일부 수정 및 일부 구현 저장용)

This commit is contained in:
2022-11-06 15:54:46 +09:00
parent e976773824
commit b0fa6c4bda
21 changed files with 3526 additions and 1912 deletions

Binary file not shown.

View File

@@ -144,6 +144,7 @@ class FfmpegQueue(object):
# 파일 존재여부 체크 # 파일 존재여부 체크
print(entity.info) print(entity.info)
filepath = entity.get_video_filepath() filepath = entity.get_video_filepath()
P.logger.debug(f'filepath:: {filepath}')
if os.path.exists(filepath): if os.path.exists(filepath):
entity.ffmpeg_status_kor = '파일 있음' entity.ffmpeg_status_kor = '파일 있음'
entity.ffmpeg_percent = 100 entity.ffmpeg_percent = 100
@@ -162,7 +163,8 @@ class FfmpegQueue(object):
P.logger.debug(P) P.logger.debug(P)
# P.logger.debug(P.system_setting.get("port")) # P.logger.debug(P.system_setting.get("port"))
ffmpeg = SupportFfmpeg(video_url, os.path.basename(filepath), callback_function=self.callback_function, ffmpeg = SupportFfmpeg(video_url, os.path.basename(str(filepath)),
callback_function=self.callback_function,
max_pf_count=0, save_path=ToolUtil.make_path(dirname), timeout_minute=60, max_pf_count=0, save_path=ToolUtil.make_path(dirname), timeout_minute=60,
) )
# #
@@ -359,6 +361,7 @@ class FfmpegQueue(object):
def get_entity_list(self): def get_entity_list(self):
ret = [] ret = []
P.logger.debug(self)
for x in self.entity_list: for x in self.entity_list:
tmp = x.as_dict() tmp = x.as_dict()
ret.append(tmp) ret.append(tmp)

617
lib/crawler.py Normal file
View File

@@ -0,0 +1,617 @@
import asyncio
import os
import platform
import traceback
import cloudscraper
import requests
from loguru import logger
from anime_downloader.lib.util import yommi_timeit
class Crawler:
def __init__(self):
self.session = None
self.headers = {
# 'authority': 'anilife.live',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8',
'accept-language': 'ko-KR,ko;q=0.8',
'cache-control': 'no-cache',
'cookie': 'SL_G_WPT_TO=ko; SL_GWPT_Show_Hide_tmp=1; SL_wptGlobTipTmp=1; DSR=WQYVukjkxKVYEbpgM0pgMs+awM/br6JyMtbfB4OGMC0XEA+UxUxR1RUgOi1mNMoQB16xIEuqk64iex+/ahi72A==; DCSS=FEC4550B310816E1CA91CBE4A0069C43E04F108; SPSI=faccf9a99dee9625af1c93607c2be678; SPSE=j3smljSGgZcayyKDFoQKk5/tnnUnFHa9FzCrL6GOkRwsET506JX0hAvzye3rEobnKfHiir8mAw8z7/KG11QQXw==; anilife_csrf=f30c66ba689880e9710a85b1945ad798; UTGv2=h4a5ce301324340f0b03d9e61e42bc6c0416; spcsrf=77a4e9c38c8e7392b7a36818071a5e3e; sp_lit=acrE8Wfvo4cd6GxQyGoytg==; PRLST=RT; adOtr=fZaBf9c9aed',
# 'pragma': 'no-cache',
'referer': 'https://anilife.live/g/l?id=afb8c5e4-1720-4f3d-a6b1-27e7473dc6fb',
# 'sec-fetch-dest': 'document',
# 'sec-fetch-mode': 'navigate',
# 'sec-fetch-site': 'same-origin',
# 'sec-fetch-user': '?1',
# 'sec-gpc': '1',
# 'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36',
}
self.origin_url = ''
self.episode_url = None
self.OS_PLATFORM = platform.system()
def get_html_requests(
self,
url: str, referer: str = None, stream: str = False, timeout: int = 5
) -> str:
data = ""
try:
print("get_html_requests ==================")
# cj = browser_cookie3.chrome(domain_name="anilife.live")
referer = "https://anilife.live/"
if self.session is None:
self.session = requests.session()
# logger.debug('get_html :%s', url)
self.headers["Referer"] = "" if referer is None else referer
self.headers[
"Cookie"
] = "_ga=GA1.1.578607927.1660813724; __gads=ID=10abb8b98b6828ae-2281c943a9d500fd:T=1660813741:RT=1660813741:S=ALNI_MYU_iB2lBgSrEQUBwhKpNsToaqQ8A; sbtsck=javuwDzcOJqUyweM1OQeNGzHbjoHp7Cgw44XnPdM738c3E=; SPSI=e48379959d54a6a62cc7abdcafdb2761; SPSE=h5HfMGLJzLqzNafMD3YaOvHSC9xfh77CcWdKvexp/z5N5OsTkIiYSCudQhFffEfk/0pcOTVf0DpeV0RoNopzig==; anilife_csrf=b93b9f25a12a51cf185805ec4de7cf9d; UTGv2=h46b326af644f4ac5d0eb1502881136b3750; __gpi=UID=000008ba227e99e0:T=1660813741:RT=1660912282:S=ALNI_MaJHIVJIGpQ5nTE9lvypKQxJnn10A; DSR=SXPX8ELcRgh6N/9rNgjpQoNfaX2DRceeKYR0/ul7qTI9gApWQpZxr8jgymf/r0HsUT551vtOv2CMWpIn0Hd26A==; DCSS=89508000A76BBD939F6DDACE5BD9EB902D2212A; DGCC=Wdm; adOtr=7L4Xe58995d; spcsrf=6554fa003bf6a46dd9b7417acfacc20a; _ga_56VYJJ7FTM=GS1.1.1660912281.10.1.1660912576.0.0.0; PRLST=EO"
self.headers["Referer"] = referer
page_content = self.session.get(
url, headers=self.headers, timeout=timeout, allow_redirects=True
)
data = page_content.text
except Exception as e:
logger.error(f"Exception: {e}")
logger.error(traceback.format_exc())
return data
async def get_html_playwright(
self,
url: str,
headless: bool = False,
referer: str = None,
engine: str = "chrome",
stealth: bool = False,
):
try:
from playwright.async_api import async_playwright
# from playwright.sync_api import sync_playwright
import time
print("** playwright ==========================================")
cookie = None
browser_args = [
"--window-size=1300,570",
"--window-position=000,000",
"--disable-dev-shm-usage",
"--no-sandbox",
"--disable-web-security",
"--disable-features=site-per-process",
"--disable-setuid-sandbox",
"--disable-accelerated-2d-canvas",
"--no-first-run",
"--no-zygote",
# '--single-process',
"--disable-gpu",
"--use-gl=egl",
"--disable-blink-features=AutomationControlled",
"--disable-background-networking",
"--enable-features=NetworkService,NetworkServiceInProcess",
"--disable-background-timer-throttling",
"--disable-backgrounding-occluded-windows",
"--disable-breakpad",
"--disable-client-side-phishing-detection",
"--disable-component-extensions-with-background-pages",
"--disable-default-apps",
"--disable-extensions",
"--disable-features=Translate",
"--disable-hang-monitor",
"--disable-ipc-flooding-protection",
"--disable-popup-blocking",
"--disable-prompt-on-repost",
"--disable-renderer-backgrounding",
"--disable-sync",
"--force-color-profile=srgb",
"--metrics-recording-only",
"--enable-automation",
"--password-store=basic",
"--use-mock-keychain",
"--hide-scrollbars",
"--mute-audio",
]
browser_args = []
browser = None
# scraper = cloudscraper.create_scraper(
# browser={"browser": "chrome", "platform": "windows", "desktop": True},
# debug=False,
# # sess=LogicAniLife.session,
# delay=10,
# )
#
# cookie_value, user_agent = scraper.get_cookie_string(url)
#
# logger.debug(f"cookie_value:: {cookie_value}")
start = time.time()
ua = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/69.0.3497.100 Safari/537.36"
)
# from playwright_stealth import stealth_sync
# def set_cookie(req):
# nonlocal cookie
# if "cookie" in req.headers:
# cookie = req.headers["cookie"]
# headless = True
logger.info(engine)
async with async_playwright() as p:
browser = None
try:
if engine == "chrome":
browser = await p.chromium.launch(
channel="chrome", args=browser_args, headless=headless
)
print(engine)
# browser = await p.chromium.connect('http://192.168.0.2:14444')
elif engine == "webkit":
browser = await p.webkit.launch(
headless=headless,
args=browser_args,
)
else:
print('here')
browser = await p.firefox.launch(
headless=headless,
args=browser_args,
)
# context = browser.new_context(
# user_agent=ua,
# )
# LogicAniLife.headers[
# "Referer"
# ] = "https://anilife.live/detail/id/471"
# print(LogicAniLife.headers)
self.headers["Referer"] = self.episode_url
if referer is not None:
self.headers["Referer"] = referer
logger.debug(f"self.headers {self.headers}")
context = await browser.new_context(
extra_http_headers=self.headers
)
# await context.add_cookies(self.cookies)
# self.headers["Cookie"] = cookie_value
# context.set_extra_http_headers(self.headers)
print('here1')
page = await context.new_page()
# page.set_extra_http_headers(self.headers)
# if stealth:
# await stealth_async(page)
# page.on("request", set_cookie)
# stealth_sync(page)
print(self.headers["Referer"])
# page.on("request", set_cookie)
print(f'Referer:: {self.headers["Referer"]}')
# await page.set_extra_http_headers(self.headers)
await page.goto(
url, wait_until="load", referer=self.headers["Referer"]
)
# page.wait_for_timeout(10000)
await asyncio.sleep(1)
# await page.reload()
# time.sleep(10)
# cookies = context.cookies
# print(cookies)
print(f"page.url:: {page.url}")
self.origin_url = page.url
print(await page.content())
print(f"run at {time.time() - start} sec")
return await page.content()
except Exception as e:
logger.error(f"Exception: {e}")
logger.error(traceback.format_exc())
finally:
await browser.close()
except Exception as e:
logger.error(f"Exception: {e}")
logger.error(traceback.format_exc())
finally:
# browser.close()
pass
def get_html_playwright_sync(
self,
url: str,
headless: bool = False,
referer: str = None,
engine: str = "chrome",
stealth: bool = False,
) -> str:
try:
from playwright.sync_api import sync_playwright
import time
print("playwright ==========================================")
cookie = None
browser_args = [
"--window-size=1300,570",
"--window-position=000,000",
"--disable-dev-shm-usage",
"--no-sandbox",
"--disable-web-security",
"--disable-features=site-per-process",
"--disable-setuid-sandbox",
"--disable-accelerated-2d-canvas",
"--no-first-run",
"--no-zygote",
# '--single-process',
"--disable-gpu",
"--use-gl=egl",
"--disable-blink-features=AutomationControlled",
"--disable-background-networking",
"--enable-features=NetworkService,NetworkServiceInProcess",
"--disable-background-timer-throttling",
"--disable-backgrounding-occluded-windows",
"--disable-breakpad",
"--disable-client-side-phishing-detection",
"--disable-component-extensions-with-background-pages",
"--disable-default-apps",
"--disable-extensions",
"--disable-features=Translate",
"--disable-hang-monitor",
"--disable-ipc-flooding-protection",
"--disable-popup-blocking",
"--disable-prompt-on-repost",
"--disable-renderer-backgrounding",
"--disable-sync",
"--force-color-profile=srgb",
"--metrics-recording-only",
"--enable-automation",
"--password-store=basic",
"--use-mock-keychain",
"--hide-scrollbars",
"--mute-audio",
]
# scraper = cloudscraper.create_scraper(
# browser={"browser": "chrome", "platform": "windows", "desktop": True},
# debug=False,
# # sess=LogicAniLife.session,
# delay=10,
# )
#
# cookie_value, user_agent = scraper.get_cookie_string(url)
#
# logger.debug(f"cookie_value:: {cookie_value}")
start = time.time()
ua = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/69.0.3497.100 Safari/537.36"
)
# from playwright_stealth import stealth_sync
def set_cookie(req):
nonlocal cookie
if "cookie" in req.headers:
cookie = req.headers["cookie"]
# headless = True
with sync_playwright() as p:
try:
if engine == "chrome":
# browser = await p.chromium.launch(
# channel="chrome", args=browser_args, headless=headless
# )
print(engine)
# browser = p.chromium.connect_over_cdp('http://yommi.duckdns.org:14444')
browser = p.chromium.launch(
channel="chrome", args=browser_args, headless=headless
)
elif engine == "webkit":
browser = p.webkit.launch(
headless=headless,
args=browser_args,
)
else:
browser = p.firefox.launch(
headless=headless,
args=browser_args,
)
# context = browser.new_context(
# user_agent=ua,
# )
self.headers[
"Referer"
] = "https://anilife.live/detail/id/471"
# print(self.headers)
self.headers["Referer"] = self.episode_url
if referer is not None:
self.headers["Referer"] = referer
logger.debug(f"self.headers::: {self.headers}")
context = browser.new_context(
extra_http_headers=self.headers
)
# await context.add_cookies(self.cookies)
# self.headers["Cookie"] = cookie_value
# context.set_extra_http_headers(self.headers)
page = context.new_page()
# page.set_extra_http_headers(self.headers)
if stealth:
# stealth_async(page)
pass
# page.on("request", set_cookie)
# stealth_sync(page)
print(self.headers["Referer"])
page.on("request", set_cookie)
print(f'Referer:: {self.headers["Referer"]}')
# await page.set_extra_http_headers(self.headers)
page.goto(
url, wait_until="load", referer=self.headers["Referer"]
)
# page.wait_for_timeout(10000)
time.sleep(1)
# await page.reload()
# cookies = context.cookies
# print(cookies)
print(f"page.url:: {page.url}")
self.origin_url = page.url
print(page.content().encode("utf8"))
print(f"run at {time.time() - start} sec")
return page.content()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
finally:
browser.close()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
@yommi_timeit
def get_html_selenium(self, url: str, referer: str, is_stealth: bool = False,
is_headless: bool = False) -> bytes:
from selenium.webdriver.common.by import By
from selenium import webdriver
from selenium_stealth import stealth
from webdriver_manager.chrome import ChromeDriverManager
import time
print("get_html_selenium() ==========================================")
options = webdriver.ChromeOptions()
# 크롬드라이버 헤더 옵션추가 (리눅스에서 실행시 필수)
options.add_argument("start-maximized")
if is_headless:
options.add_argument("--headless")
options.add_argument("--no-sandbox")
options.add_argument("window-size=1920x1080")
options.add_argument("disable-gpu")
# options.add_argument('--no-sandbox')
options.add_argument("--disable-dev-shm-usage")
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option("detach", True)
options.add_experimental_option("useAutomationExtension", False)
logger.debug(self.OS_PLATFORM)
driver_bin_path = os.path.join(
os.path.dirname(__file__), "bin", self.OS_PLATFORM
)
# 크롬 드라이버 경로
driver_path = f"{driver_bin_path}/chromedriver"
if self.OS_PLATFORM == "Darwin":
# driver = webdriver.Chrome(executable_path=driver_path, chrome_options=options)
print("here:::::::::")
driver = webdriver.Chrome(
ChromeDriverManager().install(), chrome_options=options
)
elif self.OS_PLATFORM == "Linux":
driver = webdriver.Chrome(
ChromeDriverManager().install(), chrome_options=options
)
# driver = webdriver.Chrome(executable_path=driver_path, chrome_options=options)
# driver = webdriver.Remote(command_executor='http://192.168.0.2:14444', options=options)
else:
driver = webdriver.Chrome(
executable_path=driver_path, chrome_options=options
)
# is_stealth = True
if is_stealth:
stealth(
driver,
languages=["ko-KR", "ko"],
vendor="Google Inc.",
platform="Win32",
webgl_vendor="Intel Inc.",
renderer="Intel Iris OpenGL Engine",
fix_hairline=True,
)
driver.get(url)
# time.sleep(1)
#
# driver.refresh()
logger.debug(f"current_url:: {driver.current_url}")
# logger.debug(f"current_cookie:: {driver.get_cookies()}")
cookies_list = driver.get_cookies()
cookies_dict = {}
for cookie in cookies_list:
cookies_dict[cookie["name"]] = cookie["value"]
# logger.debug(cookies_dict)
self.cookies = cookies_list
# self.headers["Cookie"] = driver.get_cookies()
self.episode_url = driver.current_url
time.sleep(1)
elem = driver.find_element(By.XPATH, "//*")
source_code = elem.get_attribute("outerHTML")
logger.debug(source_code)
driver.close()
driver.quit()
return source_code.encode("utf-8")
# Create a request interceptor
@staticmethod
def interceptor(request):
del request.headers["Referer"] # Delete the header first
request.headers[
"Referer"
] = "https://anilife.live/g/l?id=0a36917f-39cc-43ea-b0c6-0c86d27c2408"
@staticmethod
def get_html_seleniumwire(url, referer, wired=False):
from selenium import webdriver
from selenium.webdriver.common.by import By
from seleniumwire import webdriver as wired_webdriver
from selenium_stealth import stealth
import time
options = webdriver.ChromeOptions()
# 크롬드라이버 헤더 옵션추가 (리눅스에서 실행시 필수)
options.add_argument("start-maximized")
options.add_argument("--headless")
options.add_argument("--no-sandbox")
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option("useAutomationExtension", False)
# 크롬드라이버 경로
driver_path = "./bin/Darwin/chromedriver"
if wired:
driver = wired_webdriver.Chrome(
executable_path=driver_path, chrome_options=options
)
else:
driver = webdriver.Chrome(
executable_path=driver_path, chrome_options=options
)
# stealth ======================================
# stealth(
# driver,
# languages=["en-US", "en"],
# vendor="Google Inc.",
# platform="Win32",
# webgl_vendor="Intel Inc.",
# renderer="Intel Iris OpenGL Engine",
# fix_hairline=True,
# )
if wired:
driver.request_interceptor = self.interceptor
driver.get(url)
driver.refresh()
time.sleep(1)
elem = driver.find_element(By.XPATH, "//*")
source_code = elem.get_attribute("outerHTML")
return source_code.encode("utf-8")
@staticmethod
def get_html_cloudflare(url, cached=False):
# scraper = cloudscraper.create_scraper(
# # disableCloudflareV1=True,
# # captcha={"provider": "return_response"},
# delay=10,
# browser="chrome",
# )
# scraper = cfscrape.create_scraper(
# browser={"browser": "chrome", "platform": "android", "desktop": False}
# )
# scraper = cloudscraper.create_scraper(
# browser={"browser": "chrome", "platform": "windows", "mobile": False},
# debug=True,
# )
# LogicAniLife.headers["referer"] = LogicAniLife.referer
self.headers["Referer"] = "https://anilife.live/"
self.headers[
"Cookie"
] = "_ga=GA1.1.578607927.1660813724; __gads=ID=10abb8b98b6828ae-2281c943a9d500fd:T=1660813741:RT=1660813741:S=ALNI_MYU_iB2lBgSrEQUBwhKpNsToaqQ8A; sbtsck=javuwDzcOJqUyweM1OQeNGzHbjoHp7Cgw44XnPdM738c3E=; SPSI=e48379959d54a6a62cc7abdcafdb2761; SPSE=h5HfMGLJzLqzNafMD3YaOvHSC9xfh77CcWdKvexp/z5N5OsTkIiYSCudQhFffEfk/0pcOTVf0DpeV0RoNopzig==; anilife_csrf=b93b9f25a12a51cf185805ec4de7cf9d; UTGv2=h46b326af644f4ac5d0eb1502881136b3750; __gpi=UID=000008ba227e99e0:T=1660813741:RT=1660912282:S=ALNI_MaJHIVJIGpQ5nTE9lvypKQxJnn10A; DSR=SXPX8ELcRgh6N/9rNgjpQoNfaX2DRceeKYR0/ul7qTI9gApWQpZxr8jgymf/r0HsUT551vtOv2CMWpIn0Hd26A==; DCSS=89508000A76BBD939F6DDACE5BD9EB902D2212A; DGCC=Wdm; adOtr=7L4Xe58995d; spcsrf=6554fa003bf6a46dd9b7417acfacc20a; _ga_56VYJJ7FTM=GS1.1.1660912281.10.1.1660912576.0.0.0; PRLST=EO"
# logger.debug(f"headers:: {LogicAniLife.headers}")
if self.session is None:
self.session = requests.Session()
self.session.headers = self.headers
# LogicAniLife.session = requests.Session()
sess = cloudscraper.create_scraper(
browser={"browser": "firefox", "platform": "windows", "desktop": True},
debug=False,
sess=self.session,
delay=10,
)
# print(scraper.get(url, headers=LogicAniLife.headers).content)
# print(scraper.get(url).content)
# return scraper.get(url, headers=LogicAniLife.headers).content
# print(LogicAniLife.headers)
return sess.get(
url, headers=self.session.headers, timeout=10, allow_redirects=True
).content.decode("utf8", errors="replace")

145
lib/misc.py Normal file
View File

@@ -0,0 +1,145 @@
import asyncio
import threading
import traceback
from asyncio import Task
from typing import Awaitable, T
from loguru import logger
def start_cor(loop, url):
fut = asyncio.run_coroutine_threadsafe(hello_async(url), loop)
print(fut.result())
def _start_background_loop(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
_LOOP = asyncio.new_event_loop()
_LOOP_THREAD = threading.Thread(
target=_start_background_loop, args=(_LOOP,), daemon=True
)
_LOOP_THREAD.start()
# =================================================================#
def asyncio_run(future, as_task=True):
"""
A better implementation of `asyncio.run`.
:param future: A future or task or call of an async method.
:param as_task: Forces the future to be scheduled as task (needed for e.g. aiohttp).
"""
try:
loop = asyncio.get_running_loop()
except RuntimeError: # no event loop running:
loop = asyncio.new_event_loop()
return loop.run_until_complete(_to_task(future, as_task, loop))
else:
import nest_asyncio
nest_asyncio.apply(loop)
return asyncio.run(_to_task(future, as_task, loop))
def asyncio_run2(coro: Awaitable[T], timeout=60) -> T:
"""
Runs the coroutine in an event loop running on a background thread,
and blocks the current thread until it returns a result.
This plays well with gevent, since it can yield on the Future result call.
:param coro: A coroutine, typically an async method
:param timeout: How many seconds we should wait for a result before raising an error
"""
try:
return asyncio.run_coroutine_threadsafe(coro, _LOOP).result(timeout=timeout)
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
def _to_task(future, as_task, loop):
if not as_task or isinstance(future, Task):
return future
return loop.create_task(future)
def hello_sync(url: str):
from playwright.sync_api import sync_playwright
with sync_playwright() as p:
# browser = p.chromium.launch()
browser = p.chromium.connect_over_cdp('http://192.168.0.2:14444')
page = browser.new_page()
page.goto(url)
print(page.title())
browser.close()
async def _test(url):
await asyncio.sleep(2)
print('_test')
return 'ok'
async def compute(x, y):
print("Compute %s + %s ..." % (x, y))
await asyncio.sleep(1.0)
return x + y
async def print_sum(x, y):
result = await compute(x, y)
print("%s + %s = %s" % (x, y, result))
async def _thread(url, loop):
if loop is not None:
# future = asyncio.run_coroutine_threadsafe(hello_async(url), loop)
# future = asyncio.run_coroutine_threadsafe(_test(url), loop)
# print(f"Future --")
# print(" 2 ")
# print(" Result ", future.result())
# print(" 3 ")
# loop.run_until_complete(print_sum(1, 2))
loop.run_until_complete(hello_async(url, loop))
print("")
async def hello_async(url: str, loop=None):
from playwright.async_api import async_playwright
async with async_playwright() as p:
print("here")
browser = await p.chromium.launch(headless=True)
page = await browser.new_page()
await page.goto(url)
print(page.title())
await browser.close()
async def hello(url: str):
from playwright.async_api import async_playwright
# from playwright_stealth import stealth_sync, stealth_async
print("hi")
try:
from gevent.event import AsyncResult
print(AsyncResult())
await asyncio.sleep(2)
print("hi")
# pw = await async_playwright().start()
# print(pw)
# browser = await pw.chromium.launch(headless=True)
# print("Browser Launched-----------------")
# page = await browser.new_page()
# print("Browser new Page created ")
# await page.goto(url)
# LogicAniLife.response_data = await page.content()
# return await page.content()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())

View File

@@ -4,9 +4,12 @@
import os import os
import re import re
import json import json
import time
import traceback import traceback
import platform import platform
import subprocess import subprocess
from functools import wraps
# third-party # third-party
from sqlalchemy.ext.declarative import DeclarativeMeta from sqlalchemy.ext.declarative import DeclarativeMeta
# sjva 공용 # sjva 공용
@@ -38,7 +41,21 @@ def read_file(filename):
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
def yommi_timeit(func):
@wraps(func)
def timeit_wrapper(*args, **kwargs):
start_time = time.perf_counter()
result = func(*args, **kwargs)
end_time = time.perf_counter()
total_time = end_time - start_time
print(f'Function {func.__name__}{args} {kwargs} Took {total_time:.4f} seconds')
return result
return timeit_wrapper
class Util(object): class Util(object):
@staticmethod @staticmethod
def change_text_for_use_filename(text): def change_text_for_use_filename(text):
# text = text.replace('/', '') # text = text.replace('/', '')

View File

@@ -1,6 +1,6 @@
import os import os
import sys import sys
import threading # import threading
import traceback import traceback
import json import json
from datetime import datetime from datetime import datetime
@@ -9,10 +9,13 @@ import re
import asyncio import asyncio
import platform import platform
import PIL.Image
import lxml.etree import lxml.etree
# third-party # third-party
import requests import requests
from gevent import threading
from lxml import html from lxml import html
from urllib import parse from urllib import parse
import urllib import urllib
@@ -46,21 +49,23 @@ from framework import F
from plugin import ( from plugin import (
PluginModuleBase PluginModuleBase
) )
from flaskfarm.lib.plugin._ffmpeg_queue import FfmpegQueueEntity, FfmpegQueue from .lib._ffmpeg_queue import FfmpegQueueEntity, FfmpegQueue
from .lib.crawler import Crawler
# from tool_base import d # from tool_base import d
# 패키지 # 패키지
# from .plugin import P # from .plugin import P
from .lib.util import Util, yommi_timeit
from typing import Awaitable, TypeVar
T = TypeVar("T")
from .setup import * from .setup import *
logger = P.logger logger = P.logger
# =================================================================#
# 패키지
class LogicAniLife(PluginModuleBase): class LogicAniLife(PluginModuleBase):
db_default = { db_default = {
"anilife_db_version": "1", "anilife_db_version": "1",
@@ -87,8 +92,8 @@ class LogicAniLife(PluginModuleBase):
origin_url = None origin_url = None
episode_url = None episode_url = None
cookies = None cookies = None
OS_PLATFORM = None
os_platform = platform.system() response_data = None
session = requests.Session() session = requests.Session()
headers = { headers = {
@@ -109,221 +114,31 @@ class LogicAniLife(PluginModuleBase):
super(LogicAniLife, self).__init__(P, "setting", scheduler_desc="애니라이프 자동 다운로드") super(LogicAniLife, self).__init__(P, "setting", scheduler_desc="애니라이프 자동 다운로드")
self.name = "anilife" self.name = "anilife"
self.queue = None self.queue = None
self.OS_PLATFORM = platform.system()
default_route_socketio_module(self, attach='/search') default_route_socketio_module(self, attach='/search')
@staticmethod # @staticmethod
def get_html(url: str, referer: str = None, stream: bool = False, timeout: int = 5): def get_html(self, url: str, referer: str = None, stream: bool = False, is_stealth: bool = False, timeout: int = 5):
data = "" data = ""
try: try:
print("cloudflare protection bypass ==================") print("cloudflare protection bypass ==================")
print(self)
# return LogicAniLife.get_html_cloudflare(url) # return LogicAniLife.get_html_cloudflare(url)
return LogicAniLife.get_html_selenium(url, referer) # return self.get_html_selenium(url=url, referer=referer, is_stealth=is_stealth)
# return LogicAniLife.get_html_playwright(url) # url: str,
# headless: bool = False,
# referer: str = None,
# engine: str = "chrome",
# stealth: bool = False,
# return asyncio.run(LogicAniLife.get_html_playwright(url, engine="chrome", headless=True))
return asyncio.run(LogicAniLife.get_html_playwright(url, engine="chromium", headless=True))
# return LogicAniLife.get_html_playwright_sync(url, engine="chrome", headless=True)
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
return data return data
@staticmethod
def get_html_requests(
url: str, referer: str = None, stream: str = False, timeout: int = 5
) -> str:
data = ""
try:
print("get_html_requests ==================")
# cj = browser_cookie3.chrome(domain_name="anilife.live")
referer = "https://anilife.live/"
if LogicAniLife.session is None:
LogicAniLife.session = requests.session()
# logger.debug('get_html :%s', url)
LogicAniLife.headers["Referer"] = "" if referer is None else referer
LogicAniLife.headers[
"Cookie"
] = "_ga=GA1.1.578607927.1660813724; __gads=ID=10abb8b98b6828ae-2281c943a9d500fd:T=1660813741:RT=1660813741:S=ALNI_MYU_iB2lBgSrEQUBwhKpNsToaqQ8A; sbtsck=javuwDzcOJqUyweM1OQeNGzHbjoHp7Cgw44XnPdM738c3E=; SPSI=e48379959d54a6a62cc7abdcafdb2761; SPSE=h5HfMGLJzLqzNafMD3YaOvHSC9xfh77CcWdKvexp/z5N5OsTkIiYSCudQhFffEfk/0pcOTVf0DpeV0RoNopzig==; anilife_csrf=b93b9f25a12a51cf185805ec4de7cf9d; UTGv2=h46b326af644f4ac5d0eb1502881136b3750; __gpi=UID=000008ba227e99e0:T=1660813741:RT=1660912282:S=ALNI_MaJHIVJIGpQ5nTE9lvypKQxJnn10A; DSR=SXPX8ELcRgh6N/9rNgjpQoNfaX2DRceeKYR0/ul7qTI9gApWQpZxr8jgymf/r0HsUT551vtOv2CMWpIn0Hd26A==; DCSS=89508000A76BBD939F6DDACE5BD9EB902D2212A; DGCC=Wdm; adOtr=7L4Xe58995d; spcsrf=6554fa003bf6a46dd9b7417acfacc20a; _ga_56VYJJ7FTM=GS1.1.1660912281.10.1.1660912576.0.0.0; PRLST=EO"
LogicAniLife.headers["Referer"] = referer
page_content = LogicAniLife.session.get(
url, headers=headers, timeout=timeout, allow_redirects=True
)
data = page_content.text
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
return data
@staticmethod
async def get_html_playwright(
url: str,
headless: bool = False,
referer: str = None,
engine: str = "chrome",
stealth: bool = False,
) -> str:
try:
from playwright.sync_api import sync_playwright
from playwright.async_api import async_playwright
from playwright_stealth import stealth_sync, stealth_async
import time
cookie = None
browser_args = [
"--window-size=1300,570",
"--window-position=000,000",
"--disable-dev-shm-usage",
"--no-sandbox",
"--disable-web-security",
"--disable-features=site-per-process",
"--disable-setuid-sandbox",
"--disable-accelerated-2d-canvas",
"--no-first-run",
"--no-zygote",
# '--single-process',
"--disable-gpu",
"--use-gl=egl",
"--disable-blink-features=AutomationControlled",
"--disable-background-networking",
"--enable-features=NetworkService,NetworkServiceInProcess",
"--disable-background-timer-throttling",
"--disable-backgrounding-occluded-windows",
"--disable-breakpad",
"--disable-client-side-phishing-detection",
"--disable-component-extensions-with-background-pages",
"--disable-default-apps",
"--disable-extensions",
"--disable-features=Translate",
"--disable-hang-monitor",
"--disable-ipc-flooding-protection",
"--disable-popup-blocking",
"--disable-prompt-on-repost",
"--disable-renderer-backgrounding",
"--disable-sync",
"--force-color-profile=srgb",
"--metrics-recording-only",
"--enable-automation",
"--password-store=basic",
"--use-mock-keychain",
"--hide-scrollbars",
"--mute-audio",
]
# scraper = cloudscraper.create_scraper(
# browser={"browser": "chrome", "platform": "windows", "desktop": True},
# debug=False,
# # sess=LogicAniLife.session,
# delay=10,
# )
#
# cookie_value, user_agent = scraper.get_cookie_string(url)
#
# logger.debug(f"cookie_value:: {cookie_value}")
start = time.time()
ua = (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/69.0.3497.100 Safari/537.36"
)
# from playwright_stealth import stealth_sync
def set_cookie(req):
nonlocal cookie
if "cookie" in req.headers:
cookie = req.headers["cookie"]
async with async_playwright() as p:
try:
if engine == "chrome":
browser = await p.chromium.launch(
channel="chrome", args=browser_args, headless=headless
)
elif engine == "webkit":
browser = await p.webkit.launch(
headless=headless,
args=browser_args,
)
else:
browser = await p.firefox.launch(
headless=headless,
args=browser_args,
)
# context = browser.new_context(
# user_agent=ua,
# )
LogicAniLife.headers[
"Referer"
] = "https://anilife.live/detail/id/471"
# print(LogicAniLife.headers)
LogicAniLife.headers["Referer"] = LogicAniLife.episode_url
if referer is not None:
LogicAniLife.headers["Referer"] = referer
logger.debug(f"LogicAniLife.headers::: {LogicAniLife.headers}")
context = await browser.new_context(
extra_http_headers=LogicAniLife.headers
)
await context.add_cookies(LogicAniLife.cookies)
# LogicAniLife.headers["Cookie"] = cookie_value
# context.set_extra_http_headers(LogicAniLife.headers)
page = await context.new_page()
# page.set_extra_http_headers(LogicAniLife.headers)
if stealth:
await stealth_async(page)
# page.on("request", set_cookie)
# stealth_sync(page)
print(LogicAniLife.headers["Referer"])
page.on("request", set_cookie)
print(f'Referer:: {LogicAniLife.headers["Referer"]}')
# await page.set_extra_http_headers(LogicAniLife.headers)
await page.goto(
url, wait_until="load", referer=LogicAniLife.headers["Referer"]
)
# page.wait_for_timeout(10000)
await asyncio.sleep(2.9)
# await page.reload()
# time.sleep(10)
# cookies = context.cookies
# print(cookies)
print(f"page.url:: {page.url}")
LogicAniLife.origin_url = page.url
# print(page.content())
print(f"run at {time.time() - start} sec")
return await page.content()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
finally:
await browser.close()
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
finally:
# browser.close()
pass
@staticmethod @staticmethod
async def get_vod_url_v1( async def get_vod_url_v1(
@@ -582,171 +397,25 @@ class LogicAniLife(PluginModuleBase):
await browser.close() await browser.close()
@staticmethod @staticmethod
def get_html_selenium(url: str, referer: str) -> bytes: def get_vod_url_v2(url: str, headless: bool = False) -> str:
from selenium.webdriver.common.by import By try:
from selenium import webdriver import json
from selenium_stealth import stealth post_data = {
from webdriver_manager.chrome import ChromeDriverManager "url": url,
import time "headless": headless,
"engine": "webkit",
"stealth": True,
}
payload = json.dumps(post_data)
logger.debug(payload)
response_data = requests.post(url="http://localhost:7070/get_vod_url", data=payload)
options = webdriver.ChromeOptions() logger.debug(response_data.text)
# 크롬드라이버 헤더 옵션추가 (리눅스에서 실행시 필수)
options.add_argument("start-maximized")
options.add_argument("--headless")
options.add_argument("--no-sandbox")
options.add_argument("window-size=1920x1080")
options.add_argument("disable-gpu")
# options.add_argument('--no-sandbox')
options.add_argument("--disable-dev-shm-usage")
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option("useAutomationExtension", False)
if LogicAniLife.os_platform == "Darwin": return response_data.text
# 크롬드라이버 경로 except Exception as e:
driver_path = "./bin/Darwin/chromedriver" logger.error("Exception:%s", e)
# driver = webdriver.Chrome(executable_path=driver_path, chrome_options=options) logger.error(traceback.format_exc())
driver = webdriver.Chrome(
ChromeDriverManager().install(), chrome_options=options
)
else:
driver_bin_path = os.path.join(
os.path.dirname(__file__), "bin", f"{LogicAniLife.os_platform}"
)
driver_path = f"{driver_bin_path}/chromedriver"
driver = webdriver.Chrome(
executable_path=driver_path, chrome_options=options
)
stealth(
driver,
languages=["ko-KR", "ko"],
vendor="Google Inc.",
platform="Win32",
webgl_vendor="Intel Inc.",
renderer="Intel Iris OpenGL Engine",
fix_hairline=True,
)
driver.get(url)
driver.refresh()
logger.debug(f"current_url:: {driver.current_url}")
# logger.debug(f"current_cookie:: {driver.get_cookies()}")
cookies_list = driver.get_cookies()
cookies_dict = {}
for cookie in cookies_list:
cookies_dict[cookie["name"]] = cookie["value"]
# logger.debug(cookies_dict)
LogicAniLife.cookies = cookies_list
# LogicAniLife.headers["Cookie"] = driver.get_cookies()
LogicAniLife.episode_url = driver.current_url
time.sleep(1)
elem = driver.find_element(By.XPATH, "//*")
source_code = elem.get_attribute("outerHTML")
driver.close()
return source_code.encode("utf-8")
# Create a request interceptor
@staticmethod
def interceptor(request):
del request.headers["Referer"] # Delete the header first
request.headers[
"Referer"
] = "https://anilife.live/g/l?id=0a36917f-39cc-43ea-b0c6-0c86d27c2408"
@staticmethod
def get_html_seleniumwire(url, referer, wired=False):
from selenium import webdriver
from selenium.webdriver.common.by import By
from seleniumwire import webdriver as wired_webdriver
from selenium_stealth import stealth
import time
options = webdriver.ChromeOptions()
# 크롬드라이버 헤더 옵션추가 (리눅스에서 실행시 필수)
options.add_argument("start-maximized")
options.add_argument("--headless")
options.add_argument("--no-sandbox")
options.add_experimental_option("excludeSwitches", ["enable-automation"])
options.add_experimental_option("useAutomationExtension", False)
# 크롬드라이버 경로
driver_path = "./bin/Darwin/chromedriver"
if wired:
driver = wired_webdriver.Chrome(
executable_path=driver_path, chrome_options=options
)
else:
driver = webdriver.Chrome(
executable_path=driver_path, chrome_options=options
)
# stealth ======================================
# stealth(
# driver,
# languages=["en-US", "en"],
# vendor="Google Inc.",
# platform="Win32",
# webgl_vendor="Intel Inc.",
# renderer="Intel Iris OpenGL Engine",
# fix_hairline=True,
# )
if wired:
driver.request_interceptor = LogicAniLife.interceptor
driver.get(url)
driver.refresh()
time.sleep(1)
elem = driver.find_element(By.XPATH, "//*")
source_code = elem.get_attribute("outerHTML")
return source_code.encode("utf-8")
@staticmethod
def get_html_cloudflare(url, cached=False):
# scraper = cloudscraper.create_scraper(
# # disableCloudflareV1=True,
# # captcha={"provider": "return_response"},
# delay=10,
# browser="chrome",
# )
# scraper = cfscrape.create_scraper(
# browser={"browser": "chrome", "platform": "android", "desktop": False}
# )
# scraper = cloudscraper.create_scraper(
# browser={"browser": "chrome", "platform": "windows", "mobile": False},
# debug=True,
# )
# LogicAniLife.headers["referer"] = LogicAniLife.referer
LogicAniLife.headers["Referer"] = "https://anilife.live/"
LogicAniLife.headers[
"Cookie"
] = "_ga=GA1.1.578607927.1660813724; __gads=ID=10abb8b98b6828ae-2281c943a9d500fd:T=1660813741:RT=1660813741:S=ALNI_MYU_iB2lBgSrEQUBwhKpNsToaqQ8A; sbtsck=javuwDzcOJqUyweM1OQeNGzHbjoHp7Cgw44XnPdM738c3E=; SPSI=e48379959d54a6a62cc7abdcafdb2761; SPSE=h5HfMGLJzLqzNafMD3YaOvHSC9xfh77CcWdKvexp/z5N5OsTkIiYSCudQhFffEfk/0pcOTVf0DpeV0RoNopzig==; anilife_csrf=b93b9f25a12a51cf185805ec4de7cf9d; UTGv2=h46b326af644f4ac5d0eb1502881136b3750; __gpi=UID=000008ba227e99e0:T=1660813741:RT=1660912282:S=ALNI_MaJHIVJIGpQ5nTE9lvypKQxJnn10A; DSR=SXPX8ELcRgh6N/9rNgjpQoNfaX2DRceeKYR0/ul7qTI9gApWQpZxr8jgymf/r0HsUT551vtOv2CMWpIn0Hd26A==; DCSS=89508000A76BBD939F6DDACE5BD9EB902D2212A; DGCC=Wdm; adOtr=7L4Xe58995d; spcsrf=6554fa003bf6a46dd9b7417acfacc20a; _ga_56VYJJ7FTM=GS1.1.1660912281.10.1.1660912576.0.0.0; PRLST=EO"
# logger.debug(f"headers:: {LogicAniLife.headers}")
if LogicAniLife.session is None:
LogicAniLife.session = requests.Session()
LogicAniLife.session.headers = LogicAniLife.headers
# LogicAniLife.session = requests.Session()
sess = cloudscraper.create_scraper(
browser={"browser": "firefox", "platform": "windows", "desktop": True},
debug=False,
sess=LogicAniLife.session,
delay=10,
)
# print(scraper.get(url, headers=LogicAniLife.headers).content)
# print(scraper.get(url).content)
# return scraper.get(url, headers=LogicAniLife.headers).content
# print(LogicAniLife.headers)
return sess.get(
url, headers=LogicAniLife.session.headers, timeout=10, allow_redirects=True
).content.decode("utf8", errors="replace")
@staticmethod @staticmethod
def db_init(): def db_init():
@@ -755,7 +424,7 @@ class LogicAniLife(PluginModuleBase):
def process_menu(self, sub, req): def process_menu(self, sub, req):
arg = P.ModelSetting.to_dict() arg = P.ModelSetting.to_dict()
arg["sub"] = self.name arg["sub"] = self.name
if sub in ["setting", "queue", "list", "category", "request"]: if sub in ["setting", "queue", "list", "search", "request"]:
if sub == "request" and req.args.get("content_code") is not None: if sub == "request" and req.args.get("content_code") is not None:
arg["anilife_current_code"] = req.args.get("content_code") arg["anilife_current_code"] = req.args.get("content_code")
if sub == "setting": if sub == "setting":
@@ -828,7 +497,7 @@ class LogicAniLife(PluginModuleBase):
} }
) )
elif sub == "add_queue": elif sub == "add_queue":
logger.debug(f"add_queue routine ===============") logger.debug(f"anilife add_queue routine ===============")
ret = {} ret = {}
info = json.loads(request.form["data"]) info = json.loads(request.form["data"])
logger.info(f"info:: {info}") logger.info(f"info:: {info}")
@@ -947,6 +616,14 @@ class LogicAniLife(PluginModuleBase):
self.current_data = None self.current_data = None
self.queue.queue_start() self.queue.queue_start()
# fastapi running script
# import os
# cur_abspath = os.path.dirname(os.path.abspath(__file__))
# logger.debug(cur_abspath)
# os.popen(f"python {os.path.join(cur_abspath, 'yommi_api')}/main.py")
# asyncio_run2(hello("https://anilife.live"))
def reset_db(self): def reset_db(self):
db.session.query(ModelAniLifeItem).delete() db.session.query(ModelAniLifeItem).delete()
db.session.commit() db.session.commit()
@@ -961,8 +638,26 @@ class LogicAniLife(PluginModuleBase):
url = P.ModelSetting.get("anilife_url") + "/g/l?id=" + code url = P.ModelSetting.get("anilife_url") + "/g/l?id=" + code
logger.debug("url::: > %s", url) logger.debug("url::: > %s", url)
response_data = LogicAniLife.get_html(url, timeout=10) # response_data = LogicAniLife.get_html(self, url=url, timeout=10)
tree = html.fromstring(response_data)
import json
post_data = {
"url": url,
"headless": True,
"engine": "webkit"
}
payload = json.dumps(post_data)
logger.debug(payload)
response_data = None
response_data = requests.post(url="http://localhost:7070/get_html_playwright", data=payload)
# logger.debug(response_data.json()["html"])
soup_text = BeautifulSoup(response_data.json()["html"], 'lxml')
tree = html.fromstring(response_data.json()["html"])
# tree = html.fromstring(response_data)
# logger.debug(response_data) # logger.debug(response_data)
main_title = tree.xpath('//div[@class="infox"]/h1/text()')[0] main_title = tree.xpath('//div[@class="infox"]/h1/text()')[0]
image = tree.xpath('//div[@class="thumb"]/img/@src')[0] image = tree.xpath('//div[@class="thumb"]/img/@src')[0]
@@ -1097,7 +792,8 @@ class LogicAniLife(PluginModuleBase):
else: else:
print("Request was not redirected") print("Request was not redirected")
def get_anime_info(self, cate, page): @staticmethod
def get_anime_info(cate, page):
logger.debug(f"get_anime_info() routine") logger.debug(f"get_anime_info() routine")
logger.debug(f"cate:: {cate}") logger.debug(f"cate:: {cate}")
wrapper_xpath = '//div[@class="bsx"]' wrapper_xpath = '//div[@class="bsx"]'
@@ -1123,19 +819,45 @@ class LogicAniLife(PluginModuleBase):
# cate == "complete": # cate == "complete":
logger.info("url:::> %s", url) logger.info("url:::> %s", url)
data = {} data = {}
response_data = LogicAniLife.get_html(url, timeout=10)
# logger.debug(response_data) import json
post_data = {
"url": url,
"headless": True,
"engine": "chromium"
}
payload = json.dumps(post_data)
logger.debug(payload)
response_data = requests.post(url="http://localhost:7070/get_html_playwright", data=payload)
LogicAniLife.episode_url = response_data.json()["url"]
logger.info(response_data.json()["url"])
logger.debug(LogicAniLife.episode_url)
# logger.debug(response_data.json())
# logger.debug(f"wrapper_xath:: {wrapper_xpath}") # logger.debug(f"wrapper_xath:: {wrapper_xpath}")
tree = html.fromstring(response_data) # logger.debug(LogicAniLife.response_data)
# print(type(response_data))
# logger.debug(response_data.json()["html"])
soup_text = BeautifulSoup(response_data.json()["html"], 'lxml')
# print(len(soup_text.select("div.bsx")))
tree = html.fromstring(response_data.json()["html"])
# tree = lxml.etree.HTML(str(soup_text))
# logger.debug(tree)
# print(wrapper_xpath)
tmp_items = tree.xpath(wrapper_xpath) tmp_items = tree.xpath(wrapper_xpath)
# tmp_items = tree.xpath('//div[@class="bsx"]')
logger.debug(tmp_items)
data["anime_count"] = len(tmp_items) data["anime_count"] = len(tmp_items)
data["anime_list"] = [] data["anime_list"] = []
for item in tmp_items: for item in tmp_items:
entity = {} entity = {}
entity["link"] = item.xpath(".//a/@href")[0] entity["link"] = item.xpath(".//a/@href")[0]
# logger.debug(entity["link"]) logger.debug(entity["link"])
p = re.compile(r"^[http?s://]+[a-zA-Z0-9-]+/[a-zA-Z0-9-_.?=]+$") p = re.compile(r"^[http?s://]+[a-zA-Z0-9-]+/[a-zA-Z0-9-_.?=]+$")
# print(p.match(entity["link"]) != None) # print(p.match(entity["link"]) != None)
@@ -1237,19 +959,19 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
# 다운로드 추가 # 다운로드 추가
base_url = "https://anilife.live" base_url = "https://anilife.live"
iframe_url = "" iframe_url = ""
LogicAniLife.episode_url = self.info["ep_url"]
logger.debug(LogicAniLife.episode_url)
url = self.info["va"] url = self.info["va"]
# LogicAniLife.episode_url = url
logger.debug(f"url:: {url}") logger.debug(f"url:: {url}")
ourls = parse.urlparse(url) ourls = parse.urlparse(url)
self.headers = { self.headers = {"Referer": LogicAniLife.episode_url,
"Referer": f"{ourls.scheme}://{ourls.netloc}", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, "
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36", "like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"}
}
headers["Referer"] = "https://anilife.live/detail/id/471"
headers["Referer"] = LogicAniLife.episode_url
logger.debug("make_episode_info()::url==> %s", url) logger.debug("make_episode_info()::url==> %s", url)
logger.info(f"self.info:::> {self.info}") logger.info(f"self.info:::> {self.info}")
@@ -1266,23 +988,51 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
referer_url = LogicAniLife.episode_url referer_url = LogicAniLife.episode_url
logger.debug(f"LogicAniLife.episode_url:: {LogicAniLife.episode_url}") logger.debug(f"LogicAniLife.episode_url:: {LogicAniLife.episode_url}")
text = asyncio.run(
LogicAniLife.get_html_playwright( # gevent 에서 asyncio.run
url, # text = asyncio.run(
headless=True, # LogicAniLife.get_html_playwright(
referer=referer_url, # url,
engine="chrome", # headless=False,
stealth=True, # referer=referer_url,
) # engine="chrome",
) # stealth=True,
# )
# )
# task1 = asyncio.create_task(LogicAniLife.get_html_playwright(
# url,
# headless=True,
# referer=referer_url,
# engine="chrome",
# stealth=True
# ))
# loop = asyncio.new_event_loop()
import json
post_data = {
"url": url,
"headless": True,
"engine": "chromium",
"referer": referer_url,
"stealth": "False"
}
payload = json.dumps(post_data)
logger.debug(payload)
response_data = requests.post(url="http://localhost:7070/get_html_playwright", data=payload)
# logger.debug(response_data.json()["html"])
# soup_text = BeautifulSoup(response_data.json()["html"], 'lxml')
#
# tree = html.fromstring(response_data.json()["html"])
text = response_data.json()["html"]
# vod_1080p_url = text # vod_1080p_url = text
# logger.debug(text) # logger.debug(text)
soup = BeautifulSoup(text, "lxml") soup = BeautifulSoup(text, "lxml")
all_scripts = soup.find_all("script") all_scripts = soup.find_all("script")
# print(all_scripts) print(f"all_scripts:: {all_scripts}")
regex = r"(?P<jawcloud_url>http?s:\/\/.*=jawcloud)" regex = r"(?P<jawcloud_url>http?s:\/\/.*=jawcloud)"
match = re.compile(regex).search(text) match = re.compile(regex).search(text)
@@ -1352,9 +1102,11 @@ class AniLifeQueueEntity(FfmpegQueueEntity):
if not os.path.exists(self.savepath): if not os.path.exists(self.savepath):
os.makedirs(self.savepath) os.makedirs(self.savepath)
vod_1080p_url = asyncio.run( # vod_1080p_url = asyncio.run(
LogicAniLife.get_vod_url(jawcloud_url, headless=True) # LogicAniLife.get_vod_url(jawcloud_url, headless=True)
) # )
vod_1080p_url = LogicAniLife.get_vod_url_v2(jawcloud_url, headless=True)
print(f"vod_1080p_url:: {vod_1080p_url}") print(f"vod_1080p_url:: {vod_1080p_url}")
self.url = vod_1080p_url self.url = vod_1080p_url

View File

@@ -5,6 +5,7 @@
# @Site : # @Site :
# @File : logic_linkkf # @File : logic_linkkf
# @Software: PyCharm # @Software: PyCharm
import json
import os import os
import re import re
import sys import sys
@@ -21,6 +22,7 @@ from flask import jsonify, render_template, request
from framework import db, path_data, scheduler from framework import db, path_data, scheduler
from lxml import html from lxml import html
from plugin import PluginModuleBase from plugin import PluginModuleBase
from requests_cache import CachedSession
from anime_downloader.lib.util import Util from anime_downloader.lib.util import Util
# 패키지 # 패키지
@@ -60,6 +62,7 @@ class LogicLinkkf(PluginModuleBase):
current_headers = None current_headers = None
current_data = None current_data = None
referer = None referer = None
cache_path = os.path.dirname(__file__)
session = requests.Session() session = requests.Session()
headers = { headers = {
@@ -123,6 +126,16 @@ class LogicLinkkf(PluginModuleBase):
return jsonify( return jsonify(
{"ret": "success", "cate": cate, "page": page, "data": data} {"ret": "success", "cate": cate, "page": page, "data": data}
) )
elif sub == "screen_movie_list":
try:
logger.debug("request:::> %s", request.form["page"])
page = request.form["page"]
data = self.get_screen_movie_info(page)
dummy_data = {"ret": "success", "data": data}
return jsonify(data)
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
elif sub == "complete_list": elif sub == "complete_list":
pass pass
elif sub == "search": elif sub == "search":
@@ -145,7 +158,11 @@ class LogicLinkkf(PluginModuleBase):
} }
) )
elif sub == "add_queue": elif sub == "add_queue":
pass logger.debug(f"anilife add_queue routine ===============")
ret = {}
info = json.loads(request.form["data"])
logger.info(f"info:: {info}")
ret["ret"] = self.add(info)
elif sub == "entity_list": elif sub == "entity_list":
pass pass
elif sub == "queue_command": elif sub == "queue_command":
@@ -428,6 +445,62 @@ class LogicLinkkf(PluginModuleBase):
data["ret"] = "error" data["ret"] = "error"
return data return data
def get_screen_movie_info(self, page):
try:
url = f"{P.ModelSetting.get('linkkf_url')}/ani/page/{page}"
html_content = self.get_html_requests(url, cached=True)
# html_content = LogicLinkkfYommi.get_html_cloudflare(url, cached=False)
download_path = P.ModelSetting.get("linkkf_download_path")
tree = html.fromstring(html_content)
tmp_items = tree.xpath('//div[@class="myui-vodlist__box"]')
title_xpath = './/a[@class="text-fff"]//text()'
# logger.info('tmp_items:::', tmp_items)
data = dict()
data = {"ret": "success", "page": page}
data["episode_count"] = len(tmp_items)
data["episode"] = []
for item in tmp_items:
entity = dict()
entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = re.search(r"[0-9]+", entity["link"]).group()
entity["title"] = item.xpath(title_xpath)[0].strip()
if len(item.xpath("./a/@style")) > 0:
print(
re.search(
r"url\(((http|https|ftp|ftps)\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(\/\S*)?)\)",
item.xpath("./a/@style")[0],
).group()
)
if item.xpath(".//a/@data-original"):
entity["image_link"] = item.xpath(".//a/@data-original")[0]
else:
entity["image_link"] = ""
# entity["image_link"] = item.xpath("./a/@data-original")[0]
entity["chapter"] = (
item.xpath("./a/span//text()")[0]
if len(item.xpath("./a/span//text()")) > 0
else ""
)
# logger.info('entity:::', entity['title'])
data["episode"].append(entity)
# json_file_path = os.path.join(download_path, "airing_list.json")
# logger.debug("json_file_path:: %s", json_file_path)
#
# with open(json_file_path, "w") as outfile:
# json.dump(data, outfile)
return data
except Exception as e:
logger.error("Exception:%s", e)
logger.error(traceback.format_exc())
@staticmethod @staticmethod
def get_html(url: str, referer: str = None, cached: bool = False, stream: bool = False, timeout: int = 5): def get_html(url: str, referer: str = None, cached: bool = False, stream: bool = False, timeout: int = 5):
data = "" data = ""
@@ -456,6 +529,34 @@ class LogicLinkkf(PluginModuleBase):
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
return data return data
def get_html_requests(self, url, cached=False):
if LogicLinkkf.session is None:
if cached:
logger.debug("cached===========++++++++++++")
LogicLinkkf.session = CachedSession(
os.path.join(self.cache_path, "linkkf_cache"),
backend="sqlite",
expire_after=300,
cache_control=True,
)
# print(f"{cache_path}")
# print(f"cache_path:: {LogicLinkkfYommi.session.cache}")
else:
LogicLinkkf.session = requests.Session()
LogicLinkkf.referer = "https://linkkf.app"
LogicLinkkf.headers["Referer"] = LogicLinkkf.referer
# logger.debug(
# f"get_html()::LogicLinkkfYommi.referer = {LogicLinkkfYommi.referer}"
# )
page = LogicLinkkf.session.get(url, headers=LogicLinkkf.headers)
# logger.info(f"page: {page}")
return page.content.decode("utf8", errors="replace")
@staticmethod @staticmethod
def get_filename(maintitle, season, title): def get_filename(maintitle, season, title):
try: try:
@@ -490,7 +591,6 @@ class LogicLinkkf(PluginModuleBase):
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
class ModelLinkkfItem(db.Model): class ModelLinkkfItem(db.Model):
__tablename__ = "{package_name}_linkkf_item".format(package_name=P.package_name) __tablename__ = "{package_name}_linkkf_item".format(package_name=P.package_name)
__table_args__ = {"mysql_collate": "utf8_general_ci"} __table_args__ = {"mysql_collate": "utf8_general_ci"}

View File

@@ -6,27 +6,26 @@
# @File : logic_ohli24 # @File : logic_ohli24
# @Software: PyCharm # @Software: PyCharm
import os, sys, traceback, re, json, threading import asyncio
from datetime import datetime, date
import copy
import hashlib import hashlib
import importlib
import json
import os
import re
import subprocess import subprocess
import sys
import threading
import traceback
import urllib
from datetime import datetime, date
from urllib import parse
import PIL.Image
# third-party # third-party
import requests import requests
from lxml import html
from urllib import parse
import urllib
import asyncio
import importlib
# import aiohttp
# third-party # third-party
from flask import request, render_template, jsonify from flask import request, render_template, jsonify
from sqlalchemy import or_, and_, func, not_, desc from lxml import html
from pip._internal import main from sqlalchemy import or_, desc
pkgs = ["bs4", "jsbeautifier", "aiohttp"] pkgs = ["bs4", "jsbeautifier", "aiohttp"]
for pkg in pkgs: for pkg in pkgs:
@@ -39,6 +38,7 @@ for pkg in pkgs:
subprocess.check_call([sys.executable, '-m', 'pip', 'install', pkg]) subprocess.check_call([sys.executable, '-m', 'pip', 'install', pkg])
importlib.import_module(pkg) importlib.import_module(pkg)
# third party package
import aiohttp import aiohttp
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
@@ -56,16 +56,12 @@ from .lib._ffmpeg_queue import FfmpegQueueEntity, FfmpegQueue
from support.expand.ffmpeg import SupportFfmpeg from support.expand.ffmpeg import SupportFfmpeg
from .lib.util import Util from .lib.util import Util
# from tool_base import d
# 패키지
# from .plugin import P
from .setup import * from .setup import *
logger = P.logger logger = P.logger
print('*=' * 50)
#########################################################
class LogicOhli24(PluginModuleBase): class LogicOhli24(PluginModuleBase):
@@ -92,14 +88,7 @@ class LogicOhli24(PluginModuleBase):
current_data = None current_data = None
session = requests.Session() session = requests.Session()
# headers = {
# "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
# "authority": "ndoodle.xyz",
# "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
# "Accept-Language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7",
# # "Referer": "https://ndoodle.xyz/video/8a66cd1b3045b820efd42dbf18eb28e1",
# "Referer": "https://ndoodle.xyz/video/8a66cd1b3045b820efd42dbf18eb28e1",
# }
headers = { headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.5249.114 Whale/3.17.145.12 Safari/537.36', 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.5249.114 Whale/3.17.145.12 Safari/537.36',
'authority': 'ndoodle.xyz', 'authority': 'ndoodle.xyz',
@@ -140,7 +129,7 @@ class LogicOhli24(PluginModuleBase):
if sub in ["setting", "queue", "list", "category", "request", "search"]: if sub in ["setting", "queue", "list", "category", "request", "search"]:
if sub == "request" and req.args.get("content_code") is not None: if sub == "request" and req.args.get("content_code") is not None:
arg["ohli24_current_code"] = req.args.get("content_code") arg["ohli24_current_code"] = req.args.get("content_code")
if sub == "setting": elif sub == "setting":
job_id = "%s_%s" % (self.P.package_name, self.name) job_id = "%s_%s" % (self.P.package_name, self.name)
arg["scheduler"] = str(scheduler.is_include(job_id)) arg["scheduler"] = str(scheduler.is_include(job_id))
arg["is_running"] = str(scheduler.is_running(job_id)) arg["is_running"] = str(scheduler.is_running(job_id))
@@ -154,52 +143,42 @@ class LogicOhli24(PluginModuleBase):
# @staticmethod # @staticmethod
def process_ajax(self, sub, req): def process_ajax(self, sub, req):
try:
if sub == "analysis":
# code = req.form['code']
code = request.form["code"]
try:
data = []
cate = request.form.get("type", None)
page = request.form.get("page", None)
if sub == "analysis":
code = request.form["code"]
# cate = request.form["type"]
wr_id = request.form.get("wr_id", None) wr_id = request.form.get("wr_id", None)
bo_table = request.form.get("bo_table", None) bo_table = request.form.get("bo_table", None)
data = []
# print(code)
# logger.info("code::: %s", code)
P.ModelSetting.set("ohli24_current_code", code) P.ModelSetting.set("ohli24_current_code", code)
data = self.get_series_info(code, wr_id, bo_table) data = self.get_series_info(code, wr_id, bo_table)
self.current_data = data self.current_data = data
return jsonify({"ret": "success", "data": data, "code": code}) return jsonify({"ret": "success", "data": data, "code": code})
elif sub == "anime_list": elif sub == "anime_list":
data = []
cate = request.form["type"]
page = request.form["page"]
data = self.get_anime_info(cate, page) data = self.get_anime_info(cate, page)
# self.current_data = data
return jsonify( return jsonify(
{"ret": "success", "cate": cate, "page": page, "data": data} {"ret": "success", "cate": cate, "page": page, "data": data}
) )
elif sub == "complete_list": elif sub == "complete_list":
data = []
cate = request.form["type"]
logger.debug("cate:: %s", cate) logger.debug("cate:: %s", cate)
page = request.form["page"] page = request.form["page"]
data = self.get_anime_info(cate, page) data = self.get_anime_info(cate, page)
# self.current_data = data
return jsonify( return jsonify(
{"ret": "success", "cate": cate, "page": page, "data": data} {"ret": "success", "cate": cate, "page": page, "data": data}
) )
elif sub == "search": elif sub == "search":
data = []
# cate = request.form["type"]
# page = request.form["page"]
cate = request.form["type"]
query = request.form["query"] query = request.form["query"]
page = request.form["page"] page = request.form["page"]
data = self.get_search_result(query, page, cate) data = self.get_search_result(query, page, cate)
# self.current_data = data
return jsonify( return jsonify(
{ {
"ret": "success", "ret": "success",
@@ -263,10 +242,10 @@ class LogicOhli24(PluginModuleBase):
ret = LogicOhli24.add_whitelist() ret = LogicOhli24.add_whitelist()
return jsonify(ret) return jsonify(ret)
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error(f"Exception: {e}")
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
except Exception as e: except Exception as e:
P.logger.error("Exception:%s", e) P.logger.error(f"Exception: {e}")
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
@staticmethod @staticmethod
@@ -447,11 +426,7 @@ class LogicOhli24(PluginModuleBase):
else: else:
pass pass
logger.debug("url:::> %s", url) logger.debug('url:::> %s', url)
# self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)
# AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36',
# 'Referer': url }
response_data = LogicOhli24.get_html(url, timeout=10) response_data = LogicOhli24.get_html(url, timeout=10)
tree = html.fromstring(response_data) tree = html.fromstring(response_data)
@@ -527,7 +502,7 @@ class LogicOhli24(PluginModuleBase):
} }
) )
logger.info("des_items length:: %s", len(des_items)) # logger.info("des_items length:: %s", len(des_items))
for idx, item in enumerate(des_items): for idx, item in enumerate(des_items):
# key = des_key[idx] # key = des_key[idx]
span = item.xpath(".//span//text()") span = item.xpath(".//span//text()")
@@ -569,6 +544,7 @@ class LogicOhli24(PluginModuleBase):
return {"ret": "exception", "log": str(e)} return {"ret": "exception", "log": str(e)}
def get_anime_info(self, cate, page): def get_anime_info(self, cate, page):
print(cate, page)
try: try:
if cate == "ing": if cate == "ing":
url = ( url = (
@@ -982,14 +958,15 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
) )
packed_script = soup3.find("script", text=s_pattern) packed_script = soup3.find("script", text=s_pattern)
# packed_script = soup3.find('script') # packed_script = soup3.find('script')
# logger.info('packed_script>>> %s', packed_script.text) logger.info('packed_script>>> %s', packed_script.text)
unpack_script = None unpack_script = None
if packed_script is not None: if packed_script is not None:
# logger.debug('zzzzzzzzzzzz') # logger.debug('zzzzzzzzzzzz')
match = packed_pattern.search(packed_script.text) # match = packed_pattern.search(packed_script.text)
# match = re.search(packed_pattern, packed_script.text) # match = re.search(packed_pattern, packed_script.text)
# logger.debug("match::: %s", match.group()) # logger.debug("match::: %s", match.group())
unpack_script = jsbeautifier.beautify(match.group(3)) # unpack_script = jsbeautifier.beautify(match.group(3))
unpack_script = jsbeautifier.beautify(packed_script.text)
# logger.info('match groups:: %s', match.groups()) # logger.info('match groups:: %s', match.groups())
# logger.info('match group3:: %s', match.group(3)) # logger.info('match group3:: %s', match.group(3))

View File

@@ -120,6 +120,7 @@ try:
from .mod_ohli24 import LogicOhli24 from .mod_ohli24 import LogicOhli24
from .mod_anilife import LogicAniLife from .mod_anilife import LogicAniLife
from .mod_linkkf import LogicLinkkf from .mod_linkkf import LogicLinkkf
else: else:
from support import SupportSC from support import SupportSC

12
static/css/bootstrap.min.css vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -32,12 +32,13 @@
</form> </form>
</div> </div>
<!--전체--> <!--전체-->
<script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script>
<script type="text/javascript"> <script type="text/javascript">
const package_name = "{{arg['package_name'] }}"; const package_name = "{{arg['package_name'] }}";
const sub = "{{arg['sub'] }}"; const sub = "{{arg['sub'] }}";
const anilife_url = "{{arg['anilife_url']}}"; const anilife_url = "{{arg['anilife_url']}}";
let current_data = null; {#let current_data = null;#}
const params = new Proxy(new URLSearchParams(window.location.search), { const params = new Proxy(new URLSearchParams(window.location.search), {
get: (searchParams, prop) => searchParams.get(prop), get: (searchParams, prop) => searchParams.get(prop),

View File

@@ -0,0 +1,850 @@
{% extends "base.html" %} {% block content %}
<div id="preloader">
<div class='demo'>
<!-- <div class="loader-inner">-->
<div class='circle'>
<div class='inner'></div>
</div>
<div class='circle'>
<div class='inner'></div>
</div>
<div class='circle'>
<div class='inner'></div>
</div>
<div class='circle'>
<div class='inner'></div>
</div>
<div class='circle'>
<div class='inner'></div>
</div>
<!-- </div>-->
</div>
</div>
<div id="yommi_wrapper">
<div class="input-group mb-2">
<input
id="input_search"
type="search"
class="form-control rounded"
placeholder="Search"
aria-label="Search"
aria-describedby="search-addon"
/>
<button id="btn_search" type="button" class="btn btn-primary">
search
</button>
</div>
<div>
<div
id="anime_category"
class="btn-group"
role="group"
aria-label="Basic example"
>
<button id="ing" type="button" class="btn btn-success">방영중</button>
<button id="theater" type="button" class="btn btn-primary">극장판</button>
<button id="complete_anilist" type="button" class="btn btn-dark">
완결
</button>
<button id="top20" type="button" class="btn btn-grey">
Top20
</button>
</div>
<form id="airing_list_form">
<div id="airing_list"></div>
</form>
<form id="screen_movie_list_form">
<div id="screen_movie_list" class="container"></div>
</form>
<div class="text-center">
<div id="spinner" class="spinner-border" role="status">
<span class="sr-only">Loading...</span>
</div>
</div>
<form id="program_auto_form">
<div id="episode_list"></div>
</form>
</div>
</div>
<!--전체-->
<script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script>
<script
type="text/javascript"
src="https://cdn.jsdelivr.net/npm/lozad/dist/lozad.min.js"
></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.lazyload/1.9.1/jquery.lazyload.min.js"
integrity="sha512-jNDtFf7qgU0eH/+Z42FG4fw3w7DM/9zbgNPe3wfJlCylVDTT3IgKW5r92Vy9IHa6U50vyMz5gRByIu4YIXFtaQ=="
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<script type="text/javascript">
const package_name = "{{arg['package_name'] }}";
const sub = "{{arg['sub'] }}";
const anilife_url = "{{arg['anilife_url']}}";
//let current_data = null;
let page = 1;
let next_page = Number
let current_cate = ''
let current_query = ''
const observer = lozad('.lozad', {
rootMargin: '10px 0px', // syntax similar to that of CSS Margin
threshold: 0.1, // ratio of element convergence
enableAutoReload: true // it will reload the new image when validating attributes changes
});
observer.observe();
const loader = document.getElementById("preloader");
const dismissLoadingScreen = async function () {
console.log("Before the delay")
// await delay(2.5);
loader.style.display = "none";
};
const get_anime_list = (type, page) => {
console.log(`type: ${type}, page: ${page}`)
let url = ''
let data = {"page": page, "type": type}
switch (type) {
case 'ing':
url = '/' + package_name + '/ajax/' + sub + '/anime_list'
current_cate = 'ing'
break;
case 'movie':
url = '/' + package_name + '/ajax/' + sub + '/screen_movie_list'
current_cate = 'movie'
break;
case 'theater':
url = '/' + package_name + '/ajax/' + sub + '/anime_list'
current_cate = 'theater'
break;
case 'fin':
url = '/' + package_name + '/ajax/' + sub + '/complete_list'
current_cate = 'fin'
break
case 'top20':
url = '/' + package_name + '/ajax/' + sub + '/anime_list'
current_cate = 'top20'
break
default:
break;
}
$.ajax({
url: url,
type: "POST",
data: data,
cache: false,
dataType: "json",
success: (ret) => {
current_screen_movie_data = ret
console.log('ret::>', ret)
if (current_screen_movie_data !== '') {
if (type === "ing") {
make_airing_list(ret.data, page)
observer.observe();
} else if (type === "fin") {
make_screen_movie_list(ret.data, page)
observer.observe();
} else if (type === "theater") {
make_screen_movie_list(ret.data, page)
observer.observe();
} else {
make_screen_movie_list(ret.data, page)
}
div_visible = true
console.log(div_visible)
}
dismissLoadingScreen()
next_page = page + 1
}
})
}
function make_airing_list(data, page) {
let str = ''
let tmp = ''
str += '<div>';
str += '<button type="button" class="btn btn-info">Page <span class="badge bg-warning">' + page + '</span></button>';
str += '</div>';
str += '<div id="inner_screen_movie" class="row infinite-scroll">';
for (let i in data.anime_list) {
tmp = '<div class="col-6 col-sm-4 col-md-3">';
tmp += '<div class="card">';
// tmp += '<img class="lozad" data-src="' + data.anime_list[i].image_link + '" />';
tmp += '<img class="lazyload" src="../static/img_loader_x200.svg" data-original="' + data.anime_list[i].image_link + '" style="cursor: pointer" onclick="location.href=\'./request?code=' + data.anime_list[i].code + '\'"/>';
tmp += '<div class="card-body">'
// {#tmp += '<button id="code_button" data-code="' + data.episode[i].code + '" type="button" class="btn btn-primary code-button bootstrap-tooltip" data-toggle="button" data-tooltip="true" aria-pressed="true" autocomplete="off" data-placement="top">' +#}
// {# '<span data-tooltip-text="'+data.episode[i].title+'">' + data.episode[i].code + '</span></button></div>';#}
tmp += '<h5 class="card-title">' + data.anime_list[i].title + '</h5>';
tmp += '<p class="card-text">' + data.anime_list[i].code + '</p>';
tmp += '<a href="./request?code=' + data.anime_list[i].code + '" class="btn btn-primary cut-text">' + data.anime_list[i].title + '</a>';
tmp += '</div>';
tmp += '</div>';
tmp += '</div>';
str += tmp
}
str += '</div>';
str += m_hr_black();
if (page > 1) {
const temp = document.createElement('div')
temp.innerHTML = str;
while (temp.firstChild) {
document.getElementById("screen_movie_list").appendChild(temp.firstChild);
}
page++
} else {
document.getElementById("screen_movie_list").innerHTML = str;
}
$("img.lazyload").lazyload({
threshold: 10,
effect: "fadeIn",
});
}
function make_search_result_list(data, page) {
let str = ''
let tmp = ''
console.log(data.anime_list, page)
str += '<div>';
str += '<button type="button" class="btn btn-info">Page <span class="badge bg-warning">' + page + '</span></button>';
str += '</div>';
str += '<div id="inner_screen_movie" class="row infinite-scroll">';
for (let i in data.anime_list) {
if (data.anime_list[i].wr_id !== '') {
const re = /bo_table=([^&]+)/
const bo_table = data.anime_list[i].link.match(re)
// console.log(bo_table)
request_url = './request?code=' + data.anime_list[i].code + '&amp;wr_id=' + data.anime_list[i].wr_id + '&amp;bo_table=' + bo_table[1]
} else {
request_url = './request?code=' + data.anime_list[i].code
}
tmp = '<div class="col-sm-4">';
tmp += '<div class="card">';
tmp += '<img class="card-img-top" src="' + data.anime_list[i].image_link + '" />';
tmp += '<div class="card-body">'
// {#tmp += '<button id="code_button" data-code="' + data.episode[i].code + '" type="button" class="btn btn-primary code-button bootstrap-tooltip" data-toggle="button" data-tooltip="true" aria-pressed="true" autocomplete="off" data-placement="top">' +#}
// {# '<span data-tooltip-text="'+data.episode[i].title+'">' + data.episode[i].code + '</span></button></div>';#}
tmp += '<h5 class="card-title">' + data.anime_list[i].title + '</h5>';
tmp += '<p class="card-text">' + data.anime_list[i].code + '</p>';
tmp += '<a href="' + request_url + '" class="btn btn-primary cut-text">' + data.anime_list[i].title + '</a>';
tmp += '</div>';
tmp += '</div>';
tmp += '</div>';
str += tmp
}
str += '</div>';
str += m_hr_black();
if (page > 1) {
const temp = document.createElement('div')
temp.innerHTML = str;
while (temp.firstChild) {
document.getElementById("screen_movie_list").appendChild(temp.firstChild);
}
page++
} else {
document.getElementById("screen_movie_list").innerHTML = str;
}
}
function make_screen_movie_list(data, page) {
let str = ''
let tmp = ''
console.log(data.anime_list, page)
str += '<div>';
str += '<button type="button" class="btn btn-info">Page <span class="badge bg-warning">' + page + '</span></button>';
str += '</div>';
str += '<div id="inner_screen_movie" class="row infinite-scroll">';
for (let i in data.anime_list) {
tmp = '<div class="col-sm-4">';
tmp += '<div class="card">';
tmp += '<img class="card-img-top" src="' + data.anime_list[i].image_link + '" />';
tmp += '<div class="card-body">'
tmp += '<h5 class="card-title">' + data.anime_list[i].title + '</h5>';
tmp += '<p class="card-text">' + data.anime_list[i].code + '</p>';
tmp += '<a href="./request?code=' + data.anime_list[i].code + '" class="btn btn-primary cut-text">' + data.anime_list[i].title + '</a>';
tmp += '</div>';
tmp += '</div>';
tmp += '</div>';
str += tmp
}
str += '</div>';
str += m_hr_black();
if (page > 1) {
const temp = document.createElement('div')
temp.innerHTML = str;
while (temp.firstChild) {
document.getElementById("screen_movie_list").appendChild(temp.firstChild);
}
page++
} else {
document.getElementById("screen_movie_list").innerHTML = str;
}
$("img.lazyload").lazyload({
threshold: 10,
effect: "fadeIn",
});
}
$(document).ready(function () {
// if ( "{{arg['anilife_current_code']}}" !== "" ) {
// document.getElementById("code").value = "{{arg['anilife_current_code']}}";
// // 값이 공백이 아니면 분석 버튼 계속 누름
// document.getElementById("analysis_btn").click();
// }
$("#input_search").keydown(function (key) {
if (key.keyCode === 13) {
// alert("엔터키를 눌렀습니다.");
$("#btn_search").trigger("click");
}
})
get_anime_list("ing", 1)
const observer = lozad('.lozad', {
rootMargin: '10px 0px', // syntax similar to that of CSS Margin
threshold: 0.1, // ratio of element convergence
enableAutoReload: true // it will reload the new image when validating attributes changes
});
observer.observe();
});
$("body").on("click", "#btn_search", function (e) {
e.preventDefault();
let query = $("#input_search").val();
console.log(query);
current_cate = "search"
current_query = query
if ($("#input_search").val() === "") {
console.log("search keyword nothing");
return false;
}
$.ajax({
url: "/" + package_name + "/ajax/" + sub + "/search",
type: "POST",
cache: false,
data: {query: query, type: current_cate, page: page},
// dataType: "json",
contentType: "application/x-www-form-urlencoded; charset=UTF-8",
success: function (ret) {
if (ret.ret) {
console.log('ret:::', ret)
make_search_result_list(ret.data, 1);
next_page = page + 1
} else {
$.notify("<strong>분석 실패</strong><br>" + ret.log, {
type: "warning",
});
}
},
});
});
$('#anime_category #ing').on("click", function () {
// {#console.log(this.id)#}
let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible';
get_anime_list("ing", 1)
})
$('#anime_category #complete_anilist').on("click", function () {
// {#console.log(this.id)#}
let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible';
get_anime_list("fin", 1)
})
$('#anime_category #theater').on("click", function () {
// {#console.log(this.id)#}
let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible';
get_anime_list("theater", 1)
})
$('#anime_category #top20').on("click", function () {
// {#console.log(this.id)#}
let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible';
get_anime_list("top20", 1)
})
// 분석 버튼 클릭시 호출
$("body").on('click', '#analysis_btn', function (e) {
e.preventDefault();
const code = document.getElementById("code").value
console.log(code)
$.ajax({
url: '/' + package_name + '/ajax/' + sub + '/analysis',
type: "POST",
cache: false,
data: {code: code},
dataType: "json",
success: function (ret) {
if (ret.ret === 'success' && ret.data != null) {
// console.log(ret.code)
console.log(ret.data)
make_program(ret.data)
} else {
$.notify('<strong>분석 실패</strong><br>' + ret.log, {type: 'warning'});
}
}
});
});
$("body").on('click', '#go_anilife_btn', function (e) {
e.preventDefault();
window.open("{{arg['anilife_url']}}", "_blank");
});
$("body").on('click', '#all_check_on_btn', function (e) {
e.preventDefault();
$('input[id^="checkbox_"]').bootstrapToggle('on')
});
$("body").on('click', '#all_check_off_btn', function (e) {
e.preventDefault();
$('input[id^="checkbox_"]').bootstrapToggle('off')
});
$("body").on('click', '#add_queue_btn', function (e) {
e.preventDefault();
data = current_data.episode[$(this).data('idx')];
console.log('data:::>', data)
$.ajax({
url: '/' + package_name + '/ajax/' + sub + '/add_queue',
type: "POST",
cache: false,
data: {data: JSON.stringify(data)},
dataType: "json",
success: function (data) {
if (data.ret == 'enqueue_db_append' || data.ret == 'enqueue_db_exist') {
$.notify('<strong>다운로드 작업을 추가 하였습니다.</strong>', {type: 'success'});
} else if (data.ret == 'queue_exist') {
$.notify('<strong>이미 큐에 있습니다. 삭제 후 추가하세요.</strong>', {type: 'warning'});
} else if (data.ret == 'db_completed') {
$.notify('<strong>DB에 완료 기록이 있습니다.</strong>', {type: 'warning'});
} else {
$.notify('<strong>추가 실패</strong><br>' + ret.log, {type: 'warning'});
}
}
});
});
// const observer = lozad();
// const el = document.querySelector('img');
// const observer = lozad(el); // passing a `NodeList` (e.g. `document.querySelectorAll()`) is also valid
// observer.observe();
const loadNextAnimes = (cate, page) => {
spinner.style.display = "block";
let data = {type: cate, page: String(page)};
let url = ''
switch (cate) {
case 'ing':
url = '/' + package_name + '/ajax/' + sub + '/anime_list'
current_cate = 'ing'
break;
case 'movie':
url = '/' + package_name + '/ajax/' + sub + '/screen_movie_list'
current_cate = 'movie'
break;
case 'theater':
url = '/' + package_name + '/ajax/' + sub + '/anime_list'
current_cate = 'theater'
break;
case 'fin':
url = '/' + package_name + '/ajax/' + sub + '/complete_list'
current_cate = 'fin'
break
case 'search':
url = "/" + package_name + "/ajax/" + sub + "/search"
current_cate = 'search'
data.query = current_query
break;
default:
break;
}
fetch(url, {
method: "POST",
cache: "no-cache",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: new URLSearchParams(data),
})
.then((res) => res.json())
.then((response) => {
// console.log("Success:", JSON.stringify(response));
// {#imagesContainer.appendChild()#}
console.log("return page:::> ", String(response.page));
// {#page = response.page#}
if (current_cate === 'search') {
make_search_result_list(response.data, response.page);
} else {
make_screen_movie_list(response.data, response.page);
}
page++;
next_page++;
})
.catch((error) => console.error("Error:", error));
};
const onScroll = (e) => {
console.dir(e.target.scrollingElement.scrollHeight);
const {scrollTop, scrollHeight, clientHeight} = e.target.scrollingElement;
if (Math.round(scrollHeight - scrollTop) <= clientHeight) {
document.getElementById("spinner").style.display = "block";
console.log("loading");
console.log("now page::> ", page);
console.log("next_page::> ", String(next_page));
loadNextAnimes(current_cate, next_page);
}
};
const debounce = (func, delay) => {
let timeoutId = null;
return (...args) => {
clearTimeout(timeoutId);
timeoutId = setTimeout(func.bind(null, ...args), delay);
};
};
document.addEventListener("scroll", debounce(onScroll, 300));
</script>
<style>
button.code-button {
min-width: 82px !important;
}
.tooltip {
position: relative;
display: block;
}
@media (min-width: 576px) {
.container {
max-width: 100%;
}
.card-columns {
column-count: 2;
column-gap: 1.25rem;
}
.card-columns .card {
display: inline-block;
}
}
@media (min-width: 768px) {
.card-columns {
column-count: 3;
}
}
/* Large devices (desktops, 992px and up) */
@media (min-width: 992px) {
.card-columns {
column-count: 3;
}
}
/* Extra large devices (large desktops, 1200px and up) */
@media (min-width: 1200px) {
.card-columns {
column-count: 5;
}
#yommi_wrapper {
max-width: 80%;
margin: 0 auto;
}
[data-tooltip-text]:hover {
position: relative;
}
[data-tooltip-text]:after {
-webkit-transition: bottom 0.3s ease-in-out, opacity 0.3s ease-in-out;
-moz-transition: bottom 0.3s ease-in-out, opacity 0.3s ease-in-out;
transition: bottom 0.3s ease-in-out, opacity 0.3s ease-in-out;
background-color: rgba(0, 0, 0, 0.8);
-webkit-box-shadow: 0px 0px 3px 1px rgba(50, 50, 50, 0.4);
-moz-box-shadow: 0px 0px 3px 1px rgba(50, 50, 50, 0.4);
box-shadow: 0px 0px 3px 1px rgba(50, 50, 50, 0.4);
-webkit-border-radius: 5px;
-moz-border-radius: 5px;
border-radius: 5px;
color: #ffffff;
font-size: 12px;
margin-bottom: 10px;
padding: 7px 12px;
position: absolute;
width: auto;
min-width: 50px;
max-width: 300px;
word-wrap: break-word;
z-index: 9999;
opacity: 0;
left: -9999px;
top: 90%;
content: attr(data-tooltip-text);
}
[data-tooltip-text]:hover:after {
top: 230%;
left: 0;
opacity: 1;
}
[data-tooltip-text]:hover {
position: relative;
}
[data-tooltip-text]:after {
-webkit-transition: bottom 0.3s ease-in-out, opacity 0.3s ease-in-out;
-moz-transition: bottom 0.3s ease-in-out, opacity 0.3s ease-in-out;
transition: bottom 0.3s ease-in-out, opacity 0.3s ease-in-out;
background-color: rgba(0, 0, 0, 0.8);
-webkit-box-shadow: 0px 0px 3px 1px rgba(50, 50, 50, 0.4);
-moz-box-shadow: 0px 0px 3px 1px rgba(50, 50, 50, 0.4);
box-shadow: 0px 0px 3px 1px rgba(50, 50, 50, 0.4);
-webkit-border-radius: 5px;
-moz-border-radius: 5px;
border-radius: 5px;
color: #ffffff;
font-size: 12px;
margin-bottom: 10px;
padding: 7px 12px;
position: absolute;
width: auto;
min-width: 50px;
max-width: 300px;
word-wrap: break-word;
z-index: 9999;
opacity: 0;
left: -9999px;
top: -210% !important;
content: attr(data-tooltip-text);
}
[data-tooltip-text]:hover:after {
top: 130%;
left: 0;
opacity: 1;
}
#airing_list {
display: none;
}
.cut-text {
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
width: 100%;
}
#screen_movie_list {
margin-top: 10px;
}
.card-body {
padding: 0 !important;
}
.card-title {
padding: 1rem !important;
}
button#add_whitelist {
float: right;
}
button.btn-favorite {
background-color: #e0ff42;
}
body {
font-family: NanumSquareNeo, system-ui, -apple-system, Segoe UI, Roboto, Helvetica Neue, Noto Sans, Liberation Sans, Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji;
}
body {
background-image: linear-gradient(90deg, #233f48, #6c6fa2, #768dae);
}
.demo {
width: 100px;
height: 102px;
border-radius: 100%;
position: absolute;
top: 45%;
left: calc(50% - 50px);
}
.circle {
width: 100%;
height: 100%;
position: absolute;
}
.circle .inner {
width: 100%;
height: 100%;
border-radius: 100%;
border: 5px solid rgba(0, 255, 170, 0.7);
border-right: none;
border-top: none;
backgroudn-clip: padding;
box-shadow: inset 0px 0px 10px rgba(0, 255, 170, 0.15);
}
@-webkit-keyframes spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
@keyframes spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
.circle:nth-of-type(0) {
transform: rotate(0deg);
}
.circle:nth-of-type(0) .inner {
-webkit-animation: spin 2s infinite linear;
animation: spin 2s infinite linear;
}
.circle:nth-of-type(1) {
transform: rotate(70deg);
}
.circle:nth-of-type(1) .inner {
-webkit-animation: spin 2s infinite linear;
animation: spin 2s infinite linear;
}
.circle:nth-of-type(2) {
transform: rotate(140deg);
}
.circle:nth-of-type(2) .inner {
-webkit-animation: spin 2s infinite linear;
animation: spin 2s infinite linear;
}
.demo {
-webkit-animation: spin 5s infinite linear;
animation: spin 5s infinite linear;
background: rgba(0, 0, 0, 0.2);
background: radial-gradient(#222, #000);
bottom: 0;
left: 0;
overflow: hidden;
/*position: fixed;*/
right: 0;
/*top: 0;*/
z-index: 99999;
opacity: 0.5;
margin: 0 auto;
transform: translate(-50%, -50%);
position: absolute;
top: 50%;
}
.loader-inner {
bottom: 0;
height: 60px;
left: 0;
margin: auto;
position: absolute;
right: 0;
top: 0;
width: 100px;
}
#preloader {
/*background-color: green;*/
/*color: white;*/
/*height: 100vh;*/
/*width: 100%;*/
/*position: fixed;*/
/*z-index: 100;*/
background: rgba(0, 0, 0, 0.2);
background: radial-gradient(#222, #000);
bottom: 0;
left: 0;
overflow: hidden;
position: fixed;
right: 0;
top: 0;
z-index: 99999;
opacity: 0.5;
}
</style>
{% endblock %}

View File

@@ -34,10 +34,10 @@
{{ macros.setting_checkbox('anilife_auto_mode_all', '에피소드 모두 받기', value=arg['anilife_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }} {{ macros.setting_checkbox('anilife_auto_mode_all', '에피소드 모두 받기', value=arg['anilife_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}
{{ macros.m_tab_content_start('action', false) }} {# {{ macros.m_tab_content_start('action', false) }}#}
{{ macros.setting_button([['global_one_execute_sub_btn', '1회 실행']], left='1회 실행' ) }} {# {{ macros.setting_button([['global_one_execute_sub_btn', '1회 실행']], left='1회 실행' ) }}#}
{{ macros.setting_button([['global_reset_db_sub_btn', 'DB 초기화']], left='DB정리' ) }} {# {{ macros.setting_button([['global_reset_db_sub_btn', 'DB 초기화']], left='DB정리' ) }}#}
{{ macros.m_tab_content_end() }} {# {{ macros.m_tab_content_end() }}#}
</div><!--tab-content--> </div><!--tab-content-->
</form> </form>

View File

@@ -93,6 +93,8 @@
function make_program(data) { function make_program(data) {
current_data = data; current_data = data;
{#$("body").css({"background":"url("+data.poster_url+")"})#}
// console.log('current_data:: ', data) // console.log('current_data:: ', data)
str = ""; str = "";
tmp = '<div class="form-inline w-100">'; tmp = '<div class="form-inline w-100">';

View File

@@ -34,18 +34,17 @@
</button> </button>
</div> </div>
<div>
<div <div
id="anime_category" id="anime_category"
class="btn-group" class="btn-group"
role="group" role="group"
aria-label="Basic example" aria-label="Linkkf Button"
> >
<button id="ing" type="button" class="btn btn-success">방영중</button> <button id="ing" type="button" class="btn btn-success">방영중</button>
<button id="theater" type="button" class="btn btn-primary">극장판</button> <button id="movie" type="button" class="btn btn-primary">극장판</button>
<button id="complete_anilist" type="button" class="btn btn-dark"> <button id="complete_anilist" type="button" class="btn btn-dark">완결</button>
완결 <button id="top_view" type="button" class="btn btn-yellow">Top</button>
</button>
</div> </div>
<form id="airing_list_form"> <form id="airing_list_form">
<div id="airing_list"></div> <div id="airing_list"></div>
@@ -53,15 +52,15 @@
<form id="screen_movie_list_form"> <form id="screen_movie_list_form">
<div id="screen_movie_list" class="container"></div> <div id="screen_movie_list" class="container"></div>
</form> </form>
<div class="text-center"> {# <div class="text-center">#}
<div id="spinner" class="spinner-border" role="status"> {# <div id="spinner" class="spinner-border" role="status">#}
<span class="sr-only">Loading...</span> {# <span class="sr-only">Loading...</span>#}
</div> {# </div>#}
</div> {# </div>#}
<form id="program_auto_form"> <form id="program_auto_form">
<div id="episode_list"></div> <div id="episode_list"></div>
</form> </form>
</div>
</div> </div>
<!--전체--> <!--전체-->
@@ -106,8 +105,34 @@
observer.observe(); observer.observe();
const get_anime_screen_movie = (page) => {
let data = {page: page};
$.ajax({
url: '/' + package_name + '/ajax/' + sub + '/screen_movie_list',
type: "POST",
data: data,
cache: false,
dataType: "json",
success: (ret) => {
current_screen_movie_data = ret;
total_page = ret.total_page;
// console.log("ret::>", ret);
if (current_screen_movie_data !== "") {
make_screen_movie_list(ret, page);
$("img.lazyload").lazyload({
threshold: 100,
effect: "fadeIn",
});
div_visible = true;
}
next_page = page + 1;
},
});
};
const get_anime_list = (type, page) => { const get_anime_list = (type, page) => {
console.log(`type: ${type}, page: ${page}`) //console.log(`type: ${type}, page: ${page}`);
let url = '' let url = ''
let data = {"page": page, "type": type} let data = {"page": page, "type": type}
@@ -124,7 +149,7 @@
url = '/' + package_name + '/ajax/' + sub + '/anime_list' url = '/' + package_name + '/ajax/' + sub + '/anime_list'
current_cate = 'complete' current_cate = 'complete'
break; break;
case 'complete': case 'top_view':
url = '/' + package_name + '/ajax/' + sub + '/complete_list' url = '/' + package_name + '/ajax/' + sub + '/complete_list'
current_cate = 'complete' current_cate = 'complete'
break break
@@ -161,7 +186,7 @@
} }
next_page = page + 1 next_page = page + 1
} }
}) });
} }
function make_airing_list(data, page) { function make_airing_list(data, page) {
@@ -433,26 +458,45 @@
}); });
}); });
$('#anime_category #ing').on("click", function () {
// {#console.log(this.id)#}
let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible';
get_anime_list("ing", 1)
})
$('#anime_category #complete_anilist').on("click", function () { $("#anime_category").on("click", function (e) {
// {#console.log(this.id)#} // console.log($(this))
let spinner = document.getElementById('spinner'); // console.log(e)
spinner.style.visibility = 'visible';
get_anime_list("fin", 1)
})
$('#anime_category #theater').on("click", function () { switch (e.target.id) {
// {#console.log(this.id)#} case "ing":
let spinner = document.getElementById('spinner'); console.log("ing.....")
spinner.style.visibility = 'visible';
get_anime_list("theater", 1) {#spinner_loading.style.display = "block";#}
}) current_cate = "ing";
get_anime_list(1, "ing");
break;
case "movie":
console.log("movie")
current_cate = "movie";
get_anime_screen_movie(1);
break;
case "complete_anilist":
console.log("complete")
current_cate = "complete";
get_complete_anilist(1);
break;
case "top_view":
console.log("top_view")
current_cate = "top_view";
get_anime_list(1, "top_view");
break;
default:
console.log("default")
spinner_loading.style.display = "block";
current_cate = "ing";
get_anime_list(1, "ing");
break;
}
});
/*
// 분석 버튼 클릭시 호출 // 분석 버튼 클릭시 호출
$("body").on('click', '#analysis_btn', function (e) { $("body").on('click', '#analysis_btn', function (e) {
@@ -476,6 +520,7 @@
} }
}); });
}); });
*/
$("body").on('click', '#go_anilife_btn', function (e) { $("body").on('click', '#go_anilife_btn', function (e) {
@@ -546,8 +591,11 @@
// const el = document.querySelector('img'); // const el = document.querySelector('img');
// const observer = lozad(el); // passing a `NodeList` (e.g. `document.querySelectorAll()`) is also valid // const observer = lozad(el); // passing a `NodeList` (e.g. `document.querySelectorAll()`) is also valid
// observer.observe(); // observer.observe();
const loadNextAnimes = (cate, page) => { console.log('scroll 세로크기:', document.body.scrollHeight)
spinner.style.display = "block";
const loadNextAnimes = (cate, page, ch) => {
// spinner.style.display = "block";
loader.style.display = "block";
let data = {type: cate, page: String(page)}; let data = {type: cate, page: String(page)};
let url = '' let url = ''
switch (cate) { switch (cate) {
@@ -590,12 +638,23 @@
// {#imagesContainer.appendChild()#} // {#imagesContainer.appendChild()#}
console.log("return page:::> ", String(response.page)); console.log("return page:::> ", String(response.page));
// {#page = response.page#} // {#page = response.page#}
loader.style.display = "block";
if (current_cate === 'search') { if (current_cate === 'search') {
make_search_result_list(response.data, response.page); make_search_result_list(response.data, response.page);
} else { } else {
make_screen_movie_list(response.data, response.page); make_screen_movie_list(response.data, response.page);
} }
console.log(document.body.scrollHeight)
console.log(ch)
window.scrollBy({
top: ch + 35,
left: 0,
behavior: 'smooth'
});
loader.style.display = "none";
page++; page++;
next_page++; next_page++;
}) })
@@ -606,12 +665,20 @@
const onScroll = (e) => { const onScroll = (e) => {
console.dir(e.target.scrollingElement.scrollHeight); console.dir(e.target.scrollingElement.scrollHeight);
const {scrollTop, scrollHeight, clientHeight} = e.target.scrollingElement; const {scrollTop, scrollHeight, clientHeight} = e.target.scrollingElement;
if (Math.round(scrollHeight - scrollTop) <= clientHeight) { if (Math.round(scrollHeight - scrollTop) <= clientHeight + 170) {
document.getElementById("spinner").style.display = "block"; {#document.getElementById("spinner").style.display = "block";#}
console.log("loading"); console.log("loading");
console.log("now page::> ", page); console.log("now page::> ", page);
console.log("next_page::> ", String(next_page)); console.log("next_page::> ", String(next_page));
loadNextAnimes(current_cate, next_page); loadNextAnimes(current_cate, next_page, clientHeight);
/*window.scrollBy({
top: e.target.scrollingElement.scrollHeight + 200,
left: 0,
behavior: 'smooth'
});
*/
} }
}; };
@@ -1049,7 +1116,42 @@
} }
} }
.card-body {
padding: 0 !important;
}
.new-anime {
border-color: darksalmon;
border-width: 4px;
border-style: dashed;
}
.card-title {
padding: 1rem !important;
}
button#add_whitelist {
float: right;
}
button.btn-favorite {
background-color: #e0ff42;
}
body {
font-family: NanumSquareNeo, system-ui, -apple-system, Segoe UI, Roboto, Helvetica Neue, Noto Sans, Liberation Sans, Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol, Noto Color Emoji;
}
body {
background-image: linear-gradient(90deg, #233f48, #6c6fa2, #768dae);
</style> </style>
<link
href="{{ url_for('.static', filename='css/bootstrap.min.css') }}"
type="text/css"
rel="stylesheet"
/>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.1/font/bootstrap-icons.css"> <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.1/font/bootstrap-icons.css">
{% endblock %} {% endblock %}

View File

@@ -34,10 +34,10 @@
{{ macros.setting_checkbox('linkkf_auto_mode_all', '에피소드 모두 받기', value=arg['linkkf_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }} {{ macros.setting_checkbox('linkkf_auto_mode_all', '에피소드 모두 받기', value=arg['linkkf_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}
{{ macros.m_tab_content_start('action', false) }} {# {{ macros.m_tab_content_start('action', false) }}#}
{{ macros.setting_button([['global_one_execute_sub_btn', '1회 실행']], left='1회 실행' ) }} {# {{ macros.setting_button([['global_one_execute_sub_btn', '1회 실행']], left='1회 실행' ) }}#}
{{ macros.setting_button([['global_reset_db_sub_btn', 'DB 초기화']], left='DB정리' ) }} {# {{ macros.setting_button([['global_reset_db_sub_btn', 'DB 초기화']], left='DB정리' ) }}#}
{{ macros.m_tab_content_end() }} {# {{ macros.m_tab_content_end() }}#}
</div><!--tab-content--> </div><!--tab-content-->
</form> </form>

View File

@@ -53,11 +53,11 @@
<form id="screen_movie_list_form"> <form id="screen_movie_list_form">
<div id="screen_movie_list" class="container"></div> <div id="screen_movie_list" class="container"></div>
</form> </form>
<div class="text-center"> <!-- <div class="text-center">
<div id="spinner" class="spinner-border" role="status"> <div id="spinner" class="spinner-border" role="status">
<span class="sr-only">Loading...</span> <span class="sr-only">Loading...</span>
</div> </div>
</div> </div> -->
<form id="program_auto_form"> <form id="program_auto_form">
<div id="episode_list"></div> <div id="episode_list"></div>
</form> </form>

View File

@@ -1,5 +1,5 @@
{% extends "base.html" %} {% block content %} {% extends "base.html" %} {% block content %}
<div id="preloader"> <div id="preloader">
<div class='demo'> <div class='demo'>
<!-- <div class="loader-inner">--> <!-- <div class="loader-inner">-->
<div class='circle'> <div class='circle'>
@@ -19,8 +19,8 @@
</div> </div>
<!-- </div>--> <!-- </div>-->
</div> </div>
</div> </div>
<div> <div>
<form id="program_list"> <form id="program_list">
{{ macros.setting_input_text_and_buttons('code', '작품 Code', {{ macros.setting_input_text_and_buttons('code', '작품 Code',
[['analysis_btn', '분석'], ['go_ohli24_btn', 'Go OHLI24']], desc='예) [['analysis_btn', '분석'], ['go_ohli24_btn', 'Go OHLI24']], desc='예)
@@ -29,11 +29,11 @@
<form id="program_auto_form"> <form id="program_auto_form">
<div id="episode_list"></div> <div id="episode_list"></div>
</form> </form>
</div> </div>
<!--전체--> <!--전체-->
<script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script> <script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script>
<script type="text/javascript"> <script type="text/javascript">
const package_name = "{{arg['package_name'] }}"; const package_name = "{{arg['package_name'] }}";
const sub = "{{arg['sub'] }}"; const sub = "{{arg['sub'] }}";
const ohli24_url = "{{arg['ohli24_url']}}"; const ohli24_url = "{{arg['ohli24_url']}}";
@@ -108,7 +108,8 @@
*/ */
str += tmp str += tmp
// program // program
str += m_hr_black(); {#str += m_hr_black();#}
str += "<div class='card p-lg-5 mt-md-3 p-md-3 border-light'>"
str += m_row_start(0); str += m_row_start(0);
tmp = '' tmp = ''
if (data.image != null) if (data.image != null)
@@ -129,7 +130,9 @@
str += m_col(9, tmp) str += m_col(9, tmp)
str += m_row_end(); str += m_row_end();
str += m_hr_black(); str += "</div>"
{#str += m_hr_black();#}
for (i in data.episode) { for (i in data.episode) {
str += m_row_start(); str += m_row_start();
tmp = ''; tmp = '';
@@ -146,7 +149,7 @@
tmp += '</div>' tmp += '</div>'
str += m_col(9, tmp) str += m_col(9, tmp)
str += m_row_end(); str += m_row_end();
if (i != data.length - 1) str += m_hr(0); {#if (i != data.length - 1) str += m_hr(0);#}
} }
document.getElementById("episode_list").innerHTML = str; document.getElementById("episode_list").innerHTML = str;
$('input[id^="checkbox_"]').bootstrapToggle() $('input[id^="checkbox_"]').bootstrapToggle()
@@ -191,6 +194,9 @@
$(document).ready(function () { $(document).ready(function () {
console.log('wr_id::', params.wr_id) console.log('wr_id::', params.wr_id)
if (document.getElementById("code").value !== "") {
{#document.getElementById("analysis_btn").click()#}
}
}); });
@@ -285,8 +291,8 @@
} }
}); });
}); });
</script> </script>
<style> <style>
button.code-button { button.code-button {
min-width: 82px !important; min-width: 82px !important;
} }
@@ -384,6 +390,19 @@
opacity: 1; opacity: 1;
} }
.card {
border: none;
box-shadow: inset 1px 1px hsl(0deg 0% 100% / 20%), inset -1px -1px hsl(0deg 0% 100% / 10%), 1px 3px 24px -1px rgb(0 0 0 / 15%);
background-color: transparent;
background-image: linear-gradient(125deg, hsla(0, 0%, 100%, .3), hsla(0, 0%, 100%, .2) 70%);
backdrop-filter: blur(5px);
}
.card.border-light {
--bs-border-opacity: 1;
border-color: rgba(var(--bs-light-rgb), var(--bs-border-opacity)) !important;
}
#airing_list { #airing_list {
display: none; display: none;
} }
@@ -544,5 +563,5 @@
z-index: 99999; z-index: 99999;
opacity: 0.5; opacity: 0.5;
} }
</style> </style>
{% endblock %} {% endblock %}

View File

@@ -1,6 +1,6 @@
{% extends "base.html" %} {% block content %} {% extends "base.html" %} {% block content %}
<!--<div id="preloader"></div>--> <!--<div id="preloader"></div>-->
<div id="preloader" class="loader"> <div id="preloader" class="loader">
<div class="loader-inner"> <div class="loader-inner">
<div class="loader-line-wrap"> <div class="loader-line-wrap">
<div class="loader-line"></div> <div class="loader-line"></div>
@@ -18,8 +18,8 @@
<div class="loader-line"></div> <div class="loader-line"></div>
</div> </div>
</div> </div>
</div> </div>
<div id="yommi_wrapper"> <div id="yommi_wrapper">
<div class="input-group mb-2"> <div class="input-group mb-2">
<input <input
id="input_search" id="input_search"
@@ -62,18 +62,18 @@
<div id="episode_list"></div> <div id="episode_list"></div>
</form> </form>
</div> </div>
</div> </div>
<!--전체--> <!--전체-->
<script <script
type="text/javascript" type="text/javascript"
src="https://cdn.jsdelivr.net/npm/lozad/dist/lozad.min.js" src="https://cdn.jsdelivr.net/npm/lozad/dist/lozad.min.js"
></script> ></script>
<script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script> <script src="{{ url_for('.static', filename='js/sjva_ui14.js') }}"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.lazyload/1.9.1/jquery.lazyload.min.js" <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.lazyload/1.9.1/jquery.lazyload.min.js"
integrity="sha512-jNDtFf7qgU0eH/+Z42FG4fw3w7DM/9zbgNPe3wfJlCylVDTT3IgKW5r92Vy9IHa6U50vyMz5gRByIu4YIXFtaQ==" integrity="sha512-jNDtFf7qgU0eH/+Z42FG4fw3w7DM/9zbgNPe3wfJlCylVDTT3IgKW5r92Vy9IHa6U50vyMz5gRByIu4YIXFtaQ=="
crossorigin="anonymous" referrerpolicy="no-referrer"></script> crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<script type="text/javascript"> <script type="text/javascript">
const package_name = "{{arg['package_name'] }}"; const package_name = "{{arg['package_name'] }}";
const sub = "{{arg['sub'] }}"; const sub = "{{arg['sub'] }}";
const anilife_url = "{{arg['anilife_url']}}"; const anilife_url = "{{arg['anilife_url']}}";
@@ -393,22 +393,22 @@
$('#anime_category #ing').on("click", function () { $('#anime_category #ing').on("click", function () {
// {#console.log(this.id)#} // {#console.log(this.id)#}
let spinner = document.getElementById('spinner'); // let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible'; // spinner.style.visibility = 'visible';
get_anime_list("ing", 1) get_anime_list("ing", 1)
}) })
$('#anime_category #complete_anilist').on("click", function () { $('#anime_category #complete_anilist').on("click", function () {
// {#console.log(this.id)#} // {#console.log(this.id)#}
let spinner = document.getElementById('spinner'); // let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible'; // spinner.style.visibility = 'visible';
get_anime_list("fin", 1) get_anime_list("fin", 1)
}) })
$('#anime_category #theater').on("click", function () { $('#anime_category #theater').on("click", function () {
// {#console.log(this.id)#} // {#console.log(this.id)#}
let spinner = document.getElementById('spinner'); // let spinner = document.getElementById('spinner');
spinner.style.visibility = 'visible'; // spinner.style.visibility = 'visible';
get_anime_list("theater", 1) get_anime_list("theater", 1)
}) })
@@ -505,7 +505,7 @@
// const observer = lozad(el); // passing a `NodeList` (e.g. `document.querySelectorAll()`) is also valid // const observer = lozad(el); // passing a `NodeList` (e.g. `document.querySelectorAll()`) is also valid
// observer.observe(); // observer.observe();
const loadNextAnimes = (cate, page) => { const loadNextAnimes = (cate, page) => {
spinner.style.display = "block"; {#spinner.style.display = "block";#}
let data = {type: cate, page: String(page)}; let data = {type: cate, page: String(page)};
let url = '' let url = ''
switch (cate) { switch (cate) {
@@ -565,7 +565,7 @@
console.dir(e.target.scrollingElement.scrollHeight); console.dir(e.target.scrollingElement.scrollHeight);
const {scrollTop, scrollHeight, clientHeight} = e.target.scrollingElement; const {scrollTop, scrollHeight, clientHeight} = e.target.scrollingElement;
if (Math.round(scrollHeight - scrollTop) <= clientHeight) { if (Math.round(scrollHeight - scrollTop) <= clientHeight) {
document.getElementById("spinner").style.display = "block"; {#document.getElementById("spinner").style.display = "block";#}
console.log("loading"); console.log("loading");
console.log("now page::> ", page); console.log("now page::> ", page);
console.log("next_page::> ", String(next_page)); console.log("next_page::> ", String(next_page));
@@ -582,8 +582,8 @@
}; };
document.addEventListener("scroll", debounce(onScroll, 300)); document.addEventListener("scroll", debounce(onScroll, 300));
</script> </script>
<style> <style>
button.code-button { button.code-button {
min-width: 82px !important; min-width: 82px !important;
} }
@@ -919,6 +919,6 @@
} }
</style> </style>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.1/font/bootstrap-icons.css"> <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.1/font/bootstrap-icons.css">
{% endblock %} {% endblock %}

View File

@@ -34,10 +34,10 @@
{{ macros.setting_checkbox('ohli24_auto_mode_all', '에피소드 모두 받기', value=arg['ohli24_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }} {{ macros.setting_checkbox('ohli24_auto_mode_all', '에피소드 모두 받기', value=arg['ohli24_auto_mode_all'], desc=['On : 이전 에피소드를 모두 받습니다.', 'Off : 최신 에피소드만 받습니다.']) }}
{{ macros.m_tab_content_end() }} {{ macros.m_tab_content_end() }}
{{ macros.m_tab_content_start('action', false) }} {# {{ macros.m_tab_content_start('action', false) }}#}
{{ macros.setting_button([['global_one_execute_sub_btn', '1회 실행']], left='1회 실행' ) }} {# {{ macros.setting_button([['global_one_execute_sub_btn', '1회 실행']], left='1회 실행' ) }}#}
{{ macros.setting_button([['global_reset_db_sub_btn', 'DB 초기화']], left='DB정리' ) }} {# {{ macros.setting_button([['global_reset_db_sub_btn', 'DB 초기화']], left='DB정리' ) }}#}
{{ macros.m_tab_content_end() }} {# {{ macros.m_tab_content_end() }}#}
</div><!--tab-content--> </div><!--tab-content-->
</form> </form>

16
yommi_api/api.sh Normal file
View File

@@ -0,0 +1,16 @@
#!/bin/bash
LINE="***********************************************"
PORT="7070"
#WORK_DIR="/mnt/WD/Users/Work/python/ff_dev_plugins/anime_downloader/yommi_api"
WORK_DIR="/Volumes/WD/Users/Work/python/ff_dev_plugins/anime_downloader/yommi_api"
echo "$LINE"
echo "* fast api running..."
echo "$LINE"
pip install fastapi uvicorn[standard] playwright
# shellcheck disable=SC2164
cd "$WORK_DIR"
uvicorn main:app --reload --port=$PORT
#echo "* listening $PORT..."
#echo "$LINE"