2022-02-08 23:17:30 +09:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
# @Time : 2022/02/08 3:44 PM
|
|
|
|
|
# @Author : yommi
|
2022-04-06 23:45:15 +09:00
|
|
|
# @Site :
|
2022-02-08 23:17:30 +09:00
|
|
|
# @File : logic_ohli24
|
|
|
|
|
# @Software: PyCharm
|
|
|
|
|
|
|
|
|
|
import os, sys, traceback, re, json, threading
|
2023-03-01 18:01:39 +09:00
|
|
|
import time
|
2022-06-07 20:00:29 +09:00
|
|
|
from datetime import datetime, date
|
2022-02-08 23:17:30 +09:00
|
|
|
import copy
|
2022-03-27 18:26:18 +09:00
|
|
|
import hashlib
|
2022-04-06 23:45:15 +09:00
|
|
|
|
2023-03-01 18:01:39 +09:00
|
|
|
import discord
|
|
|
|
|
|
2022-02-08 23:17:30 +09:00
|
|
|
# third-party
|
|
|
|
|
import requests
|
2023-03-01 18:01:39 +09:00
|
|
|
from discord_webhook import DiscordWebhook, DiscordEmbed
|
2022-02-10 00:34:14 +09:00
|
|
|
from lxml import html
|
2022-03-27 18:26:18 +09:00
|
|
|
from urllib import parse
|
2022-04-09 19:37:36 +09:00
|
|
|
import urllib
|
2022-04-17 13:45:37 +09:00
|
|
|
import asyncio
|
|
|
|
|
|
|
|
|
|
# import aiohttp
|
2022-04-06 23:45:15 +09:00
|
|
|
|
2022-02-08 23:17:30 +09:00
|
|
|
# third-party
|
|
|
|
|
from flask import request, render_template, jsonify
|
|
|
|
|
from sqlalchemy import or_, and_, func, not_, desc
|
2022-03-27 18:26:18 +09:00
|
|
|
from pip._internal import main
|
|
|
|
|
|
2023-02-01 19:22:06 +09:00
|
|
|
from .lib.utils import yommi_timeit
|
|
|
|
|
|
2022-04-17 13:45:37 +09:00
|
|
|
pkgs = ["beautifulsoup4", "jsbeautifier", "aiohttp"]
|
2022-03-27 18:26:18 +09:00
|
|
|
for pkg in pkgs:
|
|
|
|
|
try:
|
|
|
|
|
import pkg
|
|
|
|
|
except ImportError:
|
2022-04-06 23:45:15 +09:00
|
|
|
main(["install", pkg])
|
2022-03-27 18:26:18 +09:00
|
|
|
|
2022-04-17 13:45:37 +09:00
|
|
|
import aiohttp
|
|
|
|
|
|
2022-03-27 18:26:18 +09:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
import jsbeautifier
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
# sjva 공용
|
|
|
|
|
from framework import db, scheduler, path_data, socketio
|
|
|
|
|
from framework.util import Util
|
|
|
|
|
from framework.common.util import headers
|
2022-04-06 23:45:15 +09:00
|
|
|
from plugin import (
|
|
|
|
|
LogicModuleBase,
|
|
|
|
|
FfmpegQueueEntity,
|
|
|
|
|
FfmpegQueue,
|
|
|
|
|
default_route_socketio,
|
|
|
|
|
)
|
2022-02-08 23:17:30 +09:00
|
|
|
from tool_base import d
|
2022-04-06 23:45:15 +09:00
|
|
|
|
2022-02-08 23:17:30 +09:00
|
|
|
# 패키지
|
|
|
|
|
from .plugin import P
|
|
|
|
|
|
|
|
|
|
logger = P.logger
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#########################################################
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class LogicOhli24(LogicModuleBase):
|
|
|
|
|
db_default = {
|
2023-03-01 18:01:39 +09:00
|
|
|
"ohli24_db_version": "1.1",
|
2023-08-07 19:08:46 +09:00
|
|
|
"ohli24_url": "https://ohli24.live",
|
2022-04-06 23:45:15 +09:00
|
|
|
"ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"),
|
|
|
|
|
"ohli24_auto_make_folder": "True",
|
|
|
|
|
"ohli24_auto_make_season_folder": "True",
|
|
|
|
|
"ohli24_finished_insert": "[완결]",
|
|
|
|
|
"ohli24_max_ffmpeg_process_count": "1",
|
2023-01-29 00:42:01 +09:00
|
|
|
"ohli24_order_desc": "True",
|
2022-04-06 23:45:15 +09:00
|
|
|
"ohli24_auto_start": "False",
|
|
|
|
|
"ohli24_interval": "* 5 * * *",
|
|
|
|
|
"ohli24_auto_mode_all": "False",
|
|
|
|
|
"ohli24_auto_code_list": "all",
|
|
|
|
|
"ohli24_current_code": "",
|
|
|
|
|
"ohli24_uncompleted_auto_enqueue": "False",
|
2023-01-30 19:26:57 +09:00
|
|
|
"ohli24_image_url_prefix_series": "",
|
|
|
|
|
"ohli24_image_url_prefix_episode": "",
|
2022-09-12 15:22:03 +09:00
|
|
|
"ohli24_discord_notify": "True",
|
2022-02-08 23:17:30 +09:00
|
|
|
}
|
|
|
|
|
current_headers = None
|
2022-02-10 00:34:14 +09:00
|
|
|
current_data = None
|
2023-01-13 20:14:11 +09:00
|
|
|
referer = None
|
|
|
|
|
origin_url = None
|
|
|
|
|
episode_url = None
|
|
|
|
|
cookies = None
|
2022-03-27 18:26:18 +09:00
|
|
|
|
2022-02-10 00:34:14 +09:00
|
|
|
session = requests.Session()
|
|
|
|
|
headers = {
|
2022-04-06 23:45:15 +09:00
|
|
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
|
|
|
|
|
"Chrome/71.0.3578.98 Safari/537.36",
|
|
|
|
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
|
|
|
|
"Accept-Language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7",
|
2023-01-13 20:14:11 +09:00
|
|
|
# "Referer": "",
|
|
|
|
|
# "Cookie": "PHPSESSID=hhhnrora8o9omv1tljq4efv216; 2a0d2363701f23f8a75028924a3af643=NDkuMTYzLjExMS4xMDk=; e1192aefb64683cc97abb83c71057733=aW5n",
|
2022-04-06 23:45:15 +09:00
|
|
|
}
|
|
|
|
|
useragent = {
|
|
|
|
|
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, "
|
|
|
|
|
"like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
|
2022-02-10 00:34:14 +09:00
|
|
|
}
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
def __init__(self, P):
|
2022-04-17 13:45:37 +09:00
|
|
|
super(LogicOhli24, self).__init__(P, "setting", scheduler_desc="ohli24 자동 다운로드")
|
2022-04-06 23:45:15 +09:00
|
|
|
self.name = "ohli24"
|
2022-03-27 18:26:18 +09:00
|
|
|
self.queue = None
|
2023-03-01 18:01:39 +09:00
|
|
|
self.last_post_title = ""
|
|
|
|
|
self.discord_webhook_url = "https://discord.com/api/webhooks/1071430127860334663/viCiM5ssS-U1_ONWgdWa-64KgvPfU5jJ8WQAym-4vkiyASB0e8IcnlLnxG4F40nj10kZ"
|
|
|
|
|
self.discord_color = "242424"
|
|
|
|
|
self.discord_title = "새로운 애니"
|
|
|
|
|
self.DISCORD_CHANNEL_ID = "1071430054023798958"
|
2022-02-08 23:17:30 +09:00
|
|
|
default_route_socketio(P, self)
|
|
|
|
|
|
2023-01-13 20:14:11 +09:00
|
|
|
@staticmethod
|
|
|
|
|
async def get_html_playwright(
|
|
|
|
|
url: str,
|
|
|
|
|
headless: bool = False,
|
|
|
|
|
referer: str = "",
|
|
|
|
|
engine: str = "chrome",
|
|
|
|
|
stealth: bool = False,
|
|
|
|
|
):
|
|
|
|
|
try:
|
|
|
|
|
from playwright.sync_api import sync_playwright
|
|
|
|
|
from playwright.async_api import async_playwright
|
|
|
|
|
from playwright_stealth import stealth_sync, stealth_async
|
|
|
|
|
|
|
|
|
|
import time
|
|
|
|
|
|
|
|
|
|
cookie = None
|
2023-01-13 21:57:59 +09:00
|
|
|
# browser_args = [
|
|
|
|
|
# "--window-size=1300,570",
|
|
|
|
|
# "--window-position=000,000",
|
|
|
|
|
# "--disable-dev-shm-usage",
|
|
|
|
|
# "--no-sandbox",
|
|
|
|
|
# "--disable-web-security",
|
|
|
|
|
# "--disable-features=site-per-process",
|
|
|
|
|
# "--disable-setuid-sandbox",
|
|
|
|
|
# "--disable-accelerated-2d-canvas",
|
|
|
|
|
# "--no-first-run",
|
|
|
|
|
# "--no-zygote",
|
|
|
|
|
# # '--single-process',
|
|
|
|
|
# "--disable-gpu",
|
|
|
|
|
# # "--use-gl=egl",
|
|
|
|
|
# "--disable-blink-features=AutomationControlled",
|
|
|
|
|
# # "--disable-background-networking",
|
|
|
|
|
# # "--enable-features=NetworkService,NetworkServiceInProcess",
|
|
|
|
|
# "--disable-background-timer-throttling",
|
|
|
|
|
# "--disable-backgrounding-occluded-windows",
|
|
|
|
|
# "--disable-breakpad",
|
|
|
|
|
# "--disable-client-side-phishing-detection",
|
|
|
|
|
# "--disable-component-extensions-with-background-pages",
|
|
|
|
|
# "--disable-default-apps",
|
|
|
|
|
# "--disable-extensions",
|
|
|
|
|
# "--disable-features=Translate",
|
|
|
|
|
# "--disable-hang-monitor",
|
|
|
|
|
# "--disable-ipc-flooding-protection",
|
|
|
|
|
# "--disable-popup-blocking",
|
|
|
|
|
# "--disable-prompt-on-repost",
|
|
|
|
|
# # "--disable-renderer-backgrounding",
|
|
|
|
|
# "--disable-sync",
|
|
|
|
|
# "--force-color-profile=srgb",
|
|
|
|
|
# # "--metrics-recording-only",
|
|
|
|
|
# # "--enable-automation",
|
|
|
|
|
# "--password-store=basic",
|
|
|
|
|
# # "--use-mock-keychain",
|
|
|
|
|
# # "--hide-scrollbars",
|
|
|
|
|
# "--mute-audio",
|
|
|
|
|
# ]
|
2023-01-13 20:14:11 +09:00
|
|
|
browser_args = [
|
|
|
|
|
"--window-size=1300,570",
|
2023-01-13 21:57:59 +09:00
|
|
|
"--window-position=0,0",
|
|
|
|
|
# "--disable-dev-shm-usage",
|
2023-01-13 20:14:11 +09:00
|
|
|
"--no-sandbox",
|
2023-01-13 21:57:59 +09:00
|
|
|
# "--disable-web-security",
|
|
|
|
|
# "--disable-features=site-per-process",
|
|
|
|
|
# "--disable-setuid-sandbox",
|
|
|
|
|
# "--disable-accelerated-2d-canvas",
|
|
|
|
|
# "--no-first-run",
|
|
|
|
|
# "--no-zygote",
|
|
|
|
|
# "--single-process",
|
2023-01-13 20:14:11 +09:00
|
|
|
"--disable-gpu",
|
2023-01-13 21:57:59 +09:00
|
|
|
# "--use-gl=egl",
|
2023-01-13 20:14:11 +09:00
|
|
|
"--mute-audio",
|
|
|
|
|
]
|
|
|
|
|
# scraper = cloudscraper.create_scraper(
|
|
|
|
|
# browser={"browser": "chrome", "platform": "windows", "desktop": True},
|
|
|
|
|
# debug=False,
|
|
|
|
|
# # sess=LogicAniLife.session,
|
|
|
|
|
# delay=10,
|
|
|
|
|
# )
|
|
|
|
|
#
|
|
|
|
|
# cookie_value, user_agent = scraper.get_cookie_string(url)
|
|
|
|
|
#
|
|
|
|
|
# logger.debug(f"cookie_value:: {cookie_value}")
|
|
|
|
|
|
|
|
|
|
start = time.time()
|
|
|
|
|
ua = (
|
|
|
|
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
|
|
|
|
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
|
|
|
|
"Chrome/69.0.3497.100 Safari/537.36"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# from playwright_stealth import stealth_sync
|
|
|
|
|
|
|
|
|
|
def set_cookie(req):
|
|
|
|
|
nonlocal cookie
|
|
|
|
|
if "cookie" in req.headers:
|
|
|
|
|
cookie = req.headers["cookie"]
|
|
|
|
|
|
|
|
|
|
async with async_playwright() as p:
|
|
|
|
|
try:
|
|
|
|
|
if engine == "chrome":
|
|
|
|
|
browser = await p.chromium.launch(
|
|
|
|
|
channel="chrome", args=browser_args, headless=headless
|
|
|
|
|
)
|
|
|
|
|
elif engine == "webkit":
|
|
|
|
|
browser = await p.webkit.launch(
|
|
|
|
|
headless=headless,
|
|
|
|
|
args=browser_args,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
browser = await p.firefox.launch(
|
|
|
|
|
headless=headless,
|
|
|
|
|
args=browser_args,
|
|
|
|
|
)
|
|
|
|
|
# context = browser.new_context(
|
|
|
|
|
# user_agent=ua,
|
|
|
|
|
# )
|
|
|
|
|
|
|
|
|
|
LogicOhli24.headers[
|
|
|
|
|
"Referer"
|
|
|
|
|
] = "https://anilife.live/detail/id/471"
|
|
|
|
|
# print(LogicAniLife.headers)
|
|
|
|
|
|
|
|
|
|
LogicOhli24.headers["Referer"] = LogicOhli24.episode_url
|
|
|
|
|
|
|
|
|
|
if referer is not None:
|
|
|
|
|
LogicOhli24.headers["Referer"] = referer
|
|
|
|
|
|
|
|
|
|
# logger.debug(f"LogicAniLife.headers::: {LogicOhli24.headers}")
|
|
|
|
|
context = await browser.new_context(
|
|
|
|
|
extra_http_headers=LogicOhli24.headers, ignore_https_errors=True
|
|
|
|
|
)
|
|
|
|
|
# await context.add_cookies(LogicOhli24.cookies)
|
|
|
|
|
|
|
|
|
|
# LogicAniLife.headers["Cookie"] = cookie_value
|
|
|
|
|
|
2023-01-13 21:57:59 +09:00
|
|
|
# await context.set_extra_http_headers(LogicOhli24.headers)
|
2023-01-13 20:14:11 +09:00
|
|
|
|
|
|
|
|
page = await context.new_page()
|
|
|
|
|
|
|
|
|
|
# page.set_extra_http_headers(LogicAniLife.headers)
|
|
|
|
|
|
|
|
|
|
if stealth:
|
|
|
|
|
await stealth_async(page)
|
|
|
|
|
|
|
|
|
|
# page.on("request", set_cookie)
|
|
|
|
|
# stealth_sync(page)
|
2023-01-13 23:30:54 +09:00
|
|
|
# print(LogicOhli24.headers["Referer"])
|
2023-01-13 20:14:11 +09:00
|
|
|
|
2023-01-13 23:30:54 +09:00
|
|
|
# page.on("request", set_cookie)
|
2023-01-13 20:14:11 +09:00
|
|
|
|
|
|
|
|
print(f'Referer:: {LogicOhli24.headers["Referer"]}')
|
|
|
|
|
# await page.set_extra_http_headers(LogicAniLife.headers)
|
|
|
|
|
|
|
|
|
|
# domcontentloaded
|
|
|
|
|
# load
|
|
|
|
|
# networkidle
|
|
|
|
|
await page.goto(
|
|
|
|
|
url,
|
2023-01-14 18:22:57 +09:00
|
|
|
wait_until="load",
|
2023-01-13 23:30:54 +09:00
|
|
|
# referer=LogicOhli24.headers["Referer"],
|
2023-01-13 20:14:11 +09:00
|
|
|
)
|
2023-01-13 23:30:54 +09:00
|
|
|
# await page.wait_for_url(url, wait_until="domcontentloaded")
|
2023-01-13 20:14:11 +09:00
|
|
|
# page.wait_for_timeout(10000)
|
|
|
|
|
# await asyncio.sleep(2.9)
|
2023-01-13 21:57:59 +09:00
|
|
|
await asyncio.sleep(1)
|
2023-01-13 20:14:11 +09:00
|
|
|
|
|
|
|
|
# await page.reload()
|
|
|
|
|
|
|
|
|
|
# time.sleep(10)
|
|
|
|
|
# cookies = context.cookies
|
|
|
|
|
# print(cookies)
|
|
|
|
|
|
|
|
|
|
print(f"page.url:: {page.url}")
|
|
|
|
|
LogicOhli24.origin_url = page.url
|
|
|
|
|
|
2023-01-13 23:30:54 +09:00
|
|
|
temp_content = await page.content()
|
2023-01-13 20:14:11 +09:00
|
|
|
#
|
|
|
|
|
# print(temp_content)
|
|
|
|
|
|
|
|
|
|
print(f"run at {time.time() - start} sec")
|
|
|
|
|
|
2023-01-13 23:30:54 +09:00
|
|
|
return temp_content
|
2023-01-13 20:14:11 +09:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Exception:%s", e)
|
|
|
|
|
logger.error(traceback.format_exc())
|
|
|
|
|
finally:
|
|
|
|
|
await browser.close()
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Exception:%s", e)
|
|
|
|
|
logger.error(traceback.format_exc())
|
|
|
|
|
finally:
|
|
|
|
|
# browser.close()
|
|
|
|
|
pass
|
|
|
|
|
|
2022-02-08 23:17:30 +09:00
|
|
|
@staticmethod
|
|
|
|
|
def db_init():
|
|
|
|
|
pass
|
|
|
|
|
# try:
|
|
|
|
|
# for key, value in P.Logic.db_default.items():
|
|
|
|
|
# if db.session.query(ModelSetting).filter_by(key=key).count() == 0:
|
|
|
|
|
# db.session.add(ModelSetting(key, value))
|
|
|
|
|
# db.session.commit()
|
|
|
|
|
# except Exception as e:
|
|
|
|
|
# logger.error('Exception:%s', e)
|
|
|
|
|
# logger.error(traceback.format_exc())
|
|
|
|
|
|
|
|
|
|
def process_menu(self, sub, req):
|
|
|
|
|
arg = P.ModelSetting.to_dict()
|
2022-04-06 23:45:15 +09:00
|
|
|
arg["sub"] = self.name
|
|
|
|
|
if sub in ["setting", "queue", "list", "category", "request"]:
|
|
|
|
|
if sub == "request" and req.args.get("content_code") is not None:
|
|
|
|
|
arg["ohli24_current_code"] = req.args.get("content_code")
|
|
|
|
|
if sub == "setting":
|
|
|
|
|
job_id = "%s_%s" % (self.P.package_name, self.name)
|
|
|
|
|
arg["scheduler"] = str(scheduler.is_include(job_id))
|
|
|
|
|
arg["is_running"] = str(scheduler.is_running(job_id))
|
2022-02-08 23:17:30 +09:00
|
|
|
return render_template(
|
2022-04-06 23:45:15 +09:00
|
|
|
"{package_name}_{module_name}_{sub}.html".format(
|
|
|
|
|
package_name=P.package_name, module_name=self.name, sub=sub
|
|
|
|
|
),
|
|
|
|
|
arg=arg,
|
|
|
|
|
)
|
|
|
|
|
return render_template("sample.html", title="%s - %s" % (P.package_name, sub))
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
# @staticmethod
|
|
|
|
|
def process_ajax(self, sub, req):
|
|
|
|
|
try:
|
2022-04-06 23:45:15 +09:00
|
|
|
if sub == "analysis":
|
2022-02-08 23:17:30 +09:00
|
|
|
# code = req.form['code']
|
2022-04-06 23:45:15 +09:00
|
|
|
code = request.form["code"]
|
2022-04-10 14:24:49 +09:00
|
|
|
|
|
|
|
|
wr_id = request.form.get("wr_id", None)
|
|
|
|
|
bo_table = request.form.get("bo_table", None)
|
2022-02-08 23:17:30 +09:00
|
|
|
data = []
|
2022-02-10 00:34:14 +09:00
|
|
|
# print(code)
|
|
|
|
|
# logger.info("code::: %s", code)
|
2022-04-06 23:45:15 +09:00
|
|
|
P.ModelSetting.set("ohli24_current_code", code)
|
2022-04-10 14:24:49 +09:00
|
|
|
data = self.get_series_info(code, wr_id, bo_table)
|
2022-02-08 23:17:30 +09:00
|
|
|
self.current_data = data
|
2022-04-06 23:45:15 +09:00
|
|
|
return jsonify({"ret": "success", "data": data, "code": code})
|
|
|
|
|
elif sub == "anime_list":
|
|
|
|
|
data = []
|
|
|
|
|
cate = request.form["type"]
|
|
|
|
|
page = request.form["page"]
|
|
|
|
|
|
2022-04-10 14:24:49 +09:00
|
|
|
data = self.get_anime_info(cate, page)
|
|
|
|
|
# self.current_data = data
|
|
|
|
|
return jsonify(
|
|
|
|
|
{"ret": "success", "cate": cate, "page": page, "data": data}
|
|
|
|
|
)
|
|
|
|
|
elif sub == "complete_list":
|
|
|
|
|
data = []
|
|
|
|
|
|
|
|
|
|
cate = request.form["type"]
|
2022-08-20 23:01:21 +09:00
|
|
|
logger.debug("cate:: %s", cate)
|
2022-04-10 14:24:49 +09:00
|
|
|
page = request.form["page"]
|
|
|
|
|
|
2022-04-06 23:45:15 +09:00
|
|
|
data = self.get_anime_info(cate, page)
|
|
|
|
|
# self.current_data = data
|
|
|
|
|
return jsonify(
|
|
|
|
|
{"ret": "success", "cate": cate, "page": page, "data": data}
|
|
|
|
|
)
|
2022-04-09 19:37:36 +09:00
|
|
|
elif sub == "search":
|
|
|
|
|
data = []
|
|
|
|
|
# cate = request.form["type"]
|
|
|
|
|
# page = request.form["page"]
|
2022-04-17 18:00:43 +09:00
|
|
|
cate = request.form["type"]
|
2022-04-09 19:37:36 +09:00
|
|
|
query = request.form["query"]
|
2022-04-17 18:00:43 +09:00
|
|
|
page = request.form["page"]
|
2022-04-09 19:37:36 +09:00
|
|
|
|
2022-04-17 18:00:43 +09:00
|
|
|
data = self.get_search_result(query, page, cate)
|
2022-04-09 19:37:36 +09:00
|
|
|
# self.current_data = data
|
2022-04-17 18:00:43 +09:00
|
|
|
return jsonify(
|
|
|
|
|
{
|
|
|
|
|
"ret": "success",
|
|
|
|
|
"cate": cate,
|
|
|
|
|
"page": page,
|
|
|
|
|
"query": query,
|
|
|
|
|
"data": data,
|
|
|
|
|
}
|
|
|
|
|
)
|
2022-04-06 23:45:15 +09:00
|
|
|
elif sub == "add_queue":
|
2023-01-30 19:26:57 +09:00
|
|
|
logger.debug(f"linkkf add_queue routine ===============")
|
2022-02-08 23:17:30 +09:00
|
|
|
ret = {}
|
2022-04-06 23:45:15 +09:00
|
|
|
info = json.loads(request.form["data"])
|
2022-05-22 15:06:45 +09:00
|
|
|
logger.info(f"info:: {info}")
|
2022-04-06 23:45:15 +09:00
|
|
|
ret["ret"] = self.add(info)
|
2022-02-08 23:17:30 +09:00
|
|
|
return jsonify(ret)
|
2022-04-06 23:45:15 +09:00
|
|
|
elif sub == "entity_list":
|
2022-03-27 18:26:18 +09:00
|
|
|
return jsonify(self.queue.get_entity_list())
|
2022-04-06 23:45:15 +09:00
|
|
|
elif sub == "queue_command":
|
|
|
|
|
ret = self.queue.command(
|
|
|
|
|
req.form["command"], int(req.form["entity_id"])
|
|
|
|
|
)
|
2022-03-27 18:26:18 +09:00
|
|
|
return jsonify(ret)
|
2022-04-06 23:45:15 +09:00
|
|
|
elif sub == "add_queue_checked_list":
|
|
|
|
|
data = json.loads(request.form["data"])
|
2022-03-28 18:52:51 +09:00
|
|
|
|
2022-03-27 18:26:18 +09:00
|
|
|
def func():
|
|
|
|
|
count = 0
|
|
|
|
|
for tmp in data:
|
|
|
|
|
add_ret = self.add(tmp)
|
2022-04-06 23:45:15 +09:00
|
|
|
if add_ret.startswith("enqueue"):
|
|
|
|
|
self.socketio_callback("list_refresh", "")
|
2022-03-27 18:26:18 +09:00
|
|
|
count += 1
|
2022-04-06 23:45:15 +09:00
|
|
|
notify = {
|
|
|
|
|
"type": "success",
|
|
|
|
|
"msg": "%s 개의 에피소드를 큐에 추가 하였습니다." % count,
|
|
|
|
|
}
|
|
|
|
|
socketio.emit(
|
|
|
|
|
"notify", notify, namespace="/framework", broadcast=True
|
|
|
|
|
)
|
2022-03-28 18:52:51 +09:00
|
|
|
|
2022-03-27 18:26:18 +09:00
|
|
|
thread = threading.Thread(target=func, args=())
|
|
|
|
|
thread.daemon = True
|
|
|
|
|
thread.start()
|
2022-04-06 23:45:15 +09:00
|
|
|
return jsonify("")
|
|
|
|
|
elif sub == "web_list":
|
2022-03-27 18:26:18 +09:00
|
|
|
return jsonify(ModelOhli24Item.web_list(request))
|
2022-04-06 23:45:15 +09:00
|
|
|
elif sub == "db_remove":
|
|
|
|
|
return jsonify(ModelOhli24Item.delete_by_id(req.form["id"]))
|
2022-08-22 16:33:02 +09:00
|
|
|
elif sub == "add_whitelist":
|
|
|
|
|
try:
|
|
|
|
|
# params = request.get_data()
|
|
|
|
|
# logger.debug(f"params: {params}")
|
|
|
|
|
# data_code = request.args.get("data_code")
|
|
|
|
|
params = request.get_json()
|
|
|
|
|
logger.debug(f"params:: {params}")
|
|
|
|
|
if params is not None:
|
|
|
|
|
code = params["data_code"]
|
|
|
|
|
logger.debug(f"params: {code}")
|
|
|
|
|
ret = LogicOhli24.add_whitelist(code)
|
|
|
|
|
else:
|
|
|
|
|
ret = LogicOhli24.add_whitelist()
|
|
|
|
|
return jsonify(ret)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Exception:%s", e)
|
|
|
|
|
logger.error(traceback.format_exc())
|
2022-02-08 23:17:30 +09:00
|
|
|
except Exception as e:
|
2023-01-30 19:26:57 +09:00
|
|
|
P.logger.error(f"Exception: {str(e)}")
|
2022-02-08 23:17:30 +09:00
|
|
|
P.logger.error(traceback.format_exc())
|
|
|
|
|
|
2022-11-25 19:20:52 +09:00
|
|
|
def process_api(self, sub, req):
|
|
|
|
|
logger.debug("here!")
|
|
|
|
|
ret = {}
|
|
|
|
|
try:
|
|
|
|
|
if sub == "anime_list":
|
|
|
|
|
logger.debug(f"anime_list =*==")
|
|
|
|
|
logger.debug(req)
|
|
|
|
|
data = []
|
|
|
|
|
cate = req.form["type"]
|
|
|
|
|
page = req.form["page"]
|
|
|
|
|
|
|
|
|
|
data = self.get_anime_info(cate, page)
|
|
|
|
|
# self.current_data = data
|
|
|
|
|
return jsonify(
|
|
|
|
|
{"ret": "success", "cate": cate, "page": page, "data": data}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
except Exception as exception:
|
|
|
|
|
logger.error("Exception:%s", exception)
|
|
|
|
|
logger.error(traceback.format_exc())
|
|
|
|
|
ret["ret"] = "exception"
|
|
|
|
|
ret["data"] = str(exception)
|
|
|
|
|
return jsonify(ret)
|
|
|
|
|
|
|
|
|
|
#########################################################
|
|
|
|
|
# API
|
|
|
|
|
#########################################################
|
|
|
|
|
# @blueprint.route("/api/<sub>", methods=["GET", "POST"])
|
|
|
|
|
# @check_api
|
|
|
|
|
# def api(self, sub):
|
|
|
|
|
# if sub == "ohli24":
|
|
|
|
|
# try:
|
|
|
|
|
# logger.debug("api ohli24")
|
|
|
|
|
# data = {"aaaa"}
|
|
|
|
|
# except Exception as e:
|
|
|
|
|
# logger.error("Exception:%s", e)
|
|
|
|
|
# logger.error(traceback.format_exc())
|
|
|
|
|
# return data
|
|
|
|
|
|
2022-08-22 16:33:02 +09:00
|
|
|
@staticmethod
|
|
|
|
|
def add_whitelist(*args):
|
|
|
|
|
ret = {}
|
|
|
|
|
|
2023-02-01 19:35:19 +09:00
|
|
|
# logger.debug(f"args: {args}")
|
2022-08-22 16:33:02 +09:00
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
if len(args) == 0:
|
|
|
|
|
code = str(LogicOhli24.current_data["code"])
|
|
|
|
|
else:
|
|
|
|
|
code = str(args[0])
|
|
|
|
|
|
2023-02-01 19:35:19 +09:00
|
|
|
# print(code)
|
2022-08-22 16:33:02 +09:00
|
|
|
|
|
|
|
|
whitelist_program = P.ModelSetting.get("ohli24_auto_code_list")
|
2023-03-01 18:01:39 +09:00
|
|
|
|
2022-08-22 16:33:02 +09:00
|
|
|
whitelist_programs = [
|
2022-08-22 16:55:30 +09:00
|
|
|
str(x.strip()) for x in whitelist_program.replace("\n", "|").split("|")
|
2022-08-22 16:33:02 +09:00
|
|
|
]
|
2022-08-22 16:55:30 +09:00
|
|
|
|
2022-08-22 16:33:02 +09:00
|
|
|
if code not in whitelist_programs:
|
|
|
|
|
whitelist_programs.append(code)
|
|
|
|
|
whitelist_programs = filter(
|
|
|
|
|
lambda x: x != "", whitelist_programs
|
|
|
|
|
) # remove blank code
|
|
|
|
|
whitelist_program = "|".join(whitelist_programs)
|
|
|
|
|
entity = (
|
|
|
|
|
db.session.query(P.ModelSetting)
|
|
|
|
|
.filter_by(key="ohli24_auto_code_list")
|
|
|
|
|
.with_for_update()
|
|
|
|
|
.first()
|
|
|
|
|
)
|
|
|
|
|
entity.value = whitelist_program
|
|
|
|
|
db.session.commit()
|
|
|
|
|
ret["ret"] = True
|
|
|
|
|
ret["code"] = code
|
|
|
|
|
if len(args) == 0:
|
|
|
|
|
return LogicOhli24.current_data
|
|
|
|
|
else:
|
|
|
|
|
return ret
|
|
|
|
|
else:
|
|
|
|
|
ret["ret"] = False
|
|
|
|
|
ret["log"] = "이미 추가되어 있습니다."
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Exception:%s", e)
|
|
|
|
|
logger.error(traceback.format_exc())
|
|
|
|
|
ret["ret"] = False
|
|
|
|
|
ret["log"] = str(e)
|
|
|
|
|
return ret
|
|
|
|
|
|
2022-03-27 18:26:18 +09:00
|
|
|
def setting_save_after(self):
|
2022-04-06 23:45:15 +09:00
|
|
|
if self.queue.get_max_ffmpeg_count() != P.ModelSetting.get_int(
|
|
|
|
|
"ohli24_max_ffmpeg_process_count"
|
|
|
|
|
):
|
|
|
|
|
self.queue.set_max_ffmpeg_count(
|
|
|
|
|
P.ModelSetting.get_int("ohli24_max_ffmpeg_process_count")
|
|
|
|
|
)
|
2022-03-27 18:26:18 +09:00
|
|
|
|
2022-04-17 13:45:37 +09:00
|
|
|
def scheduler_function(self):
|
|
|
|
|
# Todo: 스케쥴링 함수 미구현
|
2022-06-07 20:00:29 +09:00
|
|
|
logger.debug(f"ohli24 scheduler_function::=========================")
|
2022-04-17 13:45:37 +09:00
|
|
|
|
|
|
|
|
content_code_list = P.ModelSetting.get_list("ohli24_auto_code_list", "|")
|
|
|
|
|
logger.debug(f"content_code_list::: {content_code_list}")
|
|
|
|
|
url_list = ["https://www.naver.com/", "https://www.daum.net/"]
|
2022-06-07 20:00:29 +09:00
|
|
|
|
|
|
|
|
week = ["월요일", "화요일", "수요일", "목요일", "금요일", "토요일", "일요일"]
|
|
|
|
|
today = date.today()
|
|
|
|
|
print(today)
|
|
|
|
|
print()
|
|
|
|
|
print(today.weekday())
|
|
|
|
|
|
|
|
|
|
url = f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}'
|
|
|
|
|
|
2022-09-12 15:22:03 +09:00
|
|
|
# print(url)
|
2022-06-07 20:00:29 +09:00
|
|
|
|
|
|
|
|
if "all" in content_code_list:
|
2022-06-26 21:46:16 +09:00
|
|
|
ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
|
|
|
|
|
|
|
|
|
|
logger.debug(f"today_info:: {ret_data}")
|
|
|
|
|
|
|
|
|
|
for item in ret_data["anime_list"]:
|
|
|
|
|
# wr_id = request.form.get("wr_id", None)
|
|
|
|
|
# bo_table = request.form.get("bo_table", None)
|
|
|
|
|
wr_id = None
|
|
|
|
|
bo_table = None
|
|
|
|
|
data = []
|
|
|
|
|
# print(code)
|
|
|
|
|
# logger.info("code::: %s", code)
|
2022-08-20 23:01:21 +09:00
|
|
|
# logger.debug(item)
|
2022-06-26 21:46:16 +09:00
|
|
|
|
|
|
|
|
# 잠시 중지
|
|
|
|
|
# data = self.get_series_info(item["code"], wr_id, bo_table)
|
2022-08-20 23:01:21 +09:00
|
|
|
# logger.debug(data)
|
2022-06-26 21:46:16 +09:00
|
|
|
|
2022-04-17 13:45:37 +09:00
|
|
|
# result = asyncio.run(LogicOhli24.main(url_list))
|
|
|
|
|
# logger.debug(f"result:: {result}")
|
|
|
|
|
|
2022-08-20 23:01:21 +09:00
|
|
|
elif len(content_code_list) > 0:
|
|
|
|
|
for item in content_code_list:
|
|
|
|
|
url = P.ModelSetting.get("ohli24_url") + "/c/" + item
|
|
|
|
|
print("scheduling url: %s", url)
|
|
|
|
|
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
|
2023-08-12 21:22:28 +09:00
|
|
|
print("debug===")
|
|
|
|
|
print(item)
|
2022-08-20 23:01:21 +09:00
|
|
|
content_info = self.get_series_info(item, "", "")
|
|
|
|
|
|
2023-03-01 18:03:56 +09:00
|
|
|
# logger.debug(content_info)
|
|
|
|
|
|
2022-08-20 23:01:21 +09:00
|
|
|
for episode_info in content_info["episode"]:
|
|
|
|
|
add_ret = self.add(episode_info)
|
|
|
|
|
if add_ret.startswith("enqueue"):
|
|
|
|
|
self.socketio_callback("list_refresh", "")
|
|
|
|
|
# logger.debug(f"data: {data}")
|
|
|
|
|
# self.current_data = data
|
|
|
|
|
# db에서 다운로드 완료 유무 체크
|
|
|
|
|
|
2022-04-17 13:45:37 +09:00
|
|
|
@staticmethod
|
2022-06-26 21:46:16 +09:00
|
|
|
async def get_data(url) -> str:
|
2022-04-17 13:45:37 +09:00
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
|
async with session.get(url) as response:
|
|
|
|
|
content = await response.text()
|
|
|
|
|
# print(response)
|
|
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
2022-06-26 21:46:16 +09:00
|
|
|
async def main(url_list: list):
|
2022-04-17 13:45:37 +09:00
|
|
|
input_coroutines = [LogicOhli24.get_data(url_) for url_ in url_list]
|
|
|
|
|
res = await asyncio.gather(*input_coroutines)
|
|
|
|
|
return res
|
|
|
|
|
|
2022-04-10 14:24:49 +09:00
|
|
|
def get_series_info(self, code, wr_id, bo_table):
|
2022-04-06 23:45:15 +09:00
|
|
|
code_type = "c"
|
2022-04-10 14:24:49 +09:00
|
|
|
|
2022-02-10 00:34:14 +09:00
|
|
|
try:
|
2022-04-06 23:45:15 +09:00
|
|
|
if (
|
|
|
|
|
self.current_data is not None
|
|
|
|
|
and "code" in self.current_data
|
|
|
|
|
and self.current_data["code"] == code
|
|
|
|
|
):
|
2022-02-10 00:34:14 +09:00
|
|
|
return self.current_data
|
|
|
|
|
|
2022-04-06 23:45:15 +09:00
|
|
|
if code.startswith("http"):
|
2022-03-28 14:40:38 +09:00
|
|
|
|
2022-03-28 19:14:25 +09:00
|
|
|
# if code.split('c/')[1] is not None:
|
|
|
|
|
# code = code.split('c/')[1]
|
|
|
|
|
# code_type = 'c'
|
|
|
|
|
# elif code.split('e/')[1] is not None:
|
|
|
|
|
# code_type = 'e'
|
|
|
|
|
# code = code.split('e/')[1]
|
2022-04-06 23:45:15 +09:00
|
|
|
if "/c/" in code:
|
|
|
|
|
code = code.split("c/")[1]
|
|
|
|
|
code_type = "c"
|
|
|
|
|
elif "/e/" in code:
|
|
|
|
|
code = code.split("e/")[1]
|
|
|
|
|
code_type = "e"
|
|
|
|
|
|
|
|
|
|
logger.info(f"code:::: {code}")
|
|
|
|
|
|
|
|
|
|
if code_type == "c":
|
|
|
|
|
url = P.ModelSetting.get("ohli24_url") + "/c/" + code
|
|
|
|
|
elif code_type == "e":
|
|
|
|
|
url = P.ModelSetting.get("ohli24_url") + "/e/" + code
|
2022-03-28 18:43:12 +09:00
|
|
|
else:
|
2022-04-06 23:45:15 +09:00
|
|
|
url = P.ModelSetting.get("ohli24_url") + "/e/" + code
|
2022-04-10 14:24:49 +09:00
|
|
|
|
|
|
|
|
if wr_id is not None:
|
|
|
|
|
# print(len(wr_id))
|
|
|
|
|
if len(wr_id) > 0:
|
|
|
|
|
url = (
|
|
|
|
|
P.ModelSetting.get("ohli24_url")
|
|
|
|
|
+ "/bbs/board.php?bo_table="
|
|
|
|
|
+ bo_table
|
|
|
|
|
+ "&wr_id="
|
|
|
|
|
+ wr_id
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
pass
|
|
|
|
|
|
2023-03-01 18:01:39 +09:00
|
|
|
# logger.debug("url:::> %s", url)
|
2022-04-10 14:24:49 +09:00
|
|
|
|
2022-02-10 00:34:14 +09:00
|
|
|
# self.current_headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)
|
|
|
|
|
# AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36',
|
|
|
|
|
# 'Referer': url }
|
|
|
|
|
|
|
|
|
|
response_data = LogicOhli24.get_html(url, timeout=10)
|
|
|
|
|
tree = html.fromstring(response_data)
|
|
|
|
|
title = tree.xpath('//div[@class="view-title"]/h1/text()')[0]
|
|
|
|
|
# image = tree.xpath('//div[@class="view-info"]/div[@class="image"]/div/img')[0]['src']
|
|
|
|
|
image = tree.xpath('//div[@class="image"]/div/img/@src')[0]
|
2022-04-06 23:45:15 +09:00
|
|
|
image = image.replace("..", P.ModelSetting.get("ohli24_url"))
|
2022-02-10 00:34:14 +09:00
|
|
|
des_items = tree.xpath('//div[@class="list"]/p')
|
|
|
|
|
des = {}
|
2022-04-06 23:45:15 +09:00
|
|
|
des_key = [
|
|
|
|
|
"_otit",
|
|
|
|
|
"_dir",
|
|
|
|
|
"_pub",
|
|
|
|
|
"_tag",
|
|
|
|
|
"_classifi",
|
|
|
|
|
"_country",
|
|
|
|
|
"_grade",
|
|
|
|
|
"_total_chapter",
|
|
|
|
|
"_show_time",
|
2022-04-10 14:24:49 +09:00
|
|
|
"_release_year",
|
2022-04-06 23:45:15 +09:00
|
|
|
]
|
2022-03-27 18:26:18 +09:00
|
|
|
description_dict = {
|
2022-04-06 23:45:15 +09:00
|
|
|
"원제": "_otit",
|
|
|
|
|
"원작": "_org",
|
|
|
|
|
"감독": "_dir",
|
|
|
|
|
"각본": "_scr",
|
|
|
|
|
"캐릭터 디자인": "_character_design",
|
|
|
|
|
"음악": "_sound",
|
|
|
|
|
"제작사": "_pub",
|
|
|
|
|
"장르": "_tag",
|
|
|
|
|
"분류": "_classifi",
|
|
|
|
|
"제작국가": "_country",
|
|
|
|
|
"방영일": "_date",
|
|
|
|
|
"등급": "_grade",
|
|
|
|
|
"총화수": "_total_chapter",
|
|
|
|
|
"상영시간": "_show_time",
|
2022-08-05 19:33:24 +09:00
|
|
|
"상영일": "_release_date",
|
2022-04-10 14:24:49 +09:00
|
|
|
"개봉년도": "_release_year",
|
2022-09-12 15:22:03 +09:00
|
|
|
"개봉일": "_opening_date",
|
|
|
|
|
"런타임": "_run_time",
|
2022-03-27 18:26:18 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
list_body_li = tree.xpath('//ul[@class="list-body"]/li')
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.debug(f"list_body_li:: {list_body_li}")
|
2022-03-27 18:26:18 +09:00
|
|
|
episodes = []
|
|
|
|
|
vi = None
|
|
|
|
|
for li in list_body_li:
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.debug(li)
|
2022-04-06 23:45:15 +09:00
|
|
|
title = li.xpath(".//a/text()")[0].strip()
|
2022-03-27 18:26:18 +09:00
|
|
|
thumbnail = image
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.info(li.xpath('//a[@class="item-subject"]/@href'))
|
2022-04-06 23:45:15 +09:00
|
|
|
link = (
|
|
|
|
|
P.ModelSetting.get("ohli24_url")
|
2022-04-17 13:45:37 +09:00
|
|
|
+ li.xpath('.//a[@class="item-subject"]/@href')[0]
|
2022-04-06 23:45:15 +09:00
|
|
|
)
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.debug(f"link:: {link}")
|
2023-01-29 00:42:01 +09:00
|
|
|
_date = li.xpath('.//div[@class="wr-date"]/text()')[0]
|
2022-04-06 23:45:15 +09:00
|
|
|
m = hashlib.md5(title.encode("utf-8"))
|
2022-03-27 18:26:18 +09:00
|
|
|
# _vi = hashlib.md5(title.encode('utf-8').hexdigest())
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.info(m.hexdigest())
|
2022-03-27 18:26:18 +09:00
|
|
|
_vi = m.hexdigest()
|
2022-04-06 23:45:15 +09:00
|
|
|
episodes.append(
|
|
|
|
|
{
|
|
|
|
|
"title": title,
|
|
|
|
|
"link": link,
|
|
|
|
|
"thumbnail": image,
|
2023-01-29 00:42:01 +09:00
|
|
|
"date": _date,
|
|
|
|
|
"day": _date,
|
2022-04-06 23:45:15 +09:00
|
|
|
"_id": title,
|
|
|
|
|
"va": link,
|
|
|
|
|
"_vi": _vi,
|
|
|
|
|
"content_code": code,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2023-02-01 19:22:06 +09:00
|
|
|
# 정렬 여부 체크
|
|
|
|
|
# logger.debug(P.ModelSetting.get("ohli24_order_desc"))
|
2023-01-29 00:42:01 +09:00
|
|
|
# if P.ModelSetting.get("ohli24_order_desc") == "False":
|
|
|
|
|
# print("Here....")
|
|
|
|
|
# episodes.reverse()
|
|
|
|
|
|
|
|
|
|
# logger.info("des_items length:: %s", len(des_items))
|
2022-02-10 00:34:14 +09:00
|
|
|
for idx, item in enumerate(des_items):
|
2022-03-27 18:26:18 +09:00
|
|
|
# key = des_key[idx]
|
2022-04-06 23:45:15 +09:00
|
|
|
span = item.xpath(".//span//text()")
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.info(span)
|
2022-03-27 18:26:18 +09:00
|
|
|
key = description_dict[span[0]]
|
2022-04-13 15:18:08 +09:00
|
|
|
try:
|
|
|
|
|
des[key] = item.xpath(".//span/text()")[1]
|
2022-04-13 15:19:20 +09:00
|
|
|
except IndexError:
|
2022-04-13 15:18:08 +09:00
|
|
|
des[key] = ""
|
2022-02-10 00:34:14 +09:00
|
|
|
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.info(f"des::>> {des}")
|
2022-04-06 23:45:15 +09:00
|
|
|
image = image.replace("..", P.ModelSetting.get("ohli24_url"))
|
2022-04-17 13:45:37 +09:00
|
|
|
# logger.info("images:: %s", image)
|
2023-02-01 19:22:06 +09:00
|
|
|
# logger.info("title:: %s", title)
|
2022-02-10 00:34:14 +09:00
|
|
|
|
2022-04-06 23:45:15 +09:00
|
|
|
ser_description = tree.xpath(
|
|
|
|
|
'//div[@class="view-stocon"]/div[@class="c"]/text()'
|
|
|
|
|
)
|
2022-02-10 00:34:14 +09:00
|
|
|
|
|
|
|
|
data = {
|
2022-04-06 23:45:15 +09:00
|
|
|
"title": title,
|
|
|
|
|
"image": image,
|
|
|
|
|
"date": "2022.01.11 00:30 (화)",
|
|
|
|
|
"ser_description": ser_description,
|
|
|
|
|
"des": des,
|
|
|
|
|
"episode": episodes,
|
2022-02-10 00:34:14 +09:00
|
|
|
}
|
|
|
|
|
|
2023-01-29 00:42:01 +09:00
|
|
|
if not P.ModelSetting.get_bool("ohli24_order_desc"):
|
2022-04-06 23:45:15 +09:00
|
|
|
data["episode"] = list(reversed(data["episode"]))
|
|
|
|
|
data["list_order"] = "desc"
|
2022-03-27 18:26:18 +09:00
|
|
|
|
2022-02-10 00:34:14 +09:00
|
|
|
return data
|
|
|
|
|
# logger.info(response_text)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
2022-04-06 23:45:15 +09:00
|
|
|
P.logger.error("Exception:%s", e)
|
2022-02-10 00:34:14 +09:00
|
|
|
P.logger.error(traceback.format_exc())
|
2022-04-06 23:45:15 +09:00
|
|
|
return {"ret": "exception", "log": str(e)}
|
|
|
|
|
|
|
|
|
|
def get_anime_info(self, cate, page):
|
|
|
|
|
try:
|
|
|
|
|
if cate == "ing":
|
|
|
|
|
url = (
|
|
|
|
|
P.ModelSetting.get("ohli24_url")
|
|
|
|
|
+ "/bbs/board.php?bo_table="
|
|
|
|
|
+ cate
|
|
|
|
|
+ "&page="
|
|
|
|
|
+ page
|
|
|
|
|
)
|
|
|
|
|
elif cate == "movie":
|
|
|
|
|
url = (
|
|
|
|
|
P.ModelSetting.get("ohli24_url")
|
|
|
|
|
+ "/bbs/board.php?bo_table="
|
|
|
|
|
+ cate
|
|
|
|
|
+ "&page="
|
|
|
|
|
+ page
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
url = (
|
|
|
|
|
P.ModelSetting.get("ohli24_url")
|
|
|
|
|
+ "/bbs/board.php?bo_table="
|
|
|
|
|
+ cate
|
|
|
|
|
+ "&page="
|
|
|
|
|
+ page
|
|
|
|
|
)
|
|
|
|
|
# cate == "complete":
|
2023-02-01 19:22:06 +09:00
|
|
|
|
|
|
|
|
# logger.info("url:::> %s", url)
|
|
|
|
|
|
2022-04-06 23:45:15 +09:00
|
|
|
data = {}
|
|
|
|
|
response_data = LogicOhli24.get_html(url, timeout=10)
|
2023-01-13 20:14:11 +09:00
|
|
|
# response_data = asyncio.run(
|
|
|
|
|
# LogicOhli24.get_html_playwright(
|
|
|
|
|
# url,
|
|
|
|
|
# headless=False,
|
|
|
|
|
# # referer=referer_url,
|
|
|
|
|
# engine="chrome",
|
|
|
|
|
# # stealth=True,
|
|
|
|
|
# )
|
|
|
|
|
# )
|
|
|
|
|
# print(response_data)
|
2022-04-06 23:45:15 +09:00
|
|
|
tree = html.fromstring(response_data)
|
2023-01-15 16:46:48 +09:00
|
|
|
tmp_items = tree.xpath('//div[@class="list-row"]')
|
2022-04-06 23:45:15 +09:00
|
|
|
data["anime_count"] = len(tmp_items)
|
|
|
|
|
data["anime_list"] = []
|
|
|
|
|
|
|
|
|
|
for item in tmp_items:
|
|
|
|
|
entity = {}
|
|
|
|
|
entity["link"] = item.xpath(".//a/@href")[0]
|
|
|
|
|
entity["code"] = entity["link"].split("/")[-1]
|
2022-04-10 14:24:49 +09:00
|
|
|
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[
|
|
|
|
|
0
|
|
|
|
|
].strip()
|
2023-03-03 18:54:10 +09:00
|
|
|
# logger.debug(item.xpath(".//div[@class='img-item']/img/@src")[0])
|
|
|
|
|
# logger.debug(item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0])
|
|
|
|
|
# entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
|
|
|
|
|
# 0
|
|
|
|
|
# ].replace("..", P.ModelSetting.get("ohli24_url"))
|
|
|
|
|
|
2023-03-07 20:26:38 +09:00
|
|
|
if len(item.xpath(".//div[@class='img-item']/img/@src")) > 0:
|
|
|
|
|
entity["image_link"] = item.xpath(
|
|
|
|
|
".//div[@class='img-item']/img/@src"
|
2023-03-07 21:11:49 +09:00
|
|
|
)[0].replace("..", P.ModelSetting.get("ohli24_url"))
|
2023-03-07 20:26:38 +09:00
|
|
|
else:
|
|
|
|
|
entity["image_link"] = item.xpath(
|
|
|
|
|
".//div[@class='img-item']/img/@data-ezsrc"
|
|
|
|
|
)[0]
|
|
|
|
|
|
2022-04-06 23:45:15 +09:00
|
|
|
data["ret"] = "success"
|
|
|
|
|
data["anime_list"].append(entity)
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
except Exception as e:
|
|
|
|
|
P.logger.error("Exception:%s", e)
|
2022-04-09 19:37:36 +09:00
|
|
|
P.logger.error(traceback.format_exc())
|
|
|
|
|
return {"ret": "exception", "log": str(e)}
|
|
|
|
|
|
2022-06-26 21:46:16 +09:00
|
|
|
def get_auto_anime_info(self, url: str = ""):
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
|
|
logger.info("url:::> %s", url)
|
|
|
|
|
data = {}
|
|
|
|
|
response_data = LogicOhli24.get_html(url, timeout=10)
|
|
|
|
|
tree = html.fromstring(response_data)
|
|
|
|
|
tmp_items = tree.xpath('//div[@class="list-row"]')
|
|
|
|
|
data["anime_count"] = len(tmp_items)
|
|
|
|
|
data["anime_list"] = []
|
|
|
|
|
|
|
|
|
|
for item in tmp_items:
|
|
|
|
|
entity = {}
|
|
|
|
|
entity["link"] = item.xpath(".//a/@href")[0]
|
|
|
|
|
entity["code"] = entity["link"].split("/")[-1]
|
|
|
|
|
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[
|
|
|
|
|
0
|
|
|
|
|
].strip()
|
|
|
|
|
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
|
|
|
|
|
0
|
|
|
|
|
].replace("..", P.ModelSetting.get("ohli24_url"))
|
|
|
|
|
data["ret"] = "success"
|
|
|
|
|
data["anime_list"].append(entity)
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
except Exception as e:
|
|
|
|
|
P.logger.error("Exception:%s", e)
|
|
|
|
|
P.logger.error(traceback.format_exc())
|
|
|
|
|
return {"ret": "exception", "log": str(e)}
|
|
|
|
|
|
2022-04-09 19:37:36 +09:00
|
|
|
# @staticmethod
|
2022-04-17 18:00:43 +09:00
|
|
|
def get_search_result(self, query, page, cate):
|
2022-04-09 19:37:36 +09:00
|
|
|
try:
|
|
|
|
|
_query = urllib.parse.quote(query)
|
|
|
|
|
url = (
|
|
|
|
|
P.ModelSetting.get("ohli24_url")
|
|
|
|
|
+ "/bbs/search.php?srows=24&gr_id=&sfl=wr_subject&stx="
|
|
|
|
|
+ _query
|
2022-04-17 18:00:43 +09:00
|
|
|
+ "&page="
|
|
|
|
|
+ page
|
2022-04-09 19:37:36 +09:00
|
|
|
)
|
|
|
|
|
|
2022-04-17 14:07:00 +09:00
|
|
|
logger.info("get_search_result()::url> %s", url)
|
2022-04-09 19:37:36 +09:00
|
|
|
data = {}
|
|
|
|
|
response_data = LogicOhli24.get_html(url, timeout=10)
|
|
|
|
|
tree = html.fromstring(response_data)
|
|
|
|
|
tmp_items = tree.xpath('//div[@class="list-row"]')
|
|
|
|
|
data["anime_count"] = len(tmp_items)
|
|
|
|
|
data["anime_list"] = []
|
|
|
|
|
|
|
|
|
|
for item in tmp_items:
|
|
|
|
|
entity = {}
|
|
|
|
|
entity["link"] = item.xpath(".//a/@href")[0]
|
|
|
|
|
# entity["code"] = entity["link"].split("/")[-1]
|
|
|
|
|
entity["wr_id"] = entity["link"].split("=")[-1]
|
|
|
|
|
# logger.debug(item.xpath(".//div[@class='post-title']/text()").join())
|
|
|
|
|
entity["title"] = "".join(
|
|
|
|
|
item.xpath(".//div[@class='post-title']/text()")
|
|
|
|
|
).strip()
|
|
|
|
|
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
|
|
|
|
|
0
|
|
|
|
|
].replace("..", P.ModelSetting.get("ohli24_url"))
|
|
|
|
|
|
|
|
|
|
entity["code"] = item.xpath(".//div[@class='img-item']/img/@alt")[0]
|
|
|
|
|
|
|
|
|
|
data["ret"] = "success"
|
|
|
|
|
data["anime_list"].append(entity)
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
except Exception as e:
|
|
|
|
|
P.logger.error("Exception:%s", e)
|
2022-04-06 23:45:15 +09:00
|
|
|
P.logger.error(traceback.format_exc())
|
|
|
|
|
return {"ret": "exception", "log": str(e)}
|
2022-02-10 00:34:14 +09:00
|
|
|
|
2023-03-01 18:01:39 +09:00
|
|
|
def check_for_new_post(self):
|
|
|
|
|
# Get the HTML content of the page
|
2023-08-07 19:16:59 +09:00
|
|
|
res = requests.get("https://ohli24.live/bbs/board.php?bo_table=ing")
|
2023-03-01 18:01:39 +09:00
|
|
|
soup = BeautifulSoup(res.content, "html.parser")
|
|
|
|
|
|
|
|
|
|
# Find the latest post on the page
|
|
|
|
|
latest_post = soup.find("div", class_="post-title").text
|
|
|
|
|
latest_post_image = (
|
|
|
|
|
soup.find("div", class_="img-item")
|
|
|
|
|
.find("img", class_="wr-img")
|
|
|
|
|
.get("src")
|
2023-08-07 19:16:59 +09:00
|
|
|
.replace("..", "https://ohli24.live")
|
2023-03-01 18:01:39 +09:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logger.debug(f"latest_post:: {latest_post}")
|
|
|
|
|
logger.debug(f"self.last_post_title:: {self.last_post_title}")
|
|
|
|
|
logger.debug(f"latest_post_image:: {latest_post_image}")
|
|
|
|
|
|
|
|
|
|
# Compare the latest post with the last recorded post
|
|
|
|
|
if latest_post != self.last_post_title:
|
|
|
|
|
# If there is a new post, update the last recorded post
|
|
|
|
|
self.last_post_title = latest_post
|
|
|
|
|
|
|
|
|
|
# Send a notification to Discord channel
|
|
|
|
|
# discord_client = discord.Client()
|
|
|
|
|
# discord_client.run(self.DISCORD_BOT_TOKEN)
|
|
|
|
|
#
|
|
|
|
|
# async def on_ready():
|
|
|
|
|
# channel = discord_client.get_channel(self.DISCORD_CHANNEL_ID)
|
|
|
|
|
# await channel.send(f"A new post has been added: {latest_post}")
|
|
|
|
|
#
|
|
|
|
|
# discord_client.close()
|
|
|
|
|
|
|
|
|
|
webhook = DiscordWebhook(url=self.discord_webhook_url)
|
|
|
|
|
embed = DiscordEmbed(title=self.discord_title, color=self.discord_color)
|
|
|
|
|
embed.set_timestamp()
|
|
|
|
|
path = self.last_post_title
|
|
|
|
|
embed.set_image(url=latest_post_image)
|
|
|
|
|
embed.add_embed_field(name="", value=path, inline=True)
|
|
|
|
|
embed.set_timestamp()
|
|
|
|
|
webhook.add_embed(embed)
|
|
|
|
|
response = webhook.execute()
|
|
|
|
|
|
|
|
|
|
return self.last_post_title
|
|
|
|
|
return self.last_post_title
|
|
|
|
|
|
|
|
|
|
def send_notify(self):
|
|
|
|
|
logger.debug("send_notify() routine")
|
|
|
|
|
while True:
|
|
|
|
|
self.last_post_title = self.check_for_new_post()
|
|
|
|
|
logger.debug(self.last_post_title)
|
|
|
|
|
time.sleep(600)
|
|
|
|
|
|
2022-03-27 18:26:18 +09:00
|
|
|
# @staticmethod
|
|
|
|
|
def plugin_load(self):
|
2022-02-08 23:17:30 +09:00
|
|
|
try:
|
2022-04-06 23:45:15 +09:00
|
|
|
logger.debug("%s plugin_load", P.package_name)
|
|
|
|
|
self.queue = FfmpegQueue(
|
|
|
|
|
P, P.ModelSetting.get_int("ohli24_max_ffmpeg_process_count")
|
|
|
|
|
)
|
2022-03-27 18:26:18 +09:00
|
|
|
self.current_data = None
|
|
|
|
|
self.queue.queue_start()
|
2022-02-08 23:17:30 +09:00
|
|
|
|
2023-03-01 18:01:39 +09:00
|
|
|
logger.debug(P.ModelSetting.get_bool("ohli24_discord_notify"))
|
|
|
|
|
if P.ModelSetting.get_bool("ohli24_discord_notify"):
|
|
|
|
|
self.send_notify()
|
|
|
|
|
|
2022-02-08 23:17:30 +09:00
|
|
|
except Exception as e:
|
2022-04-06 23:45:15 +09:00
|
|
|
logger.error("Exception:%s", e)
|
2022-02-08 23:17:30 +09:00
|
|
|
logger.error(traceback.format_exc())
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def plugin_unload():
|
|
|
|
|
try:
|
2022-04-06 23:45:15 +09:00
|
|
|
logger.debug("%s plugin_unload", P.package_name)
|
|
|
|
|
scheduler.remove_job("%s_recent" % P.package_name)
|
2022-02-08 23:17:30 +09:00
|
|
|
except Exception as e:
|
2022-04-06 23:45:15 +09:00
|
|
|
logger.error("Exception:%s", e)
|
2022-02-08 23:17:30 +09:00
|
|
|
logger.error(traceback.format_exc())
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def reset_db() -> bool:
|
|
|
|
|
db.session.query(ModelOhli24Item).delete()
|
|
|
|
|
db.session.commit()
|
|
|
|
|
return True
|
|
|
|
|
|
2022-02-10 00:34:14 +09:00
|
|
|
@staticmethod
|
2023-02-01 19:22:06 +09:00
|
|
|
@yommi_timeit
|
2023-01-13 20:14:11 +09:00
|
|
|
def get_html(
|
2023-03-07 20:35:22 +09:00
|
|
|
url, headers=None, referer=None, stream=False, timeout=10, stealth=False
|
2023-01-13 20:14:11 +09:00
|
|
|
):
|
2023-01-15 17:10:34 +09:00
|
|
|
# global response_data
|
2022-04-06 23:45:15 +09:00
|
|
|
data = ""
|
2023-01-15 17:10:34 +09:00
|
|
|
# response_date = ""
|
2022-02-10 00:34:14 +09:00
|
|
|
try:
|
2022-03-27 18:26:18 +09:00
|
|
|
|
2023-01-13 20:14:11 +09:00
|
|
|
print("cloudflare protection bypass ==================P")
|
2023-01-15 17:10:34 +09:00
|
|
|
response_date = ""
|
2023-01-13 20:14:11 +09:00
|
|
|
if headers is not None:
|
|
|
|
|
LogicOhli24.headers = headers
|
|
|
|
|
|
2023-02-01 14:53:18 +09:00
|
|
|
# logger.debug(f"headers: {LogicOhli24.headers}")
|
|
|
|
|
|
2023-01-15 17:02:14 +09:00
|
|
|
# response_data = asyncio.run(
|
|
|
|
|
# LogicOhli24.get_html_playwright(
|
|
|
|
|
# url,
|
|
|
|
|
# headless=True,
|
|
|
|
|
# # referer=referer_url,
|
|
|
|
|
# engine="chrome",
|
|
|
|
|
# # stealth=True,
|
|
|
|
|
# )
|
|
|
|
|
# )
|
|
|
|
|
# # print(response_data)
|
|
|
|
|
#
|
|
|
|
|
# logger.debug(len(response_data))
|
2023-01-13 20:14:11 +09:00
|
|
|
|
2023-01-15 17:02:14 +09:00
|
|
|
# return response_data
|
2023-01-13 20:14:11 +09:00
|
|
|
|
2023-01-15 17:02:14 +09:00
|
|
|
if LogicOhli24.session is None:
|
|
|
|
|
LogicOhli24.session = requests.session()
|
2023-01-13 20:14:11 +09:00
|
|
|
|
2023-01-15 17:02:14 +09:00
|
|
|
# logger.debug('get_html :%s', url)
|
2023-01-15 17:43:12 +09:00
|
|
|
# LogicOhli24.headers["Referer"] = "" if referer is None else referer
|
2023-02-01 19:22:06 +09:00
|
|
|
# logger.debug(f"referer:: {referer}")
|
2023-01-15 17:43:12 +09:00
|
|
|
if referer:
|
|
|
|
|
LogicOhli24.headers["Referer"] = referer
|
2023-01-15 17:02:14 +09:00
|
|
|
|
2023-01-15 17:43:12 +09:00
|
|
|
# logger.info(headers)
|
2023-02-01 19:22:06 +09:00
|
|
|
# logger.debug(f"LogicOhli24.headers:: {LogicOhli24.headers}")
|
2023-01-15 17:02:14 +09:00
|
|
|
page_content = LogicOhli24.session.get(
|
2023-01-15 17:43:12 +09:00
|
|
|
url, headers=LogicOhli24.headers, timeout=timeout
|
2023-01-15 17:02:14 +09:00
|
|
|
)
|
|
|
|
|
response_data = page_content.text
|
2023-01-15 17:43:12 +09:00
|
|
|
# logger.debug(response_data)
|
2023-01-15 17:05:52 +09:00
|
|
|
return response_data
|
2022-02-10 00:34:14 +09:00
|
|
|
except Exception as e:
|
2022-04-06 23:45:15 +09:00
|
|
|
logger.error("Exception:%s", e)
|
2022-02-10 00:34:14 +09:00
|
|
|
logger.error(traceback.format_exc())
|
2023-01-15 17:10:34 +09:00
|
|
|
# return response_data
|
2022-02-10 00:34:14 +09:00
|
|
|
|
2022-03-27 18:26:18 +09:00
|
|
|
#########################################################
|
|
|
|
|
def add(self, episode_info):
|
|
|
|
|
if self.is_exist(episode_info):
|
2022-04-06 23:45:15 +09:00
|
|
|
return "queue_exist"
|
2022-03-27 18:26:18 +09:00
|
|
|
else:
|
2022-04-06 23:45:15 +09:00
|
|
|
db_entity = ModelOhli24Item.get_by_ohli24_id(episode_info["_id"])
|
2022-08-20 23:01:21 +09:00
|
|
|
# logger.debug("db_entity:::> %s", db_entity)
|
2022-03-27 18:26:18 +09:00
|
|
|
if db_entity is None:
|
|
|
|
|
entity = Ohli24QueueEntity(P, self, episode_info)
|
2022-08-20 23:01:21 +09:00
|
|
|
# logger.debug("entity:::> %s", entity.as_dict())
|
2022-03-27 18:26:18 +09:00
|
|
|
ModelOhli24Item.append(entity.as_dict())
|
2022-08-20 23:01:21 +09:00
|
|
|
# logger.debug("entity:: type >> %s", type(entity))
|
2022-03-27 18:26:18 +09:00
|
|
|
|
|
|
|
|
self.queue.add_queue(entity)
|
2022-04-06 23:45:15 +09:00
|
|
|
return "enqueue_db_append"
|
|
|
|
|
elif db_entity.status != "completed":
|
2022-03-27 18:26:18 +09:00
|
|
|
entity = Ohli24QueueEntity(P, self, episode_info)
|
|
|
|
|
|
|
|
|
|
self.queue.add_queue(entity)
|
2022-04-06 23:45:15 +09:00
|
|
|
return "enqueue_db_exist"
|
2022-03-27 18:26:18 +09:00
|
|
|
else:
|
2022-04-06 23:45:15 +09:00
|
|
|
return "db_completed"
|
2022-03-27 18:26:18 +09:00
|
|
|
|
|
|
|
|
def is_exist(self, info):
|
|
|
|
|
for e in self.queue.entity_list:
|
2022-04-06 23:45:15 +09:00
|
|
|
if e.info["_id"] == info["_id"]:
|
2022-03-27 18:26:18 +09:00
|
|
|
return True
|
|
|
|
|
return False
|
|
|
|
|
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
class Ohli24QueueEntity(FfmpegQueueEntity):
|
|
|
|
|
def __init__(self, P, module_logic, info):
|
|
|
|
|
super(Ohli24QueueEntity, self).__init__(P, module_logic, info)
|
2022-03-27 18:26:18 +09:00
|
|
|
self._vi = None
|
|
|
|
|
self.url = None
|
|
|
|
|
self.epi_queue = None
|
|
|
|
|
self.filepath = None
|
|
|
|
|
self.savepath = None
|
|
|
|
|
self.quality = None
|
|
|
|
|
self.filename = None
|
2022-02-08 23:17:30 +09:00
|
|
|
self.vtt = None
|
|
|
|
|
self.season = 1
|
|
|
|
|
self.content_title = None
|
2022-03-27 18:26:18 +09:00
|
|
|
self.srt_url = None
|
|
|
|
|
self.headers = None
|
|
|
|
|
# Todo::: 임시 주석 처리
|
|
|
|
|
self.make_episode_info()
|
|
|
|
|
|
|
|
|
|
def refresh_status(self):
|
2022-04-06 23:45:15 +09:00
|
|
|
self.module_logic.socketio_callback("status", self.as_dict())
|
2022-03-27 18:26:18 +09:00
|
|
|
|
|
|
|
|
def info_dict(self, tmp):
|
|
|
|
|
# logger.debug('self.info::> %s', self.info)
|
|
|
|
|
for key, value in self.info.items():
|
|
|
|
|
tmp[key] = value
|
2022-04-06 23:45:15 +09:00
|
|
|
tmp["vtt"] = self.vtt
|
|
|
|
|
tmp["season"] = self.season
|
|
|
|
|
tmp["content_title"] = self.content_title
|
|
|
|
|
tmp["ohli24_info"] = self.info
|
|
|
|
|
tmp["epi_queue"] = self.epi_queue
|
2022-03-27 18:26:18 +09:00
|
|
|
return tmp
|
|
|
|
|
|
|
|
|
|
def donwload_completed(self):
|
2022-04-06 23:45:15 +09:00
|
|
|
db_entity = ModelOhli24Item.get_by_ohli24_id(self.info["_id"])
|
2022-03-27 18:26:18 +09:00
|
|
|
if db_entity is not None:
|
2022-04-06 23:45:15 +09:00
|
|
|
db_entity.status = "completed"
|
2022-03-27 18:26:18 +09:00
|
|
|
db_entity.complated_time = datetime.now()
|
|
|
|
|
db_entity.save()
|
|
|
|
|
|
|
|
|
|
# Get episode info from OHLI24 site
|
2022-06-26 21:46:16 +09:00
|
|
|
def make_episode_info(self):
|
|
|
|
|
try:
|
2023-08-07 19:06:08 +09:00
|
|
|
base_url = "https://ohli24.live"
|
2022-06-26 21:46:16 +09:00
|
|
|
iframe_url = ""
|
|
|
|
|
|
2023-01-15 16:46:48 +09:00
|
|
|
# https://ohli24.org/e/%EB%85%B9%EC%9D%84%20%EB%A8%B9%EB%8A%94%20%EB%B9%84%EC%8A%A4%EC%BD%94%206%ED%99%94
|
2022-06-26 21:46:16 +09:00
|
|
|
url = self.info["va"]
|
|
|
|
|
|
|
|
|
|
ourls = parse.urlparse(url)
|
|
|
|
|
|
|
|
|
|
headers = {
|
2023-01-15 17:43:12 +09:00
|
|
|
"Referer": f"{ourls.scheme}://{ourls.netloc}",
|
2023-01-15 16:46:48 +09:00
|
|
|
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
|
|
|
|
|
"Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36",
|
2022-06-26 21:46:16 +09:00
|
|
|
}
|
2023-01-15 17:43:12 +09:00
|
|
|
logger.debug(headers)
|
2022-06-26 21:46:16 +09:00
|
|
|
logger.debug("make_episode_info()::url==> %s", url)
|
|
|
|
|
logger.info(f"self.info:::> {self.info}")
|
|
|
|
|
|
2023-01-13 20:14:11 +09:00
|
|
|
# text = requests.get(url, headers=headers).text
|
2023-01-15 17:43:12 +09:00
|
|
|
text = LogicOhli24.get_html(
|
|
|
|
|
url, headers=headers, referer=f"{ourls.scheme}://{ourls.netloc}"
|
|
|
|
|
)
|
2022-06-26 21:46:16 +09:00
|
|
|
# logger.debug(text)
|
|
|
|
|
soup1 = BeautifulSoup(text, "lxml")
|
|
|
|
|
pattern = re.compile(r"url : \"\.\.(.*)\"")
|
|
|
|
|
script = soup1.find("script", text=pattern)
|
|
|
|
|
|
|
|
|
|
if script:
|
|
|
|
|
match = pattern.search(script.text)
|
|
|
|
|
if match:
|
|
|
|
|
iframe_url = match.group(1)
|
|
|
|
|
logger.info("iframe_url::> %s", iframe_url)
|
|
|
|
|
|
2023-07-01 00:09:52 +09:00
|
|
|
# try:
|
|
|
|
|
# iframe_url = soup1.find("iframe")["src"]
|
|
|
|
|
# except:
|
|
|
|
|
#
|
|
|
|
|
# pattern = r"\.\.\/(.*stream.php.*)"
|
|
|
|
|
#
|
|
|
|
|
# match = re.search(pattern, text, re.MULTILINE)
|
|
|
|
|
# if match:
|
|
|
|
|
# print(match)
|
|
|
|
|
# matched_line = match.group(0)
|
|
|
|
|
# print(matched_line)
|
|
|
|
|
# iframe_url = "https://ohli24.org/"
|
2023-08-07 19:06:08 +09:00
|
|
|
iframe_src = f"https://ohli24.live{iframe_url}"
|
2023-07-01 00:09:52 +09:00
|
|
|
|
|
|
|
|
iframe_html = LogicOhli24.get_html(iframe_src, headers=headers, timeout=600)
|
|
|
|
|
|
2023-07-01 00:13:09 +09:00
|
|
|
# print(iframe_html)
|
2023-07-01 00:09:52 +09:00
|
|
|
pattern = r"<iframe src=\"(.*?)\" allowfullscreen>"
|
|
|
|
|
|
|
|
|
|
match = re.search(pattern, iframe_html)
|
|
|
|
|
if match:
|
|
|
|
|
iframe_src = match.group(1)
|
|
|
|
|
print(iframe_src)
|
2022-06-26 21:46:16 +09:00
|
|
|
|
2023-05-09 20:54:59 +09:00
|
|
|
logger.debug(f"iframe_src:::> {iframe_src}")
|
2022-06-26 21:46:16 +09:00
|
|
|
|
2023-01-13 20:14:11 +09:00
|
|
|
# resp1 = requests.get(iframe_src, headers=headers, timeout=600).text
|
|
|
|
|
resp1 = LogicOhli24.get_html(iframe_src, headers=headers, timeout=600)
|
2023-03-01 19:34:53 +09:00
|
|
|
# logger.info("resp1::>> %s", resp1)
|
2022-06-26 21:46:16 +09:00
|
|
|
soup3 = BeautifulSoup(resp1, "lxml")
|
|
|
|
|
# packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL)
|
|
|
|
|
s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL)
|
|
|
|
|
packed_pattern = re.compile(
|
|
|
|
|
r"if?.([^{}]+)\{.*(eval.+)\}.+else?.{.(eval.+)\}", re.DOTALL
|
|
|
|
|
)
|
|
|
|
|
packed_script = soup3.find("script", text=s_pattern)
|
|
|
|
|
# packed_script = soup3.find('script')
|
2022-10-30 00:14:17 +09:00
|
|
|
# logger.info("packed_script>>> %s", packed_script.text)
|
2022-06-26 21:46:16 +09:00
|
|
|
unpack_script = None
|
|
|
|
|
if packed_script is not None:
|
|
|
|
|
# logger.debug('zzzzzzzzzzzz')
|
|
|
|
|
match = packed_pattern.search(packed_script.text)
|
|
|
|
|
# match = re.search(packed_pattern, packed_script.text)
|
|
|
|
|
# logger.debug("match::: %s", match.group())
|
2022-10-30 00:14:17 +09:00
|
|
|
# unpack_script = jsbeautifier.beautify(match.group(3))
|
|
|
|
|
|
|
|
|
|
logger.debug(type(packed_script))
|
|
|
|
|
unpack_script = jsbeautifier.beautify(str(packed_script))
|
2022-06-26 21:46:16 +09:00
|
|
|
|
|
|
|
|
p1 = re.compile(r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL)
|
|
|
|
|
m2 = re.search(
|
|
|
|
|
r"(\"tracks\".*\]).*\"captions\"",
|
|
|
|
|
unpack_script,
|
|
|
|
|
flags=re.MULTILINE | re.DOTALL,
|
|
|
|
|
)
|
|
|
|
|
# print(m2.group(1))
|
|
|
|
|
dict_string = "{" + m2.group(1) + "}"
|
|
|
|
|
|
2023-01-29 21:21:50 +09:00
|
|
|
# logger.info(f"dict_string::> {dict_string}")
|
2022-06-26 21:46:16 +09:00
|
|
|
tracks = json.loads(dict_string)
|
|
|
|
|
self.srt_url = tracks["tracks"][0]["file"]
|
|
|
|
|
|
|
|
|
|
logger.debug(f'srt_url::: {tracks["tracks"][0]["file"]}')
|
|
|
|
|
|
|
|
|
|
video_hash = iframe_src.split("/")
|
|
|
|
|
video_hashcode = re.sub(r"index\.php\?data=", "", video_hash[-1])
|
|
|
|
|
self._vi = video_hashcode
|
|
|
|
|
video_info_url = f"{video_hash[0]}//{video_hash[2]}/player/index.php?data={video_hashcode}&do=getVideo"
|
|
|
|
|
# print('hash:::', video_hash)
|
|
|
|
|
logger.debug(f"video_info_url::: {video_info_url}")
|
|
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
|
"referer": f"{iframe_src}",
|
|
|
|
|
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
|
|
|
|
|
"Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
|
|
|
|
|
"Mozilla/5.0 (Macintosh; Intel "
|
|
|
|
|
"Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 "
|
|
|
|
|
"Whale/3.12.129.46 Safari/537.36",
|
|
|
|
|
"X-Requested-With": "XMLHttpRequest",
|
2023-01-13 20:14:11 +09:00
|
|
|
"Cookie": "PHPSESSID=hhhnrora8o9omv1tljq4efv216; 2a0d2363701f23f8a75028924a3af643=NDkuMTYzLjExMS4xMDk=; e1192aefb64683cc97abb83c71057733=aW5n",
|
2022-06-26 21:46:16 +09:00
|
|
|
}
|
2022-12-21 21:25:56 +09:00
|
|
|
|
2022-06-26 21:46:16 +09:00
|
|
|
payload = {
|
|
|
|
|
"hash": video_hash[-1],
|
|
|
|
|
}
|
|
|
|
|
resp2 = requests.post(
|
|
|
|
|
video_info_url, headers=headers, data=payload, timeout=20
|
|
|
|
|
).json()
|
|
|
|
|
|
|
|
|
|
logger.debug("resp2::> %s", resp2)
|
|
|
|
|
|
|
|
|
|
hls_url = resp2["videoSource"]
|
|
|
|
|
logger.debug(f"video_url::> {hls_url}")
|
|
|
|
|
|
|
|
|
|
resp3 = requests.get(hls_url, headers=headers).text
|
|
|
|
|
# logger.debug(resp3)
|
|
|
|
|
|
|
|
|
|
# stream_url = hls_url.split('\n')[-1].strip()
|
|
|
|
|
stream_info = resp3.split("\n")[-2:]
|
|
|
|
|
# logger.debug('stream_url:: %s', stream_url)
|
|
|
|
|
logger.debug(f"stream_info:: {stream_info}")
|
|
|
|
|
self.headers = {
|
|
|
|
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
|
|
|
|
|
"Chrome/71.0.3554.0 Safari/537.36Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
|
|
|
|
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3554.0 Safari/537.36",
|
|
|
|
|
"Referer": "https://ndoodle.xyz/video/03a3655fff3e9bdea48de9f49e938e32",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.url = stream_info[1].strip()
|
2023-05-09 20:54:59 +09:00
|
|
|
logger.info(self.url)
|
2023-04-22 23:26:19 +09:00
|
|
|
if "anibeast.com" in self.url:
|
2023-05-09 20:56:57 +09:00
|
|
|
self.headers["Referer"] = iframe_src
|
2023-05-09 20:54:59 +09:00
|
|
|
if "crazypatutu.com" in self.url:
|
|
|
|
|
self.headers["Referer"] = iframe_src
|
|
|
|
|
|
2022-06-26 21:46:16 +09:00
|
|
|
match = re.compile(r'NAME="(?P<quality>.*?)"').search(stream_info[0])
|
|
|
|
|
self.quality = "720P"
|
|
|
|
|
if match is not None:
|
|
|
|
|
self.quality = match.group("quality")
|
|
|
|
|
logger.info(self.quality)
|
|
|
|
|
|
|
|
|
|
match = re.compile(
|
|
|
|
|
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
|
|
|
|
|
% ("기", "화")
|
|
|
|
|
).search(self.info["title"])
|
|
|
|
|
|
|
|
|
|
# epi_no 초기값
|
|
|
|
|
epi_no = 1
|
|
|
|
|
|
|
|
|
|
if match:
|
|
|
|
|
self.content_title = match.group("title").strip()
|
|
|
|
|
if "season" in match.groupdict() and match.group("season") is not None:
|
|
|
|
|
self.season = int(match.group("season"))
|
|
|
|
|
|
|
|
|
|
# epi_no = 1
|
|
|
|
|
epi_no = int(match.group("epi_no"))
|
|
|
|
|
ret = "%s.S%sE%s.%s-OHNI24.mp4" % (
|
|
|
|
|
self.content_title,
|
|
|
|
|
"0%s" % self.season if self.season < 10 else self.season,
|
|
|
|
|
"0%s" % epi_no if epi_no < 10 else epi_no,
|
|
|
|
|
self.quality,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
self.content_title = self.info["title"]
|
|
|
|
|
P.logger.debug("NOT MATCH")
|
|
|
|
|
ret = "%s.720p-OHNI24.mp4" % self.info["title"]
|
|
|
|
|
|
|
|
|
|
# logger.info('self.content_title:: %s', self.content_title)
|
|
|
|
|
self.epi_queue = epi_no
|
|
|
|
|
self.filename = Util.change_text_for_use_filename(ret)
|
|
|
|
|
logger.info(f"self.filename::> {self.filename}")
|
|
|
|
|
self.savepath = P.ModelSetting.get("ohli24_download_path")
|
|
|
|
|
logger.info(f"self.savepath::> {self.savepath}")
|
|
|
|
|
|
|
|
|
|
# TODO: 완결 처리
|
|
|
|
|
|
|
|
|
|
if P.ModelSetting.get_bool("ohli24_auto_make_folder"):
|
|
|
|
|
if self.info["day"].find("완결") != -1:
|
|
|
|
|
folder_name = "%s %s" % (
|
|
|
|
|
P.ModelSetting.get("ohli24_finished_insert"),
|
|
|
|
|
self.content_title,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
folder_name = self.content_title
|
|
|
|
|
folder_name = Util.change_text_for_use_filename(folder_name.strip())
|
|
|
|
|
self.savepath = os.path.join(self.savepath, folder_name)
|
|
|
|
|
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"):
|
|
|
|
|
self.savepath = os.path.join(
|
|
|
|
|
self.savepath, "Season %s" % int(self.season)
|
|
|
|
|
)
|
|
|
|
|
self.filepath = os.path.join(self.savepath, self.filename)
|
|
|
|
|
if not os.path.exists(self.savepath):
|
|
|
|
|
os.makedirs(self.savepath)
|
|
|
|
|
|
|
|
|
|
from framework.common.util import write_file, convert_vtt_to_srt
|
|
|
|
|
|
|
|
|
|
srt_filepath = os.path.join(
|
|
|
|
|
self.savepath, self.filename.replace(".mp4", ".ko.srt")
|
|
|
|
|
)
|
|
|
|
|
|
2023-01-29 21:21:50 +09:00
|
|
|
if (
|
|
|
|
|
self.srt_url is not None
|
|
|
|
|
and not os.path.exists(srt_filepath)
|
|
|
|
|
and not ("thumbnails.vtt" in self.srt_url)
|
|
|
|
|
):
|
2022-06-26 21:46:16 +09:00
|
|
|
srt_data = requests.get(self.srt_url, headers=headers).text
|
|
|
|
|
write_file(srt_data, srt_filepath)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
P.logger.error("Exception:%s", e)
|
|
|
|
|
P.logger.error(traceback.format_exc())
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class ModelOhli24Item(db.Model):
|
2022-04-06 23:45:15 +09:00
|
|
|
__tablename__ = "{package_name}_ohli24_item".format(package_name=P.package_name)
|
|
|
|
|
__table_args__ = {"mysql_collate": "utf8_general_ci"}
|
2022-02-08 23:17:30 +09:00
|
|
|
__bind_key__ = P.package_name
|
|
|
|
|
id = db.Column(db.Integer, primary_key=True)
|
|
|
|
|
created_time = db.Column(db.DateTime)
|
|
|
|
|
completed_time = db.Column(db.DateTime)
|
|
|
|
|
reserved = db.Column(db.JSON)
|
|
|
|
|
content_code = db.Column(db.String)
|
|
|
|
|
season = db.Column(db.Integer)
|
|
|
|
|
episode_no = db.Column(db.Integer)
|
|
|
|
|
title = db.Column(db.String)
|
|
|
|
|
episode_title = db.Column(db.String)
|
2022-03-27 18:26:18 +09:00
|
|
|
ohli24_va = db.Column(db.String)
|
|
|
|
|
ohli24_vi = db.Column(db.String)
|
|
|
|
|
ohli24_id = db.Column(db.String)
|
2022-02-08 23:17:30 +09:00
|
|
|
quality = db.Column(db.String)
|
|
|
|
|
filepath = db.Column(db.String)
|
|
|
|
|
filename = db.Column(db.String)
|
|
|
|
|
savepath = db.Column(db.String)
|
|
|
|
|
video_url = db.Column(db.String)
|
|
|
|
|
vtt_url = db.Column(db.String)
|
|
|
|
|
thumbnail = db.Column(db.String)
|
|
|
|
|
status = db.Column(db.String)
|
|
|
|
|
ohli24_info = db.Column(db.JSON)
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
self.created_time = datetime.now()
|
|
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
|
return repr(self.as_dict())
|
|
|
|
|
|
|
|
|
|
def as_dict(self):
|
|
|
|
|
ret = {x.name: getattr(self, x.name) for x in self.__table__.columns}
|
2022-04-06 23:45:15 +09:00
|
|
|
ret["created_time"] = self.created_time.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
|
ret["completed_time"] = (
|
|
|
|
|
self.completed_time.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
|
if self.completed_time is not None
|
|
|
|
|
else None
|
|
|
|
|
)
|
2022-02-08 23:17:30 +09:00
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
def save(self):
|
|
|
|
|
db.session.add(self)
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2022-03-27 18:26:18 +09:00
|
|
|
def get_by_id(cls, idx):
|
|
|
|
|
return db.session.query(cls).filter_by(id=idx).first()
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
@classmethod
|
2022-03-27 18:26:18 +09:00
|
|
|
def get_by_ohli24_id(cls, ohli24_id):
|
|
|
|
|
return db.session.query(cls).filter_by(ohli24_id=ohli24_id).first()
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
@classmethod
|
2022-03-27 18:26:18 +09:00
|
|
|
def delete_by_id(cls, idx):
|
|
|
|
|
db.session.query(cls).filter_by(id=idx).delete()
|
2022-02-08 23:17:30 +09:00
|
|
|
db.session.commit()
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def web_list(cls, req):
|
|
|
|
|
ret = {}
|
2022-04-06 23:45:15 +09:00
|
|
|
page = int(req.form["page"]) if "page" in req.form else 1
|
2022-02-08 23:17:30 +09:00
|
|
|
page_size = 30
|
2022-04-06 23:45:15 +09:00
|
|
|
job_id = ""
|
|
|
|
|
search = req.form["search_word"] if "search_word" in req.form else ""
|
|
|
|
|
option = req.form["option"] if "option" in req.form else "all"
|
|
|
|
|
order = req.form["order"] if "order" in req.form else "desc"
|
2022-02-08 23:17:30 +09:00
|
|
|
query = cls.make_query(search=search, order=order, option=option)
|
|
|
|
|
count = query.count()
|
|
|
|
|
query = query.limit(page_size).offset((page - 1) * page_size)
|
|
|
|
|
lists = query.all()
|
2022-04-06 23:45:15 +09:00
|
|
|
ret["list"] = [item.as_dict() for item in lists]
|
|
|
|
|
ret["paging"] = Util.get_paging_info(count, page, page_size)
|
2022-02-08 23:17:30 +09:00
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2022-04-06 23:45:15 +09:00
|
|
|
def make_query(cls, search="", order="desc", option="all"):
|
2022-02-08 23:17:30 +09:00
|
|
|
query = db.session.query(cls)
|
2022-04-06 23:45:15 +09:00
|
|
|
if search is not None and search != "":
|
|
|
|
|
if search.find("|") != -1:
|
|
|
|
|
tmp = search.split("|")
|
2022-02-08 23:17:30 +09:00
|
|
|
conditions = []
|
|
|
|
|
for tt in tmp:
|
2022-04-06 23:45:15 +09:00
|
|
|
if tt != "":
|
|
|
|
|
conditions.append(cls.filename.like("%" + tt.strip() + "%"))
|
2022-02-08 23:17:30 +09:00
|
|
|
query = query.filter(or_(*conditions))
|
2022-04-06 23:45:15 +09:00
|
|
|
elif search.find(",") != -1:
|
|
|
|
|
tmp = search.split(",")
|
2022-02-08 23:17:30 +09:00
|
|
|
for tt in tmp:
|
2022-04-06 23:45:15 +09:00
|
|
|
if tt != "":
|
|
|
|
|
query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
|
2022-02-08 23:17:30 +09:00
|
|
|
else:
|
2022-04-06 23:45:15 +09:00
|
|
|
query = query.filter(cls.filename.like("%" + search + "%"))
|
|
|
|
|
if option == "completed":
|
|
|
|
|
query = query.filter(cls.status == "completed")
|
2022-02-08 23:17:30 +09:00
|
|
|
|
2022-04-06 23:45:15 +09:00
|
|
|
query = (
|
|
|
|
|
query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
|
|
|
|
|
)
|
2022-02-08 23:17:30 +09:00
|
|
|
return query
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_list_uncompleted(cls):
|
2022-04-06 23:45:15 +09:00
|
|
|
return db.session.query(cls).filter(cls.status != "completed").all()
|
2022-02-08 23:17:30 +09:00
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def append(cls, q):
|
|
|
|
|
item = ModelOhli24Item()
|
2022-04-06 23:45:15 +09:00
|
|
|
item.content_code = q["content_code"]
|
|
|
|
|
item.season = q["season"]
|
|
|
|
|
item.episode_no = q["epi_queue"]
|
|
|
|
|
item.title = q["content_title"]
|
|
|
|
|
item.episode_title = q["title"]
|
|
|
|
|
item.ohli24_va = q["va"]
|
|
|
|
|
item.ohli24_vi = q["_vi"]
|
|
|
|
|
item.ohli24_id = q["_id"]
|
|
|
|
|
item.quality = q["quality"]
|
|
|
|
|
item.filepath = q["filepath"]
|
|
|
|
|
item.filename = q["filename"]
|
|
|
|
|
item.savepath = q["savepath"]
|
|
|
|
|
item.video_url = q["url"]
|
|
|
|
|
item.vtt_url = q["vtt"]
|
|
|
|
|
item.thumbnail = q["thumbnail"]
|
|
|
|
|
item.status = "wait"
|
|
|
|
|
item.ohli24_info = q["ohli24_info"]
|
2022-02-08 23:17:30 +09:00
|
|
|
item.save()
|