bug fixed 2022.06.26

This commit is contained in:
2022-06-26 21:46:16 +09:00
parent d9baf0704c
commit 209f69d0a7

View File

@@ -256,12 +256,29 @@ class LogicOhli24(LogicModuleBase):
print(url) print(url)
if "all" in content_code_list: if "all" in content_code_list:
pass ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
logger.debug(f"today_info:: {ret_data}")
for item in ret_data["anime_list"]:
# wr_id = request.form.get("wr_id", None)
# bo_table = request.form.get("bo_table", None)
wr_id = None
bo_table = None
data = []
# print(code)
# logger.info("code::: %s", code)
logger.debug(item)
# 잠시 중지
# data = self.get_series_info(item["code"], wr_id, bo_table)
logger.debug(data)
# result = asyncio.run(LogicOhli24.main(url_list)) # result = asyncio.run(LogicOhli24.main(url_list))
# logger.debug(f"result:: {result}") # logger.debug(f"result:: {result}")
@staticmethod @staticmethod
async def get_data(url): async def get_data(url) -> str:
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with session.get(url) as response: async with session.get(url) as response:
content = await response.text() content = await response.text()
@@ -269,7 +286,7 @@ class LogicOhli24(LogicModuleBase):
return content return content
@staticmethod @staticmethod
async def main(url_list): async def main(url_list: list):
input_coroutines = [LogicOhli24.get_data(url_) for url_ in url_list] input_coroutines = [LogicOhli24.get_data(url_) for url_ in url_list]
res = await asyncio.gather(*input_coroutines) res = await asyncio.gather(*input_coroutines)
return res return res
@@ -494,6 +511,36 @@ class LogicOhli24(LogicModuleBase):
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)} return {"ret": "exception", "log": str(e)}
def get_auto_anime_info(self, url: str = ""):
try:
logger.info("url:::> %s", url)
data = {}
response_data = LogicOhli24.get_html(url, timeout=10)
tree = html.fromstring(response_data)
tmp_items = tree.xpath('//div[@class="list-row"]')
data["anime_count"] = len(tmp_items)
data["anime_list"] = []
for item in tmp_items:
entity = {}
entity["link"] = item.xpath(".//a/@href")[0]
entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[
0
].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[
0
].replace("..", P.ModelSetting.get("ohli24_url"))
data["ret"] = "success"
data["anime_list"].append(entity)
return data
except Exception as e:
P.logger.error("Exception:%s", e)
P.logger.error(traceback.format_exc())
return {"ret": "exception", "log": str(e)}
# @staticmethod # @staticmethod
def get_search_result(self, query, page, cate): def get_search_result(self, query, page, cate):
try: try:
@@ -656,7 +703,7 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
db_entity.save() db_entity.save()
# Get episode info from OHLI24 site # Get episode info from OHLI24 site
def make_episode_info(self): def make_episode_info_old(self):
try: try:
# url = 'https://ohli24.net/e/' + self.info['va'] # url = 'https://ohli24.net/e/' + self.info['va']
base_url = "https://ohli24.net" base_url = "https://ohli24.net"
@@ -851,7 +898,212 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
except Exception as e: except Exception as e:
P.logger.error("Exception:%s", e) P.logger.error("Exception:%s", e)
P.logger.error(traceback.format_exc()) P.logger.error(traceback.format_exc())
pass
def make_episode_info(self):
try:
# url = 'https://ohli24.net/e/' + self.info['va']
base_url = "https://ohli24.net"
iframe_url = ""
# https://ohli24.net/e/%EB%85%B9%EC%9D%84%20%EB%A8%B9%EB%8A%94%20%EB%B9%84%EC%8A%A4%EC%BD%94%206%ED%99%94
url = self.info["va"]
ourls = parse.urlparse(url)
headers = {
"referer": f"{ourls.scheme}://{ourls.netloc}",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36",
}
logger.debug("make_episode_info()::url==> %s", url)
logger.info(f"self.info:::> {self.info}")
text = requests.get(url, headers=headers).text
# logger.debug(text)
soup1 = BeautifulSoup(text, "lxml")
pattern = re.compile(r"url : \"\.\.(.*)\"")
script = soup1.find("script", text=pattern)
if script:
match = pattern.search(script.text)
if match:
iframe_url = match.group(1)
logger.info("iframe_url::> %s", iframe_url)
logger.debug(soup1.find("iframe"))
iframe_url = soup1.find("iframe")["src"]
logger.info("iframe_url::> %s", iframe_url)
print(base_url)
print(iframe_url)
# exit()
# resp = requests.get(iframe_url, headers=headers, timeout=20).text
# soup2 = BeautifulSoup(resp, "lxml")
# iframe_src = soup2.find("iframe")["src"]
iframe_src = iframe_url
# print(resp1)
logger.debug(f"iframe_src:::> {iframe_src}")
resp1 = requests.get(iframe_src, headers=headers, timeout=600).text
# logger.info('resp1::>> %s', resp1)
soup3 = BeautifulSoup(resp1, "lxml")
# packed_pattern = re.compile(r'\\{*(eval.+)*\\}', re.MULTILINE | re.DOTALL)
s_pattern = re.compile(r"(eval.+)", re.MULTILINE | re.DOTALL)
packed_pattern = re.compile(
r"if?.([^{}]+)\{.*(eval.+)\}.+else?.{.(eval.+)\}", re.DOTALL
)
packed_script = soup3.find("script", text=s_pattern)
# packed_script = soup3.find('script')
# logger.info('packed_script>>> %s', packed_script.text)
unpack_script = None
if packed_script is not None:
# logger.debug('zzzzzzzzzzzz')
match = packed_pattern.search(packed_script.text)
# match = re.search(packed_pattern, packed_script.text)
# logger.debug("match::: %s", match.group())
unpack_script = jsbeautifier.beautify(match.group(3))
# logger.info('match groups:: %s', match.groups())
# logger.info('match group3:: %s', match.group(3))
# print('packed_script==>', packed_script)
# logger.debug(unpack_script)
p1 = re.compile(r"(\"tracks\".*\])\,\"captions\"", re.MULTILINE | re.DOTALL)
m2 = re.search(
r"(\"tracks\".*\]).*\"captions\"",
unpack_script,
flags=re.MULTILINE | re.DOTALL,
)
# print(m2.group(1))
dict_string = "{" + m2.group(1) + "}"
logger.info(f"dict_string::> {dict_string}")
tracks = json.loads(dict_string)
self.srt_url = tracks["tracks"][0]["file"]
logger.debug(f'srt_url::: {tracks["tracks"][0]["file"]}')
video_hash = iframe_src.split("/")
video_hashcode = re.sub(r"index\.php\?data=", "", video_hash[-1])
self._vi = video_hashcode
video_info_url = f"{video_hash[0]}//{video_hash[2]}/player/index.php?data={video_hashcode}&do=getVideo"
# print('hash:::', video_hash)
logger.debug(f"video_info_url::: {video_info_url}")
headers = {
"referer": f"{iframe_src}",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/96.0.4664.110 Whale/3.12.129.46 Safari/537.36"
"Mozilla/5.0 (Macintosh; Intel "
"Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 "
"Whale/3.12.129.46 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
}
# print(headers)
payload = {
"hash": video_hash[-1],
}
resp2 = requests.post(
video_info_url, headers=headers, data=payload, timeout=20
).json()
logger.debug("resp2::> %s", resp2)
hls_url = resp2["videoSource"]
logger.debug(f"video_url::> {hls_url}")
resp3 = requests.get(hls_url, headers=headers).text
# logger.debug(resp3)
# stream_url = hls_url.split('\n')[-1].strip()
stream_info = resp3.split("\n")[-2:]
# logger.debug('stream_url:: %s', stream_url)
logger.debug(f"stream_info:: {stream_info}")
self.headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/71.0.3554.0 Safari/537.36Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3554.0 Safari/537.36",
"Referer": "https://ndoodle.xyz/video/03a3655fff3e9bdea48de9f49e938e32",
}
self.url = stream_info[1].strip()
match = re.compile(r'NAME="(?P<quality>.*?)"').search(stream_info[0])
self.quality = "720P"
if match is not None:
self.quality = match.group("quality")
logger.info(self.quality)
match = re.compile(
r"(?P<title>.*?)\s*((?P<season>\d+)%s)?\s*((?P<epi_no>\d+)%s)"
% ("", "")
).search(self.info["title"])
# epi_no 초기값
epi_no = 1
if match:
self.content_title = match.group("title").strip()
if "season" in match.groupdict() and match.group("season") is not None:
self.season = int(match.group("season"))
# epi_no = 1
epi_no = int(match.group("epi_no"))
ret = "%s.S%sE%s.%s-OHNI24.mp4" % (
self.content_title,
"0%s" % self.season if self.season < 10 else self.season,
"0%s" % epi_no if epi_no < 10 else epi_no,
self.quality,
)
else:
self.content_title = self.info["title"]
P.logger.debug("NOT MATCH")
ret = "%s.720p-OHNI24.mp4" % self.info["title"]
# logger.info('self.content_title:: %s', self.content_title)
self.epi_queue = epi_no
self.filename = Util.change_text_for_use_filename(ret)
logger.info(f"self.filename::> {self.filename}")
self.savepath = P.ModelSetting.get("ohli24_download_path")
logger.info(f"self.savepath::> {self.savepath}")
# TODO: 완결 처리
if P.ModelSetting.get_bool("ohli24_auto_make_folder"):
if self.info["day"].find("완결") != -1:
folder_name = "%s %s" % (
P.ModelSetting.get("ohli24_finished_insert"),
self.content_title,
)
else:
folder_name = self.content_title
folder_name = Util.change_text_for_use_filename(folder_name.strip())
self.savepath = os.path.join(self.savepath, folder_name)
if P.ModelSetting.get_bool("ohli24_auto_make_season_folder"):
self.savepath = os.path.join(
self.savepath, "Season %s" % int(self.season)
)
self.filepath = os.path.join(self.savepath, self.filename)
if not os.path.exists(self.savepath):
os.makedirs(self.savepath)
from framework.common.util import write_file, convert_vtt_to_srt
srt_filepath = os.path.join(
self.savepath, self.filename.replace(".mp4", ".ko.srt")
)
if self.srt_url is not None and not os.path.exists(srt_filepath):
# vtt_data = requests.get(self.vtt, headers=headers).text
# srt_data = convert_vtt_to_srt(vtt_data)
srt_data = requests.get(self.srt_url, headers=headers).text
write_file(srt_data, srt_filepath)
except Exception as e:
P.logger.error("Exception:%s", e)
P.logger.error(traceback.format_exc())
class ModelOhli24Item(db.Model): class ModelOhli24Item(db.Model):