v0.6.14: Ubuntu Docker performance optimization for Ohli24

This commit is contained in:
2026-01-07 17:07:46 +09:00
parent 49aea1bb54
commit 8759d1e1c8
5 changed files with 221 additions and 104 deletions

View File

@@ -81,6 +81,16 @@
## 📝 변경 이력 (Changelog)
### v0.6.14 (2026-01-07)
- **Ohli24 Docker 성능 고속화**:
- Zendriver Daemon에 리눅스/도커 전용 최적화 플래그 추가 (`--no-zygote`, `--disable-dev-shm-usage`, `--disable-features=IsolateOrigins,site-per-process` 등)
- 정밀 성능 메트릭 도입 (`/tmp/zendriver_daemon.log`에서 Init/Nav/Block/Poll 단계별 시간 측정 가능)
- 목록 페이지 페칭 시 Zendriver Daemon(Layer 3A)을 최우선 순위로 격상 (기존 17초 → 1초 내외 단축 기대)
- `LogicOhli24.get_base_url()` 및 각 모듈에서 URL 끝 슬래시 제거(`rstrip`) 처리를 강화하여 불필요한 리다이렉트 방지
- **Zendriver Daemon 안정성**:
- 리눅스 환경의 `/dev/shm` 여유 공간 체크 로직 추가
- 변수 참조 오류(`NameError`, `elapsed` -> `total_elapsed`) 수정 및 에러 핸들링 보강
### v0.6.13 (2026-01-07)
- **초기화 순서 오류 수정**: `P.logger` 접근 전 `P` 인스턴스 생성이 완료되도록 `curl_cffi` 자동 설치 루틴 위치 조정 (`NameError: name 'P' is not defined` 해결)

View File

@@ -1,5 +1,5 @@
title: "애니 다운로더"
version: "0.6.13"
version: "0.6.14"
package_name: "anime_downloader"
developer: "projectdx"
description: "anime downloader"

View File

@@ -9,6 +9,7 @@ import sys
import json
import os
import time
import traceback
from typing import Dict, Any, Optional
# 봇사우루스 디버깅 일시정지 방지 및 자동 종료 설정
@@ -16,19 +17,22 @@ os.environ["BOTASAURUS_ENV"] = "production"
def fetch_html(url: str, headers: Optional[Dict[str, str]] = None, proxy: Optional[str] = None) -> Dict[str, Any]:
result: Dict[str, Any] = {"success": False, "html": "", "elapsed": 0}
start_time: float = time.time()
max_retries = 2
try:
from botasaurus.request import request as b_request
# raise_exception=True는 에러 시 exception을 발생시키게 함
# close_on_crash=True는 에러 발생 시 대기하지 않고 즉시 종료 (배포 환경용)
@b_request(proxy=proxy, raise_exception=True, close_on_crash=True)
# use_stealth=True 추가하여 탐지 회피 강화
@b_request(
proxy=proxy,
raise_exception=True,
close_on_crash=True
)
def fetch_url(request: Any, data: Dict[str, Any]) -> str:
target_url = data.get('url')
headers = data.get('headers') or {}
# 기본적인 헤더 보강 (Ohli24 대응 - Cloudflare 우회 시도)
# 기본적인 헤더 보강 (Ohli24 대응 - Cloudflare/TLS Fingerprinting 대응)
default_headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
@@ -50,37 +54,69 @@ def fetch_html(url: str, headers: Optional[Dict[str, str]] = None, proxy: Option
if k not in headers and k.lower() not in [hk.lower() for hk in headers]:
headers[k] = v
return request.get(target_url, headers=headers, timeout=30)
return request.get(target_url, headers=headers, timeout=20)
# 봇사우루스는 실패 시 자동 재시도 등을 하기도 함.
# 여기서는 단발성 요청이므로 직접 호출.
b_resp: str = fetch_url({'url': url, 'headers': headers})
elapsed: float = time.time() - start_time
if b_resp and len(b_resp) > 10:
result.update({
"success": True,
"html": b_resp,
"elapsed": round(elapsed, 2)
})
else:
result["error"] = f"Short response: {len(b_resp) if b_resp else 0} bytes"
result["elapsed"] = round(elapsed, 2)
for attempt in range(max_retries + 1):
start_time = time.time()
try:
b_resp: str = fetch_url({'url': url, 'headers': headers})
elapsed = time.time() - start_time
# 리스트 페이지는 보통 수백KB 이상 (최소 500바이트 체크)
if b_resp and len(b_resp) > 500:
result.update({
"success": True,
"html": b_resp,
"elapsed": round(elapsed, 2),
"attempt": attempt + 1
})
return result
else:
reason = f"Short response ({len(b_resp) if b_resp else 0} bytes)"
if attempt < max_retries:
time.sleep(1)
continue
result["error"] = reason
result["elapsed"] = round(time.time() - start_time, 2)
except Exception as inner_e:
if attempt < max_retries:
time.sleep(1)
continue
result["error"] = str(inner_e)
result["elapsed"] = round(time.time() - start_time, 2)
except Exception as e:
result["error"] = str(e)
result["elapsed"] = round(time.time() - start_time, 2)
result["error"] = f"Botasaurus init/import error: {str(e)}"
result["elapsed"] = 0
return result
if __name__ == "__main__":
if len(sys.argv) < 2:
print(json.dumps({"success": False, "error": "Usage: python botasaurus_ohli24.py <url> [headers_json] [proxy]"}))
sys.exit(1)
# 모든 stdout을 stderr로 리다이렉트 (라이브러리 로그가 stdout을 오염시키는 것 방지)
original_stdout = sys.stdout
sys.stdout = sys.stderr
target_url: str = sys.argv[1]
headers_arg: Optional[Dict[str, str]] = json.loads(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2] else None
proxy_arg: Optional[str] = sys.argv[3] if len(sys.argv) > 3 and sys.argv[3] else None
res: Dict[str, Any] = fetch_html(target_url, headers_arg, proxy_arg)
print(json.dumps(res, ensure_ascii=False))
try:
if len(sys.argv) < 2:
# 에러 메시지는 출력해야 하므로 다시 복구 후 출력
sys.stdout = original_stdout
print(json.dumps({"success": False, "error": "Usage: script.py <url> [headers] [proxy]"}))
sys.exit(1)
target_url: str = sys.argv[1]
headers_arg: Optional[Dict[str, str]] = json.loads(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2] else None
proxy_arg: Optional[str] = sys.argv[3] if len(sys.argv) > 3 and sys.argv[3] else None
res: Dict[str, Any] = fetch_html(target_url, headers_arg, proxy_arg)
# 최종 결과 출력 전에만 stdout 복구
sys.stdout = original_stdout
print(json.dumps(res, ensure_ascii=False))
except Exception as fatal_e:
# 에러 발생 시에도 JSON 형태로 출력하도록 보장
sys.stdout = original_stdout
print(json.dumps({
"success": False,
"error": f"Fatal execution error: {str(fatal_e)}",
"traceback": traceback.format_exc()
}, ensure_ascii=False))

View File

@@ -174,6 +174,16 @@ async def ensure_browser() -> Any:
log_debug("[ZendriverDaemon] No browser candidates found!")
return None
# 리눅스/도커 성능 분석용 로그
import platform
if platform.system() == "Linux":
try:
shm_size = os.statvfs('/dev/shm')
free_shm = (shm_size.f_bavail * shm_size.f_frsize) / (1024 * 1024)
log_debug(f"[ZendriverDaemon] Linux detected. /dev/shm free: {free_shm:.1f} MB")
except Exception as shm_e:
log_debug(f"[ZendriverDaemon] Failed to check /dev/shm: {shm_e}")
# 사용자 데이터 디렉토리 설정 (Mac/Root 권한 이슈 대응)
import tempfile
uid = os.getuid() if hasattr(os, 'getuid') else 'win'
@@ -204,14 +214,24 @@ async def ensure_browser() -> Any:
"--safebrowsing-disable-auto-update",
"--remote-allow-origins=*",
"--blink-settings=imagesEnabled=false",
"--disable-blink-features=AutomationControlled",
# 추가적인 도커 최적화 플래그
"--disable-features=IsolateOrigins,site-per-process",
"--no-zygote",
"--disable-extensions",
"--wasm-tier-up=false",
]
# 추가적인 리소스 블로킹 설정
# Note: zendriver supports direct CDP commands
for exec_path in candidates:
user_data_dir = os.path.join(tempfile.gettempdir(), f"zd_daemon_{uid}_{os.path.basename(exec_path).replace(' ', '_')}")
os.makedirs(user_data_dir, exist_ok=True)
try:
log_debug(f"[ZendriverDaemon] Trying browser at: {exec_path}")
start_time_init = time.time()
browser = await zd.start(
headless=True,
browser_executable_path=exec_path,
@@ -219,7 +239,7 @@ async def ensure_browser() -> Any:
user_data_dir=user_data_dir,
browser_args=browser_args
)
log_debug(f"[ZendriverDaemon] Browser started successfully with: {exec_path}")
log_debug(f"[ZendriverDaemon] Browser started successfully in {time.time() - start_time_init:.2f}s using: {exec_path}")
return browser
except Exception as e:
log_debug(f"[ZendriverDaemon] Failed to start {exec_path}: {e}")
@@ -242,25 +262,39 @@ async def fetch_with_browser(url: str, timeout: int = 30) -> Dict[str, Any]:
start_time: float = time.time()
try:
init_start = time.time()
await ensure_browser()
init_elapsed = time.time() - init_start
if browser is None:
result["error"] = "Browser not available"
return result
# zendriver의 browser.get(url)은 이미 열린 탭이 있으면 거기서 열려고 시도함.
# 하지만 모든 탭이 닫히면 StopIteration이 발생할 수 있음.
log_debug(f"[ZendriverDaemon] Fetching URL: {url}")
log_debug(f"[ZendriverDaemon] Fetching URL: {url} (Init: {init_elapsed:.2f}s)")
# StopIteration 방지를 위해 페이지 이동 시도
try:
nav_start = time.time()
# browser.get(url)은 새 탭을 열거나 기존 탭을 사용함
page: Any = await browser.get(url)
nav_elapsed = time.time() - nav_start
# 리소스 블로킹 (CDP 활용) - CSS, 폰트, 이미지 등 차단으로 속도 향상
block_start = time.time()
try:
await page.send(zd.cdp.network.set_blocked_urls(urls=[
"*.jpg", "*.jpeg", "*.png", "*.gif", "*.svg", "*.webp", "*.ico",
"*.css", "*.woff", "*.woff2", "*.ttf", "*.eot",
"*ads*", "*google-analytics*", "*googletagmanager*", "*doubleclick*"
]))
await page.send(zd.cdp.network.enable())
except Exception as e:
log_debug(f"[ZendriverDaemon] Resource blocking enable failed: {e}")
block_elapsed = time.time() - block_start
# 페이지 로드 대기 - 지능형 폴링 (최대 10초)
# 1. 리스트 페이지는 바로 반환, 2. 에피소드 페이지는 플레이어 로딩 대기
max_wait = 10
poll_interval = 0.2 # 1.0s -> 0.2s로 단축하여 반응속도 향상
poll_interval = 0.1 # 0.2s -> 0.1s로 더 빠르게 체크
waited = 0
html_content = ""
@@ -279,18 +313,25 @@ async def fetch_with_browser(url: str, timeout: int = 30) -> Dict[str, Any]:
log_debug(f"[ZendriverDaemon] Player detected in {waited:.1f}s")
break
elapsed: float = time.time() - start_time
poll_elapsed = time.time() - poll_start
total_elapsed = time.time() - start_time
if html_content and len(html_content) > 100:
result.update({
"success": True,
"html": html_content,
"elapsed": round(elapsed, 2)
"elapsed": round(total_elapsed, 2),
"metrics": {
"init": round(init_elapsed, 2),
"nav": round(nav_elapsed, 2),
"block": round(block_elapsed, 2),
"poll": round(poll_elapsed, 2)
}
})
log_debug(f"[ZendriverDaemon] Fetch success in {elapsed:.2f}s (Length: {len(html_content)})")
log_debug(f"[ZendriverDaemon] Success in {total_elapsed:.2f}s (Nav: {nav_elapsed:.2f}s, Poll: {poll_elapsed:.2f}s)")
else:
result["error"] = f"Short response: {len(html_content) if html_content else 0} bytes"
result["elapsed"] = round(elapsed, 2)
result["elapsed"] = round(total_elapsed, 2)
log_debug(f"[ZendriverDaemon] Fetch failure: Short response ({len(html_content) if html_content else 0} bytes)")
# 여기서 page.close()를 하지 않음! (탭을 하나라도 남겨두어야 StopIteration 방지 가능)

View File

@@ -443,6 +443,10 @@ class LogicOhli24(AnimeModuleBase):
self.web_list_model = ModelOhli24Item
default_route_socketio_module(self, attach="/queue")
@staticmethod
def get_base_url():
return P.ModelSetting.get("ohli24_url").rstrip('/')
def cleanup_stale_temps(self) -> None:
"""서버 시작 시 잔여 tmp 폴더 정리"""
try:
@@ -1272,7 +1276,7 @@ class LogicOhli24(AnimeModuleBase):
# print()
# print(today.weekday())
url = f'{P.ModelSetting.get("ohli24_url")}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}'
url = f'{LogicOhli24.get_base_url()}/bbs/board.php?bo_table=ing&sca={week[today.weekday()]}'
# print(url)
@@ -1300,7 +1304,7 @@ class LogicOhli24(AnimeModuleBase):
elif len(content_code_list) > 0:
for item in content_code_list:
url = P.ModelSetting.get("ohli24_url") + "/c/" + item
url = LogicOhli24.get_base_url() + "/c/" + item
logger.debug(f"scheduling url: {url}")
# ret_data = LogicOhli24.get_auto_anime_info(self, url=url)
content_info = self.get_series_info(item, "", "")
@@ -1418,9 +1422,9 @@ class LogicOhli24(AnimeModuleBase):
if image:
if image.startswith(".."):
image = image.replace("..", P.ModelSetting.get("ohli24_url"))
image = image.replace("..", LogicOhli24.get_base_url())
elif not image.startswith("http"):
image = P.ModelSetting.get("ohli24_url") + image
image = LogicOhli24.get_base_url() + image
logger.info(f"image:: {image}")
@@ -1473,7 +1477,7 @@ class LogicOhli24(AnimeModuleBase):
href = a_elem.get("href", "")
if not href.startswith("http"):
href = P.ModelSetting.get("ohli24_url").rstrip("/") + href
href = LogicOhli24.get_base_url() + href
# 부모에서 날짜 찾기
parent = a_elem.getparent()
@@ -1645,7 +1649,9 @@ class LogicOhli24(AnimeModuleBase):
"""카테고리별 애니메이션 목록 조회."""
logger.debug(f"get_anime_info: cate={cate}, page={page}, sca={sca}")
try:
url = P.ModelSetting.get("ohli24_url") + "/bbs/board.php?bo_table=" + cate + "&page=" + page
# URL 끝 슬래시 제거 로직 추가
base_url = P.ModelSetting.get("ohli24_url").rstrip('/')
url = base_url + "/bbs/board.php?bo_table=" + cate + "&page=" + page
if sca:
url += "&sca=" + sca
logger.info("url:::> %s", url)
@@ -1669,7 +1675,7 @@ class LogicOhli24(AnimeModuleBase):
if len(item.xpath(".//div[@class='img-item']/img/@src")) > 0:
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[0].replace(
"..", P.ModelSetting.get("ohli24_url")
"..", LogicOhli24.get_base_url()
)
else:
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@data-ezsrc")[0]
@@ -1700,7 +1706,7 @@ class LogicOhli24(AnimeModuleBase):
entity["code"] = entity["link"].split("/")[-1]
entity["title"] = item.xpath(".//div[@class='post-title']/text()")[0].strip()
entity["image_link"] = item.xpath(".//div[@class='img-item']/img/@src")[0].replace(
"..", P.ModelSetting.get("ohli24_url")
"..", LogicOhli24.get_base_url()
)
data["ret"] = "success"
data["anime_list"].append(entity)
@@ -1717,7 +1723,7 @@ class LogicOhli24(AnimeModuleBase):
try:
_query = urllib.parse.quote(query)
url = (
P.ModelSetting.get("ohli24_url")
LogicOhli24.get_base_url()
+ "/bbs/search.php?srows=24&gr_id=&sfl=wr_subject&stx="
+ _query
+ "&page="
@@ -1747,7 +1753,7 @@ class LogicOhli24(AnimeModuleBase):
for attr in img_attributes:
matches = item.xpath(attr)
if matches and matches[0].strip():
original_img = matches[0].replace("..", P.ModelSetting.get("ohli24_url"))
original_img = matches[0].replace("..", LogicOhli24.get_base_url())
break
if not original_img:
@@ -1781,7 +1787,7 @@ class LogicOhli24(AnimeModuleBase):
# Fetch image with referer
headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
"Referer": P.ModelSetting.get("ohli24_url") + "/",
"Referer": LogicOhli24.get_base_url() + "/",
}
# Use stream=True to handle binary data efficiently
@@ -1947,18 +1953,53 @@ class LogicOhli24(AnimeModuleBase):
headers["Referer"] = "https://ani.ohli24.com"
# === [Layer 1: Botasaurus @request (빠름 - HTTP Request)] ===
# Ohli24에서 Connection Reset 이슈로 인해 현재는 주석 처리 (Zendriver 최적화 집중)
"""
# === [Layer 3A: Zendriver Daemon (Primary - Persistent Browser)] ===
# 리눅스/도커 차단 환경 대응: 가장 확실하고 빠른 젠드라이버 데몬을 최우선으로 시도
if not response_data or len(response_data) < 10:
if LogicOhli24.ensure_essential_dependencies():
if LogicOhli24.is_zendriver_daemon_running():
logger.debug(f"[Layer3A] Trying Zendriver Daemon: {url}")
daemon_result = LogicOhli24.fetch_via_daemon(url, 30)
if daemon_result.get("success") and daemon_result.get("html"):
elapsed = time.time() - total_start
logger.info(f"[Layer3A] Success in {elapsed:.2f}s (HTML: {len(daemon_result['html'])})")
LogicOhli24.daemon_fail_count = 0
return daemon_result["html"]
else:
logger.warning(f"[Layer3A] Daemon failed: {daemon_result.get('error', 'Unknown')}")
LogicOhli24.daemon_fail_count += 1
# === [Layer 1: curl-cffi (Fallback 1)] ===
if not response_data or len(response_data) < 10:
try:
from concurrent.futures import ThreadPoolExecutor, TimeoutError as FuturesTimeoutError
logger.debug(f"[Layer1] Trying curl_cffi: {url}")
with ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(fetch_url_with_cffi, url, headers, 15, data, method)
response_data = future.result(timeout=20)
if response_data and len(response_data) > 500:
logger.info(f"[Layer1] curl_cffi success, HTML len: {len(response_data)}")
return response_data
else:
response_data = ""
except Exception as e:
logger.warning(f"[Layer1] curl_cffi failed: {e}")
response_data = ""
# === [Layer 2: Botasaurus @request (Mac Subprocess / Stealth)] ===
if not response_data or len(response_data) < 10:
# 리스트/검색 페이지에서 Botasaurus 활용 (Zendriver보다 빠름)
is_list_page = any(x in url for x in ["bo_table=", "/anime/", "search"])
if is_list_page and LogicOhli24.ensure_essential_dependencies():
import platform
is_mac = platform.system() == "Darwin"
try:
if is_mac:
# Mac에서는 gevent-Trio 충돌로 인해 서브프로세스로 실행
logger.debug(f"[Layer1] Trying Botasaurus subprocess (Mac workaround): {url}")
logger.debug(f"[Layer2] Trying Botasaurus subprocess (Mac): {url}")
import subprocess
script_path = os.path.join(os.path.dirname(__file__), "lib", "botasaurus_ohli24.py")
@@ -1967,21 +2008,27 @@ class LogicOhli24(AnimeModuleBase):
cmd,
capture_output=True,
text=True,
timeout=timeout + 30
timeout=timeout + 15
)
if result.returncode == 0 and result.stdout.strip():
b_result = json.loads(result.stdout.strip())
if b_result.get("success") and b_result.get("html"):
logger.info(f"[Layer1] Botasaurus(sub) success, HTML len: {len(b_result['html'])}")
return b_result["html"]
else:
logger.warning(f"[Layer1] Botasaurus(sub) failed: {b_result.get('error')}")
try:
b_result = json.loads(result.stdout.strip())
if b_result.get("success") and b_result.get("html"):
logger.info(f"[Layer2] Botasaurus(sub) success, HTML len: {len(b_result['html'])} (Attempt: {b_result.get('attempt', 1)})")
return b_result["html"]
else:
logger.warning(f"[Layer2] Botasaurus(sub) logic failed: {b_result.get('error')}")
if b_result.get("traceback"):
logger.debug(f"Botasaurus Traceback: {b_result.get('traceback')}")
except json.JSONDecodeError:
logger.error(f"[Layer2] Botasaurus JSON Decode Error. Output: {result.stdout[:200]}")
logger.debug(f"Botasaurus Stderr: {result.stderr}")
else:
logger.warning(f"[Layer1] Botasaurus subprocess error: {result.stderr}")
logger.warning(f"[Layer2] Botasaurus subprocess error (RC: {result.returncode}): {result.stderr}")
else:
# Linux 등에서는 (monkey-patching 문제가 없다면) 직접 실행 시도
logger.debug(f"[Layer1] Trying Botasaurus @request (Direct): {url}")
# Linux 등에서는 직접 실행 시도
logger.debug(f"[Layer2] Trying Botasaurus @request (Direct): {url}")
from botasaurus.request import request as b_request
@b_request(headers=headers, use_stealth=True, proxy=LogicOhli24.get_proxy())
@@ -1989,17 +2036,15 @@ class LogicOhli24(AnimeModuleBase):
return request.get(data)
b_resp = fetch_url(url)
if b_resp and len(b_resp) > 10:
logger.info(f"[Layer1] Botasaurus success, HTML len: {len(b_resp)}")
if b_resp and len(b_resp) > 500:
logger.info(f"[Layer2] Botasaurus success, HTML len: {len(b_resp)}")
return b_resp
else:
logger.warning(f"[Layer1] Botasaurus short response: {len(b_resp) if b_resp else 0}")
logger.warning(f"[Layer2] Botasaurus short response: {len(b_resp) if b_resp else 0}")
except Exception as e:
logger.warning(f"[Layer1] Botasaurus failed: {e}")
"""
logger.warning(f"[Layer2] Botasaurus failed: {e}")
# === [TEST MODE] Layer 1 (기존 것들) 일시 비활성화 - Layer 3, 4만 테스트 ===
response_data = "" # 바로 Layer 3로 이동
response_data = ""
# max_retries = 3
# for attempt in range(max_retries):
@@ -2049,33 +2094,7 @@ class LogicOhli24(AnimeModuleBase):
# logger.warning(f"[Layer2] Cloudscraper failed: {e}")
# --- Layer 3A: Zendriver Daemon (빠름 - 브라우저 상시 대기) ---
if not response_data or len(response_data) < 10:
if LogicOhli24.is_zendriver_daemon_running():
# 30초 타임아웃 적용
logger.debug(f"[Layer3A] Trying Zendriver Daemon: {url} (Timeout: 30s)")
daemon_result = LogicOhli24.fetch_via_daemon(url, 30)
if daemon_result.get("success") and daemon_result.get("html"):
elapsed = time.time() - total_start
logger.info(f"[Ohli24] Fetch success via Layer3A: {url} in {elapsed:.2f}s (HTML: {len(daemon_result['html'])})")
# 성공 시 연속 실패 카운트 초기화
LogicOhli24.daemon_fail_count = 0
return daemon_result["html"]
else:
error_msg = daemon_result.get('error', 'Unknown')
logger.warning(f"[Layer3A] Daemon failed: {error_msg}")
# 실패 카운트 증가 및 10회 누적 시 재시작
LogicOhli24.daemon_fail_count += 1
if LogicOhli24.daemon_fail_count >= 10:
logger.error(f"[Layer3A] Daemon failed {LogicOhli24.daemon_fail_count} times consecutively. Restarting daemon...")
try:
import subprocess
subprocess.run(['pkill', '-f', 'zendriver_daemon'], check=False)
LogicOhli24.daemon_fail_count = 0
except Exception as e:
logger.error(f"Failed to kill daemon: {e}")
# (Layer 3A was moved to the top)
# --- Layer 3B: Zendriver Subprocess Fallback (데몬 실패 시) ---
if not response_data or len(response_data) < 10:
@@ -2181,6 +2200,7 @@ class LogicOhli24(AnimeModuleBase):
# 캐시 비활성화 시 바로 fetch
if cache_minutes <= 0:
logger.debug(f"[Cache SKIP] Cache disabled (minutes: {cache_minutes})")
return LogicOhli24.get_html(url, **kwargs)
# 캐시 디렉토리 생성
@@ -2201,8 +2221,14 @@ class LogicOhli24(AnimeModuleBase):
if cached_html and len(cached_html) > 100:
logger.debug(f"[Cache HIT] {url[:60]}... (age: {cache_age:.0f}s)")
return cached_html
else:
logger.debug(f"[Cache MISS] Cached content is empty or too short for {url[:60]}...")
except Exception as e:
logger.warning(f"[Cache READ ERROR] {e}")
else:
logger.debug(f"[Cache EXPIRED] {url[:60]}... (age: {cache_age:.0f}s, expiry: {cache_minutes * 60}s)")
else:
logger.debug(f"[Cache MISS] No cache file found for {url[:60]}")
# 신규 fetch
html = LogicOhli24.get_html(url, **kwargs)
@@ -2821,13 +2847,17 @@ class Ohli24QueueEntity(AnimeQueueEntity):
# [Lazy Extraction] prepare_extra() replaces make_episode_info()
def prepare_extra(self):
try:
base_url = P.ModelSetting.get("ohli24_url")
base_url = LogicOhli24.get_base_url()
# 에피소드 페이지 URL (예: https://ani.ohli24.com/e/원펀맨 3기 1화)
url = self.info["va"]
if "//e/" in url:
url = url.replace("//e/", "/e/")
# URL Sanitization for va
if base_url in url and f"{base_url}//" in url:
url = url.replace(f"{base_url}//", f"{base_url}/")
ourls = parse.urlparse(url)
headers = {