diff --git a/lib/cdndania_downloader.py b/lib/cdndania_downloader.py
index dca1e5b..0f8e9e0 100644
--- a/lib/cdndania_downloader.py
+++ b/lib/cdndania_downloader.py
@@ -20,14 +20,16 @@ logger = logging.getLogger(__name__)
class CdndaniaDownloader:
"""cdndania.com 전용 다운로더 (세션 기반 보안 우회)"""
- def __init__(self, iframe_src, output_path, referer_url=None, callback=None, proxy=None, threads=16):
+ def __init__(self, iframe_src, output_path, referer_url=None, callback=None, proxy=None, threads=16, on_download_finished=None):
self.iframe_src = iframe_src # cdndania.com 플레이어 iframe URL
self.output_path = output_path
self.referer_url = referer_url or "https://ani.ohli24.com/"
self.callback = callback
self.proxy = proxy
self.threads = threads
+ self.on_download_finished = on_download_finished
self.cancelled = False
+ self.released = False # 조기 반환 여부
# 진행 상황 추적
self.start_time = None
@@ -92,6 +94,13 @@ class CdndaniaDownloader:
content = f.read().strip()
if content:
progress = json.loads(content)
+ # 조기 반환 체크 (merging 상태이면 네트워크 완료로 간주)
+ status = progress.get('status', 'downloading')
+ if status == 'merging' and not self.released:
+ if self.on_download_finished:
+ self.on_download_finished()
+ self.released = True
+
if self.callback and progress.get('percent', 0) > 0:
self.callback(
percent=progress.get('percent', 0),
@@ -164,17 +173,21 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
)
log = logging.getLogger(__name__)
- def update_progress(percent, current, total, speed, elapsed):
+ def update_progress(percent, current, total, speed, elapsed, status=None):
"""진행 상황을 파일에 저장"""
try:
+ data = {
+ 'percent': percent,
+ 'current': current,
+ 'total': total,
+ 'speed': speed,
+ 'elapsed': elapsed
+ }
+ if status:
+ data['status'] = status
+
with open(progress_path, 'w') as f:
- json.dump({
- 'percent': percent,
- 'current': current,
- 'total': total,
- 'speed': speed,
- 'elapsed': elapsed
- }, f)
+ json.dump(data, f)
except:
pass
@@ -350,7 +363,8 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
total_segments = len(segments)
log.info(f"Temp directory: {temp_dir}")
- log.info(f"Starting parallel download with {threads} threads for {total_segments} segments...")
+ # 다운로드 worker
+ log.info(f"Starting optimized download: Binary Merge Mode (Threads: {threads})")
# 세그먼트 다운로드 함수
def download_segment(index, url):
@@ -422,6 +436,9 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
log.info("All segments downloaded successfully.")
+ # 조기 반환 신호 (merging 상태 기록)
+ update_progress(100, total_segments, total_segments, "", "", status="merging")
+
# 7. ffmpeg로 합치기
log.info("Concatenating segments with ffmpeg...")
concat_file = os.path.join(temp_dir, "concat.txt")
diff --git a/lib/ffmpeg_queue_v1.py b/lib/ffmpeg_queue_v1.py
index 165a0ed..28e83d3 100644
--- a/lib/ffmpeg_queue_v1.py
+++ b/lib/ffmpeg_queue_v1.py
@@ -265,8 +265,8 @@ class FfmpegQueue(object):
# [주의] cdndania는 yt-dlp로 받으면 14B 가짜 파일(보안 차단)이 받아지므로
# aria2c 선택 여부와 무관하게 전용 다운로더(CdndaniaDownloader)를 써야 함.
# 대신 CdndaniaDownloader 내부에 멀티스레드(16)를 구현하여 속도를 해결함.
- if 'cdndania.com' in video_url:
- logger.info(f"Detected cdndania.com URL - using Optimized CdndaniaDownloader (curl_cffi + {download_threads} threads)")
+ if getattr(entity, 'need_special_downloader', False) or 'cdndania.com' in video_url or 'michealcdn.com' in video_url:
+ logger.info(f"Detected special CDN requirement (flag={getattr(entity, 'need_special_downloader', False)}) - using Optimized CdndaniaDownloader")
download_method = "cdndania"
logger.info(f"Download method: {download_method}")
@@ -298,6 +298,14 @@ class FfmpegQueue(object):
if not _iframe_src:
# 폴백: headers의 Referer에서 가져오기
_iframe_src = getattr(entity_ref, 'headers', {}).get('Referer', video_url)
+ # 슬롯 조기 반환을 위한 콜백
+ slot_released = [False]
+ def release_slot():
+ if not slot_released[0]:
+ downloader_self.current_ffmpeg_count -= 1
+ slot_released[0] = True
+ logger.info(f"Download slot released early (Network finished), current_ffmpeg_count: {downloader_self.current_ffmpeg_count}/{downloader_self.max_ffmpeg_count}")
+
logger.info(f"CdndaniaDownloader iframe_src: {_iframe_src}")
downloader = CdndaniaDownloader(
iframe_src=_iframe_src,
@@ -305,10 +313,13 @@ class FfmpegQueue(object):
referer_url="https://ani.ohli24.com/",
callback=progress_callback,
proxy=_proxy,
- threads=download_threads
+ threads=download_threads,
+ on_download_finished=release_slot # 조기 반환 콜백 전달
)
elif method == "ytdlp" or method == "aria2c":
# yt-dlp 사용 (aria2c 옵션 포함)
+ # yt-dlp는 내부적으로 병합 과정을 포함하므로 조기 반환이 어려울 수 있음 (추후 지원 고려)
+ slot_released = [False]
from .ytdlp_downloader import YtdlpDownloader
logger.info(f"Using yt-dlp downloader (method={method})...")
# 엔티티에서 쿠키 파일 가져오기 (있는 경우)
@@ -323,8 +334,8 @@ class FfmpegQueue(object):
use_aria2c=(method == "aria2c"),
threads=download_threads
)
-
else:
+ slot_released = [False]
# 기본: HLS 다운로더 사용
from .hls_downloader import HlsDownloader
logger.info("Using custom HLS downloader for m3u8 URL...")
@@ -344,14 +355,16 @@ class FfmpegQueue(object):
downloader.cancel()
entity_ref.ffmpeg_status_kor = "취소됨"
entity_ref.refresh_status()
- downloader_self.current_ffmpeg_count -= 1
+ if not slot_released[0]:
+ downloader_self.current_ffmpeg_count -= 1
return
success, message = downloader.download()
- # 다운로드 완료 후 카운트 감소
- downloader_self.current_ffmpeg_count -= 1
- logger.info(f"Download finished, current_ffmpeg_count: {downloader_self.current_ffmpeg_count}/{downloader_self.max_ffmpeg_count}")
+ # 다운로드 완료 후 카운트 감소 (이미 반환되었으면 스킵)
+ if not slot_released[0]:
+ downloader_self.current_ffmpeg_count -= 1
+ logger.info(f"Download finished (Slot released normally), current_ffmpeg_count: {downloader_self.current_ffmpeg_count}/{downloader_self.max_ffmpeg_count}")
if success:
entity_ref.ffmpeg_status = 7 # COMPLETED
diff --git a/mod_ohli24.py b/mod_ohli24.py
index e9f16de..635806e 100644
--- a/mod_ohli24.py
+++ b/mod_ohli24.py
@@ -66,11 +66,23 @@ class LogicOhli24(PluginModuleBase):
origin_url = None
episode_url = None
cookies = None
- proxy = "http://192.168.0.2:3138"
- proxies = {
- "http": proxy,
- "https": proxy,
- }
+
+ # proxy = "http://192.168.0.2:3138"
+ # proxies = {
+ # "http": proxy,
+ # "https": proxy,
+ # }
+
+ @classmethod
+ def get_proxy(cls):
+ return P.ModelSetting.get("ohli24_proxy_url")
+
+ @classmethod
+ def get_proxies(cls):
+ proxy = cls.get_proxy()
+ if proxy:
+ return {"http": proxy, "https": proxy}
+ return None
session = requests.Session()
@@ -98,6 +110,7 @@ class LogicOhli24(PluginModuleBase):
self.db_default = {
"ohli24_db_version": "1",
+ "ohli24_proxy_url": "",
"ohli24_url": "https://ani.ohli24.com",
"ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"),
"ohli24_auto_make_folder": "True",
@@ -252,8 +265,7 @@ class LogicOhli24(PluginModuleBase):
P.logger.debug("web_list3")
ret = ModelOhli24Item.web_list(req)
print(ret)
- # return jsonify("test")
- # return jsonify(ModelOhli24Item.web_list(req))
+ return jsonify(ret)
elif sub == "web_list2":
@@ -845,7 +857,7 @@ class LogicOhli24(PluginModuleBase):
@staticmethod
def get_html(url, headers=None, referer=None, stream=False, timeout=60, stealth=False, data=None, method='GET'):
- """별도 스레드에서 cloudscraper 실행하여 gevent SSL 충돌 및 Cloudflare 우회"""
+ """별도 스레드에서 curl_cffi 실행하여 gevent SSL 충돌 및 Cloudflare 우회"""
from concurrent.futures import ThreadPoolExecutor, TimeoutError as FuturesTimeoutError
import time
from urllib import parse
@@ -861,32 +873,34 @@ class LogicOhli24(PluginModuleBase):
except:
pass
- def fetch_url_with_cloudscraper(url, headers, timeout, data, method):
- """별도 스레드에서 cloudscraper로 실행"""
- import cloudscraper
- scraper = cloudscraper.create_scraper(
- browser={'browser': 'chrome', 'platform': 'darwin', 'mobile': False},
- delay=10
- )
- # 프록시 설정 (필요시 사용)
- proxies = LogicOhli24.proxies
- if method.upper() == 'POST':
- response = scraper.post(url, headers=headers, data=data, timeout=timeout, proxies=proxies)
- else:
- response = scraper.get(url, headers=headers, timeout=timeout, proxies=proxies)
- return response.text
+ def fetch_url_with_cffi(url, headers, timeout, data, method):
+ """별도 스레드에서 curl_cffi로 실행"""
+ from curl_cffi import requests
+
+ # 프록시 설정
+ proxies = LogicOhli24.get_proxies()
+
+ with requests.Session(impersonate="chrome120") as session:
+ # 헤더 설정
+ if headers:
+ session.headers.update(headers)
+
+ if method.upper() == 'POST':
+ response = session.post(url, data=data, timeout=timeout, proxies=proxies)
+ else:
+ response = session.get(url, timeout=timeout, proxies=proxies)
+ return response.text
response_data = ""
if headers is None:
headers = {
- "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
- "accept-language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7",
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
+ "Accept-Language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7",
}
if referer:
- # Referer 인코딩
if '://' in referer:
try:
scheme, netloc, path, params, query, fragment = parse.urlparse(referer)
@@ -895,18 +909,18 @@ class LogicOhli24(PluginModuleBase):
referer = parse.urlunparse((scheme, netloc, path, params, query, fragment))
except:
pass
- headers["referer"] = referer
- elif "referer" not in headers:
- headers["referer"] = "https://ani.ohli24.com"
+ headers["Referer"] = referer
+ elif "Referer" not in headers and "referer" not in headers:
+ headers["Referer"] = "https://ani.ohli24.com"
max_retries = 3
for attempt in range(max_retries):
try:
- logger.debug(f"get_html (cloudscraper in thread) {method} attempt {attempt + 1}: {url}")
+ logger.debug(f"get_html (curl_cffi in thread) {method} attempt {attempt + 1}: {url}")
- # ThreadPoolExecutor로 별도 스레드에서 cloudscraper 실행
+ # ThreadPoolExecutor로 별도 스레드에서 실행
with ThreadPoolExecutor(max_workers=1) as executor:
- future = executor.submit(fetch_url_with_cloudscraper, url, headers, timeout, data, method)
+ future = executor.submit(fetch_url_with_cffi, url, headers, timeout, data, method)
response_data = future.result(timeout=timeout + 10)
if response_data and (len(response_data) > 10 or method.upper() == 'POST'):
@@ -983,8 +997,10 @@ class LogicOhli24(PluginModuleBase):
return False
def callback_function(self, **args):
- logger.debug("callback_function============")
- logger.debug(args)
+ logger.debug(f"callback_function invoked with args: {args}")
+ if 'status' in args:
+ logger.debug(f"Status: {args['status']}")
+
refresh_type = None
if args["type"] == "status_change":
if args["status"] == SupportFfmpeg.Status.DOWNLOADING:
@@ -1003,11 +1019,15 @@ class LogicOhli24(PluginModuleBase):
# socketio.emit("notify", data, namespace='/framework', broadcast=True)
refresh_type = "add"
elif args["type"] == "last":
+ entity = self.queue.get_entity_by_entity_id(args['data']['callback_id'])
+
if args["status"] == SupportFfmpeg.Status.WRONG_URL:
+ if entity: entity.download_failed("WRONG_URL")
data = {"type": "warning", "msg": "잘못된 URL입니다"}
socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "add"
elif args["status"] == SupportFfmpeg.Status.WRONG_DIRECTORY:
+ if entity: entity.download_failed("WRONG_DIRECTORY")
data = {
"type": "warning",
"msg": "잘못된 디렉토리입니다.
" + args["data"]["save_fullpath"],
@@ -1015,6 +1035,7 @@ class LogicOhli24(PluginModuleBase):
socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "add"
elif args["status"] == SupportFfmpeg.Status.ERROR or args["status"] == SupportFfmpeg.Status.EXCEPTION:
+ if entity: entity.download_failed("ERROR/EXCEPTION")
data = {
"type": "warning",
"msg": "다운로드 시작 실패.
" + args["data"]["save_fullpath"],
@@ -1022,6 +1043,7 @@ class LogicOhli24(PluginModuleBase):
socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "add"
elif args["status"] == SupportFfmpeg.Status.USER_STOP:
+ if entity: entity.download_failed("USER_STOP")
data = {
"type": "warning",
"msg": "다운로드가 중지 되었습니다.
" + args["data"]["save_fullpath"],
@@ -1041,6 +1063,7 @@ class LogicOhli24(PluginModuleBase):
refresh_type = "last"
elif args["status"] == SupportFfmpeg.Status.TIME_OVER:
+ if entity: entity.download_failed("TIME_OVER")
data = {
"type": "warning",
"msg": "시간초과로 중단 되었습니다.
" + args["data"]["save_fullpath"],
@@ -1049,6 +1072,7 @@ class LogicOhli24(PluginModuleBase):
socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "last"
elif args["status"] == SupportFfmpeg.Status.PF_STOP:
+ if entity: entity.download_failed("PF_STOP")
data = {
"type": "warning",
"msg": "PF초과로 중단 되었습니다.
" + args["data"]["save_fullpath"],
@@ -1057,6 +1081,7 @@ class LogicOhli24(PluginModuleBase):
socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "last"
elif args["status"] == SupportFfmpeg.Status.FORCE_STOP:
+ if entity: entity.download_failed("FORCE_STOP")
data = {
"type": "warning",
"msg": "강제 중단 되었습니다.
" + args["data"]["save_fullpath"],
@@ -1065,6 +1090,7 @@ class LogicOhli24(PluginModuleBase):
socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "last"
elif args["status"] == SupportFfmpeg.Status.HTTP_FORBIDDEN:
+ if entity: entity.download_failed("HTTP_FORBIDDEN")
data = {
"type": "warning",
"msg": "403에러로 중단 되었습니다.
" + args["data"]["save_fullpath"],
@@ -1073,6 +1099,8 @@ class LogicOhli24(PluginModuleBase):
socketio.emit("notify", data, namespace="/framework", broadcast=True)
refresh_type = "last"
elif args["status"] == SupportFfmpeg.Status.ALREADY_DOWNLOADING:
+ # Already downloading usually means logic error or race condition, maybe not fail DB?
+ # Keeping as is for now unless requested.
data = {
"type": "warning",
"msg": "임시파일폴더에 파일이 있습니다.
" + args["data"]["temp_fullpath"],
@@ -1103,11 +1131,17 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
self.srt_url = None
self.headers = None
self.cookies_file = None # yt-dlp용 CDN 세션 쿠키 파일 경로
+ self.need_special_downloader = False # CDN 보안 우회 다운로더 필요 여부
# Todo::: 임시 주석 처리
self.make_episode_info()
def refresh_status(self):
+ # ffmpeg_queue_v1.py에서 실패 처리(-1)된 경우 DB 업데이트 트리거
+ if getattr(self, 'ffmpeg_status', 0) == -1:
+ reason = getattr(self, 'ffmpeg_status_kor', 'Unknown Error')
+ self.download_failed(reason)
+
self.module_logic.socketio_callback("status", self.as_dict())
# 추가: /queue 네임스페이스로도 명시적으로 전송
try:
@@ -1133,7 +1167,14 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
db_entity = ModelOhli24Item.get_by_ohli24_id(self.info["_id"])
if db_entity is not None:
db_entity.status = "completed"
- db_entity.complated_time = datetime.now()
+ db_entity.completed_time = datetime.now()
+ db_entity.save()
+
+ def download_failed(self, reason):
+ logger.debug(f"download failed.......!! reason: {reason}")
+ db_entity = ModelOhli24Item.get_by_ohli24_id(self.info["_id"])
+ if db_entity is not None:
+ db_entity.status = "failed"
db_entity.save()
# Get episode info from OHLI24 site
@@ -1154,74 +1195,10 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
}
logger.debug(f"make_episode_info()::url==> {url}")
logger.info(f"self.info:::> {self.info}")
-
- # Step 1: 에피소드 페이지에서 cdndania.com iframe 찾기
- text = LogicOhli24.get_html(url, headers=headers, referer=f"{ourls.scheme}://{ourls.netloc}")
-
- # 디버깅: HTML에 cdndania 있는지 확인
- if "cdndania" in text:
- logger.info("cdndania found in HTML")
- else:
- logger.warning("cdndania NOT found in HTML - page may be dynamically loaded")
- logger.debug(f"HTML snippet: {text[:1000]}")
-
- soup = BeautifulSoup(text, "lxml")
-
- # mcpalyer 클래스 내의 iframe 찾기
- player_div = soup.find("div", class_="mcpalyer")
- logger.debug(f"player_div (mcpalyer): {player_div is not None}")
-
- if not player_div:
- player_div = soup.find("div", class_="embed-responsive")
- logger.debug(f"player_div (embed-responsive): {player_div is not None}")
-
- iframe = None
- if player_div:
- iframe = player_div.find("iframe")
- logger.debug(f"iframe in player_div: {iframe is not None}")
- if not iframe:
- iframe = soup.find("iframe", src=re.compile(r"cdndania\.com"))
- logger.debug(f"iframe with cdndania src: {iframe is not None}")
- if not iframe:
- # 모든 iframe 찾기
- all_iframes = soup.find_all("iframe")
- logger.debug(f"Total iframes found: {len(all_iframes)}")
- for i, f in enumerate(all_iframes):
- logger.debug(f"iframe {i}: src={f.get('src', 'no src')}")
- if all_iframes:
- iframe = all_iframes[0]
-
- if not iframe or not iframe.get("src"):
- logger.error("No iframe found on episode page")
- return
-
- iframe_src = iframe.get("src")
- logger.info(f"Found cdndania iframe: {iframe_src}")
-
- # Step 2: cdndania.com 페이지에서 m3u8 URL 추출
- video_url, vtt_url, cookies_file = self.extract_video_from_cdndania(iframe_src, url)
-
- if not video_url:
- logger.error("Failed to extract video URL from cdndania")
- return
-
- self.url = video_url
- self.srt_url = vtt_url
- self.cookies_file = cookies_file # yt-dlp용 세션 쿠키 파일
- self.iframe_src = iframe_src # CdndaniaDownloader용 원본 iframe URL
- logger.info(f"Video URL: {self.url}")
- if self.srt_url:
- logger.info(f"Subtitle URL: {self.srt_url}")
- if self.cookies_file:
- logger.info(f"Cookies file: {self.cookies_file}")
-
- # 헤더 설정
- self.headers = {
- "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
- "Referer": iframe_src,
- }
-
+ # ------------------------------------------------------------------
+ # [METADATA PARSING] - Extract title, season, epi info first!
+ # ------------------------------------------------------------------
# 파일명 생성
match = re.compile(r"(?P