feat: Implement parallel segment downloading, enhance UI/UX with smooth transitions and navigation styling, and add new log and manual templates.

This commit is contained in:
2025-12-30 00:50:13 +09:00
parent a0ecedd9a9
commit 51c91c8e52
25 changed files with 2081 additions and 87 deletions

View File

@@ -20,12 +20,13 @@ logger = logging.getLogger(__name__)
class CdndaniaDownloader:
"""cdndania.com 전용 다운로더 (세션 기반 보안 우회)"""
def __init__(self, iframe_src, output_path, referer_url=None, callback=None, proxy=None):
def __init__(self, iframe_src, output_path, referer_url=None, callback=None, proxy=None, threads=16):
self.iframe_src = iframe_src # cdndania.com 플레이어 iframe URL
self.output_path = output_path
self.referer_url = referer_url or "https://ani.ohli24.com/"
self.callback = callback
self.proxy = proxy
self.threads = threads
self.cancelled = False
# 진행 상황 추적
@@ -52,7 +53,8 @@ class CdndaniaDownloader:
self.output_path,
self.referer_url or "",
self.proxy or "",
progress_path
progress_path,
str(self.threads)
]
logger.info(f"Starting download subprocess: {self.iframe_src}")
@@ -144,7 +146,7 @@ class CdndaniaDownloader:
self.process.terminate()
def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path):
def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path, threads=16):
"""실제 다운로드 작업 (subprocess에서 실행)"""
import sys
import os
@@ -329,72 +331,104 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path)
log.info(f"Found {len(segments)} segments")
# 6. 세그먼트 다운로드
# 6. 세그먼트 다운로드 (병렬 처리)
start_time = time.time()
last_speed_time = start_time
total_bytes = 0
last_bytes = 0
current_speed = 0
with tempfile.TemporaryDirectory() as temp_dir:
segment_files = []
# 진행 상황 공유 변수 (Thread-safe하게 관리 필요)
completed_segments = 0
lock = threading.Lock()
# 출력 디렉토리 미리 생성 (임시 폴더 생성을 위해)
output_dir = os.path.dirname(output_path)
if output_dir and not os.path.exists(output_dir):
os.makedirs(output_dir)
with tempfile.TemporaryDirectory(dir=output_dir) as temp_dir:
segment_files = [None] * len(segments) # 순서 보장을 위해 미리 할당
total_segments = len(segments)
log.info(f"Temp directory: {temp_dir}")
log.info(f"Starting parallel download with {threads} threads for {total_segments} segments...")
for i, segment_url in enumerate(segments):
segment_path = os.path.join(temp_dir, f"segment_{i:05d}.ts")
# 매 20개마다 또는 첫 5개 로그
if i < 5 or i % 20 == 0:
log.info(f"Downloading segment {i+1}/{total_segments}")
# 세그먼트 다운로드 함수
def download_segment(index, url):
nonlocal completed_segments, total_bytes
try:
seg_resp = session.get(segment_url, headers=m3u8_headers,
proxies=proxies, timeout=120)
if seg_resp.status_code != 200:
time.sleep(0.5)
seg_resp = session.get(segment_url, headers=m3u8_headers,
proxies=proxies, timeout=120)
segment_data = seg_resp.content
if len(segment_data) < 100:
print(f"CDN security block: segment {i} returned {len(segment_data)}B", file=sys.stderr)
sys.exit(1)
with open(segment_path, 'wb') as f:
f.write(segment_data)
segment_files.append(f"segment_{i:05d}.ts")
total_bytes += len(segment_data)
# 속도 계산
current_time = time.time()
if current_time - last_speed_time >= 1.0:
bytes_diff = total_bytes - last_bytes
time_diff = current_time - last_speed_time
current_speed = bytes_diff / time_diff if time_diff > 0 else 0
last_speed_time = current_time
last_bytes = total_bytes
# 진행률 업데이트
percent = int(((i + 1) / total_segments) * 100)
elapsed = format_time(current_time - start_time)
update_progress(percent, i + 1, total_segments, format_speed(current_speed), elapsed)
# 재시도 로직
for retry in range(3):
try:
seg_resp = session.get(url, headers=m3u8_headers, proxies=proxies, timeout=30)
if seg_resp.status_code == 200:
content = seg_resp.content
if len(content) < 100:
if retry == 2:
raise Exception(f"Segment data too small ({len(content)}B)")
time.sleep(1)
continue
# 파일 저장
filename = f"segment_{index:05d}.ts"
filepath = os.path.join(temp_dir, filename)
with open(filepath, 'wb') as f:
f.write(content)
# 결과 기록
with lock:
segment_files[index] = filename
total_bytes += len(content)
completed_segments += 1
# 진행률 업데이트 (너무 자주는 말고 10개마다)
if completed_segments % 10 == 0 or completed_segments == total_segments:
pct = int((completed_segments / total_segments) * 100)
elapsed = time.time() - start_time
speed = total_bytes / elapsed if elapsed > 0 else 0
log.info(f"Progress: {pct}% ({completed_segments}/{total_segments}) Speed: {format_speed(speed)}")
update_progress(pct, completed_segments, total_segments, format_speed(speed), format_time(elapsed))
return True
except Exception as e:
if retry == 2:
log.error(f"Seg {index} failed after retries: {e}")
raise e
time.sleep(0.5)
except Exception as e:
log.error(f"Segment {i} download error: {e}")
print(f"Segment {i} download failed: {e}", file=sys.stderr)
sys.exit(1)
return False
# 스레드 풀 실행
from concurrent.futures import ThreadPoolExecutor
# 설정된 스레드 수로 병렬 다운로드
with ThreadPoolExecutor(max_workers=threads) as executor:
futures = []
for i, seg_url in enumerate(segments):
futures.append(executor.submit(download_segment, i, seg_url))
# 모든 작업 완료 대기
for future in futures:
try:
future.result()
except Exception as e:
log.error(f"Thread error: {e}")
print(f"Download thread failed: {e}", file=sys.stderr)
sys.exit(1)
# 다운로드 완료 확인
if completed_segments != total_segments:
print(f"Incomplete download: {completed_segments}/{total_segments}", file=sys.stderr)
sys.exit(1)
log.info("All segments downloaded successfully.")
# 7. ffmpeg로 합치기
log.info("Concatenating segments with ffmpeg...")
concat_file = os.path.join(temp_dir, "concat.txt")
with open(concat_file, 'w') as f:
for seg_file in segment_files:
f.write(f"file '{seg_file}'\n")
if seg_file:
f.write(f"file '{seg_file}'\n")
# 출력 디렉토리 생성
output_dir = os.path.dirname(output_path)
@@ -447,8 +481,9 @@ if __name__ == "__main__":
referer = sys.argv[3] if sys.argv[3] else None
proxy = sys.argv[4] if sys.argv[4] else None
progress_path = sys.argv[5]
threads = int(sys.argv[6]) if len(sys.argv) > 6 else 16
_download_worker(iframe_url, output_path, referer, proxy, progress_path)
_download_worker(iframe_url, output_path, referer, proxy, progress_path, threads)
elif len(sys.argv) >= 3:
# CLI 테스트 모드
logging.basicConfig(level=logging.DEBUG)

View File

@@ -255,12 +255,18 @@ class FfmpegQueue(object):
# m3u8 URL인 경우 다운로드 방법 설정에 따라 분기
if video_url.endswith('.m3u8') or 'master.txt' in video_url or 'gcdn.app' in video_url:
# 다운로드 방법 설정 확인
# 다운로드 방법 및 스레드 설정 확인
download_method = P.ModelSetting.get(f"{self.name}_download_method")
download_threads = P.ModelSetting.get_int(f"{self.name}_download_threads")
if not download_threads:
download_threads = 16
# cdndania.com 감지 시 CdndaniaDownloader 사용 (curl_cffi로 세션 기반 보안 우회)
# [주의] cdndania는 yt-dlp로 받으면 14B 가짜 파일(보안 차단)이 받아지므로
# aria2c 선택 여부와 무관하게 전용 다운로더(CdndaniaDownloader)를 써야 함.
# 대신 CdndaniaDownloader 내부에 멀티스레드(16)를 구현하여 속도를 해결함.
if 'cdndania.com' in video_url:
logger.info("Detected cdndania.com URL - using CdndaniaDownloader (curl_cffi session)")
logger.info(f"Detected cdndania.com URL - using Optimized CdndaniaDownloader (curl_cffi + {download_threads} threads)")
download_method = "cdndania"
logger.info(f"Download method: {download_method}")
@@ -298,12 +304,13 @@ class FfmpegQueue(object):
output_path=output_file_ref,
referer_url="https://ani.ohli24.com/",
callback=progress_callback,
proxy=_proxy
proxy=_proxy,
threads=download_threads
)
elif method == "ytdlp":
# yt-dlp 사용
elif method == "ytdlp" or method == "aria2c":
# yt-dlp 사용 (aria2c 옵션 포함)
from .ytdlp_downloader import YtdlpDownloader
logger.info("Using yt-dlp downloader...")
logger.info(f"Using yt-dlp downloader (method={method})...")
# 엔티티에서 쿠키 파일 가져오기 (있는 경우)
_cookies_file = getattr(entity_ref, 'cookies_file', None)
downloader = YtdlpDownloader(
@@ -312,7 +319,9 @@ class FfmpegQueue(object):
headers=headers_ref,
callback=progress_callback,
proxy=_proxy,
cookies_file=_cookies_file
cookies_file=_cookies_file,
use_aria2c=(method == "aria2c"),
threads=download_threads
)
else:

View File

@@ -17,13 +17,15 @@ logger = logging.getLogger(__name__)
class YtdlpDownloader:
"""yt-dlp 기반 다운로더"""
def __init__(self, url, output_path, headers=None, callback=None, proxy=None, cookies_file=None):
def __init__(self, url, output_path, headers=None, callback=None, proxy=None, cookies_file=None, use_aria2c=False, threads=16):
self.url = url
self.output_path = output_path
self.headers = headers or {}
self.callback = callback # 진행 상황 콜백
self.proxy = proxy
self.cookies_file = cookies_file # CDN 세션 쿠키 파일 경로
self.use_aria2c = use_aria2c # Aria2c 사용 여부
self.threads = threads # 병렬 다운로드 스레드 수
self.cancelled = False
self.process = None
self.error_output = [] # 에러 메시지 저장
@@ -134,8 +136,9 @@ class YtdlpDownloader:
'--no-part',
]
if use_native_hls:
if use_native_hls or self.use_aria2c:
# hlz CDN: native HLS 다운로더 사용 (ffmpeg의 확장자 제한 우회)
# Aria2c 사용 시: Native HLS를 써야 프래그먼트 병렬 다운로드가 가능함 (ffmpeg 모드는 순차적)
cmd += ['--hls-prefer-native']
else:
# 기타 CDN: ffmpeg 사용 (더 안정적)
@@ -148,6 +151,26 @@ class YtdlpDownloader:
'--extractor-args', 'generic:force_hls', # HLS 강제 추출
'-o', self.output_path,
]
# 1.3 Aria2c 설정 (병렬 다운로드)
# 1.3 Aria2c / 고속 모드 설정
if self.use_aria2c:
# [최적화] HLS(m3u8)의 경우, 작은 파일 수백 개를 받는데 aria2c 프로세스를 매번 띄우는 것보다
# yt-dlp 내장 멀티스레드(-N)를 사용하는 것이 훨씬 빠르고 가볍습니다.
# 따라서 사용자가 'aria2c'를 선택했더라도 HLS 스트림에 대해서는 'Native Concurrent' 모드로 작동시켜 속도를 극대화합니다.
# 병렬 프래그먼트 다운로드 개수 (기본 1 -> 16 or 설정값)
cmd += ['--concurrent-fragments', str(self.threads)]
# 버퍼 크기 조절 (속도 향상 도움)
cmd += ['--buffer-size', '16M']
# DNS 캐싱 등 네트워크 타임아웃 완화
cmd += ['--socket-timeout', '30']
logger.info(f"High Speed Mode Active: Using Native Downloader with {self.threads} concurrent threads (Optimized for HLS)")
# 주의: --external-downloader aria2c는 HLS 프래그먼트에서 오버헤드가 크므로 제거함
# 1.5 환경별 브라우저 위장 설정 (Impersonate)
# macOS에서는 고급 위장 기능을 사용하되, 종속성 문제가 잦은 Linux/Docker에서는 UA 수동 지정
@@ -204,7 +227,10 @@ class YtdlpDownloader:
cmd.append(current_url)
logger.info(f"Executing refined browser-impersonated yt-dlp CLI (v16): {' '.join(cmd)}")
logger.info(f"Executing refined browser-impersonated yt-dlp CLI (v17): {' '.join(cmd)}")
if self.use_aria2c:
logger.info("ARIA2C ACTIVE: Forcing native HLS downloader for concurrency.")
# 4. subprocess 실행 및 파싱
self.process = subprocess.Popen(
@@ -316,6 +342,10 @@ class YtdlpDownloader:
if 'error' in line.lower() or 'security' in line.lower() or 'unable' in line.lower():
logger.warning(f"yt-dlp output notice: {line}")
self.error_output.append(line)
# Aria2c / 병렬 다운로드 로그 로깅
if 'aria2c' in line.lower() or 'fragment' in line.lower():
logger.info(f"yt-dlp: {line}")
self.process.wait()