diff --git a/README.md b/README.md
index 7206175..0150508 100644
--- a/README.md
+++ b/README.md
@@ -81,6 +81,16 @@
## π λ³κ²½ μ΄λ ₯ (Changelog)
+### v0.7.0 (2026-01-11)
+- **GDM(Gommi Downloader Manager) ν΅ν© κ³ λν**:
+ - **ν΅ν© ν νμ΄μ§**: λ§ν¬μ λ, μ λλΌμ΄ν, μ€ν΄λ¦¬24μ ν νμ΄μ§μμ GDM μμ
μ μ€μκ°μΌλ‘ νμΈ λ° μ€μ§/μμ κ°λ₯νλλ‘ ν΅ν©.
+ - **μν μλ λκΈ°ν**: GDM λ€μ΄λ‘λ μλ£ μ μ½λ°±μ ν΅ν΄ λ‘컬 DB μνλ₯Ό μλμΌλ‘ 'μ»΄ν리νΈ'λ‘ μ
λ°μ΄νΈνμ¬ λͺ©λ‘ νμ΄μ§(`list`)μ μ¦μ λ°μ.
+ - **GDM μμ
λ§€ν**: GDMμ λ€μν μν μ½λ λ° μ§νλ₯ μ κ° νλ¬κ·ΈμΈ UI νμμ λ§κ² λ³ν μ²λ¦¬.
+- **μμ μ± κ°ν**:
+ - **λ°±κ·ΈλΌμ΄λ DB μμ ν**: μ€μΌμ€λ¬ λ° λΉλκΈ° μμ
μ€ λ°μ΄ν°λ² μ΄μ€ μ κ·Ό μ `app_context` μ€λ₯ λ°©μ§λ₯Ό μν΄ μ μμ μΈ μ»¨ν
μ€νΈ λν μ μ©.
+ - **μλ λ€μ΄λ‘λ λ‘μ§ κ°μ **: λ§ν¬μ λ 'μ 체(all)' λͺ¨λ λͺ¨λν°λ§ λ° μλ μνΌμλ λ±λ‘ λ‘μ§ λ³΄κ°.
+- **μλ¦Ό μμ€ν
**: λ§ν¬μ λ μ μνΌμλ κ°μ§ μ Discord/Telegram μλ¦Ό κΈ°λ₯ λ° μ€μ UI μΆκ°.
+
### v0.6.25 (2026-01-09)
- **μκ° μ
λ°μ΄νΈ κΈ°λ₯ μΆκ°**: λͺ¨λ μ€μ νμ΄μ§ (Ohli24, Anilife, Linkkf)μμ "μ
λ°μ΄νΈ" λ²νΌ ν΄λ¦μΌλ‘ Git Pull λ° νλ¬κ·ΈμΈ ν« λ¦¬λ‘λ μ§μ
- **λ²μ μ²΄ν¬ API**: GitHubμμ μ΅μ λ²μ μ 보λ₯Ό κ°μ Έμ μ
λ°μ΄νΈ μλ¦Ό νμ (1μκ° μΊμ±)
diff --git a/info.yaml b/info.yaml
index 20e6716..c7b9b3b 100644
--- a/info.yaml
+++ b/info.yaml
@@ -1,5 +1,5 @@
title: "μ λ λ€μ΄λ‘λ"
-version: 0.6.25
+version: 0.7.0
package_name: "anime_downloader"
developer: "projectdx"
description: "anime downloader"
diff --git a/mod_anilife.py b/mod_anilife.py
index 2b1755f..84d0f50 100644
--- a/mod_anilife.py
+++ b/mod_anilife.py
@@ -216,19 +216,58 @@ class LogicAniLife(AnimeModuleBase):
def process_command(self, command, arg1, arg2, arg3, req):
try:
if command == "list":
+ # 1. μ체 ν λͺ©λ‘ κ°μ Έμ€κΈ°
ret = self.queue.get_entity_list() if self.queue else []
+
+ # 2. GDM νμ€ν¬ κ°μ Έμ€κΈ° (μ€μΉλ κ²½μ°)
+ try:
+ from gommi_downloader_manager.mod_queue import ModuleQueue
+ if ModuleQueue:
+ gdm_tasks = ModuleQueue.get_all_downloads()
+ # μ΄ λͺ¨λ(anilife)μ΄ μΆκ°ν μμ
λ§ νν°λ§
+ anilife_tasks = [t for t in gdm_tasks if t.caller_plugin == f"{P.package_name}_{self.name}"]
+
+ for task in anilife_tasks:
+ # ν
νλ¦Ώ νΈν νμμΌλ‘ λ³ν
+ gdm_item = self._convert_gdm_task_to_queue_item(task)
+ ret.append(gdm_item)
+ except Exception as e:
+ logger.debug(f"GDM tasks fetch error: {e}")
+
return jsonify(ret)
- elif command == "stop":
- entity_id = int(arg1) if arg1 else -1
- result = self.queue.command("cancel", entity_id) if self.queue else {"ret": "error"}
- return jsonify(result)
- elif command == "remove":
- entity_id = int(arg1) if arg1 else -1
- result = self.queue.command("remove", entity_id) if self.queue else {"ret": "error"}
- return jsonify(result)
- elif command in ["reset", "delete_completed"]:
- result = self.queue.command(command, 0) if self.queue else {"ret": "error"}
+
+ elif command in ["stop", "remove", "cancel"]:
+ entity_id = arg1
+ if entity_id and str(entity_id).startswith("dl_"):
+ # GDM μμ
μ²λ¦¬
+ try:
+ from gommi_downloader_manager.mod_queue import ModuleQueue
+ if ModuleQueue:
+ if command == "stop" or command == "cancel":
+ task = ModuleQueue.get_download(entity_id)
+ if task:
+ task.cancel()
+ return jsonify({"ret": "success", "log": "GDM μμ
μ μ€μ§νμμ΅λλ€."})
+ elif command == "remove" or command == "delete":
+ # GDMμμ μμ μ²λ¦¬
+ class DummyReq:
+ def __init__(self, id):
+ self.form = {"id": id}
+ ModuleQueue.process_ajax("delete", DummyReq(entity_id))
+ return jsonify({"ret": "success", "log": "GDM μμ
μ μμ νμμ΅λλ€."})
+ except Exception as e:
+ logger.error(f"GDM command error: {e}")
+ return jsonify({"ret": "error", "log": f"GDM λͺ
λ Ή μ€ν¨: {e}"})
+
+ # μ체 ν μ²λ¦¬
+ entity_id = int(arg1) if arg1 and str(arg1).isdigit() else -1
+ command_to_call = "cancel" if command == "stop" else command
+ if self.queue:
+ result = self.queue.command(command_to_call, entity_id)
+ else:
+ result = {"ret": "error", "log": "Queue not initialized"}
return jsonify(result)
+
elif command == "merge_subtitle":
# AniUtil already imported at module level
db_id = int(arg1)
@@ -248,6 +287,73 @@ class LogicAniLife(AnimeModuleBase):
self.P.logger.error(traceback.format_exc())
return jsonify({'ret': 'fail', 'log': str(e)})
+ def _convert_gdm_task_to_queue_item(self, task):
+ """GDM DownloadTask κ°μ²΄λ₯Ό FfmpegQueueEntity.as_dict() νΈν νμμΌλ‘ λ³ν"""
+ status_kor_map = {
+ "pending": "λκΈ°μ€",
+ "extracting": "λΆμμ€",
+ "downloading": "λ€μ΄λ‘λμ€",
+ "paused": "μΌμμ μ§",
+ "completed": "μλ£",
+ "error": "μ€ν¨",
+ "cancelled": "μ·¨μλ¨"
+ }
+
+ status_str_map = {
+ "pending": "WAITING",
+ "extracting": "ANALYZING",
+ "downloading": "DOWNLOADING",
+ "paused": "PAUSED",
+ "completed": "COMPLETED",
+ "error": "FAILED",
+ "cancelled": "FAILED"
+ }
+
+ t_dict = task.as_dict()
+
+ return {
+ "entity_id": t_dict["id"],
+ "url": t_dict["url"],
+ "filename": t_dict["filename"] or t_dict["title"],
+ "ffmpeg_status_kor": status_kor_map.get(t_dict["status"], "μμμμ"),
+ "ffmpeg_percent": t_dict["progress"],
+ "created_time": t_dict["created_time"],
+ "current_speed": t_dict["speed"],
+ "download_time": t_dict["eta"],
+ "status_str": status_str_map.get(t_dict["status"], "WAITING"),
+ "idx": t_dict["id"],
+ "callback_id": "anilife",
+ "start_time": t_dict["start_time"] or t_dict["created_time"],
+ "percent": t_dict["progress"],
+ "save_fullpath": t_dict["filepath"],
+ "is_gdm": True
+ }
+
+ def plugin_callback(self, data):
+ """GDM λͺ¨λλ‘λΆν° λ€μ΄λ‘λ μν μ
λ°μ΄νΈ μμ """
+ try:
+ callback_id = data.get('callback_id')
+ status = data.get('status')
+
+ logger.info(f"[AniLife] Received GDM callback: id={callback_id}, status={status}")
+
+ if callback_id:
+ from framework import F
+ with F.app.app_context():
+ db_item = ModelAniLifeItem.get_by_anilife_id(callback_id)
+ if db_item:
+ if status == "completed":
+ db_item.status = "completed"
+ db_item.completed_time = datetime.now()
+ db_item.filepath = data.get('filepath')
+ db_item.save()
+ logger.info(f"[AniLife] Updated DB item {db_item.id} to COMPLETED via GDM callback")
+ elif status == "error":
+ pass
+ except Exception as e:
+ logger.error(f"[AniLife] Callback processing error: {e}")
+ logger.error(traceback.format_exc())
+
# @staticmethod
def get_html(
self,
@@ -971,6 +1077,35 @@ class LogicAniLife(AnimeModuleBase):
logger.error(f"reset_db error: {e}")
return jsonify({"ret": "error", "msg": str(e)})
+ elif sub == "add_schedule":
+ # μ€μΌμ₯΄ λ±λ‘ (μλ λ€μ΄λ‘λ λͺ©λ‘μ μ½λ μΆκ°)
+ try:
+ code = request.form.get("code", "")
+ title = request.form.get("title", "")
+ logger.debug(f"add_schedule: code={code}, title={title}")
+
+ if not code:
+ return jsonify({"ret": "error", "msg": "μ½λκ° μμ΅λλ€."})
+
+ # κΈ°μ‘΄ whitelist κ°μ Έμ€κΈ°
+ whitelist = P.ModelSetting.get("anilife_auto_code_list") or ""
+ code_list = [c.strip() for c in whitelist.replace("\n", "|").split("|") if c.strip()]
+
+ if code in code_list:
+ return jsonify({"ret": "exist", "msg": "μ΄λ―Έ λ±λ‘λμ΄ μμ΅λλ€."})
+
+ # μ½λ μΆκ°
+ code_list.append(code)
+ new_whitelist = "|".join(code_list)
+ P.ModelSetting.set("anilife_auto_code_list", new_whitelist)
+
+ logger.info(f"[Anilife] Schedule added: {code} ({title})")
+ return jsonify({"ret": "success", "msg": f"μ€μΌμ₯΄ λ±λ‘ μλ£: {title}"})
+ except Exception as e:
+ logger.error(f"add_schedule error: {e}")
+ logger.error(traceback.format_exc())
+ return jsonify({"ret": "error", "msg": str(e)})
+
# Fallback to base class for common subs (queue_command, entity_list, browse_dir, command, etc.)
return super().process_ajax(sub, req)
@@ -1087,12 +1222,73 @@ class LogicAniLife(AnimeModuleBase):
return False
def scheduler_function(self):
- logger.debug(f"ohli24 scheduler_function::=========================")
-
- content_code_list = P.ModelSetting.get_list("anilife_auto_code_list", "|")
- url = f'{P.ModelSetting.get("anilife_url")}/dailyani'
- if "all" in content_code_list:
- ret_data = LogicAniLife.get_auto_anime_info(self, url=url)
+ """μ€μΌμ€λ¬ ν¨μ - anilife μλ λ€μ΄λ‘λ μ²λ¦¬"""
+ logger.info("anilife scheduler_function::=========================")
+
+ try:
+ content_code_list = P.ModelSetting.get_list("anilife_auto_code_list", "|")
+ auto_mode_all = P.ModelSetting.get_bool("anilife_auto_mode_all")
+
+ logger.info(f"Auto-download codes: {content_code_list}")
+ logger.info(f"Auto mode all episodes: {auto_mode_all}")
+
+ if not content_code_list:
+ logger.info("[Scheduler] No auto-download codes configured")
+ return
+
+ # κ° μν μ½λλ³ μ²λ¦¬
+ for code in content_code_list:
+ code = code.strip()
+ if not code:
+ continue
+
+ if code.lower() == "all":
+ # TODO: μ 체 μ΅μ μνΌμλ μ€μΊ λ‘μ§ (μΆν ꡬν)
+ logger.info("[Scheduler] 'all' mode - skipping for now")
+ continue
+
+ logger.info(f"[Scheduler] Processing code: {code}")
+
+ try:
+ # μν μ 보 μ‘°ν
+ series_info = self.get_series_info(code)
+
+ if not series_info or "episode" not in series_info:
+ logger.warning(f"[Scheduler] No episode info for: {code}")
+ continue
+
+ episodes = series_info.get("episode", [])
+ logger.info(f"[Scheduler] Found {len(episodes)} episodes for: {series_info.get('title', code)}")
+
+ # μνΌμλ μν λ° μλ λ±λ‘
+ added_count = 0
+ for episode_info in episodes:
+ try:
+ result = self.add(episode_info)
+ if result and result.startswith("enqueue"):
+ added_count += 1
+ logger.info(f"[Scheduler] Auto-enqueued: {episode_info.get('title', 'Unknown')}")
+ self.socketio_callback("list_refresh", "")
+
+ # auto_mode_allμ΄ Falseλ©΄ μ΅μ 1κ°λ§ (리μ€νΈκ° μ΅μ μμ΄λΌκ³ κ°μ )
+ if not auto_mode_all and added_count > 0:
+ logger.info(f"[Scheduler] Auto mode: latest only - stopping after 1 episode")
+ break
+
+ except Exception as ep_err:
+ logger.error(f"[Scheduler] Episode add error: {ep_err}")
+ continue
+
+ logger.info(f"[Scheduler] Completed {code}: added {added_count} episodes")
+
+ except Exception as code_err:
+ logger.error(f"[Scheduler] Error processing {code}: {code_err}")
+ logger.error(traceback.format_exc())
+ continue
+
+ except Exception as e:
+ logger.error(f"[Scheduler] Fatal error: {e}")
+ logger.error(traceback.format_exc())
def reset_db(self):
db.session.query(ModelAniLifeItem).delete()
@@ -2073,118 +2269,137 @@ class ModelAniLifeItem(db.Model):
return ret
def save(self):
- db.session.add(self)
- db.session.commit()
+ from framework import F
+ with F.app.app_context():
+ db.session.add(self)
+ db.session.commit()
@classmethod
def get_by_id(cls, idx):
- return db.session.query(cls).filter_by(id=idx).first()
+ from framework import F
+ with F.app.app_context():
+ return db.session.query(cls).filter_by(id=idx).first()
@classmethod
def get_by_anilife_id(cls, anilife_id):
- return db.session.query(cls).filter_by(anilife_id=anilife_id).first()
+ from framework import F
+ with F.app.app_context():
+ return db.session.query(cls).filter_by(anilife_id=anilife_id).first()
@classmethod
def delete_by_id(cls, idx):
- try:
- logger.debug(f"delete_by_id: {idx} (type: {type(idx)})")
- if isinstance(idx, str) and ',' in idx:
- id_list = [int(x.strip()) for x in idx.split(',') if x.strip()]
- logger.debug(f"Batch delete: {id_list}")
- count = db.session.query(cls).filter(cls.id.in_(id_list)).delete(synchronize_session='fetch')
- logger.debug(f"Deleted count: {count}")
- else:
- db.session.query(cls).filter_by(id=int(idx)).delete()
- logger.debug(f"Single delete: {idx}")
- db.session.commit()
- return True
- except Exception as e:
- logger.error(f"Exception: {str(e)}")
- logger.error(traceback.format_exc())
- return False
+ from framework import F
+ with F.app.app_context():
+ try:
+ logger.debug(f"delete_by_id: {idx} (type: {type(idx)})")
+ if isinstance(idx, str) and ',' in idx:
+ id_list = [int(x.strip()) for x in idx.split(',') if x.strip()]
+ logger.debug(f"Batch delete: {id_list}")
+ count = db.session.query(cls).filter(cls.id.in_(id_list)).delete(synchronize_session='fetch')
+ logger.debug(f"Deleted count: {count}")
+ else:
+ db.session.query(cls).filter_by(id=int(idx)).delete()
+ logger.debug(f"Single delete: {idx}")
+ db.session.commit()
+ return True
+ except Exception as e:
+ logger.error(f"Exception: {str(e)}")
+ # logger.error(traceback.format_exc())
+ return False
@classmethod
def delete_all(cls):
- try:
- db.session.query(cls).delete()
- db.session.commit()
- return True
- except Exception as e:
- logger.error(f"Exception: {str(e)}")
- logger.error(traceback.format_exc())
- return False
+ from framework import F
+ with F.app.app_context():
+ try:
+ db.session.query(cls).delete()
+ db.session.commit()
+ return True
+ except Exception as e:
+ logger.error(f"Exception: {str(e)}")
+ # logger.error(traceback.format_exc())
+ return False
@classmethod
def web_list(cls, req):
- ret = {}
- page = int(req.form["page"]) if "page" in req.form else 1
- page_size = 30
- job_id = ""
- search = req.form["search_word"] if "search_word" in req.form else ""
- option = req.form["option"] if "option" in req.form else "all"
- order = req.form["order"] if "order" in req.form else "desc"
- query = cls.make_query(search=search, order=order, option=option)
- count = query.count()
- query = query.limit(page_size).offset((page - 1) * page_size)
- lists = query.all()
- ret["list"] = [item.as_dict() for item in lists]
- ret["paging"] = Util.get_paging_info(count, page, page_size)
- return ret
+ from framework import F
+ with F.app.app_context():
+ ret = {}
+ page = int(req.form["page"]) if "page" in req.form else 1
+ page_size = 30
+ job_id = ""
+ search = req.form["search_word"] if "search_word" in req.form else ""
+ option = req.form["option"] if "option" in req.form else "all"
+ order = req.form["order"] if "order" in req.form else "desc"
+ query = cls.make_query(search=search, order=order, option=option)
+ count = query.count()
+ query = query.limit(page_size).offset((page - 1) * page_size)
+ lists = query.all()
+ ret["list"] = [item.as_dict() for item in lists]
+ ret["paging"] = Util.get_paging_info(count, page, page_size)
+ return ret
@classmethod
def make_query(cls, search="", order="desc", option="all"):
- query = db.session.query(cls)
- if search is not None and search != "":
- if search.find("|") != -1:
- tmp = search.split("|")
- conditions = []
- for tt in tmp:
- if tt != "":
- conditions.append(cls.filename.like("%" + tt.strip() + "%"))
- query = query.filter(or_(*conditions))
- elif search.find(",") != -1:
- tmp = search.split(",")
- for tt in tmp:
- if tt != "":
- query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
- else:
- query = query.filter(cls.filename.like("%" + search + "%"))
- if option == "completed":
- query = query.filter(cls.status == "completed")
+ from framework import F
+ with F.app.app_context():
+ query = db.session.query(cls)
+ if search is not None and search != "":
+ if search.find("|") != -1:
+ tmp = search.split("|")
+ conditions = []
+ for tt in tmp:
+ if tt != "":
+ conditions.append(cls.filename.like("%" + tt.strip() + "%"))
+ query = query.filter(or_(*conditions))
+ elif search.find(",") != -1:
+ tmp = search.split(",")
+ for tt in tmp:
+ if tt != "":
+ query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
+ else:
+ query = query.filter(cls.filename.like("%" + search + "%"))
+ if option == "completed":
+ query = query.filter(cls.status == "completed")
- query = (
- query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
- )
- return query
+ query = (
+ query.order_by(desc(cls.id)) if order == "desc" else query.order_by(cls.id)
+ )
+ return query
@classmethod
def get_list_uncompleted(cls):
- return db.session.query(cls).filter(cls.status != "completed").all()
+ from framework import F
+ with F.app.app_context():
+ return db.session.query(cls).filter(cls.status != "completed").all()
@classmethod
def append(cls, q):
- # μ€λ³΅ 체ν¬
- existing = cls.get_by_anilife_id(q["_id"])
- if existing:
- logger.debug(f"Item already exists in DB: {q['_id']}")
- return existing
-
- item = ModelAniLifeItem()
- item.content_code = q["content_code"]
- item.season = q["season"]
- item.episode_no = q.get("epi_queue")
- item.title = q["content_title"]
- item.episode_title = q["title"]
- item.anilife_va = q.get("va")
- item.anilife_vi = q.get("_vi")
- item.anilife_id = q["_id"]
- item.quality = q["quality"]
- item.filepath = q.get("filepath")
- item.filename = q.get("filename")
- item.savepath = q.get("savepath")
- item.video_url = q.get("url")
- item.vtt_url = q.get("vtt")
- item.thumbnail = q.get("thumbnail")
- item.status = "wait"
- item.anilife_info = q.get("anilife_info")
- item.save()
+ from framework import F
+ with F.app.app_context():
+ # μ€λ³΅ 체ν¬
+ existing = cls.get_by_anilife_id(q["_id"])
+ if existing:
+ logger.debug(f"Item already exists in DB: {q['_id']}")
+ return existing
+
+ item = ModelAniLifeItem()
+ item.content_code = q["content_code"]
+ item.season = q["season"]
+ item.episode_no = q.get("epi_queue")
+ item.title = q["content_title"]
+ item.episode_title = q["title"]
+ item.anilife_va = q.get("va")
+ item.anilife_vi = q.get("_vi")
+ item.anilife_id = q["_id"]
+ item.quality = q["quality"]
+ item.filepath = q.get("filepath")
+ item.filename = q.get("filename")
+ item.savepath = q.get("savepath")
+ item.video_url = q.get("url")
+ item.vtt_url = q.get("vtt")
+ item.thumbnail = q.get("image", "")
+ item.status = "wait"
+ item.anilife_info = q["anilife_info"]
+ item.save()
+ return item
diff --git a/mod_base.py b/mod_base.py
index c03bcc2..9f6aa63 100644
--- a/mod_base.py
+++ b/mod_base.py
@@ -139,11 +139,23 @@ class AnimeModuleBase(PluginModuleBase):
# μκ° μ
λ°μ΄νΈ (Git Pull) λ° λͺ¨λ 리λ‘λ
try:
import subprocess
- plugin_path = os.path.dirname(os.path.dirname(__file__)) if '__file__' in dir() else os.path.dirname(__file__)
- # μ€μ νλ¬κ·ΈμΈ λ£¨νΈ λλ ν 리
plugin_path = os.path.dirname(__file__)
self.P.logger.info(f"μ λ λ€μ΄λ‘λ μκ° μ
λ°μ΄νΈ μμ: {plugin_path}")
+ # λ¨Όμ λ³κ²½λ νμΌ λͺ©λ‘ νμΈ (model νμΌ λ³κ²½ κ°μ§)
+ diff_cmd = ['git', '-C', plugin_path, 'diff', '--name-only', 'HEAD', 'origin/main']
+ subprocess.run(['git', '-C', plugin_path, 'fetch'], capture_output=True) # fetch first
+ diff_result = subprocess.run(diff_cmd, capture_output=True, text=True)
+ changed_files = diff_result.stdout.strip().split('\n') if diff_result.stdout.strip() else []
+
+ # λͺ¨λΈ νμΌ λ³κ²½ μ¬λΆ νμΈ
+ model_patterns = ['model', 'db', 'migration']
+ needs_restart = any(
+ any(pattern in f.lower() for pattern in model_patterns)
+ for f in changed_files if f
+ )
+
+ # Git Pull μ€ν
cmd = ['git', '-C', plugin_path, 'pull']
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
stdout, stderr = process.communicate()
@@ -153,10 +165,20 @@ class AnimeModuleBase(PluginModuleBase):
self.P.logger.info(f"Git pull κ²°κ³Ό: {stdout}")
- # λͺ¨λ 리λ‘λ
- self.reload_plugin()
+ # λͺ¨λΈ λ³κ²½ μμΌλ©΄ 리λ‘λ μλ
+ if not needs_restart:
+ self.reload_plugin()
+ msg = f"μ
λ°μ΄νΈ μλ£! μλ‘κ³ μΉ¨νμΈμ.
{stdout}"
+ else:
+ self.P.logger.warning("λͺ¨λΈ νμΌ λ³κ²½ κ°μ§ - μλ² μ¬μμ νμ")
+ msg = f"λͺ¨λΈ λ³κ²½ κ°μ§! μλ² μ¬μμμ΄ νμν©λλ€.{stdout}"
- return jsonify({'ret': 'success', 'msg': f"μ
λ°μ΄νΈ λ° λ¦¬λ‘λ μλ£!{stdout}", 'data': stdout})
+ return jsonify({
+ 'ret': 'success',
+ 'msg': msg,
+ 'data': stdout,
+ 'needs_restart': needs_restart
+ })
except Exception as e:
self.P.logger.error(f"μκ° μ
λ°μ΄νΈ μ€ μ€λ₯: {str(e)}")
self.P.logger.error(traceback.format_exc())
@@ -299,24 +321,33 @@ class AnimeModuleBase(PluginModuleBase):
package_name = self.P.package_name
self.P.logger.info(f"νλ¬κ·ΈμΈ 리λ‘λ μμ: {package_name}")
+ # 리λ‘λμμ μ μΈν ν¨ν΄ (λͺ¨λΈ/DB κ΄λ ¨ - SQLAlchemy μΆ©λ λ°©μ§)
+ skip_patterns = ['model', 'db', 'migration', 'setup', 'create_plugin']
+
# κ΄λ ¨ λͺ¨λ μ°ΎκΈ° λ° λ¦¬λ‘λ
modules_to_reload = []
for module_name in list(sys.modules.keys()):
if module_name.startswith(package_name):
- modules_to_reload.append(module_name)
+ # λͺ¨λΈ κ΄λ ¨ λͺ¨λμ 건λλ°κΈ°
+ should_skip = any(pattern in module_name.lower() for pattern in skip_patterns)
+ if not should_skip:
+ modules_to_reload.append(module_name)
# μμ‘΄μ± μμμΌλ‘ μ λ ¬ (κΉμ λͺ¨λ λ¨Όμ )
modules_to_reload.sort(key=lambda x: x.count('.'), reverse=True)
+ reloaded_count = 0
for module_name in modules_to_reload:
try:
module = sys.modules[module_name]
importlib.reload(module)
self.P.logger.debug(f"Reloaded: {module_name}")
+ reloaded_count += 1
except Exception as e:
- self.P.logger.warning(f"Failed to reload {module_name}: {e}")
+ self.P.logger.warning(f"Skip reload {module_name}: {e}")
- self.P.logger.info(f"νλ¬κ·ΈμΈ λͺ¨λ [{package_name}] 리λ‘λ μλ£")
+ self.P.logger.info(f"νλ¬κ·ΈμΈ [{package_name}] 리λ‘λ μλ£: {reloaded_count}κ° λͺ¨λ")
+ self.P.logger.info("ν
νλ¦Ώ/μ μ νμΌμ μλ‘κ³ μΉ¨ μ μλ μ μ©λ©λλ€.")
return True
except Exception as e:
self.P.logger.error(f"λͺ¨λ 리λ‘λ μ€ μ€ν¨: {str(e)}")
diff --git a/mod_linkkf.py b/mod_linkkf.py
index 990b3c2..63483fe 100644
--- a/mod_linkkf.py
+++ b/mod_linkkf.py
@@ -107,9 +107,13 @@ class LogicLinkkf(AnimeModuleBase):
"linkkf_uncompleted_auto_enqueue": "False",
"linkkf_image_url_prefix_series": "",
"linkkf_image_url_prefix_episode": "",
- "linkkf_discord_notify": "True",
"linkkf_download_method": "ffmpeg", # ffmpeg, ytdlp, aria2c
"linkkf_download_threads": "16", # yt-dlp/aria2c λ³λ ¬ μ°λ λ μ
+ # μλ¦Ό μ€μ
+ "linkkf_notify_enabled": "False",
+ "linkkf_discord_webhook_url": "",
+ "linkkf_telegram_bot_token": "",
+ "linkkf_telegram_chat_id": "",
}
# default_route_socketio(P, self)
self.web_list_model = ModelLinkkfItem
@@ -470,6 +474,32 @@ class LogicLinkkf(AnimeModuleBase):
logger.error(f"browse_dir error: {e}")
return jsonify({"ret": "error", "error": str(e)}), 500
+ elif sub == "test_notification":
+ # ν
μ€νΈ μλ¦Ό μ μ‘
+ try:
+ discord_url = P.ModelSetting.get("linkkf_discord_webhook_url")
+ telegram_token = P.ModelSetting.get("linkkf_telegram_bot_token")
+ telegram_chat_id = P.ModelSetting.get("linkkf_telegram_chat_id")
+
+ if not discord_url and not (telegram_token and telegram_chat_id):
+ return jsonify({"ret": "error", "msg": "Discord Webhook URL λλ Telegram μ€μ μ μ
λ ₯νμΈμ."})
+
+ test_message = "π **ν
μ€νΈ μλ¦Ό**\nLinkkf μλ¦Ό μ€μ μ΄ μλ£λμμ΅λλ€!\n\nμλ¦Όμ΄ μ μμ μΌλ‘ μμ λκ³ μμ΅λλ€."
+ sent_to = []
+
+ if discord_url:
+ self.send_discord_notification(discord_url, "ν
μ€νΈ", test_message)
+ sent_to.append("Discord")
+
+ if telegram_token and telegram_chat_id:
+ self.send_telegram_notification(telegram_token, telegram_chat_id, test_message)
+ sent_to.append("Telegram")
+
+ return jsonify({"ret": "success", "msg": f"{', '.join(sent_to)}μΌλ‘ μλ¦Ό μ μ‘ μλ£!"})
+ except Exception as e:
+ logger.error(f"test_notification error: {e}")
+ return jsonify({"ret": "error", "msg": str(e)})
+
return super().process_ajax(sub, req)
except Exception as e:
@@ -477,6 +507,144 @@ class LogicLinkkf(AnimeModuleBase):
P.logger.error(traceback.format_exc())
return jsonify({"ret": "error", "log": str(e)})
+ def process_command(self, command, arg1, arg2, arg3, req):
+ try:
+ if command == "list":
+ # 1. μ체 ν λͺ©λ‘ κ°μ Έμ€κΈ°
+ ret = self.queue.get_entity_list() if self.queue else []
+
+ # 2. GDM νμ€ν¬ κ°μ Έμ€κΈ° (μ€μΉλ κ²½μ°)
+ try:
+ from gommi_downloader_manager.mod_queue import ModuleQueue
+ if ModuleQueue:
+ gdm_tasks = ModuleQueue.get_all_downloads()
+ # μ΄ λͺ¨λ(linkkf)μ΄ μΆκ°ν μμ
λ§ νν°λ§
+ linkkf_tasks = [t for t in gdm_tasks if t.caller_plugin == f"{P.package_name}_{self.name}"]
+
+ for task in linkkf_tasks:
+ # ν
νλ¦Ώ νΈν νμμΌλ‘ λ³ν
+ gdm_item = self._convert_gdm_task_to_queue_item(task)
+ ret.append(gdm_item)
+ except Exception as e:
+ logger.debug(f"GDM tasks fetch error: {e}")
+
+ return jsonify(ret)
+
+ elif command in ["stop", "remove", "cancel"]:
+ entity_id = arg1
+ if entity_id and str(entity_id).startswith("dl_"):
+ # GDM μμ
μ²λ¦¬
+ try:
+ from gommi_downloader_manager.mod_queue import ModuleQueue
+ if ModuleQueue:
+ if command == "stop" or command == "cancel":
+ task = ModuleQueue.get_download(entity_id)
+ if task:
+ task.cancel()
+ return jsonify({"ret": "success", "log": "GDM μμ
μ μ€μ§νμμ΅λλ€."})
+ elif command == "remove":
+ # GDMμμ μμ μ²λ¦¬ (λͺ
λ Ήμ΄ 'delete' μ¬μ©)
+ # process_ajaxμ delete λ‘μ§ μ°Έκ³
+ class DummyReq:
+ def __init__(self, id):
+ self.form = {"id": id}
+ ModuleQueue.process_ajax("delete", DummyReq(entity_id))
+ return jsonify({"ret": "success", "log": "GDM μμ
μ μμ νμμ΅λλ€."})
+ except Exception as e:
+ logger.error(f"GDM command error: {e}")
+ return jsonify({"ret": "error", "log": f"GDM λͺ
λ Ή μ€ν¨: {e}"})
+
+ # μ체 ν μ²λ¦¬
+ return super().process_command(command, arg1, arg2, arg3, req)
+
+ return super().process_command(command, arg1, arg2, arg3, req)
+ except Exception as e:
+ logger.error(f"process_command Error: {e}")
+ logger.error(traceback.format_exc())
+ return jsonify({'ret': 'fail', 'log': str(e)})
+
+ def _convert_gdm_task_to_queue_item(self, task):
+ """GDM DownloadTask κ°μ²΄λ₯Ό FfmpegQueueEntity.as_dict() νΈν νμμΌλ‘ λ³ν"""
+ # μν λ§΅ν
+ status_kor_map = {
+ "pending": "λκΈ°μ€",
+ "extracting": "λΆμμ€",
+ "downloading": "λ€μ΄λ‘λμ€",
+ "paused": "μΌμμ μ§",
+ "completed": "μλ£",
+ "error": "μ€ν¨",
+ "cancelled": "μ·¨μλ¨"
+ }
+
+ status_str_map = {
+ "pending": "WAITING",
+ "extracting": "ANALYZING",
+ "downloading": "DOWNLOADING",
+ "paused": "PAUSED",
+ "completed": "COMPLETED",
+ "error": "FAILED",
+ "cancelled": "FAILED"
+ }
+
+ # GDM taskλ as_dict()λ₯Ό μ 곡ν¨
+ t_dict = task.as_dict()
+
+ return {
+ "entity_id": t_dict["id"],
+ "url": t_dict["url"],
+ "filename": t_dict["filename"] or t_dict["title"],
+ "ffmpeg_status_kor": status_kor_map.get(t_dict["status"], "μμμμ"),
+ "ffmpeg_percent": t_dict["progress"],
+ "created_time": t_dict["created_time"],
+ "current_speed": t_dict["speed"],
+ "download_time": t_dict["eta"],
+ "status_str": status_str_map.get(t_dict["status"], "WAITING"),
+ "idx": t_dict["id"],
+ "callback_id": "linkkf",
+ "start_time": t_dict["start_time"] or t_dict["created_time"],
+ "percent": t_dict["progress"],
+ "save_fullpath": t_dict["filepath"],
+ "is_gdm": True # GDM μμ
μμ νμ (λλ²κΉ
μ©)
+ }
+
+ def plugin_callback(self, data):
+ """
+ GDM λͺ¨λλ‘λΆν° λ€μ΄λ‘λ μν μ
λ°μ΄νΈ μμ
+ data = {
+ 'callback_id': self.callback_id,
+ 'status': self.status,
+ 'filepath': self.filepath,
+ 'filename': os.path.basename(self.filepath) if self.filepath else '',
+ 'error': self.error_message
+ }
+ """
+ try:
+ callback_id = data.get('callback_id')
+ status = data.get('status')
+
+ logger.info(f"[Linkkf] Received GDM callback: id={callback_id}, status={status}")
+
+ # DB μν μ
λ°μ΄νΈ
+ if callback_id:
+ from framework import F
+ with F.app.app_context():
+ db_item = ModelLinkkfItem.get_by_linkkf_id(callback_id)
+ if db_item:
+ if status == "completed":
+ db_item.status = "completed"
+ db_item.completed_time = datetime.now()
+ db_item.filepath = data.get('filepath')
+ db_item.save()
+ logger.info(f"[Linkkf] Updated DB item {db_item.id} to COMPLETED via GDM callback")
+
+ # μλ¦Ό μ μ‘ (νμ μ)
+ # self.socketio_callback("list_refresh", "")
+ elif status == "error":
+ # νμ μ μλ¬ μ²λ¦¬
+ pass
+ except Exception as e:
+ logger.error(f"[Linkkf] Callback processing error: {e}")
+ logger.error(traceback.format_exc())
def socketio_callback(self, refresh_type, data):
"""
@@ -1780,6 +1948,29 @@ class LogicLinkkf(AnimeModuleBase):
def plugin_load(self):
try:
logger.debug("%s plugin_load", P.package_name)
+
+ # μ μ€μ μ΄κΈ°ν (κΈ°μ‘΄ μ€μΉμμ λλ½λ μ€μ μΆκ°)
+ new_settings = {
+ "linkkf_notify_enabled": "False",
+ "linkkf_discord_webhook_url": "",
+ "linkkf_telegram_bot_token": "",
+ "linkkf_telegram_chat_id": "",
+ }
+ for key, default_value in new_settings.items():
+ if P.ModelSetting.get(key) is None:
+ P.ModelSetting.set(key, default_value)
+ logger.info(f"[Linkkf] Initialized new setting: {key}")
+
+ # μΆκ° μ€μ : μλ λ€μ΄λ‘λ vs μλ¦Όλ§
+ if P.ModelSetting.get("linkkf_auto_download_new") is None:
+ P.ModelSetting.set("linkkf_auto_download_new", "True")
+ logger.info("[Linkkf] Initialized setting: linkkf_auto_download_new")
+
+ # λͺ¨λν°λ§ μ£ΌκΈ° μ€μ (κΈ°λ³Έ 10λΆ)
+ if P.ModelSetting.get("linkkf_monitor_interval") is None:
+ P.ModelSetting.set("linkkf_monitor_interval", "10")
+ logger.info("[Linkkf] Initialized setting: linkkf_monitor_interval")
+
# ν΄λμ€ λ 벨 ν μ΄κΈ°ν
if LogicLinkkf.queue is None:
LogicLinkkf.queue = FfmpegQueue(
@@ -1806,6 +1997,229 @@ class LogicLinkkf(AnimeModuleBase):
def plugin_unload(self):
pass
+ def scheduler_function(self):
+ """μ€μΌμ€λ¬ ν¨μ - linkkf μλ λ€μ΄λ‘λ μ²λ¦¬"""
+ from framework import F
+ logger.info("linkkf scheduler_function::=========================")
+
+ # Flask μ± μ»¨ν
μ€νΈ λ΄μμ μ€ν (μ€μΌμ€λ¬λ λ³λ μ€λ λ)
+ with F.app.app_context():
+ try:
+ content_code_list = P.ModelSetting.get_list("linkkf_auto_code_list", "|")
+ auto_mode_all = P.ModelSetting.get_bool("linkkf_auto_mode_all")
+
+ logger.info(f"Auto-download codes: {content_code_list}")
+ logger.info(f"Auto mode all episodes: {auto_mode_all}")
+
+ if not content_code_list:
+ logger.info("[Scheduler] No auto-download codes configured")
+ return
+
+ # κ° μν μ½λλ³ μ²λ¦¬
+ for code in content_code_list:
+ code = code.strip()
+ if not code:
+ continue
+
+ if code.lower() == "all":
+ # μ¬μ΄νΈ μ 체 μ΅μ μνΌμλ μ€μΊ
+ logger.info("[Scheduler] 'all' mode - scanning latest episodes from site")
+ self.scan_latest_episodes(auto_mode_all)
+ continue
+
+ logger.info(f"[Scheduler] Processing code: {code}")
+
+ try:
+ # μν μ 보 μ‘°ν
+ series_info = self.get_series_info(code)
+
+ if not series_info or "episode" not in series_info:
+ logger.warning(f"[Scheduler] No episode info for: {code}")
+ continue
+
+ episodes = series_info.get("episode", [])
+ logger.info(f"[Scheduler] Found {len(episodes)} episodes for: {series_info.get('title', code)}")
+
+ # μνΌμλ μν λ° μλ λ±λ‘
+ added_count = 0
+ added_episodes = []
+ for episode_info in episodes:
+ try:
+ result = self.add(episode_info)
+ if result and result.startswith("enqueue"):
+ added_count += 1
+ added_episodes.append(episode_info.get('title', 'Unknown'))
+ logger.info(f"[Scheduler] Auto-enqueued: {episode_info.get('title', 'Unknown')}")
+ self.socketio_callback("list_refresh", "")
+
+ # auto_mode_allμ΄ Falseλ©΄ μ΅μ 1κ°λ§ (리μ€νΈκ° μ΅μ μμ΄λΌκ³ κ°μ )
+ if not auto_mode_all and added_count > 0:
+ logger.info(f"[Scheduler] Auto mode: latest only - stopping after 1 episode")
+ break
+
+ except Exception as ep_err:
+ logger.error(f"[Scheduler] Episode add error: {ep_err}")
+ continue
+
+ # μ μνΌμλ μΆκ°λ¨ β μλ¦Ό μ μ‘
+ if added_count > 0:
+ self.send_notification(
+ title=series_info.get('title', code),
+ episodes=added_episodes,
+ count=added_count
+ )
+
+ logger.info(f"[Scheduler] Completed {code}: added {added_count} episodes")
+
+ except Exception as code_err:
+ logger.error(f"[Scheduler] Error processing {code}: {code_err}")
+ logger.error(traceback.format_exc())
+ continue
+
+ except Exception as e:
+ logger.error(f"[Scheduler] Fatal error: {e}")
+ logger.error(traceback.format_exc())
+
+ def send_notification(self, title, episodes, count):
+ """Discord/Telegram μλ¦Ό μ μ‘"""
+ if not P.ModelSetting.get_bool("linkkf_notify_enabled"):
+ return
+
+ # λ©μμ§ μμ±
+ episode_list = "\n".join([f"β’ {ep}" for ep in episodes[:5]])
+ if count > 5:
+ episode_list += f"\n... μΈ {count - 5}κ°"
+
+ message = f"π¬ **{title}**\nμ μνΌμλ {count}κ°κ° λ€μ΄λ‘λ νμ μΆκ°λμμ΅λλ€!\n\n{episode_list}"
+
+ # Discord Webhook
+ discord_url = P.ModelSetting.get("linkkf_discord_webhook_url")
+ if discord_url:
+ self.send_discord_notification(discord_url, title, message)
+
+ # Telegram Bot
+ telegram_token = P.ModelSetting.get("linkkf_telegram_bot_token")
+ telegram_chat_id = P.ModelSetting.get("linkkf_telegram_chat_id")
+ if telegram_token and telegram_chat_id:
+ self.send_telegram_notification(telegram_token, telegram_chat_id, message)
+
+ def scan_latest_episodes(self, auto_mode_all):
+ """μ¬μ΄νΈμμ μ΅μ μνΌμλ λͺ©λ‘μ μ€μΊνκ³ μ μνΌμλ κ°μ§"""
+ try:
+ auto_download = P.ModelSetting.get_bool("linkkf_auto_download_new")
+
+ # μ΅μ λ°©μ λͺ©λ‘ κ°μ Έμ€κΈ° (1νμ΄μ§λ§ - κ°μ₯ μ΅μ )
+ latest_data = self.get_anime_info("ing", 1)
+
+ if not latest_data or "episode" not in latest_data:
+ logger.warning("[Scheduler] Failed to fetch latest anime list")
+ return
+
+ items = latest_data.get("episode", [])
+ logger.info(f"[Scheduler] Scanned {len(items)} items from 'ing' page")
+
+ total_added = 0
+ all_new_episodes = []
+
+ # κ° μνμ μ΅μ μνΌμλ νμΈ
+ for item in items[:20]: # μμ 20κ°λ§ μ²λ¦¬ (μ±λ₯ κ³ λ €)
+ try:
+ code = item.get("code")
+ if not code:
+ continue
+
+ # ν΄λΉ μνμ μνΌμλ λͺ©λ‘ μ‘°ν
+ series_info = self.get_series_info(code)
+ if not series_info or "episode" not in series_info:
+ continue
+
+ episodes = series_info.get("episode", [])
+ series_title = series_info.get("title", code)
+
+ # μ μνΌμλλ§ μΆκ° (add λ©μλκ° μ€λ³΅ 체ν¬ν¨)
+ for ep in episodes[:5]: # μ΅μ 5κ°λ§ νμΈ
+ try:
+ if auto_download:
+ result = self.add(ep)
+ if result and result.startswith("enqueue"):
+ total_added += 1
+ all_new_episodes.append(f"{series_title} - {ep.get('title', '')}")
+ self.socketio_callback("list_refresh", "")
+ else:
+ # μλ¦Όλ§ (λ€μ΄λ‘λ μν¨) - DB 체ν¬λ‘ μ μνΌμλμΈμ§ νμΈ
+ ep_code = ep.get("code", "")
+ existing = ModelLinkkfItem.get_by_code(ep_code) if ep_code else None
+ if not existing:
+ all_new_episodes.append(f"{series_title} - {ep.get('title', '')}")
+
+ if not auto_mode_all and total_added > 0:
+ break
+ except Exception:
+ continue
+
+ if not auto_mode_all and total_added > 0:
+ break
+
+ except Exception as e:
+ logger.debug(f"[Scheduler] Error scanning {item.get('code', 'unknown')}: {e}")
+ continue
+
+ # κ²°κ³Ό μλ¦Ό
+ if all_new_episodes:
+ mode_text = "μλ λ€μ΄λ‘λ" if auto_download else "μ μνΌμλ κ°μ§"
+ self.send_notification(
+ title=f"[{mode_text}] μ¬μ΄νΈ λͺ¨λν°λ§",
+ episodes=all_new_episodes,
+ count=len(all_new_episodes)
+ )
+ logger.info(f"[Scheduler] 'all' mode completed: {len(all_new_episodes)} new episodes found")
+ else:
+ logger.info("[Scheduler] 'all' mode: No new episodes found")
+
+ except Exception as e:
+ logger.error(f"[Scheduler] scan_latest_episodes error: {e}")
+ logger.error(traceback.format_exc())
+
+ def send_discord_notification(self, webhook_url, title, message):
+ """Discord WebhookμΌλ‘ μλ¦Ό μ μ‘"""
+ try:
+ payload = {
+ "embeds": [{
+ "title": f"πΊ Linkkf μλ λ€μ΄λ‘λ",
+ "description": message,
+ "color": 0x10B981, # μ΄λ‘μ
+ "footer": {"text": "FlaskFarm Anime Downloader"}
+ }]
+ }
+ response = requests.post(webhook_url, json=payload, timeout=10)
+ if response.status_code in [200, 204]:
+ logger.info(f"[Notify] Discord μλ¦Ό μ μ‘ μ±κ³΅: {title}")
+ else:
+ logger.warning(f"[Notify] Discord μλ¦Ό μ€ν¨: {response.status_code}")
+ except Exception as e:
+ logger.error(f"[Notify] Discord μλ¦Ό μ€λ₯: {e}")
+
+ def send_telegram_notification(self, bot_token, chat_id, message):
+ """Telegram Bot APIλ‘ μλ¦Ό μ μ‘"""
+ try:
+ # Markdown νμμΌλ‘ λ³ν (** -> *)
+ telegram_message = message.replace("**", "*")
+
+ url = f"https://api.telegram.org/bot{bot_token}/sendMessage"
+ payload = {
+ "chat_id": chat_id,
+ "text": telegram_message,
+ "parse_mode": "Markdown"
+ }
+ response = requests.post(url, json=payload, timeout=10)
+ result = response.json()
+ if result.get("ok"):
+ logger.info(f"[Notify] Telegram μλ¦Ό μ μ‘ μ±κ³΅")
+ else:
+ logger.warning(f"[Notify] Telegram μλ¦Ό μ€ν¨: {result.get('description', 'Unknown')}")
+ except Exception as e:
+ logger.error(f"[Notify] Telegram μλ¦Ό μ€λ₯: {e}")
+
def download_thread_function(self):
while True:
try:
@@ -2148,39 +2562,47 @@ class ModelLinkkfItem(db.Model):
return ret
def save(self):
- db.session.add(self)
- db.session.commit()
+ from framework import F
+ with F.app.app_context():
+ db.session.add(self)
+ db.session.commit()
@classmethod
def get_by_id(cls, idx):
- return db.session.query(cls).filter_by(id=idx).first()
+ from framework import F
+ with F.app.app_context():
+ return db.session.query(cls).filter_by(id=idx).first()
@classmethod
def get_by_linkkf_id(cls, linkkf_id):
- return db.session.query(cls).filter_by(linkkf_id=linkkf_id).first()
+ from framework import F
+ with F.app.app_context():
+ return db.session.query(cls).filter_by(linkkf_id=linkkf_id).first()
@classmethod
def append(cls, q):
- logger.debug(q)
- item = ModelLinkkfItem()
- item.content_code = q["program_code"]
- item.season = q["season"]
- item.episode_no = q["epi_queue"]
- item.title = q["content_title"]
- item.episode_title = q["title"]
- # item.linkkf_va = q["va"]
- item.linkkf_code = q["code"]
- item.linkkf_id = q["_id"]
- item.quality = q["quality"]
- item.filepath = q["filepath"]
- item.filename = q["filename"]
- item.savepath = q["savepath"]
- item.video_url = q["url"]
- item.vtt_url = q["vtt"]
- item.thumbnail = q.get("image", "")
- item.status = "wait"
- item.linkkf_info = q["linkkf_info"]
- item.save()
+ from framework import F
+ with F.app.app_context():
+ logger.debug(q)
+ item = ModelLinkkfItem()
+ item.content_code = q["program_code"]
+ item.season = q["season"]
+ item.episode_no = q["epi_queue"]
+ item.title = q["content_title"]
+ item.episode_title = q["title"]
+ # item.linkkf_va = q["va"]
+ item.linkkf_code = q["code"]
+ item.linkkf_id = q["_id"]
+ item.quality = q["quality"]
+ item.filepath = q["filepath"]
+ item.filename = q["filename"]
+ item.savepath = q["savepath"]
+ item.video_url = q["url"]
+ item.vtt_url = q["vtt"]
+ item.thumbnail = q.get("image", "")
+ item.status = "wait"
+ item.linkkf_info = q["linkkf_info"]
+ item.save()
@classmethod
def get_paging_info(cls, count, page, page_size):
@@ -2208,51 +2630,57 @@ class ModelLinkkfItem(db.Model):
@classmethod
def delete_by_id(cls, idx):
- db.session.query(cls).filter_by(id=idx).delete()
- db.session.commit()
+ from framework import F
+ with F.app.app_context():
+ db.session.query(cls).filter_by(id=idx).delete()
+ db.session.commit()
return True
@classmethod
def web_list(cls, req):
- ret = {}
- page = int(req.form["page"]) if "page" in req.form else 1
- page_size = 30
- job_id = ""
- search = req.form["search_word"] if "search_word" in req.form else ""
- option = req.form["option"] if "option" in req.form else "all"
- order = req.form["order"] if "order" in req.form else "desc"
- query = cls.make_query(search=search, order=order, option=option)
- count = query.count()
- query = query.limit(page_size).offset((page - 1) * page_size)
- lists = query.all()
- ret["list"] = [item.as_dict() for item in lists]
- ret["paging"] = cls.get_paging_info(count, page, page_size)
- return ret
+ from framework import F
+ with F.app.app_context():
+ ret = {}
+ page = int(req.form["page"]) if "page" in req.form else 1
+ page_size = 30
+ job_id = ""
+ search = req.form["search_word"] if "search_word" in req.form else ""
+ option = req.form["option"] if "option" in req.form else "all"
+ order = req.form["order"] if "order" in req.form else "desc"
+ query = cls.make_query(search=search, order=order, option=option)
+ count = query.count()
+ query = query.limit(page_size).offset((page - 1) * page_size)
+ lists = query.all()
+ ret["list"] = [item.as_dict() for item in lists]
+ ret["paging"] = cls.get_paging_info(count, page, page_size)
+ return ret
@classmethod
def make_query(cls, search="", order="desc", option="all"):
- query = db.session.query(cls)
- if search is not None and search != "":
- if search.find("|") != -1:
- tmp = search.split("|")
- conditions = []
- for tt in tmp:
- if tt != "":
- conditions.append(cls.filename.like("%" + tt.strip() + "%"))
- query = query.filter(or_(*conditions))
- elif search.find(",") != -1:
- tmp = search.split(",")
- for tt in tmp:
- if tt != "":
- query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
- else:
- query = query.filter(cls.filename.like("%" + search + f"%"))
-
- if option == "completed":
- query = query.filter(cls.status == "completed")
+ from framework import F
+ with F.app.app_context():
+ query = db.session.query(cls)
+ if search is not None and search != "":
+ if search.find("|") != -1:
+ tmp = search.split("|")
+ conditions = []
+ for tt in tmp:
+ if tt != "":
+ conditions.append(cls.filename.like("%" + tt.strip() + "%"))
+ query = query.filter(or_(*conditions))
+ elif search.find(",") != -1:
+ tmp = search.split(",")
+ for tt in tmp:
+ if tt != "":
+ query = query.filter(cls.filename.like("%" + tt.strip() + "%"))
+ else:
+ query = query.filter(cls.filename.like("%" + search + f"%"))
- if order == "desc":
- query = query.order_by(desc(cls.id))
- else:
- query = query.order_by(cls.id)
- return query
+ if option == "completed":
+ query = query.filter(cls.status == "completed")
+
+ if order == "desc":
+ query = query.order_by(desc(cls.id))
+ else:
+ query = query.order_by(cls.id)
+ return query
diff --git a/mod_ohli24.py b/mod_ohli24.py
index 3a2c3c1..4ea7fa8 100644
--- a/mod_ohli24.py
+++ b/mod_ohli24.py
@@ -1186,31 +1186,149 @@ class LogicOhli24(AnimeModuleBase):
self, command: str, arg1: str, arg2: str, arg3: str, req: Any
) -> Any:
"""컀맨λ μ²λ¦¬."""
- ret: Dict[str, Any] = {"ret": "success"}
+ try:
+ if command == "list":
+ # 1. μ체 ν λͺ©λ‘ κ°μ Έμ€κΈ°
+ ret = self.queue.get_entity_list() if self.queue else []
+
+ # 2. GDM νμ€ν¬ κ°μ Έμ€κΈ° (μ€μΉλ κ²½μ°)
+ try:
+ from gommi_downloader_manager.mod_queue import ModuleQueue
+ if ModuleQueue:
+ gdm_tasks = ModuleQueue.get_all_downloads()
+ # μ΄ λͺ¨λ(ohli24)μ΄ μΆκ°ν μμ
λ§ νν°λ§
+ ohli24_tasks = [t for t in gdm_tasks if t.caller_plugin == f"{P.package_name}_{self.name}"]
+
+ for task in ohli24_tasks:
+ # ν
νλ¦Ώ νΈν νμμΌλ‘ λ³ν
+ gdm_item = self._convert_gdm_task_to_queue_item(task)
+ ret.append(gdm_item)
+ except Exception as e:
+ logger.debug(f"GDM tasks fetch error: {e}")
+
+ return jsonify(ret)
+
+ elif command in ["stop", "remove", "cancel"]:
+ entity_id = arg1
+ if entity_id and str(entity_id).startswith("dl_"):
+ # GDM μμ
μ²λ¦¬
+ try:
+ from gommi_downloader_manager.mod_queue import ModuleQueue
+ if ModuleQueue:
+ if command == "stop" or command == "cancel":
+ task = ModuleQueue.get_download(entity_id)
+ if task:
+ task.cancel()
+ return jsonify({"ret": "success", "log": "GDM μμ
μ μ€μ§νμμ΅λλ€."})
+ elif command == "remove" or command == "delete":
+ # GDMμμ μμ μ²λ¦¬
+ class DummyReq:
+ def __init__(self, id):
+ self.form = {"id": id}
+ ModuleQueue.process_ajax("delete", DummyReq(entity_id))
+ return jsonify({"ret": "success", "log": "GDM μμ
μ μμ νμμ΅λλ€."})
+ except Exception as e:
+ logger.error(f"GDM command error: {e}")
+ return jsonify({"ret": "error", "log": f"GDM λͺ
λ Ή μ€ν¨: {e}"})
+
+ # μ체 ν μ²λ¦¬
+ return super().process_command(command, arg1, arg2, arg3, req)
- if command == "download_program":
- _pass = arg2
- db_item = ModelOhli24Program.get(arg1)
- if _pass == "false" and db_item is not None:
- ret["ret"] = "warning"
- ret["msg"] = "μ΄λ―Έ DBμ μλ νλͺ© μ
λλ€."
- elif (
- _pass == "true"
- and db_item is not None
- and ModelOhli24Program.get_by_id_in_queue(db_item.id) is not None
- ):
- ret["ret"] = "warning"
- ret["msg"] = "μ΄λ―Έ νμ μλ νλͺ© μ
λλ€."
- else:
- if db_item is None:
- db_item = ModelOhli24Program(arg1, self.get_episode(arg1))
- db_item.save()
- db_item.init_for_queue()
- self.download_queue.put(db_item)
- ret["msg"] = "λ€μ΄λ‘λλ₯Ό μΆκ° νμμ΅λλ€."
- return jsonify(ret)
+ if command == "download_program":
+ ret: Dict[str, Any] = {"ret": "success"}
+ _pass = arg2
+ db_item = ModelOhli24Program.get(arg1)
+ if _pass == "false" and db_item is not None:
+ ret["ret"] = "warning"
+ ret["msg"] = "μ΄λ―Έ DBμ μλ νλͺ© μ
λλ€."
+ elif (
+ _pass == "true"
+ and db_item is not None
+ and ModelOhli24Program.get_by_id_in_queue(db_item.id) is not None
+ ):
+ ret["ret"] = "warning"
+ ret["msg"] = "μ΄λ―Έ νμ μλ νλͺ© μ
λλ€."
+ else:
+ if db_item is None:
+ db_item = ModelOhli24Program(arg1, self.get_episode(arg1))
+ db_item.save()
+ db_item.init_for_queue()
+ self.download_queue.put(db_item)
+ ret["msg"] = "λ€μ΄λ‘λλ₯Ό μΆκ° νμμ΅λλ€."
+ return jsonify(ret)
- return super().process_command(command, arg1, arg2, arg3, req)
+ return super().process_command(command, arg1, arg2, arg3, req)
+ except Exception as e:
+ logger.error(f"process_command Error: {e}")
+ logger.error(traceback.format_exc())
+ return jsonify({'ret': 'fail', 'log': str(e)})
+
+ def _convert_gdm_task_to_queue_item(self, task):
+ """GDM DownloadTask κ°μ²΄λ₯Ό FfmpegQueueEntity.as_dict() νΈν νμμΌλ‘ λ³ν"""
+ status_kor_map = {
+ "pending": "λκΈ°μ€",
+ "extracting": "λΆμμ€",
+ "downloading": "λ€μ΄λ‘λμ€",
+ "paused": "μΌμμ μ§",
+ "completed": "μλ£",
+ "error": "μ€ν¨",
+ "cancelled": "μ·¨μλ¨"
+ }
+
+ status_str_map = {
+ "pending": "WAITING",
+ "extracting": "ANALYZING",
+ "downloading": "DOWNLOADING",
+ "paused": "PAUSED",
+ "completed": "COMPLETED",
+ "error": "FAILED",
+ "cancelled": "FAILED"
+ }
+
+ t_dict = task.as_dict()
+
+ return {
+ "entity_id": t_dict["id"],
+ "url": t_dict["url"],
+ "filename": t_dict["filename"] or t_dict["title"],
+ "ffmpeg_status_kor": status_kor_map.get(t_dict["status"], "μμμμ"),
+ "ffmpeg_percent": t_dict["progress"],
+ "created_time": t_dict["created_time"],
+ "current_speed": t_dict["speed"],
+ "download_time": t_dict["eta"],
+ "status_str": status_str_map.get(t_dict["status"], "WAITING"),
+ "idx": t_dict["id"],
+ "callback_id": "ohli24",
+ "start_time": t_dict["start_time"] or t_dict["created_time"],
+ "percent": t_dict["progress"],
+ "save_fullpath": t_dict["filepath"],
+ "is_gdm": True
+ }
+
+ def plugin_callback(self, data):
+ """GDM λͺ¨λλ‘λΆν° λ€μ΄λ‘λ μν μ
λ°μ΄νΈ μμ """
+ try:
+ callback_id = data.get('callback_id')
+ status = data.get('status')
+
+ logger.info(f"[Ohli24] Received GDM callback: id={callback_id}, status={status}")
+
+ if callback_id:
+ from framework import F
+ with F.app.app_context():
+ db_item = ModelOhli24Item.get_by_ohli24_id(callback_id)
+ if db_item:
+ if status == "completed":
+ db_item.status = "completed"
+ db_item.completed_time = datetime.now()
+ db_item.filepath = data.get('filepath')
+ db_item.save()
+ logger.info(f"[Ohli24] Updated DB item {db_item.id} to COMPLETED via GDM callback")
+ elif status == "error":
+ pass
+ except Exception as e:
+ logger.error(f"[Ohli24] Callback processing error: {e}")
+ logger.error(traceback.format_exc())
@staticmethod
def add_whitelist(*args: str) -> Dict[str, Any]:
diff --git a/static/css/video_modal.css b/static/css/video_modal.css
index 4743a40..b74fc82 100644
--- a/static/css/video_modal.css
+++ b/static/css/video_modal.css
@@ -77,6 +77,56 @@
object-fit: cover !important;
}
+/* Artplayer Container */
+#artplayer-container {
+ width: 100%;
+ height: 100%;
+ min-height: 400px;
+}
+#artplayer-container.art-zoomed .art-video {
+ object-fit: cover !important;
+}
+
+/* Plyr Container */
+#plyr-container {
+ width: 100%;
+ height: 100%;
+}
+#plyr-container .plyr {
+ height: 100%;
+}
+#plyr-container .plyr--video {
+ height: 100%;
+}
+#plyr-container video.vjs-zoomed {
+ object-fit: cover !important;
+}
+
+/* Player Select Dropdown in Header */
+#player-select {
+ background: rgba(255, 255, 255, 0.1);
+ color: white;
+ border: 1px solid rgba(255, 255, 255, 0.2);
+ border-radius: 6px;
+ padding: 6px 12px;
+ font-size: 13px;
+ cursor: pointer;
+ transition: all 0.2s ease;
+}
+#player-select:hover {
+ background: rgba(255, 255, 255, 0.15);
+ border-color: rgba(255, 255, 255, 0.3);
+}
+#player-select:focus {
+ outline: none;
+ border-color: #3b82f6;
+ box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.3);
+}
+#player-select option {
+ background: #1e293b;
+ color: #f1f5f9;
+}
+
/* Zoom Button */
.video-zoom-btn {
position: absolute;
diff --git a/static/js/video_modal.js b/static/js/video_modal.js
index cade724..c92dc0b 100644
--- a/static/js/video_modal.js
+++ b/static/js/video_modal.js
@@ -1,12 +1,3 @@
-/**
- * Video Modal Component JavaScript
- * Reusable video player modal for Anime Downloader
- *
- * Usage:
- * VideoModal.init({ package_name: 'anime_downloader', sub: 'ohli24' });
- * VideoModal.openWithPath('/path/to/video.mp4');
- */
-
var VideoModal = (function() {
'use strict';
@@ -15,28 +6,45 @@ var VideoModal = (function() {
sub: 'ohli24'
};
- var videoPlayer = null;
+ var videoPlayer = null; // Video.js instance
+ var artPlayer = null; // Artplayer instance
+ var plyrPlayer = null; // Plyr instance
+ var currentPlayer = 'videojs'; // 'videojs', 'artplayer', 'plyr'
var playlist = [];
var currentPlaylistIndex = 0;
var currentPlayingPath = '';
+ var currentStreamUrl = '';
var isVideoZoomed = false;
/**
* Initialize the video modal
- * @param {Object} options - Configuration options
- * @param {string} options.package_name - Package name (default: 'anime_downloader')
- * @param {string} options.sub - Sub-module name (e.g., 'ohli24', 'linkkf')
*/
function init(options) {
config = Object.assign(config, options || {});
+
+ // Load saved player preference
+ var savedPlayer = localStorage.getItem('anime_downloader_preferred_player');
+ if (savedPlayer && ['videojs', 'artplayer', 'plyr'].indexOf(savedPlayer) >= 0) {
+ currentPlayer = savedPlayer;
+ $('#player-select').val(currentPlayer);
+ }
+
bindEvents();
- console.log('[VideoModal] Initialized with config:', config);
+ console.log('[VideoModal] Initialized with player:', currentPlayer);
}
/**
* Bind all event handlers
*/
function bindEvents() {
+ // Player selector change
+ $('#player-select').off('change').on('change', function() {
+ var newPlayer = $(this).val();
+ if (newPlayer !== currentPlayer) {
+ switchPlayer(newPlayer);
+ }
+ });
+
// Dropdown episode selection
$('#episode-dropdown').off('change').on('change', function() {
var index = parseInt($(this).val());
@@ -50,10 +58,12 @@ var VideoModal = (function() {
$('#btn-video-zoom').off('click').on('click', function() {
isVideoZoomed = !isVideoZoomed;
if (isVideoZoomed) {
- $('#video-player').addClass('vjs-zoomed');
+ $('#video-player, #plyr-player').addClass('vjs-zoomed');
+ $('#artplayer-container').addClass('art-zoomed');
$(this).addClass('active').find('i').removeClass('fa-expand').addClass('fa-compress');
} else {
- $('#video-player').removeClass('vjs-zoomed');
+ $('#video-player, #plyr-player').removeClass('vjs-zoomed');
+ $('#artplayer-container').removeClass('art-zoomed');
$(this).removeClass('active').find('i').removeClass('fa-compress').addClass('fa-expand');
}
});
@@ -64,87 +74,81 @@ var VideoModal = (function() {
});
$('#videoModal').off('hide.bs.modal').on('hide.bs.modal', function() {
- if (videoPlayer) {
- videoPlayer.pause();
- }
+ pauseAllPlayers();
});
$('#videoModal').off('hidden.bs.modal').on('hidden.bs.modal', function() {
$('body').removeClass('modal-video-open');
if (isVideoZoomed) {
isVideoZoomed = false;
- $('#video-player').removeClass('vjs-zoomed');
+ $('#video-player, #plyr-player').removeClass('vjs-zoomed');
+ $('#artplayer-container').removeClass('art-zoomed');
$('#btn-video-zoom').removeClass('active').find('i').removeClass('fa-compress').addClass('fa-expand');
}
});
}
/**
- * Open modal with a file path (fetches playlist from server)
- * @param {string} filePath - Path to the video file
+ * Switch between players
*/
- function openWithPath(filePath) {
- $.ajax({
- url: '/' + config.package_name + '/ajax/' + config.sub + '/get_playlist?path=' + encodeURIComponent(filePath),
- type: 'GET',
- dataType: 'json',
- success: function(data) {
- playlist = data.playlist || [];
- currentPlaylistIndex = data.current_index || 0;
- currentPlayingPath = filePath;
-
- var streamUrl = '/' + config.package_name + '/ajax/' + config.sub + '/stream_video?path=' + encodeURIComponent(filePath);
- initPlayer(streamUrl);
- updatePlaylistUI();
- $('#videoModal').modal('show');
- },
- error: function() {
- // Fallback: single file
- playlist = [{ name: filePath.split('/').pop(), path: filePath }];
- currentPlaylistIndex = 0;
- var streamUrl = '/' + config.package_name + '/ajax/' + config.sub + '/stream_video?path=' + encodeURIComponent(filePath);
- initPlayer(streamUrl);
- updatePlaylistUI();
- $('#videoModal').modal('show');
- }
- });
+ function switchPlayer(newPlayer) {
+ pauseAllPlayers();
+
+ currentPlayer = newPlayer;
+ localStorage.setItem('anime_downloader_preferred_player', newPlayer);
+
+ // Hide all player containers
+ $('#videojs-container').hide();
+ $('#artplayer-container').hide();
+ $('#plyr-container').hide();
+
+ // Show selected player and reinitialize with current URL
+ if (currentStreamUrl) {
+ initPlayerWithUrl(currentStreamUrl);
+ }
+
+ console.log('[VideoModal] Switched to:', newPlayer);
}
/**
- * Open modal with a direct stream URL
- * @param {string} streamUrl - Direct URL to stream
- * @param {string} title - Optional title
+ * Pause all players
*/
- function openWithUrl(streamUrl, title) {
- playlist = [{ name: title || 'Video', path: streamUrl }];
- currentPlaylistIndex = 0;
- initPlayer(streamUrl);
- updatePlaylistUI();
- $('#videoModal').modal('show');
+ function pauseAllPlayers() {
+ try {
+ if (videoPlayer) videoPlayer.pause();
+ } catch(e) {}
+ try {
+ if (artPlayer) artPlayer.pause();
+ } catch(e) {}
+ try {
+ if (plyrPlayer) plyrPlayer.pause();
+ } catch(e) {}
}
/**
- * Open modal with a playlist array
- * @param {Array} playlistData - Array of {name, path} objects
- * @param {number} startIndex - Index to start playing from
+ * Initialize player with URL based on current player selection
*/
- function openWithPlaylist(playlistData, startIndex) {
- playlist = playlistData || [];
- currentPlaylistIndex = startIndex || 0;
- if (playlist.length > 0) {
- var filePath = playlist[currentPlaylistIndex].path;
- var streamUrl = '/' + config.package_name + '/ajax/' + config.sub + '/stream_video?path=' + encodeURIComponent(filePath);
- initPlayer(streamUrl);
- updatePlaylistUI();
- $('#videoModal').modal('show');
+ function initPlayerWithUrl(streamUrl) {
+ currentStreamUrl = streamUrl;
+
+ if (currentPlayer === 'videojs') {
+ initVideoJS(streamUrl);
+ } else if (currentPlayer === 'artplayer') {
+ initArtplayer(streamUrl);
+ } else if (currentPlayer === 'plyr') {
+ initPlyr(streamUrl);
}
}
/**
- * Initialize or update Video.js player
- * @param {string} streamUrl - URL to play
+ * Initialize Video.js player
*/
- function initPlayer(streamUrl) {
+ function initVideoJS(streamUrl) {
+ // Hide other containers
+ $('#artplayer-container').hide();
+ $('#plyr-container').hide();
+ $('#videojs-container').show();
+
if (!videoPlayer) {
videoPlayer = videojs('video-player', {
controls: true,
@@ -157,22 +161,84 @@ var VideoModal = (function() {
}
});
- // Auto-next on video end
- videoPlayer.on('ended', function() {
- var autoNextEnabled = $('#auto-next-checkbox').is(':checked');
- if (autoNextEnabled && currentPlaylistIndex < playlist.length - 1) {
- currentPlaylistIndex++;
- playVideoAtIndex(currentPlaylistIndex);
- }
- });
+ videoPlayer.on('ended', handleVideoEnded);
}
videoPlayer.src({ type: 'video/mp4', src: streamUrl });
}
+ /**
+ * Initialize Artplayer
+ */
+ function initArtplayer(streamUrl) {
+ // Hide other containers
+ $('#videojs-container').hide();
+ $('#plyr-container').hide();
+ $('#artplayer-container').show().empty();
+
+ if (artPlayer) {
+ artPlayer.destroy();
+ artPlayer = null;
+ }
+
+ artPlayer = new Artplayer({
+ container: '#artplayer-container',
+ url: streamUrl,
+ autoplay: false,
+ pip: true,
+ screenshot: true,
+ setting: true,
+ playbackRate: true,
+ aspectRatio: true,
+ fullscreen: true,
+ fullscreenWeb: true,
+ theme: '#3b82f6'
+ });
+
+ artPlayer.on('video:ended', handleVideoEnded);
+ }
+
+ /**
+ * Initialize Plyr player
+ */
+ function initPlyr(streamUrl) {
+ // Hide other containers
+ $('#videojs-container').hide();
+ $('#artplayer-container').hide();
+ $('#plyr-container').show();
+
+ // Set source
+ $('#plyr-player').attr('src', streamUrl);
+
+ if (!plyrPlayer) {
+ plyrPlayer = new Plyr('#plyr-player', {
+ controls: ['play-large', 'play', 'progress', 'current-time', 'mute', 'volume', 'settings', 'pip', 'fullscreen'],
+ settings: ['quality', 'speed'],
+ speed: { selected: 1, options: [0.5, 0.75, 1, 1.25, 1.5, 2] }
+ });
+
+ plyrPlayer.on('ended', handleVideoEnded);
+ } else {
+ plyrPlayer.source = {
+ type: 'video',
+ sources: [{ src: streamUrl, type: 'video/mp4' }]
+ };
+ }
+ }
+
+ /**
+ * Handle video ended event (auto-next)
+ */
+ function handleVideoEnded() {
+ var autoNextEnabled = $('#auto-next-checkbox').is(':checked');
+ if (autoNextEnabled && currentPlaylistIndex < playlist.length - 1) {
+ currentPlaylistIndex++;
+ playVideoAtIndex(currentPlaylistIndex);
+ }
+ }
+
/**
* Play video at specific playlist index
- * @param {number} index - Playlist index
*/
function playVideoAtIndex(index) {
if (index < 0 || index >= playlist.length) return;
@@ -180,14 +246,73 @@ var VideoModal = (function() {
var item = playlist[index];
var streamUrl = '/' + config.package_name + '/ajax/' + config.sub + '/stream_video?path=' + encodeURIComponent(item.path);
- if (videoPlayer) {
- videoPlayer.src({ type: 'video/mp4', src: streamUrl });
- videoPlayer.play();
- }
+ initPlayerWithUrl(streamUrl);
+
+ // Try to auto-play
+ setTimeout(function() {
+ if (currentPlayer === 'videojs' && videoPlayer) videoPlayer.play();
+ else if (currentPlayer === 'artplayer' && artPlayer) artPlayer.play = true;
+ else if (currentPlayer === 'plyr' && plyrPlayer) plyrPlayer.play();
+ }, 100);
updatePlaylistUI();
}
+ /**
+ * Open modal with a file path (fetches playlist from server)
+ */
+ function openWithPath(filePath) {
+ $.ajax({
+ url: '/' + config.package_name + '/ajax/' + config.sub + '/get_playlist?path=' + encodeURIComponent(filePath),
+ type: 'GET',
+ dataType: 'json',
+ success: function(data) {
+ playlist = data.playlist || [];
+ currentPlaylistIndex = data.current_index || 0;
+ currentPlayingPath = filePath;
+
+ var streamUrl = '/' + config.package_name + '/ajax/' + config.sub + '/stream_video?path=' + encodeURIComponent(filePath);
+ initPlayerWithUrl(streamUrl);
+ updatePlaylistUI();
+ $('#videoModal').modal('show');
+ },
+ error: function() {
+ playlist = [{ name: filePath.split('/').pop(), path: filePath }];
+ currentPlaylistIndex = 0;
+ var streamUrl = '/' + config.package_name + '/ajax/' + config.sub + '/stream_video?path=' + encodeURIComponent(filePath);
+ initPlayerWithUrl(streamUrl);
+ updatePlaylistUI();
+ $('#videoModal').modal('show');
+ }
+ });
+ }
+
+ /**
+ * Open modal with a direct stream URL
+ */
+ function openWithUrl(streamUrl, title) {
+ playlist = [{ name: title || 'Video', path: streamUrl }];
+ currentPlaylistIndex = 0;
+ initPlayerWithUrl(streamUrl);
+ updatePlaylistUI();
+ $('#videoModal').modal('show');
+ }
+
+ /**
+ * Open modal with a playlist array
+ */
+ function openWithPlaylist(playlistData, startIndex) {
+ playlist = playlistData || [];
+ currentPlaylistIndex = startIndex || 0;
+ if (playlist.length > 0) {
+ var filePath = playlist[currentPlaylistIndex].path;
+ var streamUrl = '/' + config.package_name + '/ajax/' + config.sub + '/stream_video?path=' + encodeURIComponent(filePath);
+ initPlayerWithUrl(streamUrl);
+ updatePlaylistUI();
+ $('#videoModal').modal('show');
+ }
+ }
+
/**
* Update playlist UI (dropdown, external player buttons)
*/
diff --git a/templates/anime_downloader/components/video_modal.html b/templates/anime_downloader/components/video_modal.html
index db4dab0..1ce22ef 100644
--- a/templates/anime_downloader/components/video_modal.html
+++ b/templates/anime_downloader/components/video_modal.html
@@ -5,21 +5,44 @@
+
+
+
+
+
+
+
';
tmp += '';
@@ -314,7 +320,10 @@
tmp += '';
tmp += '