test
This commit is contained in:
7
lib/tool_expand/__init__.py
Normal file
7
lib/tool_expand/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from framework import logger
|
||||
from .fileprocess import ToolExpandFileProcess
|
||||
from .discord import ToolExpandDiscord
|
||||
from .telegram import ToolTelegram
|
||||
from .torrent_process import TorrentProcess
|
||||
from .fp_ktv import EntityKtv
|
||||
|
||||
178
lib/tool_expand/discord.py
Normal file
178
lib/tool_expand/discord.py
Normal file
@@ -0,0 +1,178 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#########################################################
|
||||
|
||||
import os
|
||||
import traceback, time
|
||||
import random
|
||||
|
||||
import requests
|
||||
from discord_webhook import DiscordWebhook, DiscordEmbed
|
||||
|
||||
from framework import app
|
||||
from . import logger
|
||||
|
||||
|
||||
webhook_list = app.config['DEFINE']['WEBHOOK_LIST_FOR_IMAGE_PROXY']
|
||||
|
||||
class ToolExpandDiscord(object):
|
||||
|
||||
@classmethod
|
||||
def send_discord_message(cls, text, image_url=None, webhook_url=None):
|
||||
from system.model import ModelSetting as SystemModelSetting
|
||||
try:
|
||||
if webhook_url is None:
|
||||
webhook_url = SystemModelSetting.get('notify_discord_webhook')
|
||||
|
||||
webhook = DiscordWebhook(url=webhook_url, content=text)
|
||||
if image_url is not None:
|
||||
embed = DiscordEmbed()
|
||||
embed.set_timestamp()
|
||||
embed.set_image(url=image_url)
|
||||
webhook.add_embed(embed)
|
||||
response = webhook.execute()
|
||||
return True
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
|
||||
@classmethod
|
||||
def discord_proxy_set_target(cls, source, target):
|
||||
try:
|
||||
return
|
||||
"""
|
||||
if source is None or target is None:
|
||||
return False
|
||||
if requests.get(target).status_code != 200:
|
||||
return False
|
||||
if target.startswith('https://images-ext-') and target.find('discordapp.net') != -1:
|
||||
from framework import py_urllib
|
||||
from system.model import ModelSetting as SystemModelSetting
|
||||
url = '{server_plugin_ddns}/server/normal/discord_proxy/set_target?source={source}&target={target}&user={user}'.format(server_plugin_ddns=server_plugin_ddns, source=py_urllib.quote_plus(source), target=py_urllib.quote_plus(target), user=SystemModelSetting.get('sjva_me_user_id'))
|
||||
data = requests.get(url).json()
|
||||
"""
|
||||
except Exception as exception:
|
||||
logger.error('server disconnect..')
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
@classmethod
|
||||
def discord_proxy_image(cls, image_url, webhook_url=None, retry=True):
|
||||
#2020-12-23
|
||||
#image_url = None
|
||||
if image_url == '' or image_url is None:
|
||||
return
|
||||
data = None
|
||||
|
||||
if webhook_url is None or webhook_url == '':
|
||||
webhook_url = webhook_list[random.randint(10,len(webhook_list)-1)] # sjva 채널
|
||||
|
||||
try:
|
||||
from framework import py_urllib
|
||||
webhook = DiscordWebhook(url=webhook_url, content='')
|
||||
embed = DiscordEmbed()
|
||||
embed.set_timestamp()
|
||||
embed.set_image(url=image_url)
|
||||
webhook.add_embed(embed)
|
||||
import io
|
||||
byteio = io.BytesIO()
|
||||
webhook.add_file(file=byteio.getvalue(), filename='dummy')
|
||||
response = webhook.execute()
|
||||
data = None
|
||||
if type(response) == type([]):
|
||||
if len(response) > 0:
|
||||
data = response[0].json()
|
||||
else:
|
||||
data = response.json()
|
||||
|
||||
if data is not None and 'embeds' in data:
|
||||
target = data['embeds'][0]['image']['proxy_url']
|
||||
if requests.get(target).status_code == 200:
|
||||
return target
|
||||
else:
|
||||
return image_url
|
||||
else:
|
||||
raise Exception(str(data))
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
if retry:
|
||||
time.sleep(1)
|
||||
return cls.discord_proxy_image(image_url, webhook_url=None, retry=False)
|
||||
else:
|
||||
return image_url
|
||||
|
||||
|
||||
@classmethod
|
||||
def discord_proxy_image_localfile(cls, filepath, retry=True):
|
||||
data = None
|
||||
webhook_url = webhook_list[random.randint(0,9)] # sjva 채널
|
||||
|
||||
try:
|
||||
from discord_webhook import DiscordWebhook, DiscordEmbed
|
||||
webhook = DiscordWebhook(url=webhook_url, content='')
|
||||
import io
|
||||
with open(filepath, 'rb') as fh:
|
||||
byteio = io.BytesIO(fh.read())
|
||||
webhook.add_file(file=byteio.getvalue(), filename='image.jpg')
|
||||
embed = DiscordEmbed()
|
||||
embed.set_image(url="attachment://image.jpg")
|
||||
response = webhook.execute()
|
||||
data = None
|
||||
if type(response) == type([]):
|
||||
if len(response) > 0:
|
||||
data = response[0].json()
|
||||
else:
|
||||
data = response.json()
|
||||
|
||||
if data is not None and 'attachments' in data:
|
||||
target = data['attachments'][0]['url']
|
||||
if requests.get(target).status_code == 200:
|
||||
return target
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
if retry:
|
||||
time.sleep(1)
|
||||
return cls.discord_proxy_image_localfile(filepath, retry=False)
|
||||
|
||||
|
||||
# RSS에서 자막 올린거
|
||||
@classmethod
|
||||
def discord_cdn(cls, byteio=None, filepath=None, filename=None, webhook_url=None, content='', retry=True):
|
||||
data = None
|
||||
if webhook_url is None:
|
||||
webhook_url = webhook_list[random.randint(0,9)] # sjva 채널
|
||||
|
||||
try:
|
||||
from discord_webhook import DiscordWebhook, DiscordEmbed
|
||||
webhook = DiscordWebhook(url=webhook_url, content=content)
|
||||
if byteio is None and filepath is not None:
|
||||
import io
|
||||
with open(filepath, 'rb') as fh:
|
||||
byteio = io.BytesIO(fh.read())
|
||||
|
||||
webhook.add_file(file=byteio.getvalue(), filename=filename)
|
||||
embed = DiscordEmbed()
|
||||
response = webhook.execute()
|
||||
data = None
|
||||
if type(response) == type([]):
|
||||
if len(response) > 0:
|
||||
data = response[0].json()
|
||||
else:
|
||||
data = response.json()
|
||||
|
||||
if data is not None and 'attachments' in data:
|
||||
target = data['attachments'][0]['url']
|
||||
if requests.get(target).status_code == 200:
|
||||
return target
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
if retry:
|
||||
time.sleep(1)
|
||||
return cls.discord_proxy_image_localfile(filepath, retry=False)
|
||||
324
lib/tool_expand/fileprocess.py
Normal file
324
lib/tool_expand/fileprocess.py
Normal file
@@ -0,0 +1,324 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#########################################################
|
||||
# python
|
||||
import os, re
|
||||
import traceback
|
||||
import time
|
||||
import threading
|
||||
import shutil
|
||||
|
||||
from framework import app
|
||||
from . import logger
|
||||
|
||||
EXTENSION = 'mp4|avi|mkv|ts|wmv|m2ts|smi|srt|ass|m4v|flv|asf|mpg|ogm'
|
||||
|
||||
class ToolExpandFileProcess(object):
|
||||
|
||||
@classmethod
|
||||
def remove_extension(cls, filename):
|
||||
ret = filename
|
||||
regex = r'(.*?)\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
ret = filename.replace('.' + match.group('ext'), '')
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
@classmethod
|
||||
def remove_small_file_and_move_target(cls, path, size, target=None, except_ext=None, small_move_path=None):
|
||||
''' path 안의 폴더들을 재귀탐색하여 기준 크기보다 작은 것은 삭제하고 크기보다 큰 것은 target 경로로 이동한다. 결론적으로 모든 폴더들은 삭제된다.
|
||||
|
||||
target이 None경우 path 로 이동한다. 이동할때 파일이 이미 존재하는 경우 파일을 삭제한다. 크기 체크 안함.
|
||||
|
||||
Argument
|
||||
path : 정리할 폴더
|
||||
size : 파일 크기. 메가단위
|
||||
target : 이동할 폴더. None경우 path
|
||||
except_exe : 삭제하지 않을 확장자. 기본 : ['.smi', '.srt', 'ass']
|
||||
no_remove_path : None인 경우 삭제. 값이 있는 경우 삭제가 아닌 이동
|
||||
|
||||
Return
|
||||
True : 성공
|
||||
False : 실패
|
||||
'''
|
||||
try:
|
||||
if target is None:
|
||||
target = path
|
||||
if except_ext is None:
|
||||
except_ext = ['.smi', '.srt', '.ass', '.sup']
|
||||
lists = os.listdir(path)
|
||||
for f in lists:
|
||||
try:
|
||||
file_path = os.path.join(path, f)
|
||||
except_file = False
|
||||
if os.path.splitext(file_path.lower())[1] in except_ext:
|
||||
except_file = True
|
||||
if os.path.isdir(file_path):
|
||||
cls.remove_small_file_and_move_target(file_path, size, target=target, except_ext=except_ext)
|
||||
if not os.listdir(file_path):
|
||||
logger.info('REMOVE DIR : %s', file_path)
|
||||
os.rmdir(file_path)
|
||||
else:
|
||||
if os.stat(file_path).st_size > 1024 * 1024 * size or except_file:
|
||||
if path == target:
|
||||
continue
|
||||
try:
|
||||
logger.info('MOVE : %s', os.path.join(target, f))
|
||||
except:
|
||||
logger.info('MOVE')
|
||||
if os.path.exists(os.path.join(target, f)):
|
||||
logger.info(u'ALREADY in Target : %s', os.path.join(target, f))
|
||||
os.remove(file_path)
|
||||
else:
|
||||
shutil.move(file_path, os.path.join(target, f))
|
||||
else:
|
||||
if small_move_path is None or small_move_path == '':
|
||||
try:
|
||||
logger.info(u'FILE REMOVE : %s %s', file_path, os.stat(file_path).st_size)
|
||||
except:
|
||||
logger.info(u'FILE REMOVE')
|
||||
os.remove(file_path)
|
||||
else:
|
||||
logger.info(u'SNALL FILE MOVE : %s', file_path)
|
||||
shutil.move(file_path, os.path.join(small_move_path, f))
|
||||
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
@classmethod
|
||||
def change_filename_censored(cls, filename):
|
||||
#24id
|
||||
# 2021-06-30
|
||||
tmp = os.path.splitext(filename)
|
||||
if tmp[-1].lower() in ['.smi', '.ass', '.srt', '.sup']:
|
||||
return filename.lower()
|
||||
|
||||
match = re.compile('\d{2}id', re.I).search(filename.lower())
|
||||
id_before = None
|
||||
if match:
|
||||
id_before = match.group(0)
|
||||
filename = filename.lower().replace(id_before, 'zzid')
|
||||
|
||||
try:
|
||||
filename = cls.change_filename_censored_old(filename)
|
||||
if filename is not None:
|
||||
if id_before is not None:
|
||||
filename = filename.replace('zzid', id_before)
|
||||
base, ext = os.path.splitext(filename)
|
||||
tmps = base.split('-')
|
||||
tmp2 = tmps[1].split('cd')
|
||||
if len(tmp2) == 1:
|
||||
tmp = '%s-%s%s' % (tmps[0], str(int(tmps[1])).zfill(3), ext)
|
||||
elif len(tmp2) == 2:
|
||||
tmp = '%s-%scd%s%s' % (tmps[0], str(int(tmp2[0])).zfill(3), tmp2[1], ext)
|
||||
return tmp
|
||||
except Exception as exception:
|
||||
logger.debug('filename : %s', filename)
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
return filename
|
||||
|
||||
|
||||
@classmethod
|
||||
def change_filename_censored_old(cls, filename):
|
||||
|
||||
#logger.debug('get_plex_filename:%s', file)
|
||||
original_filename = filename
|
||||
#return file
|
||||
filename = filename.lower()
|
||||
|
||||
#-h264 제거
|
||||
filename = filename.replace('-h264', '')
|
||||
filename = filename.replace('-264', '')
|
||||
#2019-10-06 -■-IBW-670Z_1080p.mkv => ibw-6701080 [-■-IBW-670Z_1080p].mkv
|
||||
filename = filename.replace('z_1080p', '').replace('z_720p', '')
|
||||
filename = filename.replace('z_', '')
|
||||
filename = filename.replace('-c.', '.')
|
||||
filename = filename.replace('c.', '.')
|
||||
# 2021-11-09
|
||||
for sp in ['.com@', '.com-']:
|
||||
tmp = filename.split(sp)
|
||||
logger.error(tmp)
|
||||
if len(tmp) == 2:
|
||||
filename = tmp[1]
|
||||
|
||||
#if file.find('@') != -1:
|
||||
# file = file.split('@')[1]
|
||||
|
||||
# 1080p
|
||||
regex = r'^(?P<code>.*?)\.1080p\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
# fhd
|
||||
#regex = r'^(?P<code>.*?)fhd\.(?P<ext>%s)$' % EXTENSION
|
||||
#2019-10-06
|
||||
# sdmu-676_FHD.mp4 => sdmu-676cd-1 [sdmu-676_FHD].mp4
|
||||
regex = r'^(?P<code>.*?)(\_|\-)fhd\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
# [ ]숫자 제거
|
||||
regex = r'^\[.*?\]\d+(?P<code>.*?)\.(?P<ext>%s)$'
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
# [ ] 제거
|
||||
regex = r'^\[.*?\](?P<code>.*?)\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
# ( ) 제거
|
||||
regex = r'^\(.*?\)(?P<code>.*?)\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
|
||||
# 3,4자리 숫자
|
||||
regex = r'^\d{3,4}(?P<code>.*?)\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
regex = r'^.*\.com\-?\d*\-?\d*@?(?P<code>.*?)(\-h264)??\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
regex = r'^(?P<dummy>.*\.com.*?)(?P<code>[a-z]+)'
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = filename.replace(match.group('dummy'), '')
|
||||
|
||||
# -5 제거
|
||||
regex = r'^(?P<code>.*?)\-5.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
filename = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
|
||||
# dhd1080.com@1fset00597hhb.mp4
|
||||
#regex = r'^.*?com@(\d)?(?P<code>[a-z]+\d+)\w+.(?P<ext>%s)$' % EXTENSION
|
||||
#match = re.compile(regex).match(file)
|
||||
#if match:
|
||||
# file = '%s.%s' % (match.group('code'), match.group('ext'))
|
||||
|
||||
|
||||
# s-cute
|
||||
regex = r'^s-cute\s(?P<code>\d{3}).*?.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
ret = 'scute-%s.%s' % (match.group('code'), match.group('ext'))
|
||||
return ret.lower()
|
||||
|
||||
logger.debug('5. %s', filename)
|
||||
regex_list = [
|
||||
r'^(?P<name>[a-zA-Z]+)[-_]?(?P<no>\d+)(([-_]?(cd|part)?(?P<part_no>\d))|[-_]?(?P<part_char>\w))?\.(?P<ext>%s)$' % EXTENSION,
|
||||
r'^\w+.\w+@(?P<name>[a-zA-Z]+)[-_]?(?P<no>\d+)(([-_\.]?(cd|part)?(?P<part_no>\d))|[-_\.]?(?P<part_char>\w))?\.(?P<ext>%s)$' % EXTENSION
|
||||
]
|
||||
for regex in regex_list:
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
ret = filename
|
||||
part = None
|
||||
if match.group('part_no') is not None:
|
||||
part = 'cd%s' % match.group('part_no')
|
||||
elif match.group('part_char') is not None:
|
||||
part = 'cd%s' % (ord(match.group('part_char').lower()) - ord('a') + 1)
|
||||
if part is None:
|
||||
ret = '%s-%s.%s' % (match.group('name').lower(), match.group('no'), match.group('ext'))
|
||||
else:
|
||||
ret = '%s-%s%s.%s' % (match.group('name').lower(), match.group('no'), part, match.group('ext'))
|
||||
#logger.debug('%s -> %s' % (file, ret))
|
||||
return ret.lower()
|
||||
|
||||
# T28 - 매치여야함.
|
||||
#logger.debug('N2 before:%s', file)
|
||||
regex = r'(?P<name>[a-zA-Z]+\d+)\-(?P<no>\d+).*?\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
ret = '%s-%s.%s' % (match.group('name'), match.group('no'), match.group('ext'))
|
||||
#logger.debug('N2. %s -> %s' % (file, ret))
|
||||
return ret.lower()
|
||||
|
||||
# 오리지널로 ABC123 매치여야함.
|
||||
# hjd2048.com-0113meyd466-264.mp4
|
||||
#logger.debug('N3 before:%s', original_filename)
|
||||
|
||||
regex = r'^(?P<name>[a-zA-Z]{3,})\-?(?P<no>\d+).*?\.(?P<ext>%s)$' % EXTENSION
|
||||
#logger.debug(file)
|
||||
match = re.compile(regex).match(filename)
|
||||
if match:
|
||||
ret = '%s-%s.%s' % (match.group('name'), match.group('no'), match.group('ext'))
|
||||
#logger.debug('N3. %s -> %s' % (file, ret))]
|
||||
#logger.debug('match 00')
|
||||
return ret.lower()
|
||||
|
||||
regex = r'^(?P<name>[a-zA-Z]{3,})\-?(?P<no>\d+).*?\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(original_filename)
|
||||
if match:
|
||||
ret = '%s-%s.%s' % (match.group('name'), match.group('no'), match.group('ext'))
|
||||
#logger.debug('N3. %s -> %s' % (file, ret))]
|
||||
#logger.debug('match 11')
|
||||
return ret.lower()
|
||||
|
||||
# 서치
|
||||
#logger.debug('N1 before:%s', file)
|
||||
regex = r'(?P<name>[a-zA-Z]+)\-(?P<no>\d+).*?\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).search(filename)
|
||||
if match:
|
||||
ret = '%s-%s.%s' % (match.group('name'), match.group('no'), match.group('ext'))
|
||||
#logger.debug('N1. %s -> %s' % (file, ret))
|
||||
#logger.debug('match 22')
|
||||
return ret.lower()
|
||||
|
||||
# 서치
|
||||
#logger.debug('N1 before:%s', file)
|
||||
regex = r'(?P<name>[a-zA-Z]+)\-(?P<no>\d+).*?\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).search(original_filename)
|
||||
if match:
|
||||
ret = '%s-%s.%s' % (match.group('name'), match.group('no'), match.group('ext'))
|
||||
#logger.debug('N1. %s -> %s' % (file, ret))
|
||||
#logger.debug('match 33')
|
||||
return ret.lower()
|
||||
|
||||
#21-01-08 fbfb.me@sivr00103.part1.mp4
|
||||
regex = r'\w+.\w+@(?P<name>[a-zA-Z]+)(?P<no>\d{5})\.(cd|part)(?P<part_no>\d+)\.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).match(original_filename)
|
||||
if match:
|
||||
ret = filename
|
||||
part = None
|
||||
if match.group('part_no') is not None:
|
||||
part = 'cd%s' % match.group('part_no')
|
||||
if part is None:
|
||||
ret = '%s-%s.%s' % (match.group('name').lower(), match.group('no'), match.group('ext'))
|
||||
else:
|
||||
ret = '%s-%s%s.%s' % (match.group('name').lower(), match.group('no'), part, match.group('ext'))
|
||||
#logger.debug('%s -> %s' % (file, ret))
|
||||
return ret.lower()
|
||||
|
||||
#20-02-02
|
||||
regex = r'\w+.\w+@(?P<name>[a-zA-Z]+)(?P<no>\d{5}).*?.(?P<ext>%s)$' % EXTENSION
|
||||
match = re.compile(regex).search(original_filename)
|
||||
if match:
|
||||
no = match.group('no').replace('0', '').zfill(3)
|
||||
ret = '%s-%s.%s' % (match.group('name'), no, match.group('ext'))
|
||||
#logger.debug('match 44')
|
||||
return ret.lower()
|
||||
|
||||
|
||||
#logger.debug('%s -> %s' % (file, None))
|
||||
return None
|
||||
|
||||
399
lib/tool_expand/fp_ktv.py
Normal file
399
lib/tool_expand/fp_ktv.py
Normal file
@@ -0,0 +1,399 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#########################################################
|
||||
# python
|
||||
import os, re
|
||||
import traceback
|
||||
import time
|
||||
import threading
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
|
||||
from tool_base import d
|
||||
from . import logger
|
||||
|
||||
|
||||
EXTENSION = 'mp4|avi|mkv|ts|wmv|m2ts|smi|srt|ass|m4v|flv|asf|mpg|ogm'
|
||||
|
||||
REGEXS = [
|
||||
r'^(?P<name>.*?)\.([sS](?P<sno>\d+))?[eE](?P<no>\d+)(\-E\d{1,4})?\.?(?P<a>.*?\.)?(?P<date>\d{6})\.(?P<etc>.*?)((?P<quality>\d+)[p|P])?(\-?(?P<release>.*?))?(\.(.*?))?$',
|
||||
r'^(?P<name>.*?)\s([sS](?P<sno>\d+))?[eE](?P<no>\d+)(\-E\d{1,4})?\.?(END\.)?(?P<date>\d{6})\.(?P<etc>.*?)(?P<quality>\d+)[p|P](?P<more>\..*?)(?P<ext>\.[\w|\d]{3})$',
|
||||
r'^(?P<name>.*?)\.([sS](?P<sno>\d+))?(E(?P<no>\d+)\.?)?(END\.)?(?P<date>\d{6})\.(?P<etc>.*?)(?P<quality>\d+)[p|P](\-?(?P<release>.*?))?(\.(.*?))?$',
|
||||
r'^(?P<name>.*?)([sS](?P<sno>\d+))?[eE](?P<no>\d+)', # 외국 릴
|
||||
r'^(?P<name>.*?)\.(Series\.(?P<sno>\d+)\.)?(?P<no>\d+)of', # 외국 릴
|
||||
r'^(?P<name>.*?)[\s\(](?P<no>\d+)[회화]',
|
||||
|
||||
]
|
||||
|
||||
#합본처리 제외
|
||||
#_REGEX_FILENAME_RENAME = r'(?P<title>.*?)[\s\.]E?(?P<no>\d{1,2})[\-\~\s\.]?E?\d{1,2}'
|
||||
|
||||
|
||||
|
||||
class EntityKtv(object):
|
||||
meta_cache = {}
|
||||
def __init__(self, filename, dirname=None, meta=False, is_title=False, config=None):
|
||||
self.data = {
|
||||
'filename' : {
|
||||
'original' : filename,
|
||||
'dirname' : dirname,
|
||||
'is_matched' : False,
|
||||
'match_index' : -1,
|
||||
'name' : '',
|
||||
'original_name': '',
|
||||
},
|
||||
'meta' : {
|
||||
'find':False,
|
||||
},
|
||||
'process_info' : {
|
||||
'rebuild':'',
|
||||
'status':''
|
||||
}
|
||||
}
|
||||
if is_title == False:
|
||||
self.analyze(config=config)
|
||||
self.data['filename']['original_name'] = self.data['filename']['name']
|
||||
if self.data['filename']['name'] != '' and config is not None:
|
||||
rule = config.get('검색어 변경', None)
|
||||
if rule is not None:
|
||||
self.change_name(rule)
|
||||
else:
|
||||
self.data['filename']['name'] = filename
|
||||
self.data['filename']['is_matched'] = True
|
||||
self.data['filename']['match_index'] = -1
|
||||
self.data['filename']['date'] = ''
|
||||
|
||||
|
||||
search_try = False
|
||||
if meta and self.data['filename']['is_matched']:
|
||||
if self.data['filename']['match_index'] in [3, 4]:
|
||||
from tool_base import ToolHangul
|
||||
info = ToolHangul.language_info(self.data['filename']['name'])
|
||||
if info[0] == 0:
|
||||
search_try = True
|
||||
self.find_meta_tmdb()
|
||||
|
||||
if search_try == False:
|
||||
self.find_meta()
|
||||
|
||||
if self.data['meta']['find']:
|
||||
self.find_meta_season()
|
||||
try:
|
||||
#logger.warning(f"찾은 메타 : {self.data['meta']['info']['title']} {self.data['meta']['info']['code']}")
|
||||
if self.data['filename']['date'] == '':
|
||||
self.data['process_info']['status'] = 'no_date'
|
||||
else:
|
||||
self.check_episode_no()
|
||||
except Exception as exception:
|
||||
logger.debug('Exception:%s', exception)
|
||||
logger.debug(traceback.format_exc())
|
||||
|
||||
|
||||
def analyze(self, config=None):
|
||||
def get(md, field):
|
||||
if field in md and md[field] is not None:
|
||||
return md[field]
|
||||
return ''
|
||||
|
||||
for idx, regex in enumerate(REGEXS):
|
||||
match = re.compile(regex).match(self.data['filename']['original'])
|
||||
if not match:
|
||||
continue
|
||||
md = match.groupdict()
|
||||
self.data['filename']['is_matched'] = True
|
||||
self.data['filename']['match_index'] = idx
|
||||
self.data['filename']['name'] = get(md, 'name').replace('.', ' ').strip()
|
||||
tmp = get(md, 'sno')
|
||||
self.data['filename']['sno'] = int(tmp) if tmp != '' else 1
|
||||
tmp = get(md, 'no')
|
||||
try:
|
||||
self.data['filename']['no'] = int(tmp) if tmp != '' else -1
|
||||
if self.data['filename']['no'] == 0:
|
||||
raise Exception('0')
|
||||
except:
|
||||
self.data['process_info']['rebuild'] += 'remove_episode'
|
||||
self.data['filename']['no'] = -1
|
||||
|
||||
self.data['filename']['date'] = get(md, 'date')
|
||||
self.data['filename']['etc'] = get(md, 'etc')
|
||||
self.data['filename']['quality'] = get(md, 'quality')
|
||||
self.data['filename']['release'] = get(md, 'release')
|
||||
self.data['filename']['more'] = get(md, 'more')
|
||||
self.data['filename']['day_delta'] = 0
|
||||
if self.data['filename']['date'] != '':
|
||||
today = datetime.now()
|
||||
try:
|
||||
tmp = str(self.data['filename']['date'])
|
||||
if tmp[0] in ['8', '9']:
|
||||
tmp = '19' + tmp
|
||||
else:
|
||||
tmp = '20' + tmp
|
||||
max_date = datetime.strptime(tmp, '%Y%m%d')
|
||||
except:
|
||||
max_date = today
|
||||
self.data['filename']['day_delta'] = (today - max_date).days
|
||||
|
||||
|
||||
#logger.warning(d(self.data['filename']))
|
||||
break
|
||||
|
||||
if config is not None:
|
||||
rule = config.get('에피소드 번호 삭제 목록', [])
|
||||
if self.data['filename']['name'] in rule:
|
||||
self.data['process_info']['rebuild'] += 'remove_episode_by_rule'
|
||||
self.data['filename']['no'] = -1
|
||||
|
||||
|
||||
def change_name(self, rules):
|
||||
name = self.data['filename']['name']
|
||||
for rule in rules:
|
||||
try:
|
||||
name = re.sub(rule['source'], rule['target'], name, flags=re.I).strip()
|
||||
except Exception as e:
|
||||
P.logger.error(f"Exception:{e}")
|
||||
P.logger.error(traceback.format_exc())
|
||||
self.data['filename']['name'] = name
|
||||
|
||||
|
||||
|
||||
def check_episode_no(self):
|
||||
if self.data['filename']['no'] > 0 and self.data['filename']['no'] in self.data['meta']['info']['extra_info']['episodes']:
|
||||
#logger.warning(f"에피소드 정보 있음")
|
||||
#logger.warning(self.data['meta']['info']['extra_info']['episodes'][self.data['filename']['no']])
|
||||
tmp = self.data['meta']['info']['extra_info']['episodes'][self.data['filename']['no']]
|
||||
# daum만 체크
|
||||
if 'daum' in tmp:
|
||||
value = tmp['daum']
|
||||
tmp2 = value['premiered']
|
||||
if self.data['filename']['date'] == tmp2.replace('-', '')[2:]:
|
||||
self.data['process_info']['status'] = 'number_and_date_match'
|
||||
self.data['process_info']['episode'] = value
|
||||
self.data['process_info']['episode']['no'] = self.data['filename']['no']
|
||||
return
|
||||
else:
|
||||
#하루차이는 매칭시킴
|
||||
if abs(int(self.data['filename']['date']) - int(tmp2.replace('-', '')[2:])) in [1, 70, 71, 72, 73, 8870]:
|
||||
self.data['process_info']['status'] = 'number_and_date_match'
|
||||
self.data['process_info']['rebuild'] += 'change_date'
|
||||
self.data['process_info']['change_date'] = tmp2.replace('-', '')[2:]
|
||||
self.data['process_info']['episode'] = value
|
||||
self.data['process_info']['episode']['no'] = self.data['filename']['no']
|
||||
return
|
||||
|
||||
# 맞는 에피소드 몾찾음
|
||||
if len(self.data['meta']['info']['extra_info']['episodes']) == 0:
|
||||
# 메타검색은 했지만 에피소드 목록이 없음.
|
||||
self.data['process_info']['status'] = 'meta_epi_empty'
|
||||
return
|
||||
|
||||
# 방송일에 맞는 에피 번호 찾기
|
||||
#logger.warning(f"에피소드 목록")
|
||||
|
||||
for epi_no, value in self.data['meta']['info']['extra_info']['episodes'].items():
|
||||
if 'daum' in value:
|
||||
site_info = value['daum']
|
||||
tmp2 = site_info['premiered']
|
||||
if self.data['filename']['date'] == tmp2.replace('-', '')[2:]:
|
||||
self.data['process_info']['status'] = 'number_and_date_match'
|
||||
self.data['process_info']['rebuild'] += 'change_epi_number'
|
||||
self.data['process_info']['change_epi_number'] = epi_no
|
||||
self.data['process_info']['episode'] = value['daum']
|
||||
self.data['process_info']['episode']['no'] = epi_no
|
||||
return
|
||||
|
||||
# 다음에서 몾찾았지만 티빙 웨이브에 있다면 그대로 유지해야함.
|
||||
# 굳이 찾을 필요없이 릴리즈로 맞다고 넘김
|
||||
# 근데 받을때는 에피번호가 없고 나중에 메타가 생기는 경우가 잇는 것 같음
|
||||
if self.data['filename']['no'] != -1:
|
||||
if self.data['filename']['release'] in ['ST', 'SW', 'SWQ', 'STQ', 'ODK']:
|
||||
self.data['process_info']['status'] = 'number_and_date_match_by_release'
|
||||
return
|
||||
else:
|
||||
for epi_no, value in self.data['meta']['info']['extra_info']['episodes'].items():
|
||||
for site, site_info in value.items():
|
||||
if site == 'daum':
|
||||
continue
|
||||
tmp2 = site_info['premiered']
|
||||
if self.data['filename']['date'] == tmp2.replace('-', '')[2:]:
|
||||
self.data['process_info']['status'] = 'number_and_date_match_ott'
|
||||
self.data['process_info']['rebuild'] += 'change_epi_number'
|
||||
self.data['process_info']['change_epi_number'] = epi_no
|
||||
self.data['process_info']['episode'] = site_info
|
||||
self.data['process_info']['episode']['no'] = epi_no
|
||||
|
||||
return
|
||||
|
||||
|
||||
#logger.error("에피소드 목록이 있지만 맞는 메타를 찾지 못함")
|
||||
#logger.error(f"에피소드 번호 {epi_no}")
|
||||
#logger.error(f"에피소드 번호 {self.data['filename']['original']}")
|
||||
#logger.warning(d(self.data['meta']['info']['extra_info']['episodes']))
|
||||
#logger.debug("티빙, 웨이브에서 찾음")
|
||||
|
||||
if self.data['filename']['no'] > 0 and self.data['filename']['no'] in self.data['meta']['info']['extra_info']['episodes']:
|
||||
#logger.warning(f"에피소드 정보 있음 22")
|
||||
#logger.warning(self.data['meta']['info']['extra_info']['episodes'][self.data['filename']['no']])
|
||||
tmp = self.data['meta']['info']['extra_info']['episodes'][self.data['filename']['no']]
|
||||
# daum만 체크
|
||||
for site, value in tmp.items():
|
||||
if site == 'daum':
|
||||
continue
|
||||
tmp2 = value['premiered']
|
||||
if self.data['filename']['date'] == tmp2.replace('-', '')[2:]:
|
||||
self.data['process_info']['status'] = 'number_and_date_match'
|
||||
self.data['process_info']['episode'] = value
|
||||
self.data['process_info']['episode']['no'] = self.data['filename']['no']
|
||||
return
|
||||
else:
|
||||
#하루차이는 매칭시킴
|
||||
if abs(int(self.data['filename']['date']) - int(tmp2.replace('-', '')[2:])) in [1, 70, 71, 72, 73, 8870]:
|
||||
self.data['process_info']['status'] = 'number_and_date_match'
|
||||
self.data['process_info']['rebuild'] += 'change_date'
|
||||
self.data['process_info']['change_date'] = tmp2.replace('-', '')[2:]
|
||||
self.data['process_info']['episode'] = value
|
||||
self.data['process_info']['episode']['no'] = self.data['filename']['no']
|
||||
return
|
||||
|
||||
for epi_no, value in self.data['meta']['info']['extra_info']['episodes'].items():
|
||||
if epi_no == 0:
|
||||
continue
|
||||
for site, site_info in value.items():
|
||||
if site == 'daum':
|
||||
continue
|
||||
tmp2 = site_info['premiered']
|
||||
if self.data['filename']['date'] == tmp2.replace('-', '')[2:]:
|
||||
#logger.warning(f"2222 다음에서 새로운 에피소드 번호 찾음 : {epi_no}")
|
||||
#logger.warning(d(site_info))
|
||||
self.data['process_info']['status'] = 'number_and_date_match'
|
||||
self.data['process_info']['rebuild'] += 'change_epi_number'
|
||||
self.data['process_info']['change_epi_number'] = epi_no
|
||||
self.data['process_info']['episode'] = site_info
|
||||
self.data['process_info']['episode']['no'] = epi_no
|
||||
return
|
||||
|
||||
if epi_no < self.data['filename']['no']:
|
||||
self.data['process_info']['status'] = 'meta_epi_not_find'
|
||||
|
||||
self.data['process_info']['status'] = 'meta_epi_not_find'
|
||||
#for tmp in self.data['meta']['info']['extra_info']['episode']:
|
||||
# logger.debug((tmp))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def find_meta(self):
|
||||
from lib_metadata import SiteDaumTv, SiteTvingTv, SiteWavveTv
|
||||
module_map = [('daum', SiteDaumTv), ('tving',SiteTvingTv), ('wavve',SiteWavveTv)]
|
||||
#if self.data['filename']['name'] in EntityKtv.meta_cache:
|
||||
# self.data['meta'] = EntityKtv.meta_cache[self.data['filename']['name']]
|
||||
# return
|
||||
#module_list = [SiteDaumTv, SiteTvingTv, SiteWavveTv]
|
||||
#module_list = [SiteDaumTv]
|
||||
|
||||
for site, site_class in module_map:
|
||||
try:
|
||||
if self.data['filename']['name'] in EntityKtv.meta_cache and site in EntityKtv.meta_cache[self.data['filename']['name']]:
|
||||
self.data['meta'] = EntityKtv.meta_cache[self.data['filename']['name']][site]
|
||||
# 없는 것도 저장하여 중복검색 방지
|
||||
if self.data['meta']['find']:
|
||||
return
|
||||
site_data = site_class.search(self.data['filename']['name'])
|
||||
#logger.warning(f"{site} {d(site_data)}")
|
||||
if site_data['ret'] == 'success':
|
||||
if site == 'daum':
|
||||
self.data['meta']['search'] = site_data['data']
|
||||
self.data['meta']['info'] = site_class.info(self.data['meta']['search']['code'], self.data['meta']['search']['title'])['data']
|
||||
# Daum이 미국드라마, 일본드라마 등의 외국 드라마 장르이면서 ST SW 릴이면 Daum보다는 OTT 메타를 사용하도록 함
|
||||
if self.data['meta']['info']['genre'][0] != '드라마' and self.data['meta']['info']['genre'][0].find('드라마') != -1 and self.data['filename'].get('release', '') in ['ST', 'SW']:
|
||||
continue
|
||||
|
||||
SiteTvingTv.apply_tv_by_search(self.data['meta']['info'], force_search_title=self.data['filename']['name'])
|
||||
SiteWavveTv.apply_tv_by_search(self.data['meta']['info'], force_search_title=self.data['filename']['name'])
|
||||
if self.data['meta']['info']['episode'] == -1:
|
||||
self.data['meta']['info']['episode'] = len(self.data['meta']['info']['extra_info']['episodes'].keys())
|
||||
self.data['meta']['find'] = True
|
||||
|
||||
else:
|
||||
if len(site_data['data']) > 0 and site_data['data'][0]['score'] > 90:
|
||||
self.data['meta']['search'] = site_data['data'][0]
|
||||
self.data['meta']['info'] = site_class.info(self.data['meta']['search']['code'])['data']
|
||||
self.data['meta']['find'] = True
|
||||
|
||||
if self.data['meta']['find']:
|
||||
if len(self.data['meta']['info']['genre']) == 0:
|
||||
self.data['meta']['info']['genre'].append('기타')
|
||||
if self.data['filename']['name'] not in EntityKtv.meta_cache:
|
||||
EntityKtv.meta_cache[self.data['filename']['name']] = {}
|
||||
EntityKtv.meta_cache[self.data['filename']['name']][site] = self.data['meta']
|
||||
return
|
||||
else:
|
||||
if self.data['filename']['name'] not in EntityKtv.meta_cache:
|
||||
EntityKtv.meta_cache[self.data['filename']['name']] = {}
|
||||
EntityKtv.meta_cache[self.data['filename']['name']][site] = self.data['meta']
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
def get_newfilename(self):
|
||||
if self.data['filename']['match_index'] == 2:
|
||||
self.data['process_info']['rebuild'] += f"match_{self.data['filename']['match_index']}"
|
||||
|
||||
if self.data['process_info']['rebuild'] in ['', 'match_2', 'meta_epi_not_find', 'match_3']:
|
||||
return self.data['filename']['original']
|
||||
|
||||
elif self.data['process_info']['rebuild'] == 'remove_episode' or self.data['process_info']['rebuild'].find('remove_episode_by_rule') != -1:
|
||||
return re.sub('\.[eE].*?\.', '.', self.data['filename']['original'])
|
||||
elif self.data['process_info']['rebuild'] == 'remove_episodechange_epi_number' and self.data['process_info']['change_epi_number'] == 0:
|
||||
return re.sub('\.[eE].*?\.', '.', self.data['filename']['original'])
|
||||
|
||||
elif self.data['process_info']['rebuild'] == 'change_epi_number':
|
||||
return re.sub('\.[eE].*?\.', f".E{str(self.data['process_info']['change_epi_number']).zfill(2)}.", self.data['filename']['original'])
|
||||
elif self.data['process_info']['rebuild'] == 'change_epi_numbermatch_2':
|
||||
# 날짜만 있는 원본 에피소드 삽입
|
||||
return self.data['filename']['original'].replace(f".{self.data['filename']['date']}.", f".E{str(self.data['process_info']['change_epi_number']).zfill(2)}.{self.data['filename']['date']}.")
|
||||
elif self.data['process_info']['rebuild'] == 'change_date':
|
||||
return self.data['filename']['original'].replace(f".{self.data['filename']['date']}.", f".{self.data['process_info']['change_date']}.")
|
||||
time.sleep(100)
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
def find_meta_tmdb(self):
|
||||
from lib_metadata import SiteTmdbFtv
|
||||
from tool_base import ToolBaseFile
|
||||
module_map = [('tmdb', SiteTmdbFtv)]
|
||||
|
||||
for site, site_class in module_map:
|
||||
try:
|
||||
if self.data['filename']['name'] in EntityKtv.meta_cache and site in EntityKtv.meta_cache[self.data['filename']['name']]:
|
||||
self.data['meta'] = EntityKtv.meta_cache[self.data['filename']['name']][site]
|
||||
# 없는 것도 저장하여 중복검색 방지
|
||||
if self.data['meta']['find']:
|
||||
return
|
||||
site_data = site_class.search(self.data['filename']['name'])
|
||||
#logger.warning(f"{site} {d(site_data)}")
|
||||
if site_data['ret'] == 'success':
|
||||
if len(site_data['data']) > 0 and site_data['data'][0]['score'] >= 80:
|
||||
self.data['filename']['name'] = site_data['data'][0]['title']
|
||||
self.find_meta()
|
||||
if self.data['meta']['find'] == False:
|
||||
self.data['process_info']['status'] = 'ftv'
|
||||
self.data['process_info']['ftv_title'] = ToolBaseFile.text_for_filename(site_data['data'][0]['title'])
|
||||
self.data['process_info']['ftv_year'] = site_data['data'][0]['year']
|
||||
return
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
def find_meta_season(self):
|
||||
if self.data['meta']['info']['code'][1] == 'D':
|
||||
for idx, season in enumerate(self.data['meta']['search']['series']):
|
||||
if self.data['meta']['info']['code'] == season['code']:
|
||||
self.data['meta']['info']['season'] = idx + 1
|
||||
return
|
||||
else:
|
||||
self.data['meta']['info']['season'] = -1
|
||||
|
||||
50
lib/tool_expand/telegram.py
Normal file
50
lib/tool_expand/telegram.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import os
|
||||
import traceback
|
||||
import json
|
||||
import datetime
|
||||
import time
|
||||
|
||||
from telepot2 import Bot, glance
|
||||
from telepot2.loop import MessageLoop
|
||||
|
||||
from framework import app
|
||||
from . import logger
|
||||
|
||||
from tool_base import ToolAESCipher
|
||||
|
||||
class ToolTelegram(object):
|
||||
SUPER_BOT = None
|
||||
SJVA_BOT_CHANNEL_CHAT_ID = app.config['DEFINE']['SJVA_BOT_CHANNEL_CHAT_ID']
|
||||
|
||||
@classmethod
|
||||
def broadcast(cls, text, encrypted=True, only_last=False):
|
||||
try:
|
||||
if cls.SUPER_BOT is None:
|
||||
cls.SUPER_BOT = Bot(ToolAESCipher.decrypt(app.config['DEFINE']['SUPER_BOT_TOKEN']).decode('utf-8'))
|
||||
#logger.debug(text)
|
||||
if encrypted:
|
||||
text = '^' + ToolAESCipher.encrypt(text)
|
||||
if only_last:
|
||||
cls.SUPER_BOT.sendMessage(cls.SJVA_BOT_CHANNEL_CHAT_ID[-1], text)
|
||||
else:
|
||||
for c_id in cls.SJVA_BOT_CHANNEL_CHAT_ID:
|
||||
try:
|
||||
cls.SUPER_BOT.sendMessage(c_id, text)
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error('Chat ID : %s', c_id)
|
||||
logger.error(traceback.format_exc())
|
||||
return True
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
return False
|
||||
|
||||
|
||||
#ToolTelegram().broadcast('1', encrypted=False)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
433
lib/tool_expand/torrent_process.py
Normal file
433
lib/tool_expand/torrent_process.py
Normal file
@@ -0,0 +1,433 @@
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import requests, re, json, time
|
||||
import traceback, unicodedata
|
||||
from datetime import datetime
|
||||
import traceback
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import copy
|
||||
|
||||
from framework import app, SystemModelSetting, py_urllib
|
||||
from framework.util import Util
|
||||
from tool_expand import ToolExpandFileProcess
|
||||
|
||||
|
||||
logger = get_logger('torrent_process')
|
||||
|
||||
class TorrentProcess(object):
|
||||
@classmethod
|
||||
def is_broadcast_member(cls):
|
||||
if app.config['config']['is_server'] or app.config['config']['is_debug']:
|
||||
return True
|
||||
return False
|
||||
|
||||
# 토렌트정보 수신 후
|
||||
@classmethod
|
||||
def receive_new_data(cls, entity, package_name):
|
||||
try:
|
||||
if not cls.is_broadcast_member():
|
||||
return
|
||||
if package_name == 'bot_downloader_ktv':
|
||||
cls.append('ktv', entity)
|
||||
elif package_name == 'bot_downloader_movie':
|
||||
cls.append('movie', entity)
|
||||
elif package_name == 'bot_downloader_av':
|
||||
cls.append('av', entity)
|
||||
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
@classmethod
|
||||
def append(cls, type, data):
|
||||
try:
|
||||
import requests
|
||||
import json
|
||||
response = requests.post(f"{app.config['DEFINE']['WEB_DIRECT_URL']}/sjva/torrent_%s.php" % type, data={'data':json.dumps(data.as_dict())})
|
||||
#logger.debug(response.text)
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
@classmethod
|
||||
def server_process(cls, save_list, category=None):
|
||||
if cls.is_broadcast_member():
|
||||
#logger.debug(category)
|
||||
if category == 'KTV':
|
||||
cls.server_process_ktv(save_list)
|
||||
elif category == 'MOVIE':
|
||||
return cls.server_process_movie(save_list)
|
||||
|
||||
elif category == 'AV':
|
||||
return cls.server_process_av(save_list, 'censored')
|
||||
|
||||
|
||||
# ModelBbs2 인스턴스
|
||||
@classmethod
|
||||
def server_process_ktv(cls, save_list):
|
||||
#logger.debug(save_list)
|
||||
|
||||
# Daum정보를 가져온다
|
||||
for item in save_list:
|
||||
item = item.as_dict()
|
||||
if item['torrent_info'] is not None:
|
||||
# 하나의 마그넷 단위
|
||||
try:
|
||||
for info in item['torrent_info']:
|
||||
# 파이
|
||||
logger.debug('Magnet : %s', info['magnet_uri'])
|
||||
logger.debug('Name : %s', info['name'])
|
||||
info['video_count'] = 0
|
||||
info['files_original'] = copy.deepcopy(info['files'])
|
||||
for f in info['files']:
|
||||
cls.analyse_torrent_info_file(f)
|
||||
if f['type'] == 'video':
|
||||
import ktv
|
||||
entity = ktv.EntityShow(f['filename'], by='only_filename')
|
||||
f['ktv'] = {}
|
||||
f['ktv']['filename_rule'] = entity.filename
|
||||
f['ktv']['name'] = entity.filename_name
|
||||
f['ktv']['date'] = entity.filename_date
|
||||
f['ktv']['number'] = entity.filename_no
|
||||
f['ktv']['quality'] = entity.filename_quality
|
||||
f['ktv']['release'] = entity.filename_release
|
||||
if entity.daum_info is not None:
|
||||
daum = entity.daum_info.as_dict()
|
||||
f['daum'] = {
|
||||
'daum_id' : str(daum['daum_id']),
|
||||
'poster_url' : daum['poster_url'],
|
||||
'genre' : daum['genre'],
|
||||
'title' : daum['title'],
|
||||
}
|
||||
else:
|
||||
f['daum'] = None
|
||||
info['video_count'] += 1
|
||||
# 방송
|
||||
if info['video_count'] == 1:
|
||||
ret = {}
|
||||
ret['server_id'] = item['id']
|
||||
ret['broadcast_type'] = 'auto'
|
||||
ret['hash'] = info['info_hash']
|
||||
ret['file_count'] = info['num_files']
|
||||
ret['files'] = info['files_original']
|
||||
ret['total_size'] = info['total_size']
|
||||
ret['video_count'] = info['video_count']
|
||||
for f in info['files']:
|
||||
if f['type'] == 'video':
|
||||
ret['filename'] = f['filename'] #마그넷인포의 파일명
|
||||
ret['ktv'] = f['ktv']
|
||||
ret['daum'] = f['daum']
|
||||
info['broadcast'] = ret
|
||||
|
||||
telegram = {}
|
||||
telegram['plugin'] = 'bot_downloader_ktv'
|
||||
telegram['sub'] = 'torrent'
|
||||
telegram['data'] = ret
|
||||
|
||||
text = json.dumps(telegram, indent=2)
|
||||
from framework.common.telegram_bot import TelegramBot
|
||||
TelegramBot.super_send_message(text)
|
||||
time.sleep(0.5)
|
||||
|
||||
|
||||
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
@classmethod
|
||||
def server_process_movie(cls, save_list):
|
||||
from framework.common.torrent.process_movie import ProcessMovie
|
||||
lists = []
|
||||
for item in save_list:
|
||||
item = item.as_dict()
|
||||
sub = []
|
||||
if item['files']:
|
||||
for tmp in item['files']:
|
||||
ext = os.path.splitext(tmp[1])[1].lower()
|
||||
if ext in ['.smi', '.srt', '.ass']:
|
||||
sub.append(tmp)
|
||||
|
||||
if item['torrent_info'] is not None:
|
||||
# 하나의 마그넷 단위
|
||||
try:
|
||||
for info in item['torrent_info']:
|
||||
|
||||
fileinfo = cls.get_max_size_fileinfo(info)
|
||||
movie = ProcessMovie.get_info_from_rss(fileinfo['filename'])
|
||||
#logger.debug(fileinfo)
|
||||
#logger.debug(movie)
|
||||
|
||||
torrent_info = {}
|
||||
torrent_info['name'] = info['name']
|
||||
torrent_info['size'] = info['total_size']
|
||||
torrent_info['num'] = info['num_files']
|
||||
torrent_info['hash'] = info['info_hash']
|
||||
torrent_info['filename'] = fileinfo['filename']
|
||||
torrent_info['dirname'] = fileinfo['dirname']
|
||||
torrent_info['url'] = item['url']
|
||||
|
||||
movie_info = {}
|
||||
if movie['movie'] is not None:
|
||||
movie_info['title'] = movie['movie']['title']
|
||||
movie_info['target'] = movie['target'].replace('sub_x', 'sub')
|
||||
movie_info['kor'] = movie['is_include_kor']
|
||||
if movie_info['target'] == 'imdb':
|
||||
movie_info['id'] = movie['movie']['id']
|
||||
movie_info['year'] = movie['movie']['year']
|
||||
else:
|
||||
movie_info['daum'] = {}
|
||||
movie_info['id'] = movie['movie']['id']
|
||||
movie_info['daum']['country'] = movie['movie']['country']
|
||||
movie_info['year'] = movie['movie']['year']
|
||||
movie_info['daum']['poster'] = movie['movie']['more']['poster']
|
||||
movie_info['daum']['eng'] = movie['movie']['more']['eng_title']
|
||||
movie_info['daum']['rate'] = movie['movie']['more']['rate']
|
||||
movie_info['daum']['genre'] = movie['movie']['more']['genre']
|
||||
else:
|
||||
movie_info = None
|
||||
ret = {}
|
||||
ret['server_id'] = item['id']
|
||||
if len(sub) > 0 :
|
||||
ret['s'] = sub
|
||||
if movie_info is not None:
|
||||
ret['m'] = movie_info
|
||||
ret['t'] = torrent_info
|
||||
|
||||
#logger.debug(ret)
|
||||
lists.append(ret)
|
||||
#return ret
|
||||
|
||||
# 방송
|
||||
telegram = {}
|
||||
telegram['plugin'] = 'bot_downloader_movie'
|
||||
telegram['data'] = ret
|
||||
|
||||
text = json.dumps(telegram, indent=2)
|
||||
from framework.common.telegram_bot import TelegramBot
|
||||
TelegramBot.super_send_message(text)
|
||||
time.sleep(0.5)
|
||||
#return lists
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
return lists
|
||||
|
||||
@classmethod
|
||||
def server_process_av(cls, save_list, av_type):
|
||||
lists = []
|
||||
for item in save_list:
|
||||
item = item.as_dict()
|
||||
logger.debug(item['title'])
|
||||
#av_type = item['board']
|
||||
# 2020-05-31 javdb,141jav-NONE, avnori-torrent_ymav, javnet censored_tor
|
||||
#av_type = 'censored' if av_type in ['NONE', 'torrent_ymav', 'censored_tor'] else av_type
|
||||
#av_type = 'uncensored' if av_type in ['torrent_nmav', 'uncensored_tor'] else av_type
|
||||
#av_type = 'western' if av_type in ['torrent_amav', 'white_tor'] else av_type
|
||||
|
||||
#logger.debug(json.dumps(item, indent=4))
|
||||
|
||||
if item['torrent_info'] is None:
|
||||
from torrent_info import Logic as TorrentInfoLogic
|
||||
for m in item['magnet']:
|
||||
logger.debug('Get_torrent_info:%s', m)
|
||||
for i in range(1):
|
||||
tmp = None
|
||||
try:
|
||||
tmp = TorrentInfoLogic.parse_magnet_uri(m, no_cache=True)
|
||||
except:
|
||||
logger.debug('Timeout..')
|
||||
if tmp is not None:
|
||||
break
|
||||
if tmp is not None:
|
||||
if item['torrent_info'] is None:
|
||||
item['torrent_info'] = []
|
||||
item['torrent_info'].append(tmp)
|
||||
|
||||
|
||||
|
||||
if item['torrent_info'] is not None:
|
||||
# 하나의 마그넷 단위
|
||||
try:
|
||||
for info in item['torrent_info']:
|
||||
|
||||
|
||||
fileinfo = cls.get_max_size_fileinfo(info)
|
||||
av = cls.server_process_av2(fileinfo['filename'], av_type)
|
||||
#logger.debug(fileinfo)
|
||||
#logger.debug(json.dumps(av, indent=4))
|
||||
|
||||
#2020-05-31 검색안되는건 그냥 방송안함.
|
||||
if av is None:
|
||||
logger.debug(u'AV 검색 실패')
|
||||
logger.debug(fileinfo['filename'])
|
||||
#logger.debug(av_type)
|
||||
continue
|
||||
|
||||
if info['num_files'] > 30:
|
||||
continue
|
||||
try:
|
||||
if fileinfo['filename'].lower().find('ch_sd') != -1:
|
||||
continue
|
||||
except: pass
|
||||
torrent_info = {}
|
||||
torrent_info['name'] = info['name']
|
||||
torrent_info['size'] = info['total_size']
|
||||
torrent_info['num'] = info['num_files']
|
||||
torrent_info['hash'] = info['info_hash']
|
||||
torrent_info['filename'] = fileinfo['filename']
|
||||
torrent_info['dirname'] = fileinfo['dirname']
|
||||
torrent_info['url'] = item['url']
|
||||
|
||||
av_info = None
|
||||
if av is not None:
|
||||
av_info = {}
|
||||
av_info['meta'] = av['type']
|
||||
av_info['code_show'] = av['data']['originaltitle']
|
||||
av_info['title'] = av['data']['title']
|
||||
try:
|
||||
av_info['poster'] = av['data']['thumb'][1]['value']
|
||||
except:
|
||||
try:
|
||||
av_info['poster'] = av['data']['thumb'][0]['value']
|
||||
except:
|
||||
av_info['poster'] = None
|
||||
av_info['genre'] = av['data']['genre']
|
||||
if av_info['genre'] is None:
|
||||
av_info['genre'] = []
|
||||
av_info['performer'] = []
|
||||
if av['data']['actor'] is not None:
|
||||
for actor in av['data']['actor']:
|
||||
av_info['performer'].append(actor['name'])
|
||||
av_info['studio'] = av['data']['studio']
|
||||
av_info['date'] = av['data']['premiered']
|
||||
av_info['trailer'] = ''
|
||||
if av['data']['extras'] is not None and len(av['data']['extras']) > 0:
|
||||
av_info['trailer'] = av['data']['extras'][0]['content_url']
|
||||
else:
|
||||
logger.debug('AV 검색 실패')
|
||||
logger.debug(fileinfo['filename'])
|
||||
#av_info = {}
|
||||
#av_info['title'] = info['name']
|
||||
|
||||
ret = {'av_type' :av_type}
|
||||
ret['server_id'] = item['id']
|
||||
if av_info is not None:
|
||||
ret['av'] = av_info
|
||||
ret['t'] = torrent_info
|
||||
lists.append(ret)
|
||||
|
||||
# 방송
|
||||
telegram = {}
|
||||
telegram['plugin'] = 'bot_downloader_av'
|
||||
telegram['data'] = ret
|
||||
|
||||
text = json.dumps(telegram, indent=2)
|
||||
from framework.common.telegram_bot import TelegramBot
|
||||
TelegramBot.super_send_message(text)
|
||||
time.sleep(0.5)
|
||||
#return lists
|
||||
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
return lists
|
||||
|
||||
|
||||
|
||||
|
||||
# torrent_info > files > 하나의 파일 .
|
||||
# path, size 가 있음.
|
||||
@classmethod
|
||||
def analyse_torrent_info_file(cls, file_info):
|
||||
try:
|
||||
file_info['dirs'] = os.path.split(file_info['path'])
|
||||
file_info['filename'] = os.path.basename(file_info['dirs'][-1])
|
||||
file_info['filename_except_ext'], file_info['ext'] = os.path.splitext(file_info['filename'] )
|
||||
if file_info['ext'].lower() in ['.mp4', '.mkv', '.avi', '.wmv']:
|
||||
file_info['type'] = 'video'
|
||||
elif file_info['ext'].lower() in ['.srt', '.smi', '.ass']:
|
||||
file_info['type'] = 'sub'
|
||||
else:
|
||||
file_info['type'] = None
|
||||
return file_info
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
@classmethod
|
||||
def get_max_size_fileinfo(cls, torrent_info):
|
||||
try:
|
||||
ret = {}
|
||||
max_size = -1
|
||||
max_filename = None
|
||||
for t in torrent_info['files']:
|
||||
if t['size'] > max_size:
|
||||
max_size = t['size']
|
||||
max_filename = str(t['path'])
|
||||
t = max_filename.split('/')
|
||||
ret['filename'] = t[-1]
|
||||
if len(t) == 1:
|
||||
ret['dirname'] = ''
|
||||
elif len(t) == 2:
|
||||
ret['dirname'] = t[0]
|
||||
else:
|
||||
ret['dirname'] = max_filename.replace('/%s' % ret['filename'], '')
|
||||
ret['max_size'] = max_size
|
||||
return ret
|
||||
except Exception as exception:
|
||||
logger.error('Exception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
|
||||
@classmethod
|
||||
def server_process_av2(cls, filename, av_type):
|
||||
try:
|
||||
logger.debug('filename :%s, av_type:%s', filename, av_type)
|
||||
if av_type == 'censored':
|
||||
tmp = ToolExpandFileProcess.change_filename_censored(filename)
|
||||
logger.debug('TMP1: %s', tmp)
|
||||
tmp = ToolExpandFileProcess.remove_extension(tmp)
|
||||
logger.debug('TMP2: %s', tmp)
|
||||
from metadata import Logic as MetadataLogic
|
||||
data = MetadataLogic.get_module('jav_censored').search(tmp, manual=False)
|
||||
logger.debug(data)
|
||||
|
||||
if len(data) > 0 and data[0]['score'] > 95:
|
||||
meta_info = MetadataLogic.get_module('jav_censored').info(data[0]['code'])
|
||||
ret = {'type':'dvd', 'data':meta_info}
|
||||
else:
|
||||
data = MetadataLogic.get_module('jav_censored_ama').search(tmp, manual=False)
|
||||
process_no_meta = False
|
||||
logger.debug(data)
|
||||
if data is not None and len(data) > 0 and data[0]['score'] > 95:
|
||||
meta_info = MetadataLogic.get_module('jav_censored_ama').info(data[0]['code'])
|
||||
if meta_info is not None:
|
||||
ret = {'type':'ama', 'data':meta_info}
|
||||
#else:
|
||||
# ret = {'type':'etc', 'data':None}
|
||||
else:
|
||||
ret = {'type':av_type}
|
||||
return ret
|
||||
except Exception as exception:
|
||||
logger.error('Exxception:%s', exception)
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
"""
|
||||
import ktv
|
||||
entity = ktv.EntityShow('결혼작사 이혼작곡 시즌2 E05.210626.1080p.WEB-DL.x264.AAC-Deresisi.mp4', by='only_filename')
|
||||
logger.error(entity)
|
||||
logger.error(entity.daum_info)
|
||||
"""
|
||||
Reference in New Issue
Block a user