def initialize(): try: app.config['SQLALCHEMY_BINDS'][ P. package_name] = f"sqlite:///{os.path.join(path_data, 'db', f'{P.package_name}.db')}" from framework.util import Util ToolUtil.save_dict( P.plugin_info, os.path.join(os.path.dirname(__file__), 'info.json')) from .ktv_basic import LogicKtvBasic from .ktv_yaml import LogicKtvYaml from .ktv_simple import LogicKtvSimple from .ktv_analysis import LogicKtvAnalysis #from .ktv_finish import LogicKtvFinish P.module_list = [ LogicKtvBasic(P), LogicKtvYaml(P), LogicKtvSimple(P), LogicKtvAnalysis(P) ] P.logic = Logic(P) default_route(P) except Exception as e: P.logger.error(f'Exception:{e}') P.logger.error(traceback.format_exc())
def make_info_json(cls, info, plugin_py_filepath): try: from framework import SystemModelSetting if info['developer'] == SystemModelSetting.get('sjva_me_user_id'): from tool_base import ToolUtil filename = os.path.join(os.path.dirname(plugin_py_filepath), 'info.json') ToolUtil.save_dict(info, filename) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def process_menu(self, sub, req): arg = P.ModelSetting.to_dict() arg['sub'] = self.name arg['proxy_url'] = ToolUtil.make_apikey_url(f'/{package_name}/api/proxy') if sub == 'None': sub = 'itemlist' if sub == 'itemlist': arg['categories'] = ','.join(ScmUtil.get_rule_names(self.name)) arg['agent_types'] = ','.join(ScmUtil.get_agent_types(self.name)) return render_template('{package_name}_{module_name}_{sub}.html'.format(package_name=P.package_name, module_name=self.name, sub=sub), arg=arg)
def save_from_dict_to_json(d, filename): from tool_base import ToolUtil ToolUtil.save_dict(d, filename)
def process_api(self, sub, req): try: if sub == 'add_copy': folder_id = req.form['folder_id'] folder_name = req.form['folder_name'] board_type = req.form['board_type'] category_type = req.form['category_type'] logger.debug(board_type) logger.debug(category_type) size = int(req.form['size']) count = int(req.form['count']) ddns = req.form['ddns'] need_version = req.form['version'] copy_type = req.form['copy_type'] if ddns != SystemModelSetting.get('ddns'): return {'ret':'wrong_ddns'} tmp1 = need_version.split('.') tmp2 = version.split('.') need_version = int(tmp1[2]) * 100 + int(tmp1[3]) current_version = int(tmp2[2]) * 100 + int(tmp2[3]) if need_version > current_version: return {'ret':'need_update', 'current_version':version, 'need_version':need_version} ret = self.add_copy(folder_id, folder_name, board_type, category_type, size, count, copy_type=copy_type) ret['current_version'] = version logger.debug(ret) return jsonify(ret) elif sub == 'vod_copy': fileid = req.form['fileid'] board_type = req.form['board_type'] category_type = req.form['category_type'] my_remote_path = self.get_my_copy_path(board_type, category_type) ret = {} if my_remote_path is None: ret['ret'] = 'fail' ret['data'] = 'remote path is None!!' else: ret['ret'] = 'success' ret['data'] = my_remote_path self.vod_copy(fileid, my_remote_path) return jsonify(ret) elif sub == 'plex_copy_prepare': #array('folder'=>$plexInfo['folder'], 'apikey'=>$_POST["apikey"], 'board_type'=>'plex', 'category_type'=>$plexInfo['section_title'], 'type'=>$plexInfo['ret']); folder = req.form['folder'] category_type = req.form['category_type'] content_type = req.form['content_type'] logger.error(folder) remote = folder.replace('/mnt/gds', '') # rclone 로그 "2021/10/19 00:20:03 NOTICE: Config file \"/root/.config/rclone/rclone.conf\" not found - using defaults # 때문에 config 넣어줌 command = [ModelSetting.get('rclone_path'), '--config', ModelSetting.get('rclone_config_path'), '--drive-gds-userid=A', '--drive-gds-apikey=A', '--drive-gds-mode=meta', 'backend', 'getid', f':drive:{remote}'] logger.warning(command) ret = {} ret['folderid'] = ToolSubprocess.execute_command_return(command) if len(ret['folderid']) == 33: command = [ModelSetting.get('rclone_path'), '--config', ModelSetting.get('rclone_config_path'), '--drive-gds-userid=A', '--drive-gds-apikey=A', '--drive-gds-mode=meta', 'size', f':drive:{remote}', '--json'] tmp = ToolSubprocess.execute_command_return(command, format='json') logger.error(tmp) ret['folder_name'] = remote.split('/')[-1] ret['count'] = tmp['count'] ret['size'] = tmp['bytes'] ret['size_str'] = ToolUtil.sizeof_fmt(ret['size']) ret['remote_path'] = self.get_my_copy_path('gds_plex', category_type) return jsonify(ret) elif sub == 'plex_copy': #logger.warning(req.form) ddns = req.form['ddns'] if ddns != SystemModelSetting.get('ddns'): return {'ret':'wrong_ddns'} ret = self.add_copy(req.form['folder_id'], req.form['folder_name'], 'gds_plex', req.form['category_type'], int(req.form['size']), int(req.form['count']), remote_path=req.form['remote_path']) #ret['current_version'] = version #logger.debug(ret) return jsonify(ret) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def analysis(data): logger.warning(f"분석시작 : {data['folder_path']}") data['folder_name'] = os.path.basename(data['folder_path']) data['keyword'] = re.sub('\[.*?\]', '', data['folder_name']) match = re.search('\((?P<year>\d+)\)', data['keyword']) try: data['year'] = int(match.group('year')) if match else -1 if data['year'] < 1940 or data['year'] > 2030: raise Exception() except: data['year'] = -1 data['keyword'] = re.sub('\((?P<year>\d+)\)', '', data['keyword']).strip() entity = EntityKtv(data['keyword'], meta=True, is_title=True) data['entity'] = entity.data if entity.data['meta']['find']: data['listdir'] = os.listdir(data['folder_path']) data['files'] = [] data['folders'] = [] data['min_date'] = {'value': 999999, 'file': ''} data['max_date'] = {'value': 0, 'file': ''} data['min_no'] = {'value': 9999, 'file': ''} data['max_no'] = {'value': -1, 'file': ''} data['episode_keys'] = [] for f in data['listdir']: if os.path.isdir(os.path.join(data['folder_path'], f)): data['folders'].append(f) continue else: #data['files'][f] = {} tmp_entity = EntityKtv(f) if tmp_entity.data['filename']['is_matched'] == False: continue if tmp_entity.data['filename']['no'] not in data[ 'episode_keys']: data['episode_keys'].append( tmp_entity.data['filename']['no']) #logger.warning(d(tmp.data)) stat = os.stat(os.path.join(data['folder_path'], f)) tmp_entity.data['size'] = ToolUtil.sizeof_fmt(stat.st_size) tmp_entity.data['ctime'] = ToolUtil.timestamp_to_datestr( stat.st_ctime) #logger.warning(tmp_entity.data) data['files'].append(tmp_entity.data) try: tmp = int(tmp_entity.data['filename']['date']) if tmp > data['max_date']['value']: data['max_date']['value'] = tmp data['max_date']['file'] = f if tmp < data['min_date']['value']: data['min_date']['value'] = tmp data['min_date']['file'] = f except: pass try: tmp = tmp_entity.data['filename']['no'] if tmp > data['max_no']['value']: data['max_no']['value'] = tmp data['max_no']['file'] = f if tmp < data['min_no']['value']: data['min_no']['value'] = tmp data['min_no']['file'] = f except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) data['files'] = sorted(data['files'], key=lambda k: k['filename']['no']) data['episode_keys'] = list(sorted(data['episode_keys'])) data['episode_keys_empty'] = [] try: meta_max = 0 if entity.data['meta']['info']['status'] == 2 and len( data['episode_keys']) > 0: keys = entity.data['meta']['info']['extra_info'][ 'episodes'].keys() if len(keys) > 0: meta_max = max(keys) #logger.warning(meta_max) last_max = max([meta_max, data['episode_keys'][-1]]) #logger.warning(last_max) for i in range(data['episode_keys'][0] + 1, last_max + 1): if i not in data['episode_keys']: data['episode_keys_empty'].append(i) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) #logger.warning(data['episode_keys_empty']) #logger.warning(f"보유 에피소드 수 : {len(data['episode_keys'])}") #logger.warning(f"보유 에피소드 : {data['episode_keys']}") #logger.warning(f"최대날짜 : {data['max_date']}") #logger.warning(f"파일수 : {len(data['listdir'])} files 수 : {len(data['episode_keys'])}") #today = int(datetime.now().strftime('%Y%m%d')[2:]) today = datetime.now() try: tmp = str(data['max_date']['value']) if tmp[0] in ['8', '9']: tmp = '19' + tmp else: tmp = '20' + tmp max_date = datetime.strptime(tmp, '%Y%m%d') except: max_date = today data['day_delta'] = (today - max_date).days #if entity.data['meta']['info']['status'] == 2 and (data['day_delta'] > ModelSetting.get_int(f'{name}_not_movie_day') or ModelSetting.get_int(f'{name}_not_movie_day') == 0): if entity.data['meta']['info']['status'] == 2: data['move_result'] = 'finish' else: data['move_result'] = 'is_onair' data['target_fodler'] = '' if data['move_result'] == 'finish': if entity.data['meta']['info']['episode'] > 0: if len(data['episode_keys'] ) >= entity.data['meta']['info']['episode']: # 종영이고 메타에 에피수가 있고 모두 있음 data['episode_result'] = 'finish_all' data['target_fodler'] = data['folder_path'].replace( ModelSetting.get(f'{name}_path_source'), ModelSetting.get(f'{name}_path_finish')) else: # 종영이고 메타에 에피수가 있고 일부만 있음 data['episode_result'] = 'finish_part' data['target_fodler'] = data['folder_path'].replace( ModelSetting.get(f'{name}_path_source'), ModelSetting.get(f'{name}_path_incomplete')) elif data['max_no']['value'] - data['min_no'][ 'value'] + 1 == len(data['episode_keys']): # 종영이고 메타에 에피수가 없고 최소~최대 까지 있음 data['episode_result'] = 'meta_no_epi_count_all' data['target_fodler'] = data['folder_path'].replace( ModelSetting.get(f'{name}_path_source'), ModelSetting.get(f'{name}_path_finish')) else: # 종영이고 메타에 에피수가 없고 최소~최대 범위내에서 일부없음 data['episode_result'] = 'meta_no_epi_count_part' data['target_fodler'] = data['folder_path'].replace( ModelSetting.get(f'{name}_path_source'), ModelSetting.get(f'{name}_path_incomplete')) data['target_fodler1'] = data['folder_path'].replace( ModelSetting.get(f'{name}_path_source'), ModelSetting.get(f'{name}_path_finish')) data['target_fodler2'] = data['folder_path'].replace( ModelSetting.get(f'{name}_path_source'), ModelSetting.get(f'{name}_path_incomplete')) else: if data['max_no']['value'] - data['min_no']['value'] + 1 == len( data['episode_keys']): # onair이고 최소~최대 까지 있음 data['episode_result'] = 'onalr_all' else: # onair이고 최소~최대 범위내에서 일부 없음 data['episode_result'] = 'onalr_part'
__author__ = 'Stephen Brown (Little Fish Solutions LTD)' def plugin_load(): from framework import app, path_app_root #app.config['FLASKFILEMANAGER_FILE_PATH'] = path_app_root app.config['FLASKFILEMANAGER_FILE_PATH'] = '/' init(app) def plugin_unload(): pass plugin_info = { 'version': '1.0.0', 'name': '파일 매니저', 'category_name': 'system', 'icon': '', 'developer': 'soju6jan', 'description': 'RichFilemanager를 Flask에서 동작하도록 한 FlaskFileManager 포크', 'home': 'https://github.com/soju6jan/flaskfilemanager', 'more': '', } import os from tool_base import ToolUtil ToolUtil.save_dict(plugin_info, os.path.join(os.path.dirname(__file__), 'info.json'))