def create_info_xml(metadata_item, metadata_type): row_ce = Task.source_con.execute( 'SELECT hash, data FROM metadata WHERE hash = ?', (metadata_item['hash'], )) row_ce.row_factory = dict_factory row = row_ce.fetchall() if len(row) == 1: metapath = os.path.join( ModelSetting.get('base_path_metadata'), 'Movies' if metadata_type == 1 else 'TV Shows', metadata_item['hash'][0], f"{metadata_item['hash'][1:]}.bundle", 'Contents', '_combined', 'Info.xml') if os.path.exists(metapath): logger.warning( f"{metadata_item['title']} Info.xml already exist..") else: folder_path = os.path.dirname(metapath) if os.path.exists(folder_path) == False: os.makedirs(folder_path) ToolBaseFile.write(row[0]['data'], metapath) logger.debug(metapath) logger.warning( f"{metadata_item['title']} Info.xml write..") else: logger.warning('info.xml data not exist')
def execute_query2(cls, sql, sql_filepath=None): try: if sql_filepath is None: sql_filepath = os.path.join( path_data, 'tmp', f"{str(time.time()).split('.')[0]}.sql") ToolBaseFile.write(sql, sql_filepath) if platform.system() == 'Windows': tmp = sql_filepath.replace('\\', '\\\\') cmd = f'"{ModelSetting.get("base_bin_sqlite")}" "{ModelSetting.get("base_path_db")}" ".read {tmp}"' for i in range(10): ret = ToolSubprocess.execute_command_return(cmd) if ret.find('database is locked') != -1: time.sleep(5) else: break else: for i in range(10): ret = ToolSubprocess.execute_command_return([ ModelSetting.get('base_bin_sqlite'), ModelSetting.get('base_path_db'), f".read {sql_filepath}" ]) if ret.find('database is locked') != -1: time.sleep(5) else: break return ret except Exception as e: logger.error(f'Exception:{str(e)}') logger.error(traceback.format_exc()) return ''
def api(sub): ret = {} try: if sub == 'command_add': filename = request.form['filename'] file_url = request.form['file_url'] logger.debug(filename) logger.debug(file_url) r = requests.get(file_url) download_path = os.path.join(path_data, 'command', filename) update = False if os.path.exists(download_path): os.remove(download_path) update = True ToolBaseFile.write(r.text.replace('\r\n', '\n'), download_path) try: os.system('chmod 777 "%s"' % download_path) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) ret['ret'] = 'success' if update: ret['log'] = u'정상적으로 설치하였습니다.<br>파일을 업데이트 하였습니다.' else: ret['log'] = u'정상적으로 설치하였습니다.' elif sub == 'execute': command_id = request.args.get('id') mode = request.args.get('mode') if mode is None: mode = 'json' kwargs = {} for key, value in request.args.items(): if key in ['apikey', 'mode']: continue if key not in kwargs: kwargs[key] = value ret = LogicNormal.execute_thread_function_job( int(command_id), **kwargs) if mode == 'json': return jsonify(ret) elif mode == 'return': return str(ret) elif mode == 'redirect': return redirect(ret) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) ret['ret'] = 'exception' ret['log'] = str(exception) return jsonify(ret)
def move_file(config, entity, source_path, target_folder, data, is_dry): #if entity.data['process_info']['status'] in ['number_and_date_match', 'meta_epi_empty', 'number_and_date_match_by_release', 'number_and_date_match_ott', 'meta_epi_not_find', 'no_date']: if True: year_tmp = entity.data['meta']['info']['year'] if year_tmp == 0 or year_tmp == '0': year_tmp = '' genre = entity.data['meta']['info']['genre'][0].split('/')[0] if entity.data['meta']['info']['code'][1] == 'D': genre = config['메타 사이트별 장르 접두사']['daum'] + ' ' + genre elif entity.data['meta']['info']['code'][1] == 'W': genre = config['메타 사이트별 장르 접두사']['wavve'] + ' ' + genre elif entity.data['meta']['info']['code'][1] == 'V': genre = config['메타 사이트별 장르 접두사']['tving'] + ' ' + genre genre = genre.strip() genre = config['장르 변경 규칙'].get(genre, genre) program_folder = config['타겟 폴더 구조'].format( title=ToolBaseFile.text_for_filename( entity.data['meta']['info']['title']), year=year_tmp, studio=entity.data['meta']['info']['studio'], genre=genre, release=entity.data['filename']['release'], ) tmps = program_folder.replace('(1900)', '').replace('()', '').replace('[]', '').strip() tmps = re.sub("\s{2,}", ' ', tmps) tmps = re.sub("/{2,}", '/', tmps) tmps = tmps.split('/') program_folder = os.path.join(target_folder, *tmps) #logger.debug(f"program_folder : {program_folder}") #logger.error(entity.data['process_info']['status']) program_folder = Task.get_prefer_folder(config, entity, program_folder) target_filename = entity.get_newfilename() if target_filename is not None: #logger.warning(program_folder) #logger.error(f"original : {entity.data['filename']['original']}") #logger.error(f'target_filename : {target_filename}') data['result_folder'] = program_folder data['result_filename'] = target_filename if is_dry == False: ToolBaseFile.file_move(source_path, program_folder, target_filename) else: logger.error(f"타겟 파일 None")
def find_meta_tmdb(self): from lib_metadata import SiteTmdbFtv from tool_base import ToolBaseFile module_map = [('tmdb', SiteTmdbFtv)] for site, site_class in module_map: try: if self.data['filename'][ 'name'] in EntityKtv.meta_cache and site in EntityKtv.meta_cache[ self.data['filename']['name']]: self.data['meta'] = EntityKtv.meta_cache[ self.data['filename']['name']][site] # 없는 것도 저장하여 중복검색 방지 if self.data['meta']['find']: return site_data = site_class.search(self.data['filename']['name']) #logger.warning(f"{site} {d(site_data)}") if site_data['ret'] == 'success': if len(site_data['data'] ) > 0 and site_data['data'][0]['score'] >= 80: self.data['filename']['name'] = site_data['data'][0][ 'title'] self.find_meta() if self.data['meta']['find'] == False: self.data['process_info']['status'] = 'ftv' self.data['process_info'][ 'ftv_title'] = ToolBaseFile.text_for_filename( site_data['data'][0]['title']) self.data['process_info']['ftv_year'] = site_data[ 'data'][0]['year'] return except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def make_nfo_movie(cls, info, output='text', filename='movie.nfo', savepath=None): text = cls._make_nfo_movie(info) if output == 'text': return text elif output == 'xml': return app.response_class(text, mimetype='application/xml') elif output == 'file': from io import StringIO output_stream = StringIO(u'%s' % text) response = Response( output_stream.getvalue().encode('utf-8'), mimetype='application/xml', content_type='application/octet-stream', ) response.headers[ "Content-Disposition"] = "attachment; filename=%s" % filename return response elif output == 'save': if savepath is not None: from tool_base import ToolBaseFile return ToolBaseFile.write(text, savepath)
def manual(path): try: plugin_root = os.path.dirname(P.blueprint.template_folder) filepath = os.path.join(plugin_root, *path.split('/')) from tool_base import ToolBaseFile data = ToolBaseFile.read(filepath) return render_template('manual.html', data=data) except Exception as exception: P.logger.error('Exception:%s', exception) P.logger.error(traceback.format_exc())
def execute_query(cls, sql, sql_filepath=None): try: sql += f"{sql}\ncommit;" if sql_filepath is None: sql_filepath = os.path.join( path_data, 'tmp', f"{str(time.time()).split('.')[0]}.sql") ToolBaseFile.write(sql, sql_filepath) if platform.system() == 'Windows': tmp = sql_filepath.replace('\\', '\\\\') cmd = f'"{ModelSetting.get("base_bin_sqlite")}" "{ModelSetting.get("base_path_db")}" ".read {tmp}"' ToolSubprocess.execute_command_return(cmd) else: ret = ToolSubprocess.execute_command_return([ ModelSetting.get('base_bin_sqlite'), ModelSetting.get('base_path_db'), f".read {sql_filepath}" ]) #logger.warning(ret) return True except Exception as e: logger.error(f'Exception:{str(e)}') logger.error(traceback.format_exc()) return False
def edit_file(): path = request.args.get('path') if path is None: return error('No path in request') os_file_path = web_path_to_os_path(path) # Load the contents of the file from tool_base import ToolBaseFile #content = util.read_file(os_file_path).decode() #content = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' content = ToolBaseFile.read(os_file_path) return get_file(path=path, content=content)
def process_ajax(self, sub, req): try: ret = {'ret': 'success'} if sub == 'command': command = req.form['command'] logger.error(f"sub : {sub} / command : {command}") if command == 'select': ModelSetting.set(f'{self.parent.name}_{self.name}_query', req.form['arg1']) ret['select'] = PlexDBHandle.tool_select(req.form['arg1']) elif command == 'refresh_web': PlexWebHandle.refresh_by_id(req.form['arg1']) ret['msg'] = '명령을 전송하였습니다.' elif command == 'refresh_bin': PlexBinaryScanner.scan_refresh2( req.form['arg1'], os.path.dirname(req.form['arg2'])) ret['msg'] = '완료' elif command == 'analyze_web': PlexWebHandle.analyze_by_id(req.form['arg1']) ret['msg'] = '명령을 전송하였습니다.' elif command == 'analyze_bin': PlexBinaryScanner.analyze( req.form['arg1'], metadata_item_id=req.form['arg2']) ret['msg'] = '완료' elif command == 'remove_metadata': folder_path = os.path.join( ModelSetting.get('base_path_metadata'), 'Movies' if req.form['arg1'] == '1' else 'TV Shows', req.form['arg2'][0], f"{req.form['arg2'][1:]}.bundle") if os.path.exists(folder_path): if ToolBaseFile.rmtree(folder_path): ret['msg'] = '삭제하였습니다.' else: ret['ret'] = 'warning' ret['msg'] = '삭제 실패' else: ret['ret'] = 'warning' ret['msg'] = f'{folder_path} 없음' elif sub == 'get_preset': ret['preset'] = self.preset return jsonify(ret) except Exception as e: P.logger.error(f'Exception:{str(e)}') P.logger.error(traceback.format_exc()) return jsonify({'ret': 'danger', 'msg': str(e)})
def download_file(): web_path = request.args.get('path') if not web_path: abort(400) os_path = web_path_to_os_path(web_path) if not os.path.exists(os_path): abort(404) if os.path.isdir(os_path): from tool_base import ToolBaseFile web_path = ToolBaseFile.makezip(os_path, zip_folder='tmp') web_path = web_path.replace(_FILE_PATH, '') #return 'TODO: download directory as zip' else: pass #print(_FILE_PATH) #print(web_path) #print(web_path_to_local(web_path)) return send_from_directory(_FILE_PATH, web_path_to_local(web_path), as_attachment=True)
def insert_info_xml(db_filepath, metadata_type): con = sqlite3.connect(db_filepath) ce = con.execute( 'SELECT title, hash FROM metadata_items WHERE metadata_type BETWEEN 1 AND 2' ) ce.row_factory = dict_factory datas = ce.fetchall() count = len(datas) for idx, item in enumerate(datas): row_ce = con.execute('SELECT hash FROM metadata WHERE hash = ?', (item['hash'], )) row_ce.row_factory = dict_factory row = row_ce.fetchall() if len(row) == 1: logger.warning( f"{idx+1} / {count} : {item['title']} Already Info.xml saved" ) elif len(row) == 0: metapath = os.path.join( ModelSetting.get('base_path_metadata'), 'Movies' if metadata_type == 1 else 'TV Shows', item['hash'][0], f"{item['hash'][1:]}.bundle", 'Contents', '_combined', 'Info.xml') #logger.debug(metapath) if os.path.exists(metapath): xml = ToolBaseFile.read(metapath) insert_ce = con.execute( 'INSERT INTO metadata (hash, data) VALUES (?,?)', (item['hash'], xml)) logger.warning( f"{idx+1} / {count} : {item['title']} insert..") else: logger.warning( f"{idx+1} / {count} : {item['title']} Not exist Info.xml file" ) con.commit() con.close()
def start(self): #app.config['config']['use_celery'] = False #if app.config['config']['use_celery']: # self.update_state(state='PROGRESS', meta=data) #else: # P.logic.get_module('jav_censored_tool').receive_from_task(data, celery=False) logger.warning(f"Analysis Task.start") folder_path = ModelSetting.get(f'{name}_path') logger.warning(f"분석 폴더 : {folder_path}") for base, dirs, files in os.walk(folder_path): if ModelSetting.get_bool(f"{name}_task_stop_flag"): logger.warning("사용자 중지") return 'stop' if len(dirs) == 0 and len(files) == 0: if ModelSetting.get_bool(f"{name}_remove_empty_folder"): ret = ToolBaseFile.rmtree(base) logger.error(f"폴더 삭제 : {base} {ret}") if len(files) > 0: # 파일만 있는 폴더 if base.split('/')[-1] in [ 'behindthescenes', 'deleted', 'featurette', 'interview', 'scene', 'short', 'trailer', 'other' ]: logger.warning(f"base : {base}") continue #logger.warning(base) data = {'folder_path': base} Task.analysis(data) if app.config['config']['use_celery']: self.update_state(state='PROGRESS', meta=data) else: P.logic.get_module(f'{name}').receive_from_task( data, celery=False) #return logger.warning(f"종료") return 'wait'
def start(self, location, meta_type, folder, dryrun): dryrun = True if dryrun == 'true' else False if location == 'Metadata': root_path = os.path.join(ModelSetting.get('base_path_metadata'), meta_type) elif location == 'Media': root_path = os.path.join(ModelSetting.get('base_path_media'), 'localhost') if folder == 'all': folders = os.listdir(root_path) else: folders = [folder] db_file = ModelSetting.get('base_path_db') con = sqlite3.connect(db_file) cur = con.cursor() status = {'is_working':'run', 'remove_count' : 0, 'remove_size':0, 'count':0, 'current':0} for folder in folders: folder_path = os.path.join(root_path, folder) if os.path.exists(folder_path) == False: continue bundle_list = os.listdir(folder_path) status['count'] += len(bundle_list) for bundle in bundle_list: try: if ModelSetting.get_bool('clear_bundle_task_stop_flag'): return 'stop' time.sleep(0.05) status['current'] += 1 data = {'folder':folder, 'bundle':bundle, 'status':status} bundle_path = os.path.join(folder_path, bundle) hash_value = folder + bundle.split('.')[0] if location == 'Metadata': ce = con.execute('SELECT * FROM metadata_items WHERE hash = ?', (hash_value,)) else: ce = con.execute('SELECT * FROM media_parts WHERE hash = ?', (hash_value,)) ce.row_factory = dict_factory fetch = ce.fetchall() if len(fetch) == 1: if location == 'Metadata': data['title'] = fetch[0]['title'] else: data['file'] = fetch[0]['file'] elif len(fetch) == 0: tmp = ToolBaseFile.size(start_path=bundle_path) data['remove'] = tmp status['remove_size'] += tmp status['remove_count'] += 1 if dryrun == False: ToolBaseFile.rmtree(bundle_path) if app.config['config']['use_celery']: self.update_state(state='PROGRESS', meta=data) else: self.receive_from_task(data, celery=False) except Exception as e: logger.error(f'Exception:{str(e)}') logger.error(traceback.format_exc()) return 'wait'
def ajax(sub): logger.debug('AJAX %s %s', package_name, sub) try: if sub == 'foreground_command': command = request.form['command'] ret = LogicNormal.foreground_command(command) return jsonify(ret) elif sub == 'foreground_command_close': ret = LogicNormal.foreground_command_close() return jsonify(ret) elif sub == 'job_new': ret = {} ret['ret'] = ModelCommand.job_new(request) ret['list'] = ModelCommand.job_list() return jsonify(ret) elif sub == 'job_save': ret = {} ret['ret'] = ModelCommand.job_save(request) ret['list'] = ModelCommand.job_list() return jsonify(ret) elif sub == 'scheduler_switch': ret = {} ret['ret'] = LogicNormal.scheduler_switch0(request) ret['list'] = ModelCommand.job_list() return jsonify(ret) elif sub == 'job_remove': ret = {} ret['ret'] = ModelCommand.job_remove(request) ret['list'] = ModelCommand.job_list() return jsonify(ret) elif sub == 'job_log_show': ret = {} job_id = request.form['job_id'] ret['filename'] = '%s_%s.log' % (package_name, job_id) ret['ret'] = os.path.exists( os.path.join(path_data, 'log', ret['filename'])) return jsonify(ret) elif sub == 'job_background': ret = {} job_id = request.form['job_id'] ret['ret'] = LogicNormal.job_background(job_id) return jsonify(ret) elif sub == 'job_file_edit': ret = {} job_id = request.form['job_id'] job = ModelCommand.get_job_by_id(job_id) ret['data'] = ToolBaseFile.read(job.filename) ret['ret'] = True return jsonify(ret) elif sub == 'file_save': ret = {} job_id = request.form['file_job_id'] logger.debug(job_id) data = request.form['file_textarea'] job = ModelCommand.get_job_by_id(job_id) ToolBaseFile.write(data.replace('\r\n', '\n'), job.filename) ret['ret'] = True return jsonify(ret) elif sub == 'foreground_command_by_job': ret = {} job_id = request.form['job_id'] job = ModelCommand.get_job_by_id(job_id) ret['ret'] = LogicNormal.foreground_command(job.command, job_id=job_id) return jsonify(ret) elif sub == 'process_close': ret = {'ret': 'fail'} job_id = request.form['job_id'] if LogicNormal.process_close( LogicNormal.process_list[int(job_id)]): ret['ret'] = 'success' return jsonify(ret) elif sub == 'send_process_command': ret = LogicNormal.send_process_command(request) return jsonify(ret) elif sub == 'command_list': ret = {} ret['list'] = ModelCommand.job_list() return jsonify(ret) elif sub == 'save': ret = {} ret['ret'] = LogicNormal.save(request) ret['list'] = ModelCommand.job_list() return jsonify(ret) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def start(self, config, call_module): logger.warning(f"Simple Task.start") is_dry = True if call_module.find('_dry') != -1 else False source = ModelSetting.get(f'{name}_path_source') target = ModelSetting.get(f'{name}_path_target') error = ModelSetting.get(f'{name}_path_error') logger.debug(f"소스 : {source}") logger.debug(f"target : {target}") logger.debug(f"error : {error}") for base, dirs, files in os.walk(source): logger.warning("BASE : {base}") for idx, original_filename in enumerate(files): if ModelSetting.get_bool(f"{call_module}_task_stop_flag"): logger.warning("사용자 중지") return 'stop' try: data = { 'filename': original_filename, 'foldername': base, 'log': [] } filename = original_filename logger.warning(f"{idx} / {len(files)} : {filename}") filename = DownloadProcessTask.process_pre( config, base, filename, is_dry, data) data['filename_pre'] = filename if filename is None: continue entity = EntityKtv(filename, dirname=base, meta=False, config=config) data['entity'] = entity.data if entity.data['filename']['is_matched']: data['result_folder'] = os.path.join( target, entity.data['filename']['name']) data['result_filename'] = entity.data['filename'][ 'original'] if is_dry == False: ToolBaseFile.file_move( os.path.join(base, original_filename), data['result_folder'], data['result_filename']) else: data['result_folder'] = error data['result_filename'] = original_filename if is_dry == False: ToolBaseFile.file_move( os.path.join(base, original_filename), data['result_folder'], data['result_filename']) except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) finally: if app.config['config']['use_celery']: self.update_state(state='PROGRESS', meta=data) else: P.logic.get_module(call_module.replace( '_dry', '')).receive_from_task(data, celery=False) if base != source and len(os.listdir(base)) == 0: try: if is_dry == False: os.rmdir(base) except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) for base, dirs, files in os.walk(source): if base != source and len(dirs) == 0 and len(files) == 0: try: if is_dry == False: os.rmdir(base) except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) logger.error(f"종료") return 'wait'
def show_process(data, con, cur): data['meta'] = {'remove': 0} data['meta']['metapath'] = os.path.join( ModelSetting.get('base_path_metadata'), 'TV Shows', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle") data['meta']['total'] = ToolBaseFile.size( start_path=data['meta']['metapath']) if data['command'] == 'start0': return combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') if os.path.exists(combined_xmlpath) == False: return data['use_filepath'] = [] data['remove_filepath'] = [] data['seasons'] = {} data['media'] = {'total': 0, 'remove': 0} ret = Task.xml_analysis(combined_xmlpath, data, data) if ret == False: logger.warning(f"{data['db']['title']} 쇼 분석 실패") return season_cs = con.execute( 'SELECT * FROM metadata_items WHERE metadata_type = 3 and parent_id = ? ORDER BY "index"', (data['db']['id'], )) season_cs.row_factory = dict_factory for season in season_cs.fetchall(): episode_cs = con.execute( 'SELECT * FROM metadata_items WHERE metadata_type = 4 and parent_id = ? ORDER BY "index"', (season['id'], )) episode_cs.row_factory = dict_factory for episode in episode_cs.fetchall(): season_index = season['index'] episode_index = episode['index'] if episode['index'] == -1: if episode['available_at'] is not None: episode_index = episode['available_at'].split(' ')[0] else: episode_index = episode[ 'originally_available_at'].split(' ')[0] #season_index = episode_index.split('-')[0] if season_index not in data['seasons']: data['seasons'][season_index] = {'db': season} combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'seasons', f"{season_index}.xml") ret = Task.xml_analysis(combined_xmlpath, data['seasons'][season_index], data) if ret == False: logger.warning(combined_xmlpath) logger.warning( f"{data['db']['title']} 시즌 분석 실패 : season_index - {season_index}" ) #logger.warning(combined_xmlpath) #return data['seasons'][season_index]['episodes'] = {} data['seasons'][season_index]['episodes'][episode_index] = { 'db': episode } combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'seasons', f"{season_index}", "episodes", f"{episode_index}.xml") ret = Task.xml_analysis( combined_xmlpath, data['seasons'][season_index]['episodes'][episode_index], data, is_episode=True) if ret == False: logger.warning(combined_xmlpath) #logger.warning(d(episode)) logger.warning(f"{data['db']['title']} 에피소드 분석 실패") #del data['seasons'][season_index]['episodes'][episode_index] #return #logger.warning(d(data['use_filepath'])) #logger.warning(d(data)) query = "" if data['command'] in ['start22', 'start3', 'start4']: # 쇼 http로 sql = 'UPDATE metadata_items SET ' if data['process']['poster']['url'] != '': sql += ' user_thumb_url = "{}", '.format( data['process']['poster']['url']) try: data['use_filepath'].remove( data['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['poster']['realpath']) except: pass if data['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( data['process']['art']['url']) try: data['use_filepath'].remove( data['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['art']['realpath']) except: pass if data['process']['banner']['url'] != '': sql += ' user_banner_url = "{}", '.format( data['process']['banner']['url']) try: data['use_filepath'].remove( data['process']['banner']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['banner']['realpath']) except: pass if data['process']['theme']['url'] != '': sql += ' user_music_url = "{}", '.format( data['process']['theme']['url']) if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(data['db']['id']) query += sql for season_index, season in data['seasons'].items(): if 'process' not in season: continue sql = 'UPDATE metadata_items SET ' if season['process']['poster']['url'] != '': sql += ' user_thumb_url = "{}", '.format( season['process']['poster']['url']) try: data['use_filepath'].remove( season['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( season['process']['poster']['realpath']) except: pass if season['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( season['process']['art']['url']) try: data['use_filepath'].remove( season['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( season['process']['art']['realpath']) except: pass if season['process']['banner']['url'] != '': sql += ' user_banner_url = "{}", '.format( season['process']['banner']['url']) try: data['use_filepath'].remove( season['process']['banner']['localpath']) except: pass try: data['use_filepath'].remove( season['process']['banner']['realpath']) except: pass if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(season['db']['id']) query += sql if data['command'] in ['start21', 'start22', 'start3', 'start4']: for season_index, season in data['seasons'].items(): for episode_index, episode in season['episodes'].items(): #logger.warning(episode['process']['thumb']) media_item_cs = con.execute( 'SELECT * FROM media_items WHERE metadata_item_id = ? ORDER BY id', (episode['db']['id'], )) media_item_cs.row_factory = dict_factory episode['media_list'] = [] for media_item in media_item_cs.fetchall(): media_part_cs = con.execute( 'SELECT * FROM media_parts WHERE media_item_id = ? ORDER BY id', (media_item['id'], )) media_part_cs.row_factory = dict_factory for media_part in media_part_cs.fetchall(): media_hash = media_part['hash'] #logger.warning(f" 파일 : {media_part['file']} {media_hash}") mediapath = os.path.join( ModelSetting.get('base_path_media'), 'localhost', media_hash[0], f"{media_hash[1:]}.bundle", 'Contents', 'Thumbnails', 'thumb1.jpg') if os.path.exists(mediapath): #logger.warning("미디오 썸네일 있음") episode['media_list'].append(mediapath) data['media']['total'] = os.path.getsize( mediapath) #data['remove_size'] += os.stat(mediapath).st_size #os.remove(mediapath) #media://0/10c056239442666d0931c90996ff69673861d95.bundle/Contents/Thumbnails/thumb1.jpg # 2021-11-01 # 4단계 미디어파일을 디코에 올리고 그 url로 대체한다. # if data['command'] == 'start4' and episode['process'][ 'thumb']['db_type'] == 'media': localpath = os.path.join( ModelSetting.get('base_path_media'), 'localhost', episode['process']['thumb']['db'].replace( 'media://', '')) if localpath[0] != '/': localpath = localpath.replace('/', '\\') if os.path.exists(localpath): if data['dryrun'] == False: discord_url = ToolExpandDiscord.discord_proxy_image_localfile( localpath) if discord_url is not None: episode['process']['thumb'][ 'url'] = discord_url logger.warning(discord_url) else: #logger.warning(episode) logger.warning(f"썸네일 없음 1: {episode['db']['id']}") PlexWebHandle.analyze_by_id(episode['db']['id']) if data['command'] == 'start4' and episode['process'][ 'thumb']['db'] == '': logger.warning(f"썸네일 없음 분석 2: {episode['db']['id']}") PlexWebHandle.analyze_by_id(episode['db']['id']) if episode['process']['thumb']['url'] != '': query += f'UPDATE metadata_items SET user_thumb_url = "{episode["process"]["thumb"]["url"]}" WHERE id = {episode["db"]["id"]};\n' try: data['use_filepath'].remove( episode['process']['thumb']['localpath']) except: pass try: data['use_filepath'].remove( episode['process']['thumb']['realpath']) except: pass if data['command'] in ['start3', 'start4']: for mediafilepath in episode['media_list']: if os.path.exists(mediapath): data['media']['remove'] += os.path.getsize( mediapath) if data['dryrun'] == False: os.remove(mediapath) elif episode['process']['thumb']['db'] == '': if len(episode['media_list']) > 0: tmp = f"media://{episode['media_list'][0].split('localhost/')[1]}" query += f'UPDATE metadata_items SET user_thumb_url = "{tmp}" WHERE id = {episode["db"]["id"]};\n' if data['dryrun'] == False and data['command'] in [ 'start3', 'start4' ]: for mediafilepath in episode['media_list']: content_folder = os.path.dirname( os.path.dirname(mediafilepath)) for base, folders, files in os.walk( content_folder): if not folders and not files: os.removedirs(base) #logger.error(data['command']) #logger.error(query) if query != '' and data['dryrun'] == False: PlexDBHandle.execute_query(query) #logger.error(data['meta']['remove'] ) for base, folders, files in os.walk(data['meta']['metapath']): for f in files: data['file_count'] += 1 filepath = os.path.join(base, f) #if filepath.find('themes') == -1: # continue if filepath not in data['use_filepath']: if os.path.exists(filepath): data['remove_count'] += 1 if filepath not in data['remove_filepath']: data['remove_filepath'].append(filepath) if os.path.islink(filepath) == False: data['meta']['remove'] += os.path.getsize(filepath) #logger.error(filepath) if data['dryrun'] == False: os.remove(filepath) for base, folders, files in os.walk(data['meta']['metapath']): if not folders and not files: os.removedirs(base) if data['command'] == 'start1': return
def change_text_for_use_filename(text): from tool_base import ToolBaseFile return ToolBaseFile.text_for_filename(text)
def process_ajax(self, sub, req): try: ret = {} if sub == 'command': command = req.form['command'] if command == 'size': #if req.form['arg1'] in ['plex_data', 'plex_db']: path = req.form['arg2'] self.task_interface('size', (path, )) ret = { 'ret': 'success', 'msg': '명령을 전달하였습니다. 잠시 후 결과 알림을 확인하세요.' } elif command == 'execute': if req.form['arg1'] == 'scanner': data = ToolSubprocess.execute_command_return( [req.form['arg2']]) data = data.replace('\n', '<br>').lstrip('"').rstrip('"') ret['modal'] = data elif req.form['arg1'] == 'sqlite': data = [] data.append(f"SQLite 버전") data.append( f" - {ToolSubprocess.execute_command_return([req.form['arg2'], '-version'])}" ) data.append("") data.append(f"Plex Media Server 버전") data.append( f" - {ToolSubprocess.execute_command_return([req.form['arg2'], '--version'])}" ) data = '<br>'.join(data) ret['modal'] = data elif command == 'backup': if req.form['arg1'] == 'plex_db': self.task_interface('backup', (req.form['arg2'], )) ret = { 'ret': 'success', 'msg': '명령을 전달하였습니다. 잠시 후 결과 알림을 확인하세요.' } elif command == 'db': if req.form['arg1'] == 'library_sections': data = PlexDBHandle.library_sections(req.form['arg2']) ret['modal'] = json.dumps(data, indent=4, ensure_ascii=False) elif command == 'clear': if req.form['arg1'] == 'plex_phototranscode': path = req.form['arg2'] self.task_interface('clear', (path, )) ret = { 'ret': 'success', 'msg': '명령을 전달하였습니다. 잠시 후 결과 알림을 확인하세요.' } elif command == 'system_agents': data = PlexWebHandle.system_agents(url=req.form['arg1'], token=req.form['arg2']) data = json.loads(json.dumps(xmltodict.parse(data))) ret['modal'] = json.dumps(data, indent=4, ensure_ascii=False) elif command == 'version': url = req.form['arg1'] token = req.form['arg2'] msg = f"SJVA.bundle : {PlexWebHandle.get_sjva_version(url=url, token=token)}<br>SjvaAgent : {PlexWebHandle.get_sjva_agent_version(url=url, token=token)}<br>" regex = re.compile("VERSION\s=\s'(?P<version>.*?)'") text = requests.get( 'https://raw.githubusercontent.com/soju6jan/SJVA.bundle/master/SJVA.bundle/Contents/Code/version.py' ).text match = regex.search(text) if match: msg += u'SJVA.bundle (최신) : ' + match.group('version') text = requests.get( 'https://raw.githubusercontent.com/soju6jan/SjvaAgent.bundle/main/Contents/Code/version.py' ).text match = regex.search(text) if match: msg += u'<br>SjvaAgent (최신) : ' + match.group( 'version') return jsonify({'ret': 'success', 'msg': msg}) elif command == 'make_sql': self.make_sql() elif sub == 'plex_folder_test': program_path = req.form['program_path'] data_path = req.form['data_path'] if os.path.exists(program_path) == False: ret = {'ret': 'fail', 'msg': '데이터 폴더가 없습니다.'} elif os.path.exists(data_path) == False: ret = {'ret': 'fail', 'msg': '프로그램 폴더가 없습니다.'} else: ret['data'] = {} ret['data']['bin_scanner'] = os.path.join( program_path, 'Plex Media Scanner') ret['data']['bin_sqlite'] = os.path.join( program_path, 'Plex SQLite') ret['data']['path_db'] = os.path.join( data_path, 'Plug-in Support', 'Databases', 'com.plexapp.plugins.library.db') ret['data']['path_metadata'] = os.path.join( data_path, 'Metadata') ret['data']['path_media'] = os.path.join( data_path, 'Media') ret['data']['path_phototranscoder'] = os.path.join( data_path, 'Cache', 'PhotoTranscoder') if platform.system() == 'Windows': ret['data']['bin_scanner'] += '.exe' ret['data']['bin_sqlite'] += '.exe' ret['data']['token'] = ModelSetting.get( f'{name}_token') else: xml_string = ToolBaseFile.read( os.path.join(data_path, 'Preferences.xml')) result = xmltodict.parse(xml_string) prefs = json.loads(json.dumps(result)) logger.warning(d(prefs)) ret['data']['token'] = prefs['Preferences'][ '@PlexOnlineToken'] ret['data']['machine'] = prefs['Preferences'][ '@ProcessedMachineIdentifier'] for key, value in ret['data'].items(): if key not in ['token', 'machine']: if os.path.exists(value) == False: ret = { 'ret': 'fail', 'msg': '올바른 경로가 아닙니다.<br>' + value } return jsonify(ret) ret['ret'] = 'success' ret['msg'] = '설정을 저장하세요.' return jsonify(ret) except Exception as e: P.logger.error(f'Exception:{str(e)}') P.logger.error(traceback.format_exc()) return jsonify({'ret': 'danger', 'msg': str(e)})
def func(): time.sleep(1) ret = ToolBaseFile.rmtree(folder) new_data = {'index': index, 'status': 'remove', 'target': folder} self.data['data'][index] = new_data self.refresh_data(index=index)
def start(self, configs, call_module): #logger.warning(f"Task.start : {call_module}") is_dry = True if call_module.find('_dry') != -1 else False for config in configs: source = config['소스 폴더'] target = config['타겟 폴더'] error = config['에러 폴더'] for base, dirs, files in os.walk(source): for idx, original_filename in enumerate(files): #if idx>0:return if ModelSetting.get_bool(f"{call_module}_task_stop_flag"): logger.warning("사용자 중지") return 'stop' try: data = { 'filename': original_filename, 'foldername': base, 'log': [] } filename = original_filename #logger.warning(f"{idx} / {len(files)} : {filename}") filename = Task.process_pre(config, base, filename, is_dry, data) data['filename_pre'] = filename if filename is None: continue entity = EntityKtv(filename, dirname=base, meta=True, config=config) data['entity'] = entity.data if entity.data['filename']['is_matched']: if entity.data['meta']['find']: Task.move_file( config, entity, os.path.join(base, original_filename), target, data, is_dry) else: if entity.data['process_info'][ 'status'] == 'ftv': data['result_folder'] = os.path.join( config['경로 설정']['ftv'].format( error=error), f"{entity.data['process_info']['ftv_title']} ({entity.data['process_info']['ftv_year']})", f"Season {entity.data['filename']['sno']}" ) else: data['result_folder'] = config['경로 설정'][ 'no_meta'].format(error=error) if config['메타 검색 실패시 방송별 폴더 생성']: data['result_folder'] = os.path.join( data['result_folder'], entity.data['filename']['name']) data['result_filename'] = original_filename if is_dry == False: ToolBaseFile.file_move( os.path.join(base, original_filename), data['result_folder'], data['result_filename']) else: data['result_folder'] = config['경로 설정'][ 'no_tv'].format(error=error) data['result_filename'] = original_filename if is_dry == False: ToolBaseFile.file_move( os.path.join(base, original_filename), data['result_folder'], data['result_filename']) except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) finally: if app.config['config']['use_celery']: self.update_state(state='PROGRESS', meta=data) else: P.logic.get_module(call_module.replace( '_dry', '')).receive_from_task(data, celery=False) if base != source and len(os.listdir(base)) == 0: try: if is_dry == False: os.rmdir(base) except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) for base, dirs, files in os.walk(source): if base != source and len(dirs) == 0 and len(files) == 0: try: if is_dry == False: os.rmdir(base) except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) logger.debug(f"task {call_module} 종료") return 'wait'
def analysis(data, con, cur): #logger.warning(f"분석시작 : {data['db']['title']}") Task.thumb_process(data) if data['command'] == 'start1': return # 2단계 TAG별 URL 로 세팅하고 xml 파일만 남기고 제거 if data['dryrun'] == False: #sql = 'UPDATE metadata_items SET user_thumb_url = "{}", user_art_url = "{}", user_banner_url = "{}" WHERE id = {} ;'.format( # data['process']['poster']['url'], # data['process']['art']['url'], # data['process']['banner']['url'], # data['db']['id'] #) if 'poster' not in data['process']: return sql = 'UPDATE metadata_items SET ' if data['process']['poster']['url'] != '': sql += ' user_thumb_url = "{}", '.format(data['process']['poster']['url']) if data['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format(data['process']['art']['url']) if data['process']['banner']['url'] != '': sql += ' user_banner_url = "{}", '.format(data['process']['banner']['url']) if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;'.format(data['db']['id']) sql_filepath = os.path.join(path_data, 'tmp', f"movie_{data['db']['id']}.sql") PlexDBHandle.execute_query(sql, sql_filepath=sql_filepath) c_metapath = os.path.join(data['meta']['metapath'], 'Contents') if os.path.exists(c_metapath): for f in os.listdir(c_metapath): _path = os.path.join(c_metapath, f) if f == '_combined': for tag, value in TAG.items(): tag_path = os.path.join(_path, value[1]) if os.path.exists(tag_path): if data['dryrun'] == False: data['meta']['remove'] += ToolBaseFile.size(start_path=tag_path) ToolBaseFile.rmtree(tag_path) tmp = os.path.join(_path, 'extras') if os.path.exists(tmp) and len(os.listdir(tmp)) == 0: if data['dryrun'] == False: ToolBaseFile.rmtree(tmp) tmp = os.path.join(_path, 'extras.xml') if os.path.exists(tmp): if os.path.exists(tmp): data['meta']['remove'] += os.path.getsize(tmp) if data['dryrun'] == False: os.remove(tmp) else: tmp = ToolBaseFile.size(start_path=_path) if data['dryrun'] == False: data['meta']['remove'] += tmp ToolBaseFile.rmtree(_path) else: if f == '_stored': data['meta']['remove'] += tmp if data['command'] == 'start2': return media_ce = con.execute('SELECT user_thumb_url, user_art_url, media_parts.file, media_parts.hash FROM metadata_items, media_items, media_parts WHERE metadata_items.id = media_items.metadata_item_id AND media_items.id = media_parts.media_item_id AND metadata_items.id = ?;', (data['db']['id'],)) media_ce.row_factory = dict_factory data['media'] = {'total':0, 'remove':0} for item in media_ce.fetchall(): #logger.warning(d(item)) if item['hash'] == '': continue mediapath = os.path.join(ModelSetting.get('base_path_media'), 'localhost', item['hash'][0], f"{item['hash'][1:]}.bundle") if os.path.exists(mediapath) == False: continue data['media']['total'] += ToolBaseFile.size(start_path=mediapath) if item['user_thumb_url'].startswith('media') == False: img = os.path.join(mediapath, 'Contents', 'Thumbnails', 'thumb1.jpg') if os.path.exists(img): data['media']['remove'] += os.path.getsize(img) if data['dryrun'] == False: os.remove(img) if item['user_art_url'].startswith('media') == False: img = os.path.join(mediapath, 'Contents', 'Art', 'art1.jpg') if os.path.exists(img): data['media']['remove'] += os.path.getsize(img) if data['dryrun'] == False: os.remove(img)
def clear(args): ret = ToolBaseFile.rmtree2(args[0]) return Task.get_size(args)
def plugin_install_by_api(plugin_git, zip_url, zip_filename): logger.debug('plugin_git : %s', plugin_git) logger.debug('zip_url : %s', zip_url) logger.debug('zip_filename : %s', zip_filename) is_git = True if plugin_git != None and plugin_git != '' else False ret = {} try: if is_git: name = plugin_git.split('/')[-1] else: name = zip_filename.split('.')[0] custom_path = os.path.join(path_data, 'custom') plugin_path = os.path.join(custom_path, name) logger.debug(plugin_path) plugin_info = None if os.path.exists(plugin_path): ret['ret'] = 'already_exist' ret['log'] = '이미 설치되어 있습니다.' else: if plugin_git and plugin_git.startswith('http'): for tag in ['main', 'master']: try: info_url = plugin_git.replace( 'github.com', 'raw.githubusercontent.com' ) + '/%s/info.json' % tag plugin_info = requests.get(info_url).json() if plugin_info is not None: break except: pass if zip_filename and zip_filename != '': import zipfile from tool_base import ToolBaseFile zip_filepath = os.path.join(path_data, 'tmp', zip_filename) extract_filepath = os.path.join(path_data, 'tmp', name) logger.error(zip_url) logger.warning(zip_filepath) if ToolBaseFile.download(zip_url, zip_filepath): #logger.warning(os.path.exists(zip_filepath)) with zipfile.ZipFile(zip_filepath, 'r') as zip_ref: zip_ref.extractall(extract_filepath) plugin_info_filepath = os.path.join( extract_filepath, 'info.json') if os.path.exists(plugin_info_filepath): plugin_info = ToolBaseFile.read_json( plugin_info_filepath) if plugin_info == None: plugin_info = {} flag = True if 'platform' in plugin_info: if platform.system() not in plugin_info['platform']: ret['ret'] = 'not_support_os' ret['log'] = '설치 가능한 OS가 아닙니다.' flag = False if flag and 'running_type' in plugin_info: if app.config['config']['running_type'] not in plugin_info[ 'running_type']: ret['ret'] = 'not_support_running_type' ret['log'] = '설치 가능한 실행타입이 아닙니다.' flag = False if flag and 'policy_level' in plugin_info: if plugin_info['policy_level'] > app.config['config'][ 'level']: ret['ret'] = 'policy_level' ret['log'] = '설치 가능 회원등급보다 낮습니다.' flag = False if flag and 'policy_point' in plugin_info: if plugin_info['policy_level'] > app.config['config'][ 'point']: ret['ret'] = 'policy_level' ret['log'] = '설치 가능 포인트보다 낮습니다.' flag = False if flag: if plugin_git and plugin_git.startswith('http'): command = [ 'git', '-C', custom_path, 'clone', plugin_git + '.git', '--depth', '1' ] log = Util.execute_command(command) if zip_filename and zip_filename != '': import shutil if os.path.exists(plugin_path) == False: shutil.move(extract_filepath, plugin_path) else: for tmp in os.listdir(extract_filepath): shutil.move( os.path.join(extract_filepath, tmp), plugin_path) log = '' logger.debug(plugin_info) # 2021-12-31 if 'dependency' in plugin_info: for dep in plugin_info['dependency']: for key, value in LogicPlugin.get_plugin_list( ).items(): if key == dep['name']: logger.debug( f"Dependency 설치 - 이미 설치됨 : {dep['name']}" ) break else: logger.debug(f"Dependency 설치 : {dep['home']}") LogicPlugin.plugin_install_by_api( dep['home'], dep.get('zip_url'), dep.get('zip_filename')) #command = ['git', '-C', custom_path, 'clone', dep['home'], '--depth', '1'] #ret = Util.execute_command(command) ret['ret'] = 'success' ret['log'] = [u'정상적으로 설치하였습니다. 재시작시 적용됩니다.', log] ret['log'] = '<br>'.join(ret['log']) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) ret['ret'] = 'exception' ret['log'] = str(exception) return ret
def artist_process(data, con, cur): data['meta'] = {'remove': 0} data['meta']['metapath'] = os.path.join( ModelSetting.get('base_path_metadata'), 'Artists', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle") data['meta']['total'] = ToolBaseFile.size( start_path=data['meta']['metapath']) if data['command'] == 'start0': return combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') if os.path.exists(combined_xmlpath) == False: return data['use_filepath'] = [] data['remove_filepath'] = [] data['albums'] = {} ret = Task.xml_analysis(combined_xmlpath, data) if ret == False: logger.warning(f"{data['db']['title']} 아티스트 분석 실패") return # 2022-05-11 앨범은 인덱스 모두 1임. # 트랙은 순서대로 있음 #album_cs = con.execute('SELECT * FROM metadata_items WHERE metadata_type = 9 and parent_id = ? ORDER BY "index"', (data['db']['id'],)) album_cs = con.execute( 'SELECT * FROM metadata_items WHERE metadata_type = 9 and parent_id = ?', (data['db']['id'], )) album_cs.row_factory = dict_factory data['albums'] = [] for album in album_cs.fetchall(): #album_index = album['index'] #logger.warning(album_index) #if album_index not in data['albums']: #data['albums'][album_index] = {'db':album, 'use_filepath':[], 'remove_filepath':[]} album_data = { 'db': album, 'use_filepath': [], 'remove_filepath': [] } album_data['meta'] = {'remove': 0} album_data['meta']['metapath'] = os.path.join( ModelSetting.get('base_path_metadata'), 'Albums', album_data['db']['hash'][0], f"{album_data['db']['hash'][1:]}.bundle") data['meta']['total'] += ToolBaseFile.size( start_path=album_data['meta']['metapath']) combined_xmlpath = os.path.join(album_data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') ret = Task.xml_analysis(combined_xmlpath, album_data) if ret == False: logger.warning(combined_xmlpath) logger.warning(f"{album_data['db']['title']} 앨범 분석 실패") else: data['albums'].append(album_data) query = "" #logger.debug(d(data)) if data['command'] == 'start2': # 쇼 http로 sql = 'UPDATE metadata_items SET ' if data['process']['poster']['url'] != '': sql += ' user_thumb_url = "{}", '.format( data['process']['poster']['url']) try: data['use_filepath'].remove( data['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['poster']['realpath']) except: pass if data['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( data['process']['art']['url']) try: data['use_filepath'].remove( data['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['art']['realpath']) except: pass if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(data['db']['id']) query += sql for album in data['albums']: if 'process' not in album: continue sql = 'UPDATE metadata_items SET ' # localmedia 로 생성된 파일은 url이 셋된다. #if album['process']['poster']['url'] != '': # 2022-05-11 태그로 생성된 앨범은 디스코드에 올리고 셋 if album['process']['poster']['url'] != '' and album[ 'process']['poster']['url'].startswith( 'http') == False: if 'localpath' in album['process']['poster'] and album[ 'process']['poster']['localpath'] != '': localpath = album['process']['poster']['localpath'] if localpath[0] != '/': localpath = localpath.replace('/', '\\') if os.path.exists(localpath): if data['dryrun'] == False: discord_url = ToolExpandDiscord.discord_proxy_image_localfile( localpath) if discord_url is not None: album['process']['poster'][ 'url'] = discord_url logger.warning(discord_url) if album['process']['poster']['url'].startswith('http'): sql += ' user_thumb_url = "{}", '.format( album['process']['poster']['url']) try: data['use_filepath'].remove( album['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( album['process']['poster']['realpath']) except: pass if album['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( album['process']['art']['url']) try: data['use_filepath'].remove( album['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( album['process']['art']['realpath']) except: pass if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(album['db']['id']) query += sql #logger.error(data['command']) #logger.error(query) if query != '' and data['dryrun'] == False: PlexDBHandle.execute_query(query) #logger.warning(data['meta']['remove'] ) for base, folders, files in os.walk(data['meta']['metapath']): for f in files: data['file_count'] += 1 filepath = os.path.join(base, f) if filepath not in data['use_filepath']: if os.path.islink(filepath) and os.path.exists( filepath) == False: os.remove(filepath) elif os.path.exists(filepath): data['remove_count'] += 1 if filepath not in data['remove_filepath']: data['remove_filepath'].append(filepath) if os.path.islink(filepath) == False: data['meta']['remove'] += os.path.getsize(filepath) if data['dryrun'] == False: os.remove(filepath) for album in data['albums']: for base, folders, files in os.walk(album['meta']['metapath']): for f in files: #logger.warning(data['file_count']) #logger.warning(f) data['file_count'] += 1 filepath = os.path.join(base, f) if filepath not in album['use_filepath']: if os.path.islink(filepath) and os.path.exists( filepath) == False: os.remove(filepath) elif os.path.exists(filepath): data['remove_count'] += 1 if filepath not in album['remove_filepath']: data['remove_filepath'].append(filepath) if os.path.islink(filepath) == False: data['meta']['remove'] += os.path.getsize( filepath) if data['dryrun'] == False: os.remove(filepath) else: data['use_filepath'].append(filepath) for base, folders, files in os.walk(data['meta']['metapath']): if not folders and not files: os.removedirs(base) for album in data['albums']: for base, folders, files in os.walk(album['meta']['metapath']): if not folders and not files: os.removedirs(base)
def process_pre(config, base, original_filename, is_dry, data): filename = original_filename if '전처리' not in config: return filename for key, value in config['전처리'].items(): if key == '변환': if value is None: continue for rule in value: try: filename = re.sub(rule['source'], rule['target'], filename).strip() except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) elif key == '삭제': if value is None: continue for regex in value: try: if re.search(regex, filename): try: data['result_folder'] = 'REMOVE' if is_dry == False: os.remove( os.path.join(base, original_filename)) except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) finally: return except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) elif key == '이동': if value is None: continue for target, regex_list in value.items(): for regex in regex_list: try: if re.search(regex, filename): if target[0] == '/' or target[1] == ':': # 절대경로 target_folder = target else: if target in config['경로 설정']: target_folder = config['경로 설정'][ target].format( error=config['에러 폴더']) else: target_folder = os.path.join( config['에러 폴더'], target) data['result_folder'] = target data['result_filename'] = original_filename if is_dry == False: ToolBaseFile.file_move( os.path.join(base, original_filename), target_folder, original_filename) return except Exception as e: P.logger.error(f"Exception:{e}") P.logger.error(traceback.format_exc()) return filename
def thumb_process(data): data['meta'] = {'remove':0} #logger.warning(data['db']) if data['db']['metadata_type'] == 1: data['meta']['metapath'] = os.path.join(ModelSetting.get('base_path_metadata'), 'Movies', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle") combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') elif data['db']['metadata_type'] == 2: data['meta']['metapath'] = os.path.join(ModelSetting.get('base_path_metadata'), 'TV Shows', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle") combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') data['meta']['total'] = ToolBaseFile.size(start_path=data['meta']['metapath']) if data['command'] == 'start0': return if os.path.exists(combined_xmlpath) == False: return Task.xml_analysis(combined_xmlpath, data) data['process'] = {} for tag, value in TAG.items(): data['process'][tag] = { 'db' : data['db'][f'user_{value[0]}_url'], 'db_type' : '', 'url' : '', 'filename' : '', 'location' : '', } for tag, value in TAG.items(): if data['process'][tag]['db'] != '': data['process'][tag]['db_type'] = data['process'][tag]['db'].split('//')[0] data['process'][tag]['filename'] = data['process'][tag]['db'].split('/')[-1] for item in data['info'][value[1]]: if data['process'][tag]['filename'] == item['filename']: data['process'][tag]['url'] = item['url'] break #logger.error(d(data['process'])) # 1단계. # _combined 에서 ..stored not_remove_filelist = [] c_metapath = os.path.join(data['meta']['metapath'], 'Contents') if os.path.exists(c_metapath): for f in os.listdir(c_metapath): _path = os.path.join(c_metapath, f) # 윈도우는 combined에 바로 데이터가 있어서 무조건 삭제? if f == '_stored': tmp = ToolBaseFile.size(start_path=_path) data['meta']['stored'] = tmp if platform.system() == 'Windows': data['meta']['remove'] += tmp if data['dryrun'] == False: ToolBaseFile.rmtree(_path) elif f == '_combined': for tag, value in TAG.items(): tag_path = os.path.join(_path, value[1]) #logger.warning(tag_path) if os.path.exists(tag_path) == False: continue for img_file in os.listdir(tag_path): img_path = os.path.join(tag_path, img_file) if os.path.islink(img_path): if os.path.realpath(img_path).find('_stored') == -1: # 저장된 파일에 대한 링크가 아니기 삭제 # db에 저장된 url이 stored가 아닌 에이전트 폴더를 가로 가르키는 경우가 있음 #logger.warning(img_file) if img_file == data['process'][tag]['filename']: logger.error(data['process'][tag]['filename']) not_remove_filelist.append(data['process'][tag]['filename']) continue if data['dryrun'] == False:# and os.path.exists(img_path) == True: os.remove(img_path) else: #윈도우 if img_file != data['process'][tag]['filename']: # 저장파일이 아니기 때문에 삭제 data['meta']['remove'] += os.path.getsize(img_path) if data['dryrun'] == False and os.path.exists(img_path) == True: os.remove(img_path) #if len(not_remove_filelist) == 0: for f in os.listdir(c_metapath): _path = os.path.join(c_metapath, f) if f == '_stored' or f == '_combined': continue tmp = ToolBaseFile.size(start_path=_path) data['meta']['remove'] += tmp if data['dryrun'] == False: ToolBaseFile.rmtree(_path) #else: if not_remove_filelist: logger.error(not_remove_filelist)