def queue_append(queue_list): try: logger.debug(queue_list) new_queue_list = [] for q in queue_list: src, tar = q.split('|') tmps = tar.split('/') if len(tmps) > 1: for i in range(1, len(tmps)): tmps[i] = Util.change_text_for_use_filename( tmps[i]).replace(' ', ' ').replace( ' ', ' ').rstrip('.').strip() new_queue_list.append('%s|%s/%s' % (src, tmps[0], '/'.join(tmps[1:]))) else: new_queue_list.append(q) logger.debug(new_queue_list) tmp = ModelSetting.get('gclone_queue_list') tmp += '\n' + '\n'.join(new_queue_list) ModelSetting.set('gclone_queue_list', tmp) socketio_callback('refresh_queue', ModelSetting.get('gclone_queue_list')) return LogicGclone.start() except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def gclone_execute(source, target): #./gclone --config ./gclone.conf copy gc:{1Qs6xsVJF7TkMk00s6W28HjdZ8onx2C4O} gc:{1BhTY6WLPRUkqKukNtQTIDMyjLO_UKMzP} --drive-server-side-across-configs -vvv --progress --tpslimit 3 --transfers 3 --stats 1s try: data = {'type': 'success', 'msg': u'Target:%s 작업을 시작합니다.' % target} socketio.emit("notify", data, namespace='/framework', broadcast=True) command = [ ModelSetting.get('gclone_path'), '--config', ModelSetting.get('gclone_config_path'), 'copy', source, target ] is_fclone = LogicGclone.is_fclone() # fclone의 경우 log-level 강제설정 if is_fclone: command += [ '--stats', '1s', '--log-level', 'NOTICE', '--stats-log-level', 'NOTICE' ] else: command += ModelSetting.get_list('gclone_fix_option', ' ') command += ModelSetting.get_list('gclone_user_option', ' ') logger.debug(command) if app.config['config']['is_py2']: LogicGclone.current_process = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, bufsize=1) else: LogicGclone.current_process = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) LogicGclone.current_data['command'] = ' '.join(command) LogicGclone.current_data['log'] = [] LogicGclone.current_data['files'] = [] LogicGclone.trans_callback('start') if is_fclone: LogicGclone.current_log_thread = threading.Thread( target=LogicGclone.fclone_log_thread_fuction, args=()) else: LogicGclone.current_log_thread = threading.Thread( target=LogicGclone.log_thread_fuction, args=()) LogicGclone.current_log_thread.start() logger.debug('normally process wait()') ret = LogicGclone.current_process.wait() LogicGclone.current_process = None return ret except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) return 'fail'
def func(): LogicGclone.current_data['status'] = 'is_running' while True: count = 0 job_list = ModelSetting.get_list( 'gclone_queue_list', '\n') for job in job_list: try: if LogicGclone.current_data['user_stop']: break tmp = job.split('#')[0].split('|') if len(tmp) == 2: target = tmp[1].strip() target = target.replace( '{}', '{%s}' % ModelSetting.get( 'gclone_default_folderid')) if target.find('{}') != -1: continue if target.find(':') == -1: continue return_code = LogicGclone.gclone_execute( tmp[0].strip(), target) # 0 정상 logger.debug('return_code:%s', return_code) if return_code == 0: tmp2 = ModelSetting.get( 'gclone_queue_list') for t in tmp2.split('\n'): if t.strip().startswith( '%s|%s' % (tmp[0], tmp[1])): ModelSetting.set( 'gclone_queue_list', tmp2.replace(t, '')) socketio_callback( 'refresh_queue', ModelSetting.get( 'gclone_queue_list')) count += 1 except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) if LogicGclone.current_data['user_stop']: break if count == 0: break LogicGclone.current_data['status'] = 'ready' LogicGclone.current_data['user_stop'] = False data = {'type': 'success', 'msg': u'gclone 작업을 완료하였습니다.'} socketio.emit("notify", data, namespace='/framework', broadcast=True)
def process_ajax(sub, req): try: if sub == 'start': ret = LogicGclone.start() return jsonify(ret) elif sub == 'stop': LogicGclone.current_data['user_stop'] = True ret = LogicGclone.kill() return jsonify(ret) elif sub == 'version': command = [ModelSetting.get('gclone_path'), 'version'] process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, bufsize=1) ret = [] with process.stdout: for line in iter(process.stdout.readline, b''): ret.append(line) process.wait() # wait for the subprocess to exit return jsonify(ret) elif sub == 'view_config': from framework.common.util import read_file data = read_file(ModelSetting.get('gclone_config_path')) return jsonify({'ret':True, 'data':data}) elif sub == 'gen_config': default = ''' [gc] type = drive scope = drive service_account_file = {first_json} service_account_file_path = {accounts_dir}/ ''' import glob accounts_dir = ModelSetting.get('path_accounts') sa_files = glob.glob(os.path.join(accounts_dir, '*.json')) if len(sa_files) == 0: ret = {'ret':False, 'log:': u'json 파일이 없습니다.'} else: first_json = os.path.join(accounts_dir, sa_files[0]) default = default.format(first_json=first_json, accounts_dir=accounts_dir) logger.debug(default) from framework.common.util import read_file, write_file config_path = ModelSetting.get('gclone_config_path') write_file(default, config_path) ret = {'ret':True, 'data':read_file(config_path)} return jsonify(ret) elif sub == 'log_reset': LogicGclone.current_data['log'] = [] return jsonify('') except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def get_first_json(): accounts_dir = ModelSetting.get('path_accounts') for (path, dir, files) in os.walk(accounts_dir): for fname in files: if os.path.splitext(fname)[-1] == '.json': return os.path.join(path, fname) return None
def get_first_json(): import glob accounts_dir = ModelSetting.get('path_accounts') sa_files = glob.glob(os.path.join(accounts_dir, '*.json')) if len(sa_files) == 0: return None first_json = os.path.join(accounts_dir, sa_files[0]) return os.path.join(accounts_dir, sa_files[0])
def get_random_json(): import random accounts_dir = ModelSetting.get('path_accounts') sa_files = [] for (path, dir, files) in os.walk(accounts_dir): for fname in files: if os.path.splitext(fname)[-1] == '.json': sa_files.append(fname) if len(sa_files) == 0: return None idx = int(random.random() * len(sa_files)) return os.path.join(accounts_dir, sa_files[idx])
def get_size(id): try: entity = ListModelItem.get(id) if entity is None: return {'ret': False, 'data': '유효한 아이템이 없습니다'} command = [ ModelSetting.get('gclone_path'), '--config', ModelSetting.get('gclone_config_path'), 'size', 'gc:{%s}' % entity.folder_id ] data = SystemLogicCommand.execute_command_return(command) if data.find('Failed') > 0: logger.error('failed to get size! (%s)' % (''.join(data))) return {'ret': False, 'data': ''.join(data)} data = data.split('\n') entity.obj_num = int(data[0].split(':')[1].strip()) entity.str_size = data[1].split(':')[1].split('(')[0].strip() entity.byte_size = LogicGSheet.get_byte_size(entity.str_size) entity.updated_time = datetime.now() logger.debug('getsize: folder_id:%s obj_num: %d, size: %s', entity.folder_id, entity.obj_num, entity.str_size) entity.save() info_str = '<br>파일수: {obj_num}<br>사이즈: {str_size}'.format( obj_num=entity.obj_num, str_size=entity.str_size) def func(): ret = LogicGSheet.update_size(entity.id) thread = threading.Thread(target=func, args=()) thread.setDaemon(True) thread.start() return {'ret': True, 'data': info_str} except Exception as e: logger.error('Exception %s', e) logger.error(traceback.format_exc()) return {'ret': False, 'data': '{e}'.format(e=e)}
def process_api(sub, req): try: if sub == 'append': ret = {} cmd = req.form['cmd'] tmp = ModelSetting.get('gclone_queue_list') if tmp.find(cmd) != -1: ret['status'] = 'already_exist' else: ret['status'] = LogicGclone.current_data['status'] LogicGclone.queue_append([cmd]) logger.debug('process_api:%s', ret) logger.debug('process_api:%s', cmd) return jsonify(ret) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def gclone_copy(id): try: entity = ListModelItem.get(id) if entity is None: return {'ret': False, 'data': '유효한 아이템이 없습니다'} category = LogicGSheet.get_user_copy_dest(entity.category) logger.debug('category: %s -> %s', entity.category, category) from gd_share_client.logic_user import LogicUser #logger.debug(category) my_remote = LogicUser.instance.get_my_copy_path('gsheet', category) logger.debug('my_remote(%s)', my_remote) if ModelSetting.get_bool('use_user_setting'): dest_folder = entity.title2 if entity.title2 != u'' else entity.title gcstring = 'gc:{%s}|%s/%s' % (entity.folder_id, my_remote, dest_folder) else: dest_folder = entity.category + '/' + entity.title2 if entity.title2 != u'' else entity.title gcstring = 'gc:{%s}|%s/%s' % (entity.folder_id, "gc:{}", dest_folder) tmp = ModelSetting.get('gclone_queue_list') if tmp.find(gcstring) != -1: return {'ret': True, 'data': '이미 큐에 존재합니다.'} else: LogicGclone.queue_append([gcstring]) entity.copied_time = datetime.now() # 처음 복사하는 경우만 시트정보에 카운트 갱신 if entity.copy_count == 0: wsentity = WSModelItem.get(entity.sheet_id) if wsentity is not None: wsentity.copy_count += 1 wsentity.save() entity.copy_count += 1 entity.save() return {'ret': True, 'data': '큐에 추가하였습니다.'} except Exception as e: logger.error('Exception %s', e) logger.error(traceback.format_exc()) return {'ret': False, 'data': 'Exception'}
def search_gsheet(doc_id): try: ret = [] logger.debug('start to search_gsheet: %s', doc_id) json_file = LogicGSheet.get_first_json() if json_file is None: logger.error( 'failed to get json file. please check json file in (%s)', ModelSetting.get('path_accounts')) return [] if doc_id.startswith(u'http'): doc_url = doc_id else: doc_url = 'https://docs.google.com/spreadsheets/d/{doc_id}'.format( doc_id=doc_id) logger.debug('url(%s)', doc_url) try: import gspread except ImportError: os.system("{} install gspread".format( app.config['config']['pip'])) import gspread gsp = gspread.authorize(LogicGSheet.credentials) doc = gsp.open_by_url(doc_url) for ws in doc.worksheets(): if LogicGSheet.validate_sheet(ws): ret.append({ 'doc_id': doc.id, 'doc_title': doc.title, 'doc_url': doc_url, 'ws_id': ws.id, 'ws_title': ws.title }) return ret except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) return []
def search_gsheet(doc_id): try: ret = [] logger.debug('start to search_gsheet: %s', doc_id) json_file = LogicGSheet.get_first_json() if json_file is None: logger.error( 'failed to get json file. please check json file in (%s)', ModelSetting.get('path_accounts')) return [] scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] if doc_id.startswith(u'http'): doc_url = doc_id else: doc_url = 'https://docs.google.com/spreadsheets/d/{doc_id}'.format( doc_id=doc_id) logger.debug('url(%s)', doc_url) credentials = ServiceAccountCredentials.from_json_keyfile_name( json_file, scope) gsp = gspread.authorize(credentials) doc = gsp.open_by_url(doc_url) for ws in doc.worksheets(): if LogicGSheet.validate_sheet(ws): ret.append({ 'doc_id': doc.id, 'doc_title': doc.title, 'doc_url': doc_url, 'ws_id': ws.id, 'ws_title': ws.title }) return ret except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) return []
def update_size(entity_id): try: entity = ListModelItem.get(entity_id) wsentity = WSModelItem.get(entity.sheet_id) doc_id = wsentity.doc_id ws_id = wsentity.ws_id logger.debug('start to get item from gsheet: %s, ws:%d', doc_id, ws_id) json_file = LogicGSheet.get_randon_json() if json_file is None: logger.error( 'failed to get json file. please check json file in (%s)', ModelSetting.get('path_accounts')) return ret doc_url = wsentity.doc_url try: import gspread except ImportError: os.system("{} install gspread".format( app.config['config']['pip'])) import gspread gsp = gspread.authorize(LogicGSheet.credentials) doc = gsp.open_by_url(doc_url) ws = LogicGSheet.get_worksheet(doc, ws_id) cols = ws.row_values(1) index_size = cols.index(u'사이즈') index_obj_num = cols.index(u'파일수') cell = ws.find(entity.folder_id) ws.update_cell(cell.row, index_size + 1, entity.str_size) ws.update_cell(cell.row, index_obj_num + 1, entity.obj_num) except Exception as e: logger.error('Exception %s', e) logger.error(traceback.format_exc())
def is_fclone(): try: command = [ModelSetting.get('gclone_path'), 'version'] if app.config['config']['is_py2']: process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, bufsize=1) else: process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) ret = [] iter_arg = b'' if app.config['config']['is_py2'] else '' with process.stdout: for line in iter(process.stdout.readline, iter_arg): if line.find('fclone') != -1: return True return False except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def update_size(entity_id): try: entity = ListModelItem.get(entity_id) wsentity = WSModelItem.get(entity.sheet_id) doc_id = wsentity.doc_id ws_id = wsentity.ws_id logger.debug('start to get item from gsheet: %s, ws:%d', doc_id, ws_id) json_file = LogicGSheet.get_first_json() if json_file is None: logger.error( 'failed to get json file. please check json file in (%s)', ModelSetting.get('path_accounts')) return ret scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] doc_url = wsentity.doc_url credentials = ServiceAccountCredentials.from_json_keyfile_name( json_file, scope) gsp = gspread.authorize(credentials) doc = gsp.open_by_url(doc_url) ws = LogicGSheet.get_worksheet(doc, ws_id) cols = ws.row_values(1) index_size = cols.index(u'사이즈') index_obj_num = cols.index(u'파일수') cell = ws.find(entity.folder_id) ws.update_cell(cell.row, index_size + 1, entity.str_size) ws.update_cell(cell.row, index_obj_num + 1, entity.obj_num) except Exception as e: logger.error('Exception %s', e) logger.error(traceback.format_exc())
def load_items(wsmodel_id): try: ret = [] wsentity = WSModelItem.get(wsmodel_id) if wsentity is None: return None # 목록이 삭제된 경우 업데이트 if wsentity.total_count > 0: wsentity.total_count = ListModelItem.get_total_count( wsentity.id) wsentity.copy_count = ListModelItem.get_copy_count(wsentity.id) wsentity.save() doc_id = wsentity.doc_id ws_id = wsentity.ws_id logger.debug('start to get items from gsheet: %s, ws:%d', doc_id, ws_id) json_file = LogicGSheet.get_first_json() if json_file is None: logger.error( 'failed to get json file. please check json file in (%s)', ModelSetting.get('path_accounts')) return ret scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] doc_url = wsentity.doc_url credentials = ServiceAccountCredentials.from_json_keyfile_name( json_file, scope) gsp = gspread.authorize(credentials) doc = gsp.open_by_url(doc_url) ws = LogicGSheet.get_worksheet(doc, ws_id) count = 0 scount = 0 for r in ws.get_all_records(head=1): try: # 폴더ID, 분류가 없는 경우 제외 if r[u'분류'] == '' or r[u'폴더 ID'] == '': scount += 1 continue # 파일수, 사이즈가 없는 경우 예외처리 if r[u'파일수'] == '': obj_num = 0 else: obj_num = int(r[u'파일수']) if r[u'사이즈'] == '': str_size = '-' else: str_size = r[u'사이즈'] # 파일수0, 사이즈 0Bytes인경우 스킵 if obj_num == 0 and str_size == u'0 Bytes': scount += 1 continue info = { 'sheet_id': wsmodel_id, 'title': r[u'제목'], 'folder_id': r[u'폴더 ID'], 'category': r[u'분류'], 'title2': r[u'제목 매핑'], 'obj_num': obj_num, 'str_size': str_size } entity = ListModelItem.create(info) if entity is None: #logger.debug('already exist item(folder_id:%s)', info['folder_id']) scount += 1 continue count += 1 except KeyError: logger.error('failed to get item info') logger.error(r) continue wsentity.updated_time = datetime.now() wsentity.total_count += count wsentity.save() logger.info('{count} 항목을 추가하였습니다(스킵: {scount}건)'.format( count=count, scount=scount)) ret = { 'ret': True, 'data': '{count} 항목을 추가하였습니다(스킵: {scount}건)'.format(count=count, scount=scount) } return ret except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())