def executeByPlan(self, planXml): for i in range(0, planXml.getGroupCount()): #TODO: execute precondition precondStr = planXml.getGroupCondition(i)[0] postcondStr = planXml.getGroupCondition(i)[1] if precondStr != 'None': (moduleName, funcName) = Util.getModuleAndAPI(precondStr) if Global.CASE_FOLDER + "." + moduleName not in self.mModuleObjs: print("[Error] Cannot find module %s under %s" % (moduleName, Global.CASE_FOLDER)) return precondFunc = getattr( self.mModuleObjs[Global.CASE_FOLDER + "." + moduleName], funcName) precondFunc(Global.DEFAULT_ERROR_HANDLER) for moduleName in sorted(planXml.getCaseList(i).keys()): #print(moduleName+" : ") #print(planXml.getCaseList(i)[moduleName]) self.executeCase(moduleName, planXml.getCaseList(i)[moduleName]) if postcondStr != 'None': (moduleName, funcName) = Util.getModuleAndAPI(postcondStr) if Global.CASE_FOLDER + "." + moduleName not in self.mModuleObjs: print("[Error] Cannot find module %s under %s" % (moduleName, Global.CASE_FOLDER)) return postcondFunc = getattr( self.mModuleObjs[Global.CASE_FOLDER + "." + moduleName], funcName) postcondFunc(Global.IGNORE_ERROR_HANDLER)
def handler(self, params): try: Util.taskScreenshot(Global.mCaseManager.getLogPath() + "\\failed.bmp") except Exception: print("\n" + traceback.format_exc()) print("\nCannot task screenshot") if params[Global.ERROR_CALLSTACK] != None: stackStr = "" for line in params[Global.ERROR_CALLSTACK].format_stack(): stackStr = stackStr + line.strip() Global.mCaseManager.setTraceStack(stackStr) print("\n" + stackStr) if params[Global.ERROR_MESSAGE] != None: Global.mCaseManager.setErrorMessage(params[Global.ERROR_MESSAGE]) try: print("\n" + params[Global.ERROR_MESSAGE]) except Exception: print("\nFail to print error message") if Global.ADD_FLAG.find('wait') != -1: input("Wait for click ...")
def isExecute(self, caseInfo): if caseInfo[CaseInfo.PLATFORM] != 'all': if (Util.is64Platform() and caseInfo[CaseInfo.PLATFORM] != '64') or \ (not Util.is64Platform() and caseInfo[CaseInfo.PLATFORM] != '32'): return False if Global.mExecuteType == 'all': return True if caseInfo[CaseInfo.TYPE] & CaseInfo.STRESS: if Global.mExecuteType.find("stress") == -1: return False return True
def process_menu(self, sub, req): arg = P.ModelSetting.to_dict() arg['sub'] = self.name if sub == 'setting': arg['scheduler'] = str( scheduler.is_include(self.get_scheduler_name())) arg['is_running'] = str( scheduler.is_running(self.get_scheduler_name())) arg['rss_api'] = '%s/%s/api/%s/rss' % ( SystemModelSetting.get('ddns'), package_name, self.name) arg['rss_api'] = Util.make_apikey(arg['rss_api']) return render_template( '{package_name}_{module_name}_{sub}.html'.format( package_name=package_name, module_name=self.name, sub=sub), arg=arg) elif sub == 'list': arg['is_torrent_info_installed'] = False try: import torrent_info arg['is_torrent_info_installed'] = True except: pass arg['ddns'] = SystemModelSetting.get('ddns') arg['show_log'] = ModelSetting.get_bool('show_log') arg['show_poster'] = ModelSetting.get('show_poster') return render_template( '{package_name}_{module_name}_{sub}.html'.format( package_name=package_name, module_name=self.name, sub=sub), arg=arg) return render_template('sample.html', title='%s - %s' % (package_name, sub))
def process_telegram_data(self, data, target=None): try: ret = ModelItem.process_telegram_data(data) logger.debug(data) #ret = None if ret is not None: if ModelSetting.get_bool('receive_send_notify'): msg = '😉 AV 정보 수신\n' msg += '제목 : [%s] %s (%s)\n' % (ret.code, ret.title, ret.date) msg += '파일 : %s\n' % ret.filename msg += '폴더 : %s\n' % ret.dirname msg += '크기 : %s\n' % Util.sizeof_fmt(ret.total_size) url = '%s/%s/api/%s/add_download?id=%s' % (SystemModelSetting.get('ddns'), package_name, self.name, ret.id) if SystemModelSetting.get_bool('auth_use_apikey'): url += '&apikey=%s' % SystemModelSetting.get('auth_apikey') if app.config['config']['is_server']: msg += '\n' + ret.magnet + '\n' else: msg += '\n➕ 다운로드 추가\n<%s>\n' % url #msg += '\n➕ 다운로드 추가\n<%s>\n' % url poster = ret.poster if ModelSetting.get_bool('show_poster_notify') else None ToolBaseNotify.send_message(msg, image_url=poster, message_id='bot_downloader_av_receive') self.invoke() try: if app.config['config']['is_server']: from tool_expand import TorrentProcess TorrentProcess.receive_new_data(ret, package_name) except: pass except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def __call__(self, f): self.mCase[CaseInfo.ID] = f.__name__ Global.mCaseManager.addCase( Util.convertToPackageName(inspect.getmodule(f).__file__), self.mCase) return f
def dumpHtml(self, fileName, systemInfo): fout = codecs.open(fileName, 'w', encoding='utf8') fout.write('<?xml version="1.0" encoding="UTF-8" standalone="no"?>' + "\n") fout.write( '<?xml-stylesheet type="text/xsl" href="charmer_result.xsl"?>' + "\n") fout.write("<TestResult endtime=\"%s\" starttime=\"%s\" testPlan=\"%s\" version=\"%s\">\n"% \ (datetime.fromtimestamp(Global.mEndTime), datetime.fromtimestamp(Global.mStartTime),Global.TEST_PLAN, Global.VERSION)) fout.write("<DeviceInfo>\n") fout.write("<BuildInfo build_model=\"Charmer\" deviceID=\"%s\" buildVersion=\"%s\" locales=\"%s;\" network=\"%s\" partitions=\"%s\"/>\n"% \ (systemInfo[self.PLAT], systemInfo[self.APP_VER], systemInfo[self.LOCALES], systemInfo[self.NETIP], systemInfo[self.PART])) fout.write("<FeatureInfo>\n") #fout.write("<FeatureInfo>\n") for feature in Global.mFeatureList.keys(): fout.write( "<Feature available=\"%s\" name=\"%s\" type=\"sdk\"/>\n" % (Global.mFeatureList[feature], feature)) fout.write("</FeatureInfo>\n") fout.write("</DeviceInfo>\n") fout.write("<HostInfo name=\"%s\">\n" % systemInfo[self.NETNAME]) fout.write("<Charmer version=\"%s\">\n" % Global.VERSION) fout.write("<IntValue name=\"testStatusTimeoutMs\" value=\"%d\"/>\n" % Global.CASE_TIMEOUT) perfStr = "ProcessName\tBoundary(MB);" for monitorObj in Global.MONITOR_PROC.keys(): perfStr = perfStr + monitorObj + "\t" + str( Global.MONITOR_PROC[monitorObj]) + ";" fout.write("<IntValue name=\"testMonitorPerf\" value=\"%s\"/>\n" % perfStr) fout.write("</Charmer>\n") fout.write("</HostInfo>\n") fout.write("<Summary failed=\"%d\" notExecuted=\"%d\" pass=\"%d\" timeout=\"%d\"/>\n"% \ (Global.mTestResult[CaseInfo.RESULT_FAIL], Global.mTestResult[CaseInfo.RESULT_SKIP], \ Global.mTestResult[CaseInfo.RESULT_PASS], Global.mTestResult[CaseInfo.RESULT_TIMEOUT])) procPerf = {} procCaseName = {} for moduleName in Global.mCaseManager.getCaseMap().keys(): fout.write("<TestPackage name=\"%s\">\n" % moduleName) fout.write("<TestSuite>\n") for caseInfo in Global.mCaseManager.getCaseMap()[moduleName]: if CaseInfo.START not in caseInfo: continue if CaseInfo.PERF in caseInfo: perfResultMap = caseInfo[CaseInfo.PERF] for pidName in perfResultMap.keys(): processPerfObj = perfResultMap[pidName] if not processPerfObj.mProcessName in procPerf: procPerf[processPerfObj. mProcessName] = PerfMonitor.ProcessPerf() procPerf[ processPerfObj. mProcessName].mCategoryList = processPerfObj.mCategoryList procCaseName[processPerfObj.mProcessName] = [] for aCategory in processPerfObj.mCategoryList: procCaseName[ processPerfObj.mProcessName].append( moduleName + "." + caseInfo[CaseInfo.ID]) elif procPerf[processPerfObj.mProcessName] != None: count = 0 for aPerfObj in processPerfObj.mCategoryList: if procPerf[ processPerfObj. mProcessName].mCategoryList[ count].mMaxValue < aPerfObj.mMaxValue: procPerf[processPerfObj. mProcessName].mCategoryList[ count] = aPerfObj procCaseName[processPerfObj.mProcessName][ count] = moduleName + "." + caseInfo[ CaseInfo.ID] count += 1 ''' if not processPerfObj.mProcessName in procPerf : procPerf[processPerfObj.mProcessName] = processPerfObj procPackage[processPerfObj.mProcessName] = moduleName+"."+caseInfo[CaseInfo.ID] elif procPerf[processPerfObj.mProcessName].mMaxMemory < processPerfObj.mMaxMemory : procPerf[processPerfObj.mProcessName] = processPerfObj procPackage[processPerfObj.mProcessName] = moduleName+"."+caseInfo[CaseInfo.ID] ''' startTime = "" endTime = "" if CaseInfo.END in caseInfo: endTime = datetime.fromtimestamp(caseInfo[CaseInfo.END]) if CaseInfo.START in caseInfo: startTime = datetime.fromtimestamp( caseInfo[CaseInfo.START]) logPath = Util.convertToPath(moduleName) logPath = logPath + os.sep + caseInfo[ CaseInfo.ID] + os.sep + "detailResult.xml" if caseInfo[CaseInfo.RESULT] == CaseInfo.RESULT_FAIL: message = '' if CaseInfo.ERROR in caseInfo: message = FileUtil.replaceXmlRemainedKey( caseInfo[CaseInfo.ERROR]) fout.write("<Test endtime=\"%s\" name=\"%s\" result=\"%s\" starttime=\"%s\" logPath=\"%s\" message=\"%s\"/>\n"% \ (endTime, caseInfo[CaseInfo.ID]+": "+FileUtil.replaceXmlRemainedKey(caseInfo[CaseInfo.DESCRP]), CaseInfo.resultToString(caseInfo[CaseInfo.RESULT]), startTime, logPath, message)) else: fout.write("<Test endtime=\"%s\" name=\"%s\" result=\"%s\" starttime=\"%s\" logPath=\"%s\"/>\n"% \ (endTime, caseInfo[CaseInfo.ID]+": "+FileUtil.replaceXmlRemainedKey(caseInfo[CaseInfo.DESCRP]), CaseInfo.resultToString(caseInfo[CaseInfo.RESULT]), startTime, logPath)) fout.write("</TestSuite>\n") fout.write("</TestPackage>\n") if len(procPerf.keys()) > 0: fout.write("<Performance>\n") for procName in procPerf.keys(): fout.write("\t<Proc name=\"%s\">" % procName) count = 0 for aCategory in procPerf[procName].mCategoryList: fout.write( "\t\t<Category maximum=\"%f (%s)\" average=\"%f\"/>\n" % (aCategory.mMaxValue, procCaseName[procName][count], aCategory.mAveValue)) count += 1 fout.write("\t</Proc>") fout.write(" <Category>\n") for aCategory in procPerf[procName].mCategoryList: fout.write("\t<Descrip name=\"%s\"/>\n" % aCategory.mName) fout.write(" </Category>\n") fout.write("</Performance>\n") fout.write("</TestResult>\n") fout.close()
def executeCase(self, moduleName, caseIds=None): #if moduleName in self.mModuleObjs : if moduleName not in self.mModuleObjs: (moduleName, caseIds) = Util.getModuleAndAPI(moduleName) if moduleName not in self.mModuleObjs or moduleName not in Global.mCaseManager.getCaseMap( ): return False print("---[%s]----------------------------" % moduleName) errorHandler = None envFailedFlag = False # Set default error handler getErrorHandlerFunc = getattr(self.mModuleObjs[moduleName], "getErrorHandler") if getErrorHandlerFunc != None: errorHandler = getErrorHandlerFunc() if errorHandler == None: errorHandler = Global.DEFAULT_ERROR_HANDLER prologFunc = getattr(self.mModuleObjs[moduleName], BaseCases.PROLOG_FUNC) if prologFunc != None: try: prologFunc(errorHandler) except Exception: print(traceback.format_exc()) envFailedFlag = True for caseInfo in Global.mCaseManager.getCaseMap()[moduleName]: if not self.isExecute(caseInfo): continue elif caseIds != None: if not isinstance(caseIds, list): if caseInfo[CaseInfo.ID].find(caseIds) == -1: continue else: if None in caseIds: pass elif caseInfo[CaseInfo.ID] not in caseIds: continue elif envFailedFlag: self.updateCaseInfo(caseInfo, result=CaseInfo.RESULT_FAIL) continue Global.mCaseManager.setWorkingCase(caseInfo) logPath = Global.OUT_LOG + os.sep + Util.convertToPath( moduleName) + os.sep + caseInfo[CaseInfo.ID] caseInfo[CaseInfo.LOGPATH] = logPath if not os.path.exists(logPath): os.makedirs(logPath) if caseInfo[CaseInfo.HERROR] != None: errorHandler = caseInfo[CaseInfo.HERROR] caseFunc = getattr(self.mModuleObjs[moduleName], caseInfo[CaseInfo.ID]) setupFunc = getattr(self.mModuleObjs[moduleName], BaseCases.SETUP_FUNC) tearDownFunc = getattr(self.mModuleObjs[moduleName], BaseCases.TEARDOWN_FUNC) descrp = "" timeout = Global.CASE_TIMEOUT if CaseInfo.TIMEOUT in caseInfo: timeout = caseInfo[CaseInfo.TIMEOUT] if caseInfo[CaseInfo.DESCRP] != None: descrp = caseInfo[CaseInfo.DESCRP] print(caseInfo[CaseInfo.ID] + " : " + descrp, end="") caseInfo[CaseInfo.START] = time.time() perfMonitor = PerfMonitor.PerformanceThread( Global.MONITOR_PROC, logPath) memoryStat = {} perfMonitor.setPerformanceStatistics(memoryStat) perfMonitor.start() finalResult = CaseInfo.RESULT_SKIP # Launch executionThread to execute case, and tolerate failed count for failedCount in range(0, caseInfo[CaseInfo.TOLERATE]): if setupFunc != None: try: setupFunc(errorHandler) except: print(traceback.format_exc()) self.updateCaseInfo(caseInfo, result=CaseInfo.RESULT_FAIL) break caseThread = ExecutionThread(caseFunc) caseThread.setErrorHandler(errorHandler) if not caseInfo[CaseInfo.TYPE] & CaseInfo.MANUAL: caseThread.start() caseThread.join(timeout) if caseThread.isAlive(): caseThread._stop() finalResult = caseThread.getResult() if tearDownFunc != None: try: tearDownFunc(errorHandler) except: print(traceback.format_exc()) if finalResult != CaseInfo.RESULT_FAIL: break time.sleep(2) perfMonitor.stopMonitor() perfMonitor.join(timeout) caseInfo[CaseInfo.PERF] = memoryStat perfResult = self.checkProcessBoundary(caseInfo[CaseInfo.PERF]) print("\n" + perfResult) if perfResult != '': finalResult = CaseInfo.RESULT_FAIL if CaseInfo.ERROR not in caseInfo or caseInfo[ CaseInfo.ERROR] == None: caseInfo[CaseInfo.ERROR] = '' caseInfo[ CaseInfo.ERROR] = caseInfo[CaseInfo.ERROR] + perfResult self.updateCaseInfo(caseInfo, result=finalResult) DumpInfo.dumpCaseAsHtml(caseInfo, logPath) epilogFunc = getattr(self.mModuleObjs[moduleName], BaseCases.EPILOG_FUNC) if epilogFunc != None: try: epilogFunc(errorHandler) except: print(traceback.format_exc())
def __init__(self): ''' Constructor ''' self.mModuleObjs = Util.load_from_folder("." + os.sep + Global.CASE_FOLDER)
def scheduler_function_torrent_check(self): try: last_id = ModelSetting.get_int('last_id') flag_first = False if last_id == -1: flag_first = True # 최초 실행은 -1로 판단하고, 봇을 설정안했다면 0으로 query = db.session.query(ModelItem) \ .filter(ModelItem.created_time > datetime.datetime.now() + datetime.timedelta(days=-7)) items = query.all() else: query = db.session.query(ModelItem) \ .filter(ModelItem.id > last_id ) items = query.all() # 하나씩 판단.... logger.debug('New Feed.. last_id:%s count :%s', last_id, len(items)) for item in items: try: flag_download = False item.download_status = '' item.downloader_item_id = None item.log = '' option_auto_download = ModelSetting.get( '%s_auto_download' % item.av_type) if option_auto_download == '0': item.download_status = 'no' else: # censored - 메타 조건만.. flag_download = True if flag_download and item.av_type == 'censored': if ModelSetting.get( 'censored_option_meta' ) == '1' and item.meta_type == 'ama': flag_download = False item.log += u'0. censored mode : False\n' if ModelSetting.get( 'censored_option_meta' ) == '2' and item.meta_type == 'dvd': flag_download = False item.log += u'0. censored mode : False\n' if flag_download: mode = 'blacklist' if ModelSetting.get( '%s_option_mode' % item.av_type) == '0' else 'whitelist' item.log += u'1. 모드 - %s. 다운여부 : ' % (mode) if mode == 'blacklist': flag_download = True item.log += u'%s\n' % flag_download # 2020-07-20 웨스턴 폴더명 조건 if flag_download and item.av_type == 'western': ret = self.check_option( '%s_option_foldername_filter' % item.av_type, item.dirname) if ret is not None: flag_download = not ret item.log += u'0. 폴더명 - %s : %s\n' % ( item.dirname, flag_download) if flag_download: ret = self.check_option( '%s_option_filter' % item.av_type, item.filename) if ret is not None: flag_download = not ret item.log += u'1. 파일명 - %s : %s\n' % ( item.filename, flag_download) if flag_download: ret = self.check_option( '%s_option_label' % item.av_type, item.code) if ret is not None: flag_download = not ret item.log += u'2. 레이블 - %s : %s\n' % ( item.code, flag_download) if flag_download: ret = self.check_option( '%s_option_genre' % item.av_type, item.genre) if ret is not None: flag_download = not ret item.log += u'3. 장르 - %s : %s\n' % ( item.genre, flag_download) if flag_download: ret = self.check_option( '%s_option_performer' % item.av_type, item.performer) if ret is not None: flag_download = not ret item.log += u'4. 배우 - %s : %s\n' % ( item.performer, flag_download) else: flag_download = False item.log += u'%s\n' % flag_download if not flag_download and item.av_type == 'western': ret = self.check_option( '%s_option_foldername_filter' % item.av_type, item.dirname) if ret is not None: flag_download = ret item.log += u'0. 폴더명 - %s : %s\n' % ( item.dirname, flag_download) if not flag_download: ret = self.check_option( '%s_option_filter' % item.av_type, item.filename) if ret is not None: flag_download = ret item.log += u'1. 파일명 - %s : %s\n' % ( item.filename, flag_download) if not flag_download: ret = self.check_option( '%s_option_label' % item.av_type, item.code) if ret is not None: flag_download = ret item.log += u'2. 레이블 - %s : %s\n' % ( item.code, flag_download) if not flag_download: ret = self.check_option( '%s_option_genre' % item.av_type, item.genre) if ret is not None: flag_download = ret item.log += u'3. 장르 - %s : %s\n' % ( item.genre, flag_download) if not flag_download: ret = self.check_option( '%s_option_performer' % item.av_type, item.performer) if ret is not None: flag_download = ret item.log += u'4. 배우 - %s : %s\n' % ( item.performer, flag_download) if flag_download: # and item.av_type == 'censored': try: option_min_size = float( str( ModelSetting.get('%s_option_min_size' % item.av_type))) * (2** 30) option_max_size = float( str( ModelSetting.get('%s_option_max_size' % item.av_type))) * (2** 30) if option_min_size != 0 and item.total_size < option_min_size: flag_download = False item.log += u'5. 최소크기 - %s : %s\n' % ( Util.sizeof_fmt( item.total_size, suffix='B'), flag_download) if option_max_size != 0 and item.total_size > option_max_size: flag_download = False item.log += u'5. 최대크기 - %s : %s\n' % ( Util.sizeof_fmt( item.total_size, suffix='B'), flag_download) if flag_download: item.log += u'5. 크기 - %s : %s\n' % ( Util.sizeof_fmt( item.total_size, suffix='B'), flag_download) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) if flag_download and item.av_type == 'censored': file_count = ModelSetting.get_int( 'censored_option_file_count_min') if file_count != 0 and item.file_count < file_count: flag_download = False item.log += u'6. 파일 수 min - %s : %s\n' % ( item.file_count, flag_download) if flag_download and item.av_type == 'censored': file_count = ModelSetting.get_int( 'censored_option_file_count_max') if file_count != 0 and item.file_count > file_count: flag_download = False item.log += u'6. 파일 수 max - %s : %s\n' % ( item.file_count, flag_download) if flag_download: flag_download = self.check_option_server_id_mod( item) item.log += u'8. 다운여부 : %s' % (flag_download) #다운로드 if flag_download: if option_auto_download == '1': import downloader downloader_item_id = downloader.Logic.add_download2( item.magnet, ModelSetting.get('%s_torrent_program' % item.av_type), ModelSetting.get('%s_path' % item.av_type), request_type=package_name, request_sub_type='', server_id='av_%s_%s_%s' % (item.server_id, item.file_count, item.total_size))['downloader_item_id'] item.downloader_item_id = downloader_item_id item.download_status = 'true' else: item.download_status = 'true_only_status' else: if option_auto_download == '1': item.download_status = 'false' else: item.download_status = 'false_only_status' if ModelSetting.get_bool('result_send_notify'): self.send_telegram_message(item) item.download_check_time = datetime.datetime.now() db.session.add(item) logger.debug('%s - %s %s', item.code, flag_download, item.log) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) new_last_id = last_id if flag_first and len(items) == 0: new_last_id = '0' else: if len(items) > 0: new_last_id = '%s' % items[len(items) - 1].id if new_last_id != last_id: ModelSetting.set('last_id', str(new_last_id)) db.session.commit() except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def scheduler_function_rss_request(): logger.debug('1. RSS to DB') LogicRss.process_insert_feed() try: over_flag = False apikey = ModelSetting.get('apikey') job_list = ModelOffcloud2Job.get_list() # cloud_history = Offcloud.get_history(apikey, 'cloud') # cloud_history = cloud_history['history_status'] # remote_history = Offcloud.get_history(apikey, 'remote') # remote_history = remote_history['history_status'] for job in job_list: # logger.debug('============ 클라우드로 추가 기능 테스트 %s', job.add_to_cloud) # logger.debug('============ 클라우드로 추가 기능 테스트 %s', type(job.add_to_cloud)) account = ModelOffcloud2Account.get(job.username) rss_list = ModelOffcloud2Item.get_rss_list_by_scheduler(job) cached_list = LogicRss.process_cached_list(rss_list) logger.debug('2. job name:%s count:%s, cache count:%s', job.name, len(rss_list), len(cached_list)) i = 0 # 계정 분산처리를 위한 인덱스 for feed in rss_list: try: i = i + 1 if job.username2: # logger.debug('======= 계정 분산처리 =======') # account2 = ModelOffcloud2Account.get(job.username2) account2 = [ x.strip().replace(' ', '').strip() for x in job.username2.replace('\n', '||').split('||') ] account2 = Util.get_list_except_empty(account2) ii = i % len(account2) account = ModelOffcloud2Account.get(account2[ii]) # 요청 안한 것들. 6미만은 요청안 한 것, 15는 cloud로 추가기능을 위해 새로 추가함 # 15는 캐쉬 생성을 위해 cloud로만 추가한 상태이므로 다운로드 요청상태가 아님 if feed.status < 6 or feed.status == 15: feed.oc_folderid = job.folderid if feed.link.startswith('magnet'): if feed.link[20:60] in cached_list: # 현재 아이템의 캐쉬가 있으면 LogicRss.process_cached_feed(feed) feed.oc_cached = True if job.mode == '0' or job.mode == '1': if over_flag: feed.status = 1 else: feed.remote_time = datetime.datetime.now( ) ret = Offcloud.add_remote( apikey, feed, account.option_id) if feed.job.use_tracer: feed.make_torrent_info() #logger.debubg("요청 : %s", feed.title) if ret == 'over': over_flag = True feed.status = 1 else: feed.status = 6 elif job.mode == '2': #Cache 확인만 feed.status = 12 else: # Cache 안되어 있을때 if job.mode == '1': # Cache 안되어 있어도 받는 모드 if over_flag: feed.status = 2 else: feed.remote_time = datetime.datetime.now( ) ret = Offcloud.add_remote( apikey, feed, account.option_id) if feed.job.use_tracer: feed.make_torrent_info() if ret == 'over': over_flag = True feed.status = 2 else: feed.status = 7 # Cache가 없는 feed를 cloud로 추가 try: if feed.status < 6 and feed.status > -1: if job.add_to_cloud is not None and isinstance( job.add_to_cloud, int): if datetime.datetime.now( ) > feed.created_time + datetime.timedelta( hours=int( job.add_to_cloud)): # logger.debug('============ 피드 생성시간 %s', feed.created_time) # logger.debug('============ 피드 생성시간 + 지연시간 %s', feed.created_time + datetime.timedelta(hours=job.add_to_cloud)) res_add_to_cloud = "https://offcloud.com/api/cloud?apikey=" + apikey + "&url=" + feed.link res_add_to_cloud = requests.get( res_add_to_cloud) # "https://offcloud.com/api/cloud?apikey=Q8wFNqlCWGjNEB2xicIb8RpiIPRONfSR&url={{url}}" logger.debug( 'Cloud로 추가: %s %s', res_add_to_cloud, feed.link) if 'success' in res_add_to_cloud.json( ): logger.debug( res_add_to_cloud. json()) logger.debug( datetime.datetime. now() + datetime.timedelta( hours=job. add_to_cloud)) feed.status = -1 # Cloud로 추가 완료 elif 'not_available' in res_add_to_cloud.json( ): logger.debug( 'Cloud로 추가 실패') except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) elif feed.link.startswith('http'): if ModelSetting.get_bool( 'request_http_start_link'): if not feed.link.endswith('=.torrent'): feed.remote_time = datetime.datetime.now( ) ret = Offcloud.add_remote( apikey, feed, account.option_id) if ret == 'over': over_flag = True feed.status = 3 else: feed.status = 8 else: if feed.oc_status == 'created' or feed.oc_status == 'uploading' or feed.oc_status == 'downloading': Offcloud.refresh_status(apikey, feed) if feed.oc_status == 'downloaded': feed.status = 11 feed.completed_time = datetime.datetime.now() if feed.oc_status == 'error': feed.status = 13 if feed.oc_status == 'NOSTATUS': feed.status = 14 except Exception as e: logger.error(e) logger.error(traceback.format_exc()) finally: db.session.add(feed) db.session.commit() except Exception as e: logger.error(e) logger.error(traceback.format_exc()) finally: logger.debug('==================================')
def process_insert_feed(): try: job_list = ModelOffcloud2Job.get_list() for job in job_list: try: logger.debug('Offcloud job:%s', job.id) feed_list = RssUtil.get_rss(job.rss_url) if not feed_list: continue flag_commit = False count = 0 # try: values = [x.strip() for x in job.rss_regex.split('\n')] regex_list = Util.get_list_except_empty(values) except: regex_list = [] #logger.warning(regex_list) #logger.warning(job.rss_mode) for feed in reversed(feed_list): if db.session.query(ModelOffcloud2Item).filter_by( job_id=job.id, link=feed.link).first() is None: # 2021-05-21 if job.rss_mode: #화이트리스트 flag_append = False try: for regex in regex_list: if re.compile(regex).search( feed.title): flag_append = True break except Exception as e: logger.error(e) logger.error(traceback.format_exc()) if flag_append == False: continue else: #블랙리스트 flag_append = True try: for regex in regex_list: if re.compile(regex).search( feed.title): flag_append = False break except Exception as e: logger.error(e) logger.error(traceback.format_exc()) if flag_append == False: continue r = ModelOffcloud2Item() r.title = u'%s' % feed.title r.link = feed.link #db.session.add(r) job.rss_list.append(r) flag_commit = True count += 1 if flag_commit: db.session.commit() logger.debug('Offcloud job:%s flag_commit:%s count:%s', job.id, flag_commit, count) except Exception as e: logger.error(e) logger.error(traceback.format_exc()) except Exception as e: logger.error(e) logger.error(traceback.format_exc())