def insert_matches(matches=[]): if len(matches) == 0: return cursor = db.cursor(pymysql.cursors.DictCursor) try: ids = make_ids_string(matches) sql = 'SELECT * FROM matches WHERE match_id IN (' + ids + ')' cursor.execute(sql) results = cursor.fetchall() exceptIds = list(map(lambda db_match: db_match['match_id'], results)) filteredMatches = list( filter( lambda match: None if int(match['match_id']) in exceptIds else match, matches)) if (len(filteredMatches)): cursor.executemany( """ INSERT INTO matches (match_id, date, mode, range_type, winner_type, winner_region, duration_string, duration, radiant_heroes, dire_heroes) VALUES (%(match_id)s, %(date)s, %(mode)s, %(range_type)s, %(winner_type)s, %(winner_region)s, %(duration_string)s, %(duration)s, %(radiant_heroes)s, %(dire_heroes)s)""", filteredMatches) db.commit() except BaseException as err: log_error(err) finally: cursor.close() db.close()
def localize(where, lang_id): """Given a string/list/dict and a key, looks up a localized string using the key.""" if where is None: log_error('localize(): "where" is None, nothing to localize!') return '[ERROR]' if isinstance(where, str): # just one string, nothing to localize, use it as-is return where elif isinstance(where, list): # a list of dicts, merge them where = {k: v for p in where for k, v in p.items()} if lang_id in where: # have a localized string, use it return str(where[lang_id]) if 'en' in where: # no localized string available; try English, it's the default return str(where['en']) # it's a list with only one entry and it's not the language # we want, but we have to use it anyway log_warn('localize(): missing localization for "{0}" in "{1}"'. format(lang_id, where)) return str(where[list(where)[0]])
def report_incident_by_email(file_path, rules_matched, yara_rules_file_name, event_time): if not settings.email_alerts_enabled: return try: file_name = os.path.basename(file_path) short_file_name = file_name if file_name is not None and len(file_name) > 40: short_file_name = file_name[0:39] smtp_mailer_param = build_smtp_config_dict() smtp_mailer_param['message_body'] = build_incident_email_message_body( file_name, file_path, rules_matched, yara_rules_file_name, event_time) smtp_mailer_param['subject'] = 'Match Found: {}'.format( short_file_name) print('[+] Sending incident info to {}'.format( settings.email_alert_recipients)) email_sender.send_message(smtp_mailer_param) print('[+] Incident info sent to {}'.format( settings.email_alert_recipients)) except Exception as e: print('[-] ERROR: {}'.format(e)) logger.log_error(e, module_name)
def handle_request(): request_data = request.form if 'ip' not in request_data or 'group_id' not in request_data or 'port' not in request_data: logger.log_error('malformed post request data.') return 'malformed post request data.', 400 group_id = request_data['group_id'] if test_and_set_active(group_id): logger.log_info('lock acquired for team "{}" with group_id {}'.format( team_names[int(group_id)], group_id)) ip = 'http://{}:{}'.format(request_data['ip'], request_data['port']) test_order = None if 'test_order' in request_data: test_order = literal_eval(request_data['test_order']) logger.log_info( 'custom test order {} was given for team "{}" with group_id {}' .format(test_order, team_names[int(group_id)], group_id)) if type(test_order) == int: test_order = [test_order] process_request(ip, group_id, test_order) logger.log_success( 'test for team "{}" with group_id {} initiated successfully'. format(team_names[int(group_id)], group_id)) return "success - test initiated" else: logger.log_error( 'another test for team "{}" with group_id {} is in progress'. format(team_names[int(group_id)], group_id)) return "error - existing test in progress", 406
def read_preprocessed(filepath): """ Reads a preprocessed trace file and yields tuples. This method reads a file that has already been preprocessed with preprocess.py and yields the same data as disasm_pt_file(). Since the input file has already been preprocessed, this method doesn't need memory or BBID mapping information. Keyword arguments: filepath -- The path to a preprocessed trace (commonly named trace_parsed.gz). Yields: The tuples described in disasm_pt_file() until EoF is reached, after which None is yielded. """ if not path.isfile(filepath): logger.log_error(module_name, str(filepath) + ' is not a file') return with gzip.open(filepath, 'rb') as ifile: while True: # Get packet length head = ifile.read(2) if len(head) != 2: break # EoF packet_len = unpack("H", head)[0] # Get packet contents body = ifile.read(packet_len) if body == '': break # EoF yield preprocess.unpack_instr(body) # End of generator while True: yield None
def simulatorgetnetwork(self, activity): ''' 获取模拟器的流量 获取上传和下载的流量 :return: ''' global total adc = AdbCommon(self.dev) try: uid = adc.get_app_pid(self.db.packagename) cmd = 'adb -s %s shell cat /proc/%s/net/dev' % (self.dev, uid) # 获取流量命令 logger.log_debug(cmd) pipe = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout for index in pipe.readlines(): # if index.startswith(' wlan0'): # 真机 if 'eth0' in index: # 模拟器 down = index.split()[1] # 下载 send = index.split()[9] # 上传 total = (int(down) + int(send)) / (1024 * 1024) # 上传和下载 except Exception, e: logger.log_error('获取模拟器流量失败:%e' + str(e)) total = 0
def goto(self, key, value, stepname): retval = {} retcode = 0 act_list = self.act_list step = self.step i = 0 if (key == value): step_len = len(act_list[step:]) while i < step_len: i += 1 action = act_list[step + 1] if (action["STEPNAME"] == stepname): retcode = 0 break if (i == step_len): logger.log_error("step[%s] not found in act_list[%s]" % (stepname, act_list)) retcode = -1 retval[__RETCODE__] = retcode if (retcode != 0): retval[__STEP__] = 0 retval[__RETMSG__] = "step['%s'] not found in act_list[%s]" % ( stepname, act_list) else: retval[__STEP__] = i retval[__RETMSG__] = "" return retval
def __puavo_conf(name, params): """Puavo-conf variable check.""" if 'name' not in params: log_error('Conditional "{0}" is missing a required ' 'parameter "name"'.format(name)) return (False, False) import subprocess proc = subprocess.Popen(['puavo-conf', params['name']], stdout=subprocess.PIPE, stderr=subprocess.PIPE) proc.wait() state = True present = params.get('present', True) if proc.returncode == 1: # Assume the value does not exist. We cannot distinguish # between failed puavo-conf calls and unknown/mistyped # puavoconf variables. state = False if present == state: if 'value' in params: if params['value'] != \ proc.stdout.read().decode('utf-8').strip(): return (True, False) return (True, True) return (True, False)
def replaceword(self, path, oldstart, new): ''' 替换文件中的某一行数据 :param path: 文件路径 :param old: 替换前的词语 :param new: 替换后的词语 :return:替换的后的文件 ''' try: with open(path, "r") as f: lines = f.readlines() with open(path, "w") as f_w: for line in lines: if line.startswith(oldstart): line = new + '\n' f_w.writelines(line) with open(path, "r") as f: lines = f.read() if new in lines: return 0 else: logger.log_info("修改%s中%s行失败" % (path, oldstart)) return 1 except Exception, e: logger.log_error("修改%s中%s行失败:%s" % (path, oldstart, str(e))) return 1
def run_test(self, testcase_dict): """ run single testcase. @param (dict) testcase_dict { "name": "testcase description", "skip": "skip this test unconditionally", "times": 3, "requires": [], # optional, override "function_binds": {}, # optional, override "variables": [], # optional, override "request": { "scrope": {BOTTOM: 2, LEFT: 1, RIGHT: 1, TOP: 3}, "add_cols": [ {COL: 1, COL_NAME: 月份, CONTENT: "201712"}, {COL: 35, COL_NAME: 口径, CONTENT: "管理"} ] }, sheetname: 项目-${口径}, 源文件: $workdir/${月份}/$名称-$口径-$月份.xls, 目标文件: $workdir/${月份}/部门损益明细表-整理后-$月份.xls } @return True or raise exception during test """ parsed_request = self.init_config(testcase_dict, level="testcase") try: logger.log_info("%s" % (parsed_request)) self.change_sheet(parsed_request) except Exception as e: logger.log_error("run error[%s]" % (parsed_request), exc_info=True)
def whitelistrun(self,activity,whitelist): ''' 白名单机制,只能执行定义的activity com.xxxxx.xxxxx.erechtheion.activity.ErechInfoActivity com.xxxxx.xxxxx.ddplayer.player.LuoJiLabPlayerActivity com.xxxxx.xxxxx.HomeTabActivity com.xxxxx.xxxxx.studyplan.ui.activity.SettingStudyPlanActivity :param activity 当前运行的activity :param whitelist白名单列表 :return: ''' global activitylist if whitelist != '' or len(whitelist) != 0: if isinstance(whitelist,str): activitylist = whitelist.split(',') elif isinstance(whitelist,dict or tuple): activitylist = whitelist if re.findall(activity,str(activitylist)): logger.log_info('monke运行未溢出白名单范围') return 0 else: try: randomactivity = (random.randint(0, len(activitylist) - 1)) logger.log_info('monke运行溢出activity范围,跳转到%s' % activitylist[randomactivity]) cmd = "adb -s %s shell am start -n %s/%s" % (self.dev, self.pck, activitylist[randomactivity]) logger.log_info('monkey跳转命令:%s' % cmd) os.system(cmd) return 1 except Exception as e: logger.log_error('monke运行跳转到白名单异常: ' + str(e)) else: logger.log_info('monke运行白名单未配置')
def gethtml(monkeycmd): ''' 获取HTML报告 :return:0表示获取html正常,1表示获取html失败 ''' logger.log_info("start gethtml") try: error = False data = {'monkeycmd': monkeycmd} r = requests.post('http://%s:%d/' % (host, port),data=data) with open(htmlpath, 'wb+') as f: f.write(r.content) logger.log_info('performance.html write complete' + '\n' \ + 'path is: %s' % htmlpath) except Exception as e: logger.log_error('performance.html write fail' + '\n' + str(e)) error = True r.close() stopflask() if error: return 1 else: return 0
def installapp(self, apkname, apkpath): ''' 安装app :param path: apk路径 :return: 0表示成功,1表示失败 ''' try: # if self.inspectapp(apkname) == 0: # cmd = 'adb -s %s uninstall %s' % (self.dev,apkname) # logger.log_debug(cmd) # os.system(cmd) # # cmd = 'adb -s %s install %s' % (self.dev,apkpath) # logger.log_debug(cmd) # os.system(cmd) # # if self.inspectapp(apkname) == 0: # return 0 # else: # return 1 return 0 except Exception, e: logger.log_error('安装%s失败' % apkname + '\n' + '异常原因:%s' % e) return 1
def _get_windows_img(self): browser = self.browser reportdir = self.conf_dict.get("reportdir", os.getcwd()) browser.switch_to_default_content() # 定义date为日期,time为时间 date = time.strftime("%Y%m%d") file_path = reportdir + "/" + date + "/screenshots/" fullpath = os.path.abspath(file_path) # 判断是否定义的路径目录存在,不能存在则创建 if not os.path.exists(fullpath): os.makedirs(fullpath) rq = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time())) screen_name = fullpath + '/' + rq + '.png' screen_name = os.path.abspath(screen_name) print("%s%s%s" % (self.SCREENSHOT_B_TAG, screen_name, self.SCREENSHOT_E_TAG)) try: browser.get_screenshot_as_file(screen_name) # logger.log_info("save screenshot to [%s]" %(screen_name)) except NameError as e: logger.log_error("Failed to take screenshot! [%s]" % (screen_name), exc_info=True) return -1 return 0
def open_file(file_path): try: return open(file_path, "r") except IOError as e: print('[-] ERROR {}'.format(e)) logger.log_error(e, module_name) return None
def fill_queue(self): """ Will attempt to GET enough SMSs from the URL to fill the internal queue back to MAX_SMS. If there are 5 in the queue, and MAX_LENGTH is 12, 7 will be requested. Sens an HTTP GET request to the URL with the following parameters: max_sms, key Returns the new length of the sms_queue """ current_length = len(self.sms_queue) desired_sms = self.MAX_SMS - current_length data_dict = { 'max_sms' : desired_sms, 'key' : self.key } # Make the request result = requests.get(self.url, params=data_dict) if result.status_code == requests.codes.ok: for sms in result.json['sms']: new_sms = SMS.from_dictionary(sms) self.sms_queue.append(new_sms) count = result.json['sms_count'] if VERBOSE: logger.log(self, "grabbed %s messages from django queue" % count) return current_length + count else: logger.log_error(self, "error filling queue: %d - %s" % (result.status_code, result.error)) save_error_response(result) return current_length
def export(lines_of_data, filepath): try: if filepath is not None: # load workbook and get active sheet wb = load_workbook(filepath) sheet = wb.active # row pointer row_index = EXCEL_DATA_ROW_START_INDEX border = Border(left=Side(style='hair', color='C8C8C8'), right=Side(style='hair', color='C8C8C8'), top=Side(style='hair', color='C8C8C8'), bottom=Side(style='hair', color='C8C8C8')) # update Excel sheet row by row for data in lines_of_data: # add new data sheet.append(data) # colorize Excel cells for row_cells in sheet['A' + str(row_index):'D' + str(row_index)]: for cell in row_cells: cell.fill = PatternFill('solid', 'DFDEE8') cell.border = border row_index += 1 # save workbook wb.save(filepath) except Exception as e: logger.log_error(str(e))
def build_watch_option(line): """ Build structured options dictionary from a given string. string format: directory path, bool: scan_sub folders, list: excluded extensions, int: max file size in byte unit :param line: a line extracted from WATCH_LIST_FILE_PATH (watch_list.txt) :return: dict contains directory path, is_recursive_scan, excluded_extensions, max_file_size """ try: parts = line.split(',') directory_info = {} # Try parse path try: path = parts[0].strip().rstrip('/') directory_info["watch_path"] = path directory_info["exists_on_disk"] = True if os.path.isfile(path): directory_info['path_type'] = 'file' elif os.path.isdir(path): directory_info['path_type'] = 'dir' except: return None # at least, a path is required # Try parse is_recursive option try: parts_b = parts[1].strip() if parts_b == 'true': directory_info["is_recursive"] = True else: directory_info["is_recursive"] = False except: pass # Try parse excluded extensions try: directory_info['excluded_extensions'] = parts[2].strip().split('|') except: pass # Try parse max file length option try: directory_info['max_file_size'] = int(parts[3]) except: pass if "is_recursive" not in directory_info: directory_info["is_recursive"] = False if "excluded_extensions" not in directory_info: directory_info["excluded_extensions"] = [] if "max_file_size" not in directory_info: directory_info["max_file_size"] = -1 return directory_info except Exception as e: logger.log_error("build_watch_option(): {}".format(e), module_name) return None
def filter_file(file_path, disallowed_extensions, max_size): """ Check if a file pass the provided filters. :param file_path: Path to file :param disallowed_extensions: List of disallowed extensions, separated by comma. ex; .txt, png. will check if file name end with :param max_size: max file size. file_path should not exceed the max_size value :return: None if file_path not passed the filters or file_path if filter pass. """ try: if not os.path.exists(file_path): return None if max_size > 0: if os.path.getsize(file_path) > max_size: return None if disallowed_extensions is not None: for ext in disallowed_extensions: if file_path.endswith(ext): return None return file_path except Exception as e: logger.log_error( "filter_file(): An error has occurred while filtering the file '{}' Error: {}" .format(file_path, e), module_name) return None
def fill_queue(self): """ will fill the queue back up to however many it can will only try once """ desired_sms = self.MAX_SMS - len(self.sms_queue) data_dict = { 'max_sms' : desired_sms, 'key' : self.key } result = requests.get(self.url, params=data_dict) if not result.status_code == requests.codes.ok: logger.log_error(self, "error filling queue: %d - %s" % (result.status_code, result.error)) log_response(result) return 0 count = result.json['sms_count'] for sms in result.json['sms']: new_sms = SMS.from_dictionary(sms) self.sms_queue.append(new_sms) if VERBOSE: logger.log(self, "grabbed %s messages from django queue" % count) return len(self.sms_queue)
def getproductinfo(self): ''' 获取当前的设备的信息 :return:当前设备的信息 ''' try: global model global board global osv result = os.popen('adb -s %s shell cat /system/build.prop' % self.dev) for line in result.readlines(): if re.findall(model, line): model = line.split('=')[1].replace('\n', '').replace('\r', '') if re.findall(brand, line): board = line.split('=')[1].replace('\n', '').replace('\r', '') if re.findall(version, line): osv = line.split('=')[1].replace('\n', '').replace('\r', '') return model, board, osv except Exception, e: logger.log_error("getproductinfo error! " + str(e)) return 'undefined', 'undefined', 'undefined'
def scan_file(file_path): file_path = u"{}".format(file_path) if file_path is None or not os.path.isfile(file_path): msg = "The provided path '{}' is invalid.".format(file_path) logger.log_error(msg, module_name) print('[-] ERROR: {}'.format(msg)) raise Exception(msg) # Check if there are any rules in yara-rules-src dir and compile them common_functions.compile_yara_rules_src_dir() try: logger.log_info('Single file scan started', module_name) print('[+] Single file scan started') logger.log_debug('Getting Yara-Rules', module_name) common_functions.print_verbose('[+] Getting Yara-Rules..') yara_rule_path_list = get_file_path_list(settings.yara_rules_directory, True, '*.yar') match_list = match([file_path], yara_rule_path_list) print('[+] File scan complete.') logger.log_info('File scan complete', module_name) return match_list except Exception as e: common_functions.print_verbose('[-] ERROR: {}'.format(e)) logger.log_error(e, module_name) raise
def installapp(self, apkpackagename, apkpath): ''' 安装app :param path: apk路径 :return: 0表示成功,1表示失败 ''' try: if self.inspectapp(apkpackagename) == 0: #logger.log_info('app已经存在,准备卸载') cmd1 = 'adb -s %s uninstall %s' % (self.dev, apkpackagename) logger.log_debug(cmd1) os.system(cmd1) logger.log_info('卸载完成,重新安装') cmd2 = 'adb -s %s install %s' % (self.dev, apkpath) logger.log_debug(cmd2) os.system(cmd2) time.sleep(3) if self.inspectapp(apkpackagename) == 0: return 0 else: return 1 else: #logger.log_info('app不存在,准备安装') cmd3 = 'adb -s %s install %s' % (self.dev, apkpath) logger.log_debug(cmd3) os.system(cmd3) time.sleep(3) if self.inspectapp(apkpackagename) == 0: return 0 else: return 1 except Exception, e: logger.log_error('安装%s失败' % apkpackagename + '\n' + '异常原因:%s' % e) return 1
def scan_access_logs(access_logs_file_path, www_dir_path, tail=0): """ Attempt to match accessed files access logs with Yara-Rules :param access_logs_file_path: path to access log file :param www_dir_path: path to public web directory ex; www, public_html :param tail: read last n lines from access log. if value is 0 then will read the whole file :return: list of dictionaries containing match details for each file. example: {"file": file_path, "yara_rules_file": rule_path, "match_list": matches} """ try: if access_logs_file_path is None or not os.path.isfile( access_logs_file_path): logger.log_error( 'The provided path "{}" is invalid '.format( access_logs_file_path), module_name) print('[-] ERROR: The provided path "{}" is invalid.'.format( access_logs_file_path)) return None logger.log_info('Access logs scan started', module_name) print('[+] Access logs scan started') logger.log_debug('Reading access logs file', module_name) common_functions.print_verbose('[+] Reading access logs file..') if tail > 0: lines = common_functions.tail(access_logs_file_path, tail) else: lines = common_functions.read_file_lines(access_logs_file_path) logger.log_debug( 'Attempting to parse accessed files path(s) from access logs', module_name) common_functions.print_verbose( '[+] Attempting to parse accessed files path(s) from access logs..' ) # combine file path with www dir path file_path_set = combine_file_path_list_with_dir( access_log_parser.get_accessed_files_list(lines), www_dir_path) logger.log_debug('[+] {} File to process'.format(len(file_path_set)), module_name) print('[+] {} File to process.'.format(len(file_path_set))) logger.log_debug('Getting Yara-Rules', module_name) common_functions.print_verbose('[+] Getting Yara-Rules..') yara_rule_path_list = get_file_path_list(settings.yara_rules_directory, True, ['*.yar']) match_list = match(file_path_set, yara_rule_path_list) print('[+] Access logs scan complete.') logger.log_info('Access logs scan complete', module_name) return match_list except Exception as e: print('[-] ERROR: {}'.format(e)) logger.log_error(e, module_name) return None
def run_test(self, testcase_dict): """ run single testcase. @param (dict) testcase_dict { "name": "testcase description", "skip": "skip this test unconditionally", "times": 3, "requires": [], # optional, override "function_binds": {}, # optional, override "variables": [], # optional, override "request": { "steps": [ {}, {} ] }, } @return True or raise exception during test """ # parsed_request = self.init_config(testcase_dict, level="testcase") try: t_level = "testcase" # convert keys in request headers to lowercase config_dict = utils.lower_config_dict_key(testcase_dict) self.context.init_context(level=t_level) self.context.config_context(config_dict, level=t_level) request_config = config_dict.get('request', {}) self.act_list = self._parse_steps(request_config["steps"]) self.step = 0 step_len = len(self.act_list) while self.step < step_len: action = self.act_list[self.step] logger.log_info("STEP[%d], ACTION[%s]" % (self.step + 1, action)) self.do_action(action) retcode, retmsg, n_step = self._parse_func_return(self.retval) if (retcode == 0 and n_step != 0): self.step += n_step elif (retcode != 0): logger.log_error("execute action[%s] error[%s]" % (action, retmsg)) break self.step += 1 except Exception as e: logger.log_error( "run do_action error, step[%d], act_list[%s], e[%s]" % (self.step + 1, self.act_list[self.step], e), exc_info=True) retcode = -1 raise finally: self._get_windows_img() # self.assertEqual(retcode, 0, "retcode[%d], step[%d]" %(retcode, step+1)) return retcode
def close_file(file_stream): try: file_stream.close() return True except IOError as e: print('[-] ERROR {}'.format(e)) logger.log_error(e, module_name) return False
def download_single(i: int): try: url = 'https://www.ninsheetmusic.org/download/mid/{}'.format(i) r = requests.get(url, allow_redirects=True) open('ninsheetmusic/{}.mid'.format(i), 'wb').write(r.content) log_ok('Success at {}'.format(i)) except Exception as e: log_error('Error at {}: {}'.format(i, str(e)))
def load_json_file(json_file): with open(json_file, encoding='utf-8') as data_file: try: json_content = json.load(data_file) except Exception as result: logger.log_error(result) _check_format(json_file, json_content) return json_content
def send(self, sms): try: self.out_.write(str(sms)) self.out_.flush() except socket.error as e: logger.log_error(self, "Error writing to android socket") else: logger.log_send(self, sms)
def install(path, udid): cmd = 'ios-deploy –r -b ' + '"' + ipa_path + '"' + ' -i ' + udid logger.log_info("安装ipa命令:%s" % cmd) try: os.system(cmd) except Exception as msg: logger.log_error(msg) raise
def load_avatar(self, path): try: self.icon = load_image_at_size(path, self.ICON_SIZE, self.ICON_SIZE) except Exception as e: log_error('Could not load avatar image "{0}": {1}'.format( path, str(e))) self.icon = None
def get_log(descpath, udid): cmd_log = "idevicecrashreport -e -u %s %s" % (udid, descpath) logger.log_info("拷贝log命令:%s" % cmd_log) try: os.system(cmd_log) except Exception as msg: logger.log_error(msg) raise
def json_load(json): if not json: return None try: obj = simplejson.loads(json, encoding='utf-8') except Exception, e: obj = None log_error("load json failed: %s" % e)
def cleanup_and_quit(*pipes): for pipe in pipes: try: pipe.cleanup() except Exception as e: logger.log_error(root, "Error while shutting %s - %s" % (pipe.DEVICE, e.message)) logger.log_highlight(root, "bye") os._exit(0)
def send(self, sms): """ Writes the given SMS to the socket out. Serialization is handled by the __str__ method of the SMS object """ try: self.out_.write(str(sms)) self.out_.flush() except socket.error as e: logger.log_error(self, "Error writing to android socket") else: logger.log_send(self, sms)
def ssh_remote_execute(host, cmd): try: import paramiko if not cmd: log_error("cmd is None! Failed!") return None try: client = paramiko.SSHClient() private_key_file = ssh_config.get('private_key_file') client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if private_key_file is not None and private_key_file != '': k = paramiko.RSAKey.from_private_key_file( filename=ssh_config.get('private_key_file')) client.connect( host, username=ssh_config.get('user'), port=ssh_config.get('port'), timeout=ssh_config.get('timeout'), pkey=k) else: client.connect( host, username=ssh_config.get('user'), password=ssh_config.get('password'), port=ssh_config.get('port'), timeout=ssh_config.get('timeout')) stdin, stdout, stderr = client.exec_command(cmd, timeout=300) result = stdout.readlines() log_debug(result) return result except Exception as exc: log_error("failed: %s" % cmd) log_error(exc) return None finally: try: stdin.close() stdout.close() stderr.close() client.close() except: pass except ImportError as exc: log_error("load module 'paramiko', donnot exist!") log_error(exc) return None
def send(self, sms): data_dict = { 'to_number' : sms.to_number, 'from_number' : sms.from_number, 'body' : sms.body, 'key' : self.key } result = requests.post(self.url, data=data_dict) if not result.status_code == requests.codes.ok: logger.log_error(self, "error %d - %s posting sms to %s" % (result.status_code, result.error, self.url)) log_response(result) return False else: logger.log_send(self, sms) return True
def handle_args(*args, **kwargs): ckey = get_cache_key(prefix + func.__name__, *args, **kwargs) if op == 'select': obj = r.get(ckey) if obj == None: result = func(*args, **kwargs) try: r.setex(ckey, ttl, json.dumps(result)) except Exception as e: log_error(e) return result else: return json.loads(obj) elif op == 'del' or op == 'delete' or op == 'remove': r.delete(ckey) elif op == 'insert' or op == 'update': result = func(*args, **kwargs) r.setex(ckey, ttl, json.dumps(result)) return result
def sanity_check_request(self, file_sha1, page): """check the request against common errors: - file can't be found in wiki commons? - file is not of the right type? - requested page is > than file pagecount?""" #get file information from the wikicommons API response = wiki_api.query_file_information(file_sha1) fileinfo = wiki_api.process_query_response(response) try: filename = fileinfo['filename'] except: filename = 'unknown filename' #if fileinfo came back from the wiki commons API with an error or empty if 'error' in fileinfo: error_msg = fileinfo['error'] log_error(file_sha1, filename, page, error_msg) return self.build_error_response(error_msg, 1), {} #if the file does not have the right mime type elif self.filetype not in fileinfo['mime'].lower(): error_msg = "Not a valid {} file.".format(self.filetype) log_error(file_sha1, filename, page, error_msg) return self.build_error_response(error_msg, 2), {} #if the desired page is greater than the maximum page in the file elif page > fileinfo['pagecount'] or page < 1: error_msg = "Page doesn't exist." log_error(file_sha1, filename, page, error_msg) return self.build_error_response(error_msg, 3), {} else: return {}, fileinfo
def send(self, sms): """ Sends an HTTP POST request to the url specified, with the following parameters, to_number, from_number, body, key """ data_dict = { 'to_number' : sms.to_number, 'from_number' : sms.from_number, 'body' : sms.body, 'key' : self.key } # Make the request result = requests.post(self.url, data=data_dict) if result.status_code == requests.codes.ok: logger.log_send(self, sms) else: logger.log_error(self, "error %d - %s posting sms to %s" % (result.status_code, result.error, self.url)) save_error_response(result) return False
def request_page(sha1, page, processor, filetype): #check if the desired page is already cached if processor.page_is_cached(sha1, page): return jsonify(processor.return_cached_page(sha1, page)) #if it is not yet cached #check if celery is active elif get_celery_status() is None: response = jsonify(processor.build_error_response("Celery is not active.", 4)) log_error(error_msg="Celery is not active.") set_no_cache(response) return response else: error_json, fileinfo = processor.sanity_check_request(sha1, page) #when something is not right with the request, return an error if error_json: return jsonify(error_json) #when the request is valid else: #asynchronous call to invoke processing the file process_request_async.delay(sha1, page, fileinfo, filetype) response = jsonify(processor.build_error_response("Processing the file, check back in a minute.", 0)) set_no_cache(response) return response
def listen(self): try: sms = self.text_parser.one() except RuntimeError as e: logger.log_error(self, e.message) return False if sms is None: logger.log_error(self, "Connection to android device is broken") logger.log_error(self, "Exiting read loop, closing socket") self.cleanup() return False logger.log_receive(self, sms) self.receive_callback(sms)
def GET(self): """get one-time token from youtube, exchange for session token""" params = urlparse.parse_qs(web.ctx.query[1:]) if 'token' not in params.keys(): return 'No youtube session token given' one_time_token = params['token'][0] youtube.upgrade_token(one_time_token) if 'artist' not in params.keys(): return "please enter an artist" if 'album' not in params.keys(): return "please enter an album" artist = params['artist'][0] album = params['album'][0] logger.log_request("%s, %s, %s, %s" % (datetime.now(), web.ctx.ip, artist, album)) title = "%s %s" % (artist, album) summary = "%s by %s. Playlist generated by AlbumFinder" % (album, artist) try: album_videos = list(get_album_videos(artist, album)) except amazonproduct.api.NoExactMatchesFound as e: logger.log_error("not found: %s, %s" % (artist, album)) error_messages.append('Sorry, that album could not be found.') return web.seeother('/') try: playlist_id = youtube.add_playlist(title, summary) for video_id in album_videos: youtube.add_video_to_playlist(video_id, playlist_id) web.seeother("/show_playlist?playlist_id=%s" % playlist_id) except gdata.service.RequestError as e: if re.search('Playlist already exists', str(e)): logger.log_error("already exists: %s" % title) error_messages.append('Sorry, that playlist already exists.') # get playlist id and show it else: logger.log_error(str(e)) raise e web.seeother('/')
def listen(self): """ Obtains one SMS from the TextParser. It handles parse errors (RuntimeError), but does handle socket errors that may appear. If the sms received from the parser is None, it will assume a broken connection, and, making no value judgements about it, quit, calling cleanup() in the process. """ try: sms = self.text_parser.one() except RuntimeError as e: logger.log_error(self, e.message) return False if sms is None: logger.log_error(self, "Connection to android device is broken") logger.log_error(self, "Exiting read loop, closing socket") self.cleanup() return False logger.log_receive(self, sms) self.receive_callback(sms)
def run(pipe_templates, connector): # creating the pipes pipes = [] for pipe_template in pipe_templates: try: if isinstance(pipe_template, tuple): klass = pipe_template[0] if len(pipe_template) > 1: args = pipe_template[1] else: args = () if len(pipe_template) > 2: kwargs = pipe_template[2] else: kwargs = {} else: klass = pipe_template args = () kwargs = {} except Exception as e: logger.log_error(root, "Unable to extract initialization parameters from") logger.log_error(root, repr(pipe_template)) logger.log_error(root, "Aborting start.") try: pipe = klass(*args, **kwargs) except Exception as e: logger.log_error(root, "Error initializing %s with args %s and kwargs %s" % (repr(klass), repr(args), repr(kwargs)) ) logger.log_error(root, repr(e)) logger.log_error(root, "Aborting start and shutting down") cleanup_and_quit(*pipes) pipes.append(pipe) connector(*pipes) for pipe in pipes: try: pipe.start() logger.log(root, "started device %s" % pipe.DEVICE) except Exception as e: logger.log_error(root, "unable to start %s:" % pipe.DEVICE) logger.log_error(root, e.message) logger.log_error(root, "shutting down.") cleanup_and_quit(*pipes) try: while True: for pipe in pipes: pipe.join(.2) if not pipe.isAlive() and pipe.upstream: logger.log_error(root, "%s has terminated - shutting down" % pipe.DEVICE) cleanup_and_quit(*pipes) except KeyboardInterrupt: logger.log_error(root, "KeyboardInterrupt - shutting down") cleanup_and_quit(*pipes)
def handle_error_message(self, bus, message): err, debug = message.parse_error() logger.log_error(err, debug) self.main_loop.quit()
def json_dump(obj): try: jstr = simplejson.dumps(obj, separators=(',', ':')) except Exception, e: jstr = None log_error("dump json failed: %s" % e)