def save_to_remote(js_dir, mission_id, save_list): try: for file in save_list: with open(os.path.join(js_dir, file), "rb") as f: cache.setex("%s-%s" % (file.replace(".", "_"), mission_id), 60, f.read()) except Exception as e: print e
def get_pgp_armor_key(keyid: str) -> str: """ Lookup a PGP key. :param keyid: The key ID to lookup. :return: The armored version of the PGP key, or None if the key does not exist in the DB. """ if cache.exists(keyid + "-armor"): return cache.get(keyid + "-armor").decode() if keyid.startswith("0x"): keyid = keyid.replace("0x", "") if len(keyid) == 40: key = db.Key.query.filter(db.Key.fingerprint == keyid).first() elif len(keyid) == 8: key = db.Key.query.filter(db.Key.key_fp_id == keyid).first() else: key = db.Key.query.filter(db.Key.uid.ilike("%{}%".format(keyid))).first() if key: if key.armored: cache.setex(keyid + "-armor", 60*60*24, key.armored) return key.armored else: return "" else: return None
def _setup_keybase(self, username): if cache.exists("keybase_" + username): miss = False # Load it without autofetching. k = keybaseapi.User(username, autofetch=False) # JSON load the data from the cache. data = json.loads(cache.get("keybase_" + username).decode()) # Load the raw keybase data in. k.raw_keybase_data = k._translate_into_configkey(data) # Map the data structure. k._map_data() else: miss = True # Load it with autofetching. k = keybaseapi.User(username) # JSON dump the key structure. data = json.dumps(k.raw_keybase_data.dump()) # Set it on the cache and set it to expire in a day. # Note: StrictRedis uses name,time,value. Normal redis uses name,value,time. cache.setex("keybase_" + username, 60 * 60 * 24, data) self.api_keybase = k.raw_keybase_data.dump() # Second cache pass, check if it was verified. if miss: try: k.verify_proofs() except keybaseapi.VerificationError: verified = False else: verified = True # Set it on cache. cache.setex("keybase_" + username + "_ver", 60 * 60 * 24 * 3, "1" if verified else "0") else: # Load it from cache. verified = bool(int(cache.get("keybase_" + username + "_ver"))) self.keybase = (k, verified)
def set_cachex(key, value, expires): return cache.setex(key, expires, value)
def worker_run_job(loadtool, user, script_name, script, mission_id, agent_list=list()): logger = get_logger("job", "INFO", "logs/job_%s.log" % mission_id) def create_script(): folder = os.path.join( Config.SCRIPT_FOLDER if loadtool == "gatling" else Config.JMETER_SCRIPT_FOLDER, user) if loadtool == "jmeter": report_folder = os.path.join( Config.REPORT_FOLDER, "report-{mission_id}-jmeter".format(mission_id=mission_id)) os.makedirs(report_folder) if not os.path.exists(folder): os.makedirs(folder) filename = os.path.join( folder, "%s.%s" % ("%s-%s" % (script_name, mission_id) if loadtool != "gatling" else script_name, "scala" if loadtool == "gatling" else "jmx")) with open(filename, "wb") as f: f.write(script) def save_to_remote(js_dir, mission_id, save_list): try: for file in save_list: with open(os.path.join(js_dir, file), "rb") as f: cache.setex("%s-%s" % (file.replace(".", "_"), mission_id), 60, f.read()) except Exception as e: print e try: create_script() exec_cmd = "" if loadtool == "gatling": gatling_bin = os.path.join(Config.GATLING_HOME, "bin") exec_cmd = "sh {gatling_bin}/gatling.sh -s {user}.{script_name} -rf {report_folder} -on {result_dir_name}".format( gatling_bin=gatling_bin, user=user, script_name=script_name, mission_id=mission_id, report_folder=Config.REPORT_FOLDER, result_dir_name="report-%s" % mission_id) elif loadtool == "jmeter": if len(agent_list): exec_cmd = "sh {jmeter_home}/bin/jmeter.sh -n -t {jmeter_script_folder}/{user}/{script_name}-{mission_id}.jmx -R {agent_list} -l {jmeter_script_folder}/{user}/{script_name}.{mission_id}.jtl -e -o {report_folder}/report-{mission_id}-jmeter".format( debug_detail_folder=Config.DEBUG_DETAIL_FOLDER, jmeter_home=Config.JMETER_HOME, jmeter_script_folder=Config.JMETER_SCRIPT_FOLDER, user=user, script_name=script_name, report_folder=Config.REPORT_FOLDER, mission_id=mission_id, agent_list=",".join(agent_list)) else: exec_cmd = "sh {jmeter_home}/bin/jmeter.sh -n -t {jmeter_script_folder}/{user}/{script_name}-{mission_id}.jmx -l {jmeter_script_folder}/{user}/{script_name}.{mission_id}.jtl -e -o {report_folder}/report-{mission_id}-jmeter".format( debug_detail_folder=Config.DEBUG_DETAIL_FOLDER, jmeter_home=Config.JMETER_HOME, jmeter_script_folder=Config.JMETER_SCRIPT_FOLDER, user=user, script_name=script_name, report_folder=Config.REPORT_FOLDER, mission_id=mission_id) cache.append("mission_%s_log" % mission_id, exec_cmd) try: p = pexpect.spawn(exec_cmd, timeout=120) except Exception as e: cache.set("mission_%s_error" % mission_id, str(e)) logger.error(str(e)) while p.isalive(): p.expect(['\n', pexpect.EOF, pexpect.TIMEOUT]) info = p.before cache.append("mission_%s_log" % mission_id, "<br>" + info) else: if loadtool == "jmeter": save_to_remote( os.path.join(Config.REPORT_FOLDER, "report-%s-jmeter" % mission_id, "content", "js"), mission_id, ["dashboard.js", "graph.js"]) elif loadtool == "gatling": report_dir = get_gatling_report_dir(mission_id) source_dir = os.path.join(Config.REPORT_FOLDER, report_dir) report_file = os.path.join(Config.REPORT_FOLDER, "report-gatling", "report-%s.tar.gz" % mission_id) with open( os.path.join(Config.REPORT_FOLDER, report_dir, "js", "stats.json")) as f: cache.setex("mission_%s_stats" % mission_id, 60, f.read()) with tarfile.open(report_file, "w:gz") as tar: tar.add(source_dir, arcname=os.path.basename(source_dir)) Domino.save(report_file, "report-%s.tar.gz" % mission_id) cache.setex("mission_%s_finish" % mission_id, 60, 1) cache.delete("mission_%s_log" % mission_id) send_wechat_notice(mission_id, user) except Exception as e: logger.error(traceback.format_exc()) finally: return "ok"