def run(config_path): local_path = './experimentos/producao/' filename = 'config.yaml' #caminho do bucket para fzer o download do arquivo para a maquina local bucket_name_download = config_path[5:].split('/')[0] Utils.download_file_from_gcp(config_path, local_path=local_path, filename=filename, bucket_name=bucket_name_download) #variáveis de ambiente de acordo com desenvolvimento ou produção config = config_pre_tratamento(local_path + filename) project = config['project'] config['caminho_saida_dados'] = local_path d6tflow.set_dir(config['caminho_saida_dados']) params = get_tasks(config) t = tasks.TaskPrdReport(**params) d6tflow.preview(t) d6tflow.run(t, workers=config['workers']) model = tasks.TaskTrainModel( task_engineer_params=params['task_engineer_params'], task_te_params=params['task_te_params'], task_ps_params=params['task_ps_params'], task_model_params=params['task_model_params']).output().load() salvar_modelo(t, model, config) return True
def __windows(self): # todo windows未完成 for line in U.cmd("netstat -aon|findstr 4700").stdout.readlines(): pid = line.strip().split(' ')[-1] process_name = U.cmd( 'tasklist|findstr {}'.format(pid)).stdout.read().split(' ')[0] U.cmd('taskkill /f /t /im {}'.format(process_name))
def deploy(list_alerts_file, passwords, dry_run): # print("Alerts Directory: {}".format(list_alerts_file)) # print("Password file path: {}".format(password_file_path)) # Create deployment folder Utils.create_folder(overwrite=True, folder_path=_path_deployment_plugin) # Create all cloudformation templates # Template: Bucket S3 for configuration # buckets3_template = create_cloudformation_template_init_buckets3() # write_cloudformation_template_to_file(buckets3_template, # os.path.join(_path_deployment_plugin, "bonfire_init_buckets3.yml")) #aws_keys = get_aws_keys(passwords)["AWS"] # Create cloudformation templates for alert_file in list_alerts_file: alert_file_parsed = Utils.read_yml_file(alert_file) create_cloudformation_template_alerts(alert_yml_data=alert_file_parsed) # Deploy cloudformation templates if dry_run: deploy_cloudformation_template_alerts(alert_yml_data=None, dry_run=True)
def clean_appium(port,device): #for line in U.cmd('netstat -aon | findstr %d' % port).stdout.readlines(): line = U.cmd('netstat -aon | findstr %d' % port).stdout.readline() pid = line.strip().split(' ')[-1] U.cmd('taskkill /f /pid {}'.format(pid)) L.Logging.success("killed appium %s" % port) clean_logcat(device) reconnect_device(device)
def __set_pkg_info(self): # 获取文件名 self.apk_name = os.path.basename(self.apk_path) aaptpath = os.path.join(os.path.abspath(os.path.join(os.getcwd())), aapt) # 获取包名 cmd = '{} dump badging "{}" | {} package'.format( aaptpath, self.apk_path, find_util) process = U.cmd(cmd) stdout, stderr = process.communicate() if stdout is None: U.Logging.error("[pkg_info] time out: {}".format(cmd)) elif "ERROR" in stderr or "error" in stderr: U.Logging.error("[pkg_info] cannot execute: {}".format(cmd)) U.Logging.error("[pkg_info] result: {}".format(stderr)) else: try: package_name = re.findall(r"name='([a-zA-Z0-9.*]+)'", stdout) self.name = package_name[0] self.version_code = re.findall(r"versionCode='([0-9]+)'", stdout)[0] except Exception as e: U.Logging.error( "[pkg_info] failed to regex package name from {}. {}". format(stdout, e)) # 获取启动Activity cmd = '{} dump badging "{}" | {} launchable-activity'.format( aaptpath, self.apk_path, find_util) process = U.cmd(cmd) stdout, stderr = process.communicate() if stdout is None: U.Logging.error("[pkg_info] time out: {}".format(cmd)) elif "ERROR" in stderr or "error" in stderr: U.Logging.error("[pkg_info] cannot execute: {}".format(cmd)) U.Logging.error("[pkg_info] result: {}".format(stderr)) else: try: activity_list = re.findall(r"name='(.+?)'", stdout) main_activity = "" for activity in activity_list: if not activity.startswith( "com.squareup") and not activity.startswith( "com.github"): main_activity = activity break self.activity = main_activity except Exception as e: U.Logging.error( "[pkg_info] failed to regex main activity from {}. {}". format(stdout, e)) if self.name and self.activity: return True return False
def __darwin(self, port, device): # for line in U.cmd( # "lsof -i tcp:%s | grep node|awk '{print $2}'" % # str(port)).stdout.readlines(): # U.cmd('kill -9 %s' % line.strip()) # U.Logging.debug('CleanProcess:Darwin:kill appium') for line in U.cmd( "ps -A | grep logcat | grep %s" % device).stdout.readlines(): U.cmd('kill -9 %s' % line.strip()) U.Logging.debug('CleanProcess:Darwin:kill logcat')
def main(self): """ 执行步骤: 1:开启测试 2:生成测试报告 :return: """ U.sleep(5) self.__load_analysis() U.sleep(1) self.__save_android_result()
def __linux(self, port, device): # linux必须最高权限才可获取到端口 # for line in U.cmd( # "lsof -i:%s |awk '{print $2}'" % # str(port)).stdout.readlines(): # U.cmd('kill -9 %s' % line.strip()) # U.Logging.debug('CleanProcess:linux:kill appium') for line in U.cmd( "ps -ef | grep logcat | grep %s|awk '{print $2}'" % device).stdout.readlines(): U.cmd('kill -9 %s' % line.strip()) U.Logging.debug('CleanProcess:linux:kill logcat')
def start_appium(self): try: self.driver = webdriver.Remote( 'http://127.0.0.1:%s/wd/hub' % self.__get_appium_port(), self.device_list) L.Logging.debug('appium start %s success' % self.device) U.sleep(10) return self.driver except Exception as e: L.Logging.error('Failed to start appium : {}'.format(e)) L.Logging.error('Try restartting the appium:{}'.format( self.device, ))
def _getgeneseqs(genes_df_f, db_fasta, gene_names, cachedir): cache_f = join(cachedir, basename(genes_df_f).replace('.df', '.genes.dat')) if exists(cache_f): return Utils.Load(cache_f) ret = {} for rec in SeqIO.parse(db_fasta, 'fasta'): if rec.id in gene_names: ret[rec.id] = str(rec.seq) if len(ret) == len(gene_names): break Utils.Write(cache_f, ret) return ret
def load_alert_matrix(self, alert_folder): SetupLogger.logger.debug( "Creating alert matrix list object with alert_folder '{}'".format( alert_folder)) matrix = [] alerts = [] alert_folder_list = Utils.list_files_in_directory(alert_folder) # Add each alert object for alert_file_path in alert_folder_list: # Parse alert yml file alert_file_data = Utils.read_yml_file(alert_file_path) if type(alert_file_data) is dict: category = list(alert_file_data.keys())[0] for tool, val in list(alert_file_data.values())[0].items(): alert_file_obj = AlertFile(alert_file_path, val, True, category, tool) alerts.append(alert_file_obj) SetupLogger.logger.debug( 'Alert file loaded: {}'.format(alert_file_path)) else: alert_file_obj = AlertFile(alert_file_path, "", False, "", "") alerts.append(alert_file_obj) SetupLogger.logger.info( 'Error loading file: {}'.format(alert_file_path)) alerts.sort(key=operator.attrgetter( 'category', 'tool')) # Sort by category and tool if type(alerts) is list: for alert in alerts: if alert.parsed: try: # Parse AWS alerts if alert.tool.lower() == "aws": self.parse_cloudwatch_alerts(alert, matrix) # Parse GCP alerts elif alert.tool.lower() == "gcp": self.parse_stackdriver_alerts(alert, matrix) except Exception as e: print('[error] Could not parsed alert: {}'.format(e)) else: print("[error] Error parsing file '{}'".format(alert.path)) # Sort by category and tool matrix.sort(key=operator.attrgetter('category', 'subcategory')) SetupLogger.logger.info("Number of parsed files: {}".format( len(alerts))) SetupLogger.logger.info("Number of alerts found: {}".format( len(matrix))) return matrix
def test_addperms(self): cwd = os.getcwd() try: os.mkdir(cwd + "/build") except: pass path = cwd + "/build/permstest" with io.open(path, "w") as f: f.write(u"abc") f.close() os.chmod(path, 0o700) self.assertEqual(0, os.stat(path)[0] & 0o7) Utils.addperms(path, 0o7) self.assertEqual(7, os.stat(path)[0] & 0o7)
def keyevent(self, key): key_dict = { 'up': 19, 'down': 20, 'left': 21, 'right': 22, 'ok': 23, 'back': 111 } keys = key.split(',') for key in keys: L.Logging.success("keyevent {}".format(key_dict[key])) self.driver.keyevent(key_dict[key]) U.sleep(1)
def start_appium(self): aport = random.randint(4700, 4900) bpport = random.randint(4700, 4900) self.__start_appium(aport, bpport) count = 20 for i in range(count): appium = U.cmd('netstat -aon | findstr %d' % aport).stdout.readline() if appium: L.Logging.debug('start appium :p %s bp %s device: %s' % (aport, bpport, self.device)) return aport else: L.Logging.info('waiting start appium 3 seconds') U.sleep(3)
def case_start(self): test_case_yaml = public.getcase.get_case_yaml_path().items() if not test_case_yaml: L.Logging.error('yaml not found!!!') else: for yaml_name, yaml_path in test_case_yaml: L.Logging.success('yaml path:{}'.format(yaml_path)) self.run_case(yaml_name, yaml_path) try: self.driver.quit() L.Logging.success('quit driver %s' % self.appium_port) U.sleep(5) except: L.Logging.error('quit driver error %s' % self.appium_port) clean.clean_appium(self.appium_port, self.device)
def forward(self, x): residual = x out = self.conv1(x) #out = self.bn1(out) out = BFPActivation.transform_activation_online( out, self.exp_bit, self.mantisa_bit, -1) out = self.relu(out) out = self.conv2(out) #out = self.bn2(out) out = BFPActivation.transform_activation_online( out, self.exp_bit, self.mantisa_bit, -1) out = self.relu(out) out = self.conv3(out) #out = self.bn3(out) out = BFPActivation.transform_activation_online( out, self.exp_bit, self.mantisa_bit, -1) if self.downsample is not None: # Get a max of two list #max_exp_act_list = np.maximum.reduce([self.opt_exp_act_list[self.start_exp_ind+2], self.opt_exp_act_list[self.start_exp_ind+3]]).tolist() residual = self.downsample(x) # bfp quantize both tensor for shortcut using the max exponent list # since they have the same exp list, no need for realignment # residual = BFPActivation.transform_activation_online(residual, self.exp_bit, # self.mantisa_bit, self.opt_exp_act_list[self.start_exp_ind+3]) #out = BFPActivation.transform_activation_offline(out, self.exp_bit, self.mantisa_bit, max_exp_act_list) # else: # bfp quantize both tensor for shortcut using the third exponent list # residual = BFPActivation.transform_activation_online(residual, self.exp_bit, self.mantisa_bit, self.opt_exp_act_list[self.start_exp_ind+2]) # Get the exponent from out out_exp = Utils.find_exponent(out, self.exp_bit) out_exp = Utils.find_max_exponent(out_exp, quant_dim=len(out.shape) - 1) out_exp = Utils.find_max_exponent(out_exp, quant_dim=len(out.shape) - 2) out_exp = Utils.find_max_exponent(out_exp, quant_dim=0) out_exp = out_exp.int().cpu().data.tolist() # Get the exponent from input in_exp = Utils.find_exponent(residual, self.exp_bit) in_exp = Utils.find_max_exponent(in_exp, quant_dim=len(residual.shape) - 1) in_exp = Utils.find_max_exponent(in_exp, quant_dim=len(residual.shape) - 2) in_exp = Utils.find_max_exponent(in_exp, quant_dim=0) in_exp = in_exp.int().cpu().data.tolist() # Quantize accordint to the max max_exp = np.maximum.reduce([out_exp, in_exp]).tolist() residual = BFPActivation.transform_activation_offline( residual, self.exp_bit, self.mantisa_bit, max_exp) out = BFPActivation.transform_activation_offline( out, self.exp_bit, self.mantisa_bit, max_exp) out += residual out = self.relu(out) return out
def run(args): domains = map(lambda domain: '-d ' + domain, args.domains) domains = ' '.join(domains) Logger.info('obtain domains: ' + domains) deploy_hook = '--deploy-hook "python ' + deploy_path + '"' if Utils.is_enable_deployment( ) else '' cert_name = '--cert-name ' + args.cert if args.cert else '' force_renewal = '--force-renewal' if args.force else '' challenge_alias = '--challenge-alias ' + args.alias if args.alias else '' certbot_cmd = certbot_cmd_template % { 'email': Config['base']['email'], 'cert_name': cert_name, 'force_renewal': force_renewal, 'manual_path': manual_path, 'dns': args.dns, 'deploy_hook': deploy_hook, 'domains': domains, 'challenge_alias': challenge_alias } Logger.info('certbot obtain: ' + certbot_cmd) os.system(certbot_cmd)
def main(self): """ 生成测试报告主函数 根据status yaml的文件来生成测试报告 :return: """ import GetHtml self.__analyze_log() result = self.__yaml_file(self.all_result_path, '.yaml') lst = [] for case_name, confirm_status in result.items(): case_name = str(case_name).split('.')[0] case_result = self.__open_yaml(confirm_status) case_img = self.__confirm_file( str(confirm_status).replace('status', 'img').replace('yaml', 'png')) case_per = self.__confirm_file( str(confirm_status).replace('status', 'per').replace('yaml', 'png')) case_log = self.__confirm_file( str(confirm_status).replace('status', 'log').replace('yaml', 'log')) case_filter = self.__confirm_file( str(confirm_status).replace('status', 'log').replace( 'yaml', 'log').replace(case_name, case_name + 'filter')) if case_per is None: # 获取error图片 ini = U.ConfigIni() case_per = ini.get_ini('test_case', 'error_img') lst.append( GetHtml.get_html_tr(case_name, case_result, case_img, case_per, case_log, case_filter)) GetHtml.get_html(''.join(lst), self.__app_info(), self.__device_info(), self.__test_case_execution_status(), self.all_result_path)
def main(self): """ 开启多线程: 线程1:安装应用 线程2:获取当前页面是否有可点击的按钮 :return: """ ini = U.ConfigIni() install_file = ini.get_ini('test_install_path', 'path') package_name = ini.get_ini('test_package_name', 'package_name') threads = [] click_button = threading.Thread(target=self.tap_all, args=()) threads.append(click_button) install_app = threading.Thread( target=self.__install_app, args=( package_name, install_file)) threads.append(install_app) process_list = range(len(threads)) for i in process_list: threads[i].start() for i in process_list: threads[i].join() self.adb.shell('"rm -r /data/local/tmp/*.xml"')
def test_Helper_functions_2(self): msg = 'blah\n#TSI_foo NONE\nblah blah' param = Utils.extract_number(msg, "foo") self.assertEqual(-1, param) msg = 'blah\n#TSI_foo 123\nblah blah' param = Utils.extract_number(msg, "foo") self.assertEqual(123, param) msg = 'blah\nblah blah\n' param = Utils.extract_number(msg, "foo") self.assertEqual(-1, param) msg = 'blah\n#TSI_foo NONE\nblah blah\n' param = Utils.extract_number(msg, "foo") self.assertEqual(-1, param)
def start_appium(self): """ 启动appium p:appium port bp:bootstrap port :return: 返回appium端口参数 """ aport = random.randint(4700, 4900) bpport = random.randint(4700, 4900) self.__start_driver(aport, bpport) U.Logging.debug('start appium :p %s bp %s device:%s' % (aport, bpport, self.device)) U.sleep(10) return aport
def plot_code_histograms(compiled_f, outdir): ret = Utils.Load(compiled_f) npr = np.array(ret['n+_risk']) cpr = np.array(ret['c+_risk']) hydr = np.array(ret['hyd_risk']) prr = np.array(ret['PR_risk']) for nm, riskarr, color in zip( ['N_plus', 'C_plus', 'hyd', 'PR'], [npr, cpr, hydr, prr], ['#0d4c7c', '#151515', '#018571', '#660099']): for nm1, riskarr1 in zip(['N_plus', 'C_plus', 'hyd', 'PR'], [npr, cpr, hydr, prr]): if nm == nm1: locarr = riskarr[1:] stan = riskarr[0] else: locarr = riskarr[1:][tuple([riskarr1[1:] <= riskarr1[0]])] stan = riskarr[0] _, ax = plt.subplots(1, figsize=(3.5, 2.333), dpi=144) ax.hist(locarr, color=color, bins=100, density=True) ax.axvline(stan, color='yellow', lw=1) ax.axvline(stan, color='k', lw=0.6) print('{} given {} {} p={}'.format( nm, nm1, stan, sum(locarr <= stan) / len(locarr))) plt.savefig(join( outdir, 'Code_cost_million_hist_{}_{}.png'.format(nm, nm1)), dpi=144) plt.close('all')
def __save_sql(self, case_name, device_name, cpu_list, mem_list, execution_status): sql = U.Asql() sql.insert_per(case_name, device_name, cpu_list, mem_list, execution_status) sql.close()
def start_appium(self): """ 启动appium p:appium port bp:bootstrap port :return: 返回appium端口参数 """ aport = random.randint(4700, 4900) bpport = random.randint(4700, 4900) U.cmd("appium -p %s -bp %s -U %s" % (aport, bpport, self.device)) # 启动appium U.Logging.debug('start appium :p %s bp %s device:%s' % (aport, bpport, self.device)) U.sleep(10) return aport
def get_device(): android_devices_list = [] for device in U.cmd('adb devices').stdout.readlines(): if 'device' in device and 'devices' not in device: device = device.split('\t')[0] android_devices_list.append(device) return android_devices_list
def config_initialize(): ini = U.ConfigIni() ini.set_ini('test_device', 'device', project_path + '/data/device_info.yaml') ini.set_ini('test_info', 'info', project_path + '/data/appium_parameter.yaml') ini.set_ini('test_case', 'case_xlsx', project_path + '/data/keyword.xlsx') ini.set_ini('test_case', 'report_file', project_path + '/report')
def __init__(self, log_file_name): """Initialize logger variables""" # Set log dir to temp if writable, if not log to current dir log_dir = tempfile.gettempdir() if not Utils.dir_is_writable(log_dir): log_dir = os.path.abspath(os.getcwd()) self.log_file_path = os.path.join(log_dir, log_file_name)
def case_yaml_file(): """ :return: 返回当前设备下的yaml test case列表 """ ini = U.ConfigIni() yaml_path = ini.get_ini('test_case', 'case') return GetFilePath.all_file_path(yaml_path, '.yaml')
def save_report_html(pickle_local_path, artifact_bucket, artifact_folder): import visualizacao.frontend as frontend import matplotlib.pyplot as plt import base64 from io import BytesIO t4 = frontend.Production(pickle_local_path) t4.load_data(None) t4.train_eval(6, 'elasticidade') fig = plt.gcf() tmpfile = BytesIO() fig.savefig(tmpfile, format='png') fig_1 = base64.b64encode(tmpfile.getvalue()).decode('utf-8') t4.train_eval(6, 'AUC') fig = plt.gcf() tmpfile = BytesIO() fig.savefig(tmpfile, format='png') fig_2 = base64.b64encode(tmpfile.getvalue()).decode('utf-8') t4.real_predict(6) fig = plt.gcf() tmpfile = BytesIO() fig.savefig(tmpfile, format='png') fig_3 = base64.b64encode(tmpfile.getvalue()).decode('utf-8') html = """ <img src=\'data:image/png;base64,{0}\'> <img src=\'data:image/png;base64,{1}\'> <img src=\'data:image/png;base64,{2}\'> """.format(fig_1, fig_2, fig_3) with open('report.html', 'w') as f: f.write(html) file_path = 'gs://{0}/{1}/modelagem/prd_report_{2}.html'.format( artifact_bucket, artifact_folder, datetime.datetime.now().strftime("%Y%m%d%H%M%f")) Utils.upload_file_to_gcp(local_path='./', local_filename='report.html', bucket_name=artifact_bucket, gcs_path=file_path) return file_path
def clean_device_yaml(): ini = U.ConfigIni() device_yaml = ini.get_ini('test_device','device') if os.path.getsize(device_yaml): with open(device_yaml,'w') as f: f.truncate() f.close() return return
import sys import os mydir = os.path.dirname(os.path.realpath(__file__)) parent_dir = os.path.dirname(mydir) sys.path.append(parent_dir) from lib import Cliqr from lib import CliqrConsts from lib import Utils host='sgtest.cliqr.com' port=443 user='******' access_key='6B3145FC9799DF5A' api_version='v1' api = Cliqr(host, port, user, access_key, api_version) Utils.print_ds(api.get_tenants())