def fecthPatchInfo(self): if self.test_type in ['PIT_LITE', 'CIT']: # fetch build info for path in WIN_CI_BUILD_PATH_LIST: dir_path = r'{}/{}/distrib'.format(path, self.build) if os.path.isdir(dir_path): files = os.listdir(dir_path) for fl in files: if fl.startswith('CI') and fl.endswith('.txt'): with open('{}/{}'.format(dir_path, fl), 'r') as f: self.patch_detail = f.read().replace( '\n', '</br>') self.artifacts_path = dir_path LOGGER.debug(self.patch_detail) break if self.patch_detail: break elif self.test_type in ['FULL']: ''' Need implementation here... hwo to handle PV branch? ''' pass try: lines = self.patch_detail.split('</br>') for line in lines: if 'Owner' in line: self.owner = line.replace('Owner:', '').strip() self.email = self.owner if 'Subject' in line: self.subject = line.replace('Subject:', '').strip() except: LOGGER.error(traceback.format_exc())
def run(self): while True: sock, address = self.sock.accept() LOGGER.debug('Connection setup!') th = threading.Thread(target=self.launchConnection, args=(sock, )) th.setDaemon(True) th.start()
def moveToTop(config, queue, task_id): import threading lock = threading.Lock() lock.acquire() rc = 0 try: with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format( g_username, g_password, config["rabbitMQ_address"], config["rabbitMQ_port"])) as conn: with conn.channel() as channel: # dedicated queue would be consumed firstly q = rabbitpy.Queue(channel, queue) q.durable = True q.declare() channel.enable_publisher_confirms() _t = [] _r = '' for i in range(0, q.__len__()): msg = q.get() msg.ack() task = json.loads(msg.body) if task["task_id"] == task_id: _r = msg.body else: _t.append(msg.body) _t.insert(0, _r) for i in _t: msg = rabbitpy.Message(channel, i) # Publish the message, looking for the return value to be a bool True/False if msg.publish("", queue, mandatory=True): LOGGER.debug( 'Message {} publish confirmed by RabbitMQ'.format( msg.body)) else: LOGGER.error( 'Message {} publish not confirmed by RabbitMQ'. format(msg.body)) rc = -1 except: LOGGER.error(traceback.format_exc()) rc = -1 finally: lock.release() return rc #print queryTask({"rabbitMQ_address":'127.0.0.1', 'rabbitMQ_port':5672}, '127.0.0.1', 'APL', ['PIT', 'CIT'], ['OTM', 'PV']) #print queryTask1({"rabbitMQ_address":'10.239.111.152', 'rabbitMQ_port':5672},"10.239.132.227", "APL",["CIT", "PIT"], ["OTM", "PV"]) #a = getDedicatedTaskQueue({"rabbitMQ_address":'10.239.153.126', 'rabbitMQ_port':5672},"10.239.132.227") #b = getDedicatedTaskQueue_pika({"rabbitMQ_address":'10.239.153.126', 'rabbitMQ_port':5672},"APL_OTM_CIT") #print(type(b[0]), b)
def send_report(report_str): """Send contents of report_str to server""" report_error = None reportws = "ws://mbt.tm.sample.com/stream/testruns" print "online reporting %s kB to %s..." % (len(report_str) / 1024, reportws) try: ws = websocket.create_connection( reportws, header=["Sec-WebSocket-Protocol:logmongo"], timeout=10) if json.loads(ws.recv())["retval"] != 0: raise Exception("Nonzero retval from %s." % (reportws, )) LOGGER.debug("online reporting connected") except Exception, e: report_error = "Connection error: (%s) %s" % (type(e), e)
def parse(self, os='windows'): """ Note: this log analysis parser need to be characterized by each user according to the log structure! """ # step 1 get default test log if os == 'linux': try: cmd_line = '~/sw_val/scripts/tools/print_err_msg.sh {}'.format(self.case_id) res, log = self.dut.staf_handle.trigger_cmdline(self.dut.dut_ip, cmd_line) if res: self.error_log = log.resultContext.getRootObject()['fileList'][0]['data'].split('\n')[:-1] else: LOGGER.info("can not get testid.log, so directly return") self.error_log = [] return -1 except: LOGGER.error(traceback.format_exc()) return -1 else: try: log = self.dut.staf_handle.getFile(self.dut.dut_ip, '{}/{}.log'.format(self.log_base_path, self.case_id)) LOGGER.debug(log) if log: log_list = log.split('\r') for i in range(0, len(log_list)): if 'TestSet:' in log_list[i]: try: pt = re.compile(r'TestSet:(.*)]') res = re.search(pt, log_list[i]) self.additional_info = res.group(1) except Exception, e: LOGGER.error(str(e)) if 'Log file:' in log_list[i]: try: pt = re.compile(r'Log file:(.*)]') res = re.search(pt, log_list[i]) self.extra_check_log = res.group(1) except Exception, e: LOGGER.error(str(e)) if '[ERROR]' in str(log_list[i]): self.error_log.append(log_list[i].split('[ERROR]')[1]) else:
jt["duration"] = float(case.end) - float(case.start) except: LOGGER.warning(traceback.format_exc()) jt["application"]["name"] = case.name jt["action_verdicts"]["final"] = case.pretty_result try: err_log = '' for run in case.runs(): err_log += '||'.join(['|'.join(_) for _ in run.error_log]) jt["notes"] = [err_log] except: LOGGER.error(traceback.format_exc()) LOGGER.debug(jt) send_report(json.dumps(jt)) json_template = { "hw_serial": "placeholder", "hw_type": "placeholder", "sw_version": "placeholder", "run_id": "placeholder", "timestamp": 0, "duration": 0, "application": { "name": "placeholder" }, "action_verdicts": { "final": "placeholder"
log = self.dut.staf_handle.getFile(self.dut.dut_ip, '{}/{}._AppLog.txt'.format(self.log_base_path, self.case_id)) if log: log_list = log.split('\r') for i in range(0, len(log_list)): if '[ERROR]' in str(log_list[i]): self.error_log.append(log_list[i].split('[ERROR]')[1]) if len(log_list[i]) > 2: last_line = log_list[i] for j in range(1, len(last_line.split(']')) + 1): if len(last_line.split(']')[-j]) > 2: self.error_log.append('[Potential FAILED]' + last_line.split(']')[-j]) break else: LOGGER.debug("Not metro case...") except Exception, e: LOGGER.error(traceback.format_exc()) """ check additional log files if needed """ try: log = self.dut.staf_handle.getFile(self.dut.dut_ip, self.extra_check_log) if log: log_list = log.split('\r') for i in range(0, len(log_list)): if '[ERROR]' in str(log_list[i]): self.error_log.append(log_list[i].split('[ERROR]')[1]) """ Always record the last step
def sendEmailReport(test_result, base_path, html_path, mail_list=None, excel=None, std_att=None): try: html_path = '{}/html/{}'.format(base_path, html_path) base_path += '/template' msg = MIMEMultipart() html = open(html_path).read() html_part = MIMEText(html, 'html') html_part.set_charset('utf-8') msg.attach(html_part) fp = open('{}/imgs/header.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<header>') msg.attach(msgImage) fp = open('{}/imgs/footer.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<footer>') msg.attach(msgImage) fp = open('{}/imgs/device_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<dut>') msg.attach(msgImage) fp = open('{}/imgs/driver_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<driver>') msg.attach(msgImage) fp = open('{}/imgs/link_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<link>') msg.attach(msgImage) fp = open('{}/imgs/log_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<log>') msg.attach(msgImage) fp = open('{}/imgs/warning_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<warning>') msg.attach(msgImage) fp = open('{}/imgs/user_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<user>') msg.attach(msgImage) fp = open('{}/imgs/wiki_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<wiki>') msg.attach(msgImage) fp = open('{}/imgs/patch_icon.png'.format(base_path), 'rb') msgImage = MIMEImage(fp.read()) fp.close() msgImage.add_header('Content-ID', '<patch>') msg.attach(msgImage) if excel: att1 = MIMEText(open(excel, 'rb').read(), 'base64', 'gb2312') att1["Content-Type"] = 'application/octet-stream' att1["Content-Disposition"] = 'attachment; filename="result.xls"' msg.attach(att1) if std_att: msg.attach(std_att) mailto = mail_list if mail_list else ["*****@*****.**"] msg['to'] = ','.join(mailto) msg['from'] = '*****@*****.**' msg['subject'] = '{}_{}_{}_{}'.format(test_result.build, test_result.platform, test_result.scenario_name, test_result.pass_rate) try: server = smtplib.SMTP('mail.sample.com') mail_results = server.sendmail(msg['from'], mailto, msg.as_string()) server.quit() LOGGER.debug(str(mail_results)) except Exception, e: LOGGER.error(str(traceback.format_exc())) except: LOGGER.error(traceback.format_exc())
test_result.patch, 'error_log': { 'fail_type': 'True', 'driver_log': '', # optional 'fw_log': '', # optional 'crash_log': '', # optional 'error_log': test_result.cases[i].error_log, # optional 'exec_time': 0.0, 'rerun_time': '0.0' } } case_detail.append(case_item) else: LOGGER.debug("Bypass {} env case".format( test_result.cases[i].name)) else: LOGGER.error("Failed to query case details from dashboard!") return data.update({"guid": guid, "case_detail": case_detail}) data.update(csrf_token) LOGGER.debug(data) LOGGER.info('data: {0}'.format(data)) response = session.post(URL, json=data) if response.status_code != 200 or 'success' not in str( response.content).lower(): response = session.put(URL, json=data) LOGGER.info('upload results: {} {}'.format(response.status_code, response.content))
def launchConnection(self, sock): try: sock.settimeout(2) buf = '' while True: try: buf_frag = sock.recv(1024) buf += buf_frag #LOGGER.debug(buf) if len(buf_frag) == 0: break except Exception, e: LOGGER.debug(str(traceback.format_exc())) break if buf: LOGGER.debug(buf) try: data = json.loads(buf) if data["option"] == 'QUERY': LOGGER.debug("Action: Query Status") ''' {"option" : "QUERY", "duts":[], "status":[]} ''' status = [] for dut in data["duts"]: bFound = False for g in self.server.groups(): d = g.getDutbyName(dut) if d: d.refreshStatus() status.append(d.status) break else: status.append(DUT.STATUS_UNKNOWN) data["status"] = status sock.send(json.dumps(data)) if data["option"] == 'QUEUE': LOGGER.debug("Action: Query Queue") ''' {"option" : "QUEUE", "duts":[], "pools":[], queue":[]} ''' queue = [] for dut in data.get('duts', []): tmp = [] for g in self.server.groups(): d = g.getDutbyName(dut) if d and d.task_runner: _t = d.task_runner.getCurrentTask() tmp.append(_t) tmp.append(d.getDedicatedTaskQueue()) break queue.append(tmp) for q in data.get('pools', []): queue.append( rabbitMQ.getDedicatedTaskQueue_pika( self.server.server_config, q)) data["queue"] = queue sock.send(json.dumps(data)) if data["option"] == 'REORDER': LOGGER.debug("Action: REORDER Queue") ''' {"option" : "REORDER", "queue":"", "task_id":""} ''' data["rc"] = rabbitMQ.moveToTop_pika( self.server.server_config, data["queue"], data["task_id"]) sock.send(json.dumps(data)) if data["option"] == 'LATE_LOCK': LOGGER.debug("Action: LATE_LOCK DUT triggered") # {"option": "LATE_LOCK", "lock": True, "dut" : "APL_TEST"} if 'dut' not in data: data['rc'] = -2 data['message'] = "key word dut missing!" sock.send(json.dumps(data)) else: for g in self.server.groups(): d = g.getDutbyName(data["dut"]) if d: d.task_runner.scenario_level_pause = data.get( 'lock', False) data['rc'] = 0 data['message'] = "Action Triggered!" sock.send(json.dumps(data)) break else: data['rc'] = -1 data['message'] = "Selected DUT not found!" sock.send(json.dumps(data)) if data["option"] == 'CANCEL': LOGGER.debug("Action: CANCEL TASK") ''' {"option" : "CANCEL", "dut": "", "queue":"", "task_id":""} ''' if data.has_key("dut"): # 1. the task might be already triggered d = None for g in self.server.groups(): d = g.getDutbyName(data["dut"]) if d: break if d and d.task_variable: _cur_task = d.task_variable if _cur_task["task_id"] == data["task_id"]: LOGGER.info( "Cancel a task in running with id {} on DUT {}" .format(data["task_id"], data["dut"])) d.task_runner.b_clear = True d.task_variable["killer"] = data.get( "killer", "NA") data["rc"] = 0 sock.send(json.dumps(data)) else: data["rc"] = -1 data["message"] = "No such task!" sock.send(json.dumps(data)) else: data["rc"] = -2 data["message"] = "No such DUT!" sock.send(json.dumps(data)) else: # 2. the task is still in queue #data["rc"] = rabbitMQ.cancelTask(self.server.server_config, data["queue"], data["task_id"]) data["rc"] = rabbitMQ.cancelTask_pika( self.server.server_config, data["queue"], data["task_id"]) sock.send(json.dumps(data)) if data["option"] == "DETAIL": LOGGER.debug("Action: QUERY DETAIL") try: if data.has_key("dut"): d = None for g in self.server.groups(): d = g.getDutbyName(data["dut"]) if d: break else: data["rc"] = -1 data["message"] = "No matched dut!" sock.send(json.dumps(data)) if d.task_runner: _t = d.task_runner.getCurrentTaskDetails( data.get("case_name", None)) data["result"] = _t data["rc"] = 0 data["message"] = "query successfully" sock.send(json.dumps(data)) else: data["rc"] = -2 data["message"] = "key dut missing!" sock.send(json.dumps(data)) except: data["rc"] = -3 data["message"] = traceback.format_exc() sock.send(json.dumps(data)) if data["option"] == "LOCK": LOGGER.debug("Action: LOCK/UNLOCK DUT INFO") # {"option": "LOCK", "lock": True, "dut" : "APL_TEST"} if 'dut' not in data: data['rc'] = -2 data['message'] = "key word dut missing!" sock.send(json.dumps(data)) else: for g in self.server.groups(): d = g.getDutbyName(data["dut"]) if d: if data.get('lock', False): d.pauseRunner() else: d.startRunner() data['rc'] = 0 data['message'] = "Action Triggered!" sock.send(json.dumps(data)) break else: data['rc'] = -1 data['message'] = "Selected DUT not found!" sock.send(json.dumps(data)) if data["option"] == "UPDATE": LOGGER.debug("Action: Update DUT INFO") # {"option": "UPDATE", "dut": "APL_TEST", "dut_name":"", "dut_ip":"", # "test_type" : ["PIT"], "platform":"","subversion":"","project_code":""} if 'dut' not in data: data['rc'] = -2 data['message'] = "key word dut missing!" for g in self.server.groups(): d = g.getDutbyName(data["dut"]) if d: d.dut_name = data.get( "dut_name", d.dut_name) d.dut_ip = data.get("dut_ip", d.dut_ip) d.test_type = data.get( "test_type", d.test_type) d.branch = data.get("branch", d.branch) d.project_code = data.get( "project_code", d.project_code) d.platform = data.get( "platform", d.platform) d.sub_platform = data.get( "sub_platform", d.sub_platform) data['rc'] = 0 sock.send(json.dumps(data)) break else: data['rc'] = -1 data['message'] = "Selected DUT not found!" sock.send(json.dumps(data)) if data["option"] == "PAAS": LOGGER.debug("Action: PASS REQUIREMENT") try: if data.has_key("dut"): d = None for g in self.server.groups(): d = g.getDutbyName(data["dut"]) if d: break else: data["rc"] = -1 data["message"] = "No matched dut!" sock.send(json.dumps(data)) scenario = Scenario(d.dut_id) if scenario.load(data): d.addTestScenarioObject(scenario) else: data["rc"] = -4 data[ "message"] = "failed to load cases from database" sock.send(json.dumps(data)) data["rc"] = 0 data[ "message"] = "PaaS scenario generated successfully" sock.send(json.dumps(data)) else: data["rc"] = -2 data["message"] = "key dut missing!" sock.send(json.dumps(data)) except: data["rc"] = -3 data["message"] = traceback.format_exc() sock.send(json.dumps(data)) except: sock.send(traceback.format_exc())