def process_results(self, *args): #set test status completed #call stop monitors #send prepare results command to exec #set test status collating #copy results files from exec #copy files from each mon #set test status finished #remove test from running Q t = args[1] status = args[2] serialize_str = t.serialize() t2 = testobj.testDefn() t2.deserialize(serialize_str) try: if (t.testobj.TestInputData.testid != t2.testobj.TestInputData.testid): lctx.error("testobj not same") raise Exception("Test objects do not match : ", t2.testobj.TestInputData.testid) ip = t.testobj.TestInputData.exechostname lctx.debug(status) if status == "completed": retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_PREPARE_RESULTS", serialize_str)) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_PREPARE_RESULTS failed : ", t2.testobj.TestInputData.testid) #get statistics hosts for s in t.testobj.TestInputData.stathostname.split(','): #stop stats monitors on req hosts #any host that blocks stop monitor blocks the scheduling for the FW p = 0 if s.strip() == self.HOST: p = self.PORT else: p = self.CPORT retsend = self.cl.send( s.strip(), p, self.ev.construct("DAYTONA_STOP_MONITOR", serialize_str)) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_STOP_MONITOR failed : ", t2.testobj.TestInputData.testid) t.updateStatus("completed", "collating") #todo : avoid send client its own ip lctx.debug("SENDING results.tgz download to : " + ip + ":" + str(self.CPORT)) results_file = cfg.daytona_agent_root + "/" + t.testobj.TestInputData.frameworkname + "/" + str( t.testobj.TestInputData.testid ) + "/results/" + "results.tgz" retsend = self.cl.send( ip, self.CPORT, self.ev.construct( "DAYTONA_FILE_DOWNLOAD", str(self.HOST) + "," + str(self.PORT) + "," + results_file + "," + serialize_str + "," + "RESULTS" + "," + ip)) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) results_file = cfg.daytona_agent_root + "/" + t.testobj.TestInputData.frameworkname + "/" + str( t.testobj.TestInputData.testid ) + "/results/" + "results_stats.tgz" for s in t.testobj.TestInputData.stathostname.split(','): lctx.info("Downloading stats from STATS self.HOSTS : " + s) lctx.info(s) #stop stats monitors on req hosts #any host that blocks stop monitor blocks the scheduling for the FW p = 0 if s.strip() == self.HOST: p = self.PORT else: p = self.CPORT lctx.info("Sending DOWNLOAD file to :" + s.strip() + ":" + str(p) + "File :" + results_file + "(upload to this host port:)" + str(self.HOST) + "," + str(self.PORT)) retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FILE_DOWNLOAD", str(self.HOST) + "," + str(self.PORT) + "," + results_file + "," + serialize_str + "," + "STATS" + "," + s.strip())) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error("Error downloading " + results_file + " From " + s.strip() + ":" + retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) try: lctx.debug(t2.testobj.TestInputData.exec_results_path + "results.tgz") lctx.debug(t2.testobj.TestInputData.exec_results_path + "/../") common.untarfile( t2.testobj.TestInputData.exec_results_path + "/results.tgz", t2.testobj.TestInputData.exec_results_path + "/../") for s in t2.testobj.TestInputData.stats_results_path: lctx.debug( t2.testobj.TestInputData.stats_results_path[s] + "results_stats.tgz") lctx.debug( t2.testobj.TestInputData.stats_results_path[s] + "/../") common.untarfile( t2.testobj.TestInputData.stats_results_path[s] + "/results_stats.tgz", t2.testobj.TestInputData.stats_results_path[s] + "/../") except Exception as e: lctx.error("Error in untar results") lctx.error(e) raise Exception("test result processing error", t2.testobj.TestInputData.testid) #todo : invoke other scripts to transform results and update DB except Exception as e: lctx.error("Error in processing results") lctx.error(e) t.updateStatus("collating", "failed") try: retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_CLEANUP_TEST", serialize_str)) lctx.debug("DAYTONA_CLEANUP_TEST:" + str(retsend)) retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FINISH_TEST", serialize_str)) lctx.debug(retsend) for s in t.testobj.TestInputData.stathostname.split(','): p = 0 if s.strip() == self.HOST: p = self.PORT else: p = self.CPORT lctx.debug("self.HOST : " + s.strip()) lctx.debug("PORT to send CLEANUP & FINISH : " + str(p)) retsend = self.cl.send( s.strip(), p, self.ev.construct("DAYTONA_CLEANUP_TEST", serialize_str)) lctx.debug(retsend) retsend = self.cl.send( s.strip(), p, self.ev.construct("DAYTONA_FINISH_TEST", serialize_str)) lctx.debug(retsend) except Exception as e: lctx.error("Error in processing results") t.updateStatus("collating", "failed") t.updateStatus("collating", "finished clean") now = time.time() tstr = str(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(now))) t.updateEndTime(tstr) f = open(t2.testobj.TestInputData.exec_results_path + "/results.csv") to = t.testobj.TestInputData.email reader = csv.reader(f) htmlfile = "" rownum = 0 htmlfile = '<table cellpadding="10">' for row in reader: if rownum == 0: htmlfile = htmlfile + '<tr>' for column in row: htmlfile = htmlfile + '<th width="70%">' + column + '</th>' htmlfile = htmlfile + '</tr>' else: htmlfile = htmlfile + '<tr>' for column in row: htmlfile = htmlfile + '<td width="70%">' + column + '</td>' htmlfile = htmlfile + '</tr>' rownum += 1 htmlfile = htmlfile + '</table>' f.close() subject = "Test {} completed successfully".format( t.testobj.TestInputData.testid) mail_content = "<BR> Test id : {} \ <BR> Framework : {} \ <BR> Title : {} <BR>".format( t.testobj.TestInputData.testid, t.testobj.TestInputData.frameworkname, t.testobj.TestInputData.title) mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>Purpose : {} <BR> \ <BR> Creation time : {} \ <BR>Start time : {} \ <BR>End time : {} <BR>".format( t.testobj.TestInputData.purpose, t.testobj.TestInputData.creation_time, t.testobj.TestInputData.start_time, t.testobj.TestInputData.end_time) mail_content = mail_content + "<BR>Your test executed successfully. \ <BR>Results (Contents of results.csv)<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>" + htmlfile + "<BR>" try: common.send_email(subject, to, mail_content, "", lctx, cfg.email_user, cfg.email_server, cfg.smtp_server, cfg.smtp_port) except: lctx.error("Mail send error") return "SUCCESS"
def process_results(self, *args): # set test status completed # call stop monitors # send prepare results command to exec # set test status collating # copy results files from exec # copy files from each mon # set test status finished # remove test from running Q t = args[1] status = args[2] serialize_str = t.serialize() t2 = testobj.testDefn() t2.deserialize(serialize_str) try: if t.testobj.TestInputData.testid != t2.testobj.TestInputData.testid: lctx.error("testobj not same") raise Exception("Test objects do not match : ", t2.testobj.TestInputData.testid) ip = t.testobj.TestInputData.exechostname lctx.debug(status) if status in ["completed", "timeout"]: retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_STOP_MONITOR", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_STOP_MONITOR failed : ", t2.testobj.TestInputData.testid) # get statistics hosts for s in t.testobj.TestInputData.stathostname.split(','): # stop stats monitors on req hosts # any host that blocks stop monitor blocks the scheduling for the FW if len(s.strip()) == 0: break p = self.CPORT try: retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_STOP_MONITOR", str(t2.testobj.TestInputData.testid))) except: continue lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_STOP_MONITOR failed : ", t2.testobj.TestInputData.testid) if t.testobj.TestInputData.timeout_flag: t.updateStatus("timeout", "collating") else: t.updateStatus("completed", "collating") ptop = process_top.ProcessTop(LOG.getLogger( "processTop", "DH")) # todo : avoid send client its own ip lctx.info("SENDING results.tgz download to : " + ip + ":" + str(self.CPORT)) retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FILE_DOWNLOAD", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error(retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) try: lctx.debug("Untar file : " + t2.testobj.TestInputData.exec_results_path + "results.tgz to location : " + t2.testobj.TestInputData.exec_results_path + "/../") common.untarfile( t2.testobj.TestInputData.exec_results_path + "/results.tgz", t2.testobj.TestInputData.exec_results_path + "/../") except Exception as e: lctx.error("Error in untar EXEC host results") lctx.error(e) raise Exception("test result processing error", t2.testobj.TestInputData.testid) ptop_ret = ptop.process_top_output( t2.testobj.TestInputData.stats_results_path[ip] + "sar/") lctx.debug(ptop_ret + " : " + t2.testobj.TestInputData.stats_results_path[ip]) for s in t.testobj.TestInputData.stathostname.split(','): if len(s.strip()) == 0: break lctx.info("Downloading stats from STATS self.HOSTS : " + s) lctx.info(s) # stop stats monitors on req hosts # any host that blocks stop monitor blocks the scheduling for the FW p = self.CPORT lctx.info("Sending results.tgz download to :" + s.strip() + ":" + str(p)) try: retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FILE_DOWNLOAD", str(t2.testobj.TestInputData.testid))) except: continue lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error("Error downloading STATS from " + s.strip() + ":" + retsend) raise Exception( "Daytona command DAYTONA_FILE_DOWNLOAD failed :", t2.testobj.TestInputData.testid) try: lctx.debug( "Untar file : " + t2.testobj.TestInputData.stats_results_path[s] + "results.tgz to location : " + t2.testobj.TestInputData.stats_results_path[s] + "/../") common.untarfile( t2.testobj.TestInputData.stats_results_path[s] + "/results.tgz", t2.testobj.TestInputData.stats_results_path[s] + "/../") except Exception as e: lctx.error("Error in untar STAT host " + s + " results") lctx.error(e) raise Exception("test result processing error", t2.testobj.TestInputData.testid) ptop_ret = ptop.process_top_output( t2.testobj.TestInputData.stats_results_path[s] + "sar/") lctx.debug(ptop_ret + " : " + t2.testobj.TestInputData.stats_results_path[s]) # todo : invoke other scripts to transform results and update DB except Exception as e: lctx.error("Error in processing results") lctx.error(e) t.updateStatus("collating", "failed") try: retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FINISH_TEST", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) for s in t.testobj.TestInputData.stathostname.split(','): if len(s.strip()) == 0: break p = self.CPORT lctx.debug("self.HOST : " + s.strip()) lctx.debug("PORT to send CLEANUP & FINISH : " + str(p)) try: retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FINISH_TEST", str(t2.testobj.TestInputData.testid))) except: pass lctx.debug(retsend) except Exception as e: lctx.error("Error in processing results") t.updateStatus("collating", "failed") if t.testobj.TestInputData.timeout_flag: t.updateStatus("collating", "timeout clean") else: t.updateStatus("collating", "finished clean") now = time.time() tstr = str(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(now))) t.updateEndTime(tstr) f = None try: f = open(t2.testobj.TestInputData.exec_results_path + "/results.csv") except IOError as e: lctx.debug("File results.csv not found") pass to = t.testobj.TestInputData.email htmlfile = '<table>' if f: reader = csv.reader(f) rownum = 0 for row in reader: if rownum == 0: htmlfile += '<tr>' for column in row: htmlfile += '<th style="text-align: left;" width="70%">' + column + '</th>' htmlfile += '</tr>' else: htmlfile += '<tr>' for column in row: htmlfile += '<td style="text-align: left;" width="70%">' + column + '</td>' htmlfile += '</tr>' rownum += 1 f.close() htmlfile += '</table>' host_ip = "http://" + common.get_local_ip( ) + "/test_info.php?testid=" + str(t.testobj.TestInputData.testid) subject = "Test {} completed successfully".format( t.testobj.TestInputData.testid) mail_content = "<BR> Test id : {} \ <BR> Framework : {} \ <BR> Title : {} <BR>".format( t.testobj.TestInputData.testid, t.testobj.TestInputData.frameworkname, t.testobj.TestInputData.title) mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>Purpose : {} <BR> \ <BR> Creation time : {} \ <BR>Start time : {} \ <BR>End time : {} <BR>".format( t.testobj.TestInputData.purpose, t.testobj.TestInputData.creation_time, t.testobj.TestInputData.start_time, t.testobj.TestInputData.end_time) mail_content = mail_content + "<BR>Your test executed successfully. \ <BR>Results (Contents of results.csv)<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>" + htmlfile + "<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "Link:" mail_content = mail_content + '<BR><a href="' + host_ip + '">' + host_ip + '</a>' try: common.send_email(subject, to, mail_content, "", lctx, cfg.email_user, cfg.email_server, cfg.smtp_server, cfg.smtp_port) except: lctx.error("Mail send error") return "SUCCESS"
def process_results(self, *args): """ This procedure is called by testmon as seperate thread when test execution ends or test timeout occur on agent. """ t = args[1] status = args[2] serialize_str = t.serialize() t2 = testobj.testDefn() t2.deserialize(serialize_str) # Setting up test logger for capturing test life cycle on scheduler test_logger = LOG.gettestlogger(t2, "EXEC") test_logger.info("Test execution completes, preocessing test results") try: if t.testobj.TestInputData.testid != t2.testobj.TestInputData.testid: lctx.error("testobj not same") raise Exception("Test objects do not match : ", t2.testobj.TestInputData.testid) # set test status to collating if t.testobj.TestInputData.timeout_flag: t.updateStatus("timeout", "collating") else: t.updateStatus("completed", "collating") ip = t.testobj.TestInputData.exechostname lctx.debug(status) if status in ["completed", "timeout"]: # Initiate instance of ProcessOutputFiles for docker and top outout file processing ptop = process_files.ProcessOutputFiles( LOG.getLogger("processTop", "DH")) lctx.info("SENDING results.tgz download to : " + ip + ":" + str(self.CPORT)) # send file download command to exec host (no need to send stop test as this procedure is invoked due # to test end on exec host) retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FILE_DOWNLOAD", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error("Error downloading LOGS from " + ip + " : " + retsend) test_logger.error("Error downloading LOGS from " + ip + " : " + retsend) else: test_logger.info( "Logs download successfull from exec host " + ip) # copy results files from exec to daytona file system and untar results try: lctx.debug("Untar file : " + t2.testobj.TestInputData.exec_results_path + "results.tgz to location : " + t2.testobj.TestInputData.exec_results_path + "/../") common.untarfile( t2.testobj.TestInputData.exec_results_path + "/results.tgz", t2.testobj.TestInputData.exec_results_path + "/../") except Exception as e: lctx.error("Error in untar EXEC host results") test_logger.error("Error in untar EXEC host results") lctx.error(e) # process top and docker stat files downloaded from exec host ptop_ret = ptop.process_output_files( t2.testobj.TestInputData.stats_results_path[ip] + "sar/") lctx.debug(ptop_ret + " : " + t2.testobj.TestInputData.stats_results_path[ip]) test_logger.info( "Exec host logs extracted and processed succesfully") # send DAYTONA_FINISH_TEST to exec host for finishing and test cleanup retsend = self.cl.send( ip, self.CPORT, self.ev.construct("DAYTONA_FINISH_TEST", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) test_logger.info("Test END successfull on exec host " + ip) for s in t.testobj.TestInputData.stathostname.split(','): if len(s.strip()) == 0: break # stop stats monitors on req hosts # any host that blocks stop monitor blocks the scheduling for the FW p = self.CPORT try: # Send DAYTONA_STOP_TEST on all agent hosts to stop SAR data collection after test finish on # exec host. This message is required to tell stat hosts that test execution is finished on # exec host. Upon receiving this message on stat host, agent will change test state to TESTEND # and then other SAR data collection thread will stop writing log files for this test. lctx.info("Stopping test on stat host : " + s) retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_STOP_TEST", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error("Failed to stop test on stat host " + s + " : " + retsend) test_logger.error( "Failed to stop test on stat host " + s + " : " + retsend) else: test_logger.info("Test stopped on stat host " + s) # send file download command to stat host lctx.info("Sending results.tgz download to :" + s.strip() + ":" + str(p)) retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FILE_DOWNLOAD", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) if retsend.split(",")[1] != "SUCCESS": lctx.error("Error downloading STATS from " + s.strip() + ":" + retsend) test_logger.error("Error downloading STATS from " + s.strip() + ":" + retsend) else: test_logger.info( "Logs downloaded from stat host " + s) # copy results files from stat host to daytona file system and untar results lctx.debug( "Untar file : " + t2.testobj.TestInputData.stats_results_path[s] + "results.tgz to location : " + t2.testobj.TestInputData.stats_results_path[s] + "/../") common.untarfile( t2.testobj.TestInputData.stats_results_path[s] + "/results.tgz", t2.testobj.TestInputData.stats_results_path[s] + "/../") # process top and docker stat files downloaded from stat host ptop_ret = ptop.process_output_files( t2.testobj.TestInputData.stats_results_path[s] + "sar/") lctx.debug( ptop_ret + " : " + t2.testobj.TestInputData.stats_results_path[s]) test_logger.info( "Stat host " + s + " logs extracted and processed succesfully") # send DAYTONA_FINISH_TEST to exec host for finishing and test cleanup retsend = self.cl.send( s.strip(), p, self.ev.construct( "DAYTONA_FINISH_TEST", str(t2.testobj.TestInputData.testid))) lctx.debug(retsend) test_logger.info("Test END successfull on stat host " + s) except Exception as e: # Just continue with other stat hosts if any exception occurs while working on any particular # host (Continue only when something goes wrong with stat host, because we still want to # download logs from other stat hosts) lctx.error(e) test_logger.error(e) continue except Exception as e: # Throw an error if anything goes wrong with finishing test on exec host and set test state to failed lctx.error("Error in processing results") lctx.error(e) test_logger.error("Error in processing results") test_logger.error(e) t.updateStatus("collating", "failed") # updating test state to timeout clean if test terminated due to timeout else setting it to finished clean if t.testobj.TestInputData.timeout_flag: t.updateStatus("collating", "timeout clean") else: t.updateStatus("collating", "finished clean") now = time.time() tstr = str(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(now))) # update test end time in database t.updateEndTime(tstr) f = None # Formatting email with results.csv details to send it to CC list if user has mentioned in test details # (admin need to smtp server for this functionality to work, smtp server details need to be # mentioned in config.sh) try: f = open(t2.testobj.TestInputData.exec_results_path + "/results.csv") except IOError as e: lctx.debug("File results.csv not found") pass to = t.testobj.TestInputData.email htmlfile = '<table>' if f: reader = csv.reader(f) rownum = 0 for row in reader: if rownum == 0: htmlfile += '<tr>' for column in row: htmlfile += '<th style="text-align: left;" width="70%">' + column + '</th>' htmlfile += '</tr>' else: htmlfile += '<tr>' for column in row: htmlfile += '<td style="text-align: left;" width="70%">' + column + '</td>' htmlfile += '</tr>' rownum += 1 f.close() htmlfile += '</table>' host_ip = "http://" + common.get_local_ip( ) + "/test_info.php?testid=" + str(t.testobj.TestInputData.testid) subject = "Test {} completed successfully".format( t.testobj.TestInputData.testid) mail_content = "<BR> Test id : {} <BR> Framework : {} <BR> Title : {} <BR>".format( t.testobj.TestInputData.testid, t.testobj.TestInputData.frameworkname, t.testobj.TestInputData.title) mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>Purpose : {} <BR><BR> Creation time : {} <BR>Start time : {} <BR>End " \ "time : {} <BR>".format(t.testobj.TestInputData.purpose, t.testobj.TestInputData.creation_time, t.testobj.TestInputData.start_time, t.testobj.TestInputData.end_time) mail_content = mail_content + "<BR>Your test executed successfully. <BR>Results (Contents of results.csv)<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "<BR>" + htmlfile + "<BR>" mail_content = mail_content + "<BR>==========================================================<BR>" mail_content = mail_content + "Link:" mail_content = mail_content + '<BR><a href="' + host_ip + '">' + host_ip + '</a>' try: common.send_email(subject, to, mail_content, "", lctx, cfg.email_user, cfg.email_server, cfg.smtp_server, cfg.smtp_port) except Exception as e: lctx.error("Mail send error") LOG.removeLogger(t) return "SUCCESS"