Esempio n. 1
0
def queryTask(config, device_id, project_code, test_type, branch):
    try:
        with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format(
                g_username, g_password, config["rabbitMQ_address"],
                config["rabbitMQ_port"])) as conn:
            with conn.channel() as channel:
                # dedicated queue would be consumed firstly
                q = rabbitpy.Queue(channel, device_id)
                q.durable = True
                q.declare()
                if q.__len__() > 0:
                    msg = q.get()
                    msg.ack()
                    return json.loads(msg.body)

                # common queue would be consumed according to priority
                for t in test_type:
                    for b in branch:
                        q = rabbitpy.Queue(
                            channel, '{}_{}_{}'.format(project_code, b, t))
                        q.durable = True
                        q.declare()
                        if q.__len__() > 0:
                            msg = q.get()
                            msg.ack()
                            return json.loads(msg.body)
    except:
        LOGGER.critical(traceback.format_exc())
        return None
Esempio n. 2
0
def moveToTop_pika(config, queue_id, task_id):
    task_to_requeue = []
    try:
        credentials = pika.PlainCredentials(g_username, g_password)
        params = pika.ConnectionParameters(config["rabbitMQ_address"],
                                           config["rabbitMQ_port"], '/',
                                           credentials)
        connection = pika.BlockingConnection(params)
        channel = connection.channel()
        queue = channel.queue_declare(queue=queue_id, durable=True)
        rc = -1
        _target = ''
        for m in range(queue.method.message_count):
            method_frame, header_frame, body = channel.basic_get(queue_id)
            data = json.loads(body)
            if data.get('task_id', 'not_a_valid_id') == task_id:
                _target = body
            else:
                task_to_requeue.append(body)

            channel.basic_ack(method_frame.delivery_tag)

        if _target:
            rc = 0
            task_to_requeue.insert(0, _target)

        for t in task_to_requeue:
            channel.basic_publish(body=t, exchange='', routing_key=queue_id)
    except:
        LOGGER.error(traceback.format_exc())
        rc = -2

    return rc
Esempio n. 3
0
    def fecthPatchInfo(self):
        if self.test_type in ['PIT_LITE', 'CIT']:
            # fetch build info
            for path in WIN_CI_BUILD_PATH_LIST:
                dir_path = r'{}/{}/distrib'.format(path, self.build)
                if os.path.isdir(dir_path):
                    files = os.listdir(dir_path)
                    for fl in files:
                        if fl.startswith('CI') and fl.endswith('.txt'):
                            with open('{}/{}'.format(dir_path, fl), 'r') as f:
                                self.patch_detail = f.read().replace(
                                    '\n', '</br>')
                                self.artifacts_path = dir_path
                                LOGGER.debug(self.patch_detail)
                                break
                    if self.patch_detail:
                        break
        elif self.test_type in ['FULL']:
            '''
            Need implementation here... hwo to handle PV branch?
            '''
            pass

        try:
            lines = self.patch_detail.split('</br>')
            for line in lines:
                if 'Owner' in line:
                    self.owner = line.replace('Owner:', '').strip()
                    self.email = self.owner
                if 'Subject' in line:
                    self.subject = line.replace('Subject:', '').strip()
        except:
            LOGGER.error(traceback.format_exc())
Esempio n. 4
0
def cancelTask(config, queue, task_id):
    import threading
    lock = threading.Lock()
    lock.acquire()
    rc = 0
    try:
        with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format(
                g_username, g_password, config["rabbitMQ_address"],
                config["rabbitMQ_port"])) as conn:
            with conn.channel() as channel:
                # dedicated queue would be consumed firstly
                q = rabbitpy.Queue(channel, queue)
                q.durable = True
                q.declare()
                channel.enable_publisher_confirms()
                _t = []
                for i in range(0, q.__len__()):
                    msg = q.get()
                    task = json.loads(msg.body)
                    if task["task_id"] == task_id:
                        msg.ack()
                        break
                else:
                    rc = -1
    except:
        LOGGER.error(traceback.format_exc())
        rc = -1
    finally:
        lock.release()
    return rc
Esempio n. 5
0
def getDedicatedTaskQueue_pika(config, device_id):
    # check dedicated queue firstly
    tasks = []
    task_to_nack = []
    try:
        credentials = pika.PlainCredentials(g_username, g_password)
        params = pika.ConnectionParameters(config["rabbitMQ_address"],
                                           config["rabbitMQ_port"], '/',
                                           credentials)
        connection = pika.BlockingConnection(params)
        channel = connection.channel()
        queue = channel.queue_declare(queue=device_id, durable=True)

        for m in range(queue.method.message_count):
            method_frame, header_frame, body = channel.basic_get(device_id)
            if method_frame:
                tasks.append(json.loads(body))
                task_to_nack.append(method_frame.delivery_tag)

        for t in task_to_nack:
            channel.basic_nack(t)
    except:
        for t in task_to_nack:
            channel.basic_nack(t)
        LOGGER.error(traceback.format_exc())

    return tasks
Esempio n. 6
0
 def run(self):
     while True:
         sock, address = self.sock.accept()
         LOGGER.debug('Connection setup!')
         th = threading.Thread(target=self.launchConnection,
                               args=(sock, ))
         th.setDaemon(True)
         th.start()
Esempio n. 7
0
 def __check_port_occupied(cls, port):
     _ = os.popen('netstat -ano | findstr "{}"'.format(port))
     tmp_output = _.readlines()
     LOGGER.info(tmp_output)
     if not tmp_output:
         return False
     else:
         return True
Esempio n. 8
0
 def is_new_bug(self, work, run):
     try:
         # one run could contain multiple run results, due to failrerun functionality
         log_cur = self.log_structure(work, self.error_log)
         b_new, pretty_result = self._is_new_bug(log_cur, work, run)
         LOGGER.info('{} is {}'.format(work.name, pretty_result))
         return b_new
     except:
         LOGGER.critical(traceback.format_exc())
         return True
Esempio n. 9
0
def isMergeSlot(subject):
    try:
        pt = re.compile('Merge "(.*)"')
        rc = re.search(pt, subject)
        if rc and rc.group(0):
            #print rc.group(1)
            return True
        else:
            return False
    except:
        LOGGER.error(traceback.format_exc())
        return False
Esempio n. 10
0
 def __get_ip_address():
     ip_address = None
     get_ip_process = subprocess.Popen('ipconfig -all',
                                       stdout=subprocess.PIPE)
     tmp_output = get_ip_process.communicate()[0]
     search_result = re.search(
         'IPv4 Address(. )*: (\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})',
         tmp_output)
     if search_result is not None:
         ip_address = search_result.group(2)
     else:
         ip_address = 'localhost'
         LOGGER.critical(
             'ERROR! Cannot find host IP!!!, using LOCALHOST')
     return ip_address
Esempio n. 11
0
def send_report(report_str):
    """Send contents of report_str to server"""
    report_error = None
    reportws = "ws://mbt.tm.sample.com/stream/testruns"

    print "online reporting %s kB to %s..." % (len(report_str) / 1024,
                                               reportws)
    try:
        ws = websocket.create_connection(
            reportws, header=["Sec-WebSocket-Protocol:logmongo"], timeout=10)
        if json.loads(ws.recv())["retval"] != 0:
            raise Exception("Nonzero retval from %s." % (reportws, ))
        LOGGER.debug("online reporting connected")
    except Exception, e:
        report_error = "Connection error: (%s) %s" % (type(e), e)
Esempio n. 12
0
 def init(self):
     global semaphore
     semaphore.acquire()
     try:
         self.conn = MySQLdb.connect(
             host=self.host,
             port=self.port,
             user=self.username,
             passwd=self.pwd,
             db=self.schema,
         )
         self.cur = self.conn.cursor()
         return True
     except:
         LOGGER.error(traceback.format_exc())
         return False
     finally:
         semaphore.release()
Esempio n. 13
0
    def parse(self, os='windows'):
        """
         Note: this log analysis parser need to be characterized by each user according to the log structure!
        """
        # step 1 get default test log
        if os == 'linux':
            try:
                cmd_line = '~/sw_val/scripts/tools/print_err_msg.sh {}'.format(self.case_id)
                res, log = self.dut.staf_handle.trigger_cmdline(self.dut.dut_ip, cmd_line)
                if res:
                    self.error_log = log.resultContext.getRootObject()['fileList'][0]['data'].split('\n')[:-1]
                else:
                    LOGGER.info("can not get testid.log, so directly return")
                    self.error_log = []
                    return -1
            except:
                LOGGER.error(traceback.format_exc())
                return -1
        else:
            try:
                log = self.dut.staf_handle.getFile(self.dut.dut_ip, '{}/{}.log'.format(self.log_base_path, self.case_id))
                LOGGER.debug(log)
                if log:
                    log_list = log.split('\r')
                    for i in range(0, len(log_list)):
                        if 'TestSet:' in log_list[i]:
                            try:
                                pt = re.compile(r'TestSet:(.*)]')
                                res = re.search(pt, log_list[i])
                                self.additional_info = res.group(1)
                            except Exception, e:
                                LOGGER.error(str(e))
                        if 'Log file:' in log_list[i]:
                            try:
                                pt = re.compile(r'Log file:(.*)]')
                                res = re.search(pt, log_list[i])
                                self.extra_check_log = res.group(1)
                            except Exception, e:
                                LOGGER.error(str(e))

                        if '[ERROR]' in str(log_list[i]):
                            self.error_log.append(log_list[i].split('[ERROR]')[1])
                else:
Esempio n. 14
0
def getDedicatedTaskQueue(config, device_id):
    tasks = []
    try:
        with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format(
                g_username, g_password, config["rabbitMQ_address"],
                config["rabbitMQ_port"])) as conn:
            with conn.channel() as channel:
                # dedicated queue would be consumed firstly
                q = rabbitpy.Queue(channel, device_id)
                q.durable = True
                q.declare()
                if q.__len__() == 0:
                    return []
                else:
                    for i in range(0, q.__len__()):
                        msg = q.get()
                        tasks.append(msg.body)
    except:
        LOGGER.error(traceback.format_exc())
    return tasks
Esempio n. 15
0
def getMergeSlotInfo(change_id, bundle_id_list):
    LOGGER.critical("change id: {}".format(change_id))
    _d = dict()
    _d["change_id"] = change_id
    if '/' in change_id:
        change_id = change_id.split('/')[0]

    query_url = "https://sample.com/a/changes/%s/detail/?o=CURRENT_REVISION&o=CURRENT_COMMIT" % change_id
    rc = gerritRequest(query_url)
    _d["subject"] = rc['subject']
    if 'Merge' in rc['subject']:
        _ls = rc["revisions"][rc["revisions"].keys()[0]]["commit"]["parents"]
        for _i in _ls:
            print _i["subject"], _i["commit"]
            if isMergeSlot(_i["subject"]):
                getParentInfo(str(_i["commit"]), bundle_id_list, _d, True)
            else:
                getParentInfo(str(_i["commit"]), bundle_id_list, _d, False)

        bundle_id_list.append(_d)
    return
Esempio n. 16
0
def queryTask1(config, device_id, project_code, test_type, branch):
    try:
        # check dedicated queue firstly
        credentials = pika.PlainCredentials(g_username, g_password)
        params = pika.ConnectionParameters(config["rabbitMQ_address"],
                                           config["rabbitMQ_port"], '/',
                                           credentials)
        connection = pika.BlockingConnection(params)
        channel = connection.channel()
        queue = channel.queue_declare(queue=device_id, durable=True)
        if queue.method.message_count > 0:
            method_frame, header_frame, body = channel.basic_get(device_id)
            if method_frame:
                channel.basic_ack(method_frame.delivery_tag)
                return json.loads(body)
            else:
                LOGGER.warning('No message returned')

        for t in test_type:
            for b in branch:
                q_name = '{}_{}_{}'.format(project_code, b, t)
                queue = channel.queue_declare(queue=q_name, durable=True)
                if queue.method.message_count > 0:
                    method_frame, header_frame, body = channel.basic_get(
                        q_name)
                    if method_frame:
                        channel.basic_ack(method_frame.delivery_tag)
                        return json.loads(body)
                    else:
                        LOGGER.warning('No message returned')
    except:
        print traceback.format_exc()
        LOGGER.error(traceback.format_exc())
    return None
Esempio n. 17
0
def send_to_server(data, dut_name, platform, run_id, build):
    for case in data:
        jt = json_template.copy()
        jt["hw_serial"] = dut_name
        jt["hw_type"] = platform
        jt["run_id"] = run_id
        jt["sw_version"] = build

        try:
            jt["timestamp"] = float(case.start)
            jt["duration"] = float(case.end) - float(case.start)
        except:
            LOGGER.warning(traceback.format_exc())

        jt["application"]["name"] = case.name
        jt["action_verdicts"]["final"] = case.pretty_result

        try:
            err_log = ''
            for run in case.runs():
                err_log += '||'.join(['|'.join(_) for _ in run.error_log])
            jt["notes"] = [err_log]
        except:
Esempio n. 18
0
def query_error_log_from_dashboard(platform, subversion, test_type):
    para_query_test_result = [{
        u'platform': u'IPU4-APL-WIN',
        u'date': u'2017/29/8',
        u'type': 'weekly',
        u'daily_type': 'pit_lite',
        u'sub_platform': '',
        u'password': login_password,
        u'username': login_usr,
    }]

    date = get_current_intel_calendar()

    pp = para_query_test_result[0]
    pp['platform'] = u'{0}'.format(platform)
    pp['sub_platform'] = u'{0}'.format(subversion)
    pp['date'] = u'{0}'.format(date)
    pp['type'] = u'{0}'.format(test_type)

    try:
        # data = {'test_case_id_list': json.dumps(pp)}
        data = pp
        # print(data)
        QUERY_URL = 'http://sample.com/rest/query_error_log/'
        response = requests.post(url=QUERY_URL, data=data)
        if response.status_code == 200:
            query_result = response.content
            return json.loads(query_result)["data"]["result"]
        else:
            return None
    except:
        print(traceback.format_exc())
        LOGGER.error(traceback.format_exc())
        return None


# print query_error_log_from_dashboard('TestPlatform', '', 'weekly')
Esempio n. 19
0
def moveToTop(config, queue, task_id):
    import threading
    lock = threading.Lock()
    lock.acquire()
    rc = 0
    try:
        with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format(
                g_username, g_password, config["rabbitMQ_address"],
                config["rabbitMQ_port"])) as conn:
            with conn.channel() as channel:
                # dedicated queue would be consumed firstly
                q = rabbitpy.Queue(channel, queue)
                q.durable = True
                q.declare()
                channel.enable_publisher_confirms()
                _t = []
                _r = ''
                for i in range(0, q.__len__()):
                    msg = q.get()
                    msg.ack()
                    task = json.loads(msg.body)
                    if task["task_id"] == task_id:
                        _r = msg.body
                    else:
                        _t.append(msg.body)
                _t.insert(0, _r)

                for i in _t:
                    msg = rabbitpy.Message(channel, i)
                    # Publish the message, looking for the return value to be a bool True/False
                    if msg.publish("", queue, mandatory=True):
                        LOGGER.debug(
                            'Message {} publish confirmed by RabbitMQ'.format(
                                msg.body))
                    else:
                        LOGGER.error(
                            'Message {} publish not confirmed by RabbitMQ'.
                            format(msg.body))
                        rc = -1
    except:
        LOGGER.error(traceback.format_exc())
        rc = -1
    finally:
        lock.release()
    return rc


#print queryTask({"rabbitMQ_address":'127.0.0.1', 'rabbitMQ_port':5672}, '127.0.0.1', 'APL', ['PIT', 'CIT'], ['OTM', 'PV'])
#print queryTask1({"rabbitMQ_address":'10.239.111.152', 'rabbitMQ_port':5672},"10.239.132.227", "APL",["CIT", "PIT"], ["OTM", "PV"])
#a = getDedicatedTaskQueue({"rabbitMQ_address":'10.239.153.126', 'rabbitMQ_port':5672},"10.239.132.227")
#b = getDedicatedTaskQueue_pika({"rabbitMQ_address":'10.239.153.126', 'rabbitMQ_port':5672},"APL_OTM_CIT")
#print(type(b[0]), b)
Esempio n. 20
0
def cancelTask_pika(config, queue_id, task_id):
    task_to_nack = []
    try:
        credentials = pika.PlainCredentials(g_username, g_password)
        params = pika.ConnectionParameters(config["rabbitMQ_address"],
                                           config["rabbitMQ_port"], '/',
                                           credentials)
        connection = pika.BlockingConnection(params)
        channel = connection.channel()
        queue = channel.queue_declare(queue=queue_id, durable=True)
        rc = -1
        _target_delivery_tag = ''
        for m in range(queue.method.message_count):
            method_frame, header_frame, body = channel.basic_get(queue_id)
            if method_frame:
                task = json.loads(body)
                if task.get('task_id', 'not_a_valid_id') == task_id:
                    # channel.basic_ack(method_frame.delivery_tag)
                    _target_delivery_tag = method_frame.delivery_tag
                    rc = 0
                    LOGGER.warning("cancellation op will ack: {}".format(task))
                    break
                else:
                    task_to_nack.append(method_frame.delivery_tag)
                    LOGGER.warning(
                        "cancellation op will not ack: {}".format(task))
                    #channel.basic_nack(method_frame.delivery_tag)
        else:
            rc = -1

        # task_to_nack = (_ for _ in task_to_nack)
        for t in task_to_nack:
            channel.basic_nack(t)

        channel.basic_ack(_target_delivery_tag)

    except:
        for t in task_to_nack:
            channel.basic_nack(t)
        LOGGER.error(traceback.format_exc())
        rc = -2

    return rc
Esempio n. 21
0
 def init(self):
     self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     ip = self.__get_ip_address()
     try:
         if not self.__check_port_occupied(
                 self.server.server_config["socket_server_port"]):
             self.sock.bind(
                 (ip, self.server.server_config["socket_server_port"]))
             self.sock.listen(
                 self.server.server_config["max_connection"])
             LOGGER.info(
                 "Connect to server {}:{} with max connection {}".
                 format(ip,
                        self.server.server_config["socket_server_port"],
                        self.server.server_config["max_connection"]))
             self.launched = True
             return True
         else:
             LOGGER.warning("{} has been occupied!".format(
                 self.server.server_config["socket_server_port"]))
             return False
     except:
         LOGGER.error(traceback.format_exc())
         return False
Esempio n. 22
0
        try:
            jt["timestamp"] = float(case.start)
            jt["duration"] = float(case.end) - float(case.start)
        except:
            LOGGER.warning(traceback.format_exc())

        jt["application"]["name"] = case.name
        jt["action_verdicts"]["final"] = case.pretty_result

        try:
            err_log = ''
            for run in case.runs():
                err_log += '||'.join(['|'.join(_) for _ in run.error_log])
            jt["notes"] = [err_log]
        except:
            LOGGER.error(traceback.format_exc())

        LOGGER.debug(jt)
        send_report(json.dumps(jt))


json_template = {
    "hw_serial": "placeholder",
    "hw_type": "placeholder",
    "sw_version": "placeholder",
    "run_id": "placeholder",
    "timestamp": 0,
    "duration": 0,
    "application": {
        "name": "placeholder"
    },
Esempio n. 23
0
def sendEmailReport(test_result,
                    base_path,
                    html_path,
                    mail_list=None,
                    excel=None,
                    std_att=None):
    try:

        html_path = '{}/html/{}'.format(base_path, html_path)

        base_path += '/template'

        msg = MIMEMultipart()
        html = open(html_path).read()
        html_part = MIMEText(html, 'html')
        html_part.set_charset('utf-8')
        msg.attach(html_part)

        fp = open('{}/imgs/header.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<header>')
        msg.attach(msgImage)

        fp = open('{}/imgs/footer.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<footer>')
        msg.attach(msgImage)

        fp = open('{}/imgs/device_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<dut>')
        msg.attach(msgImage)

        fp = open('{}/imgs/driver_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<driver>')
        msg.attach(msgImage)

        fp = open('{}/imgs/link_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<link>')
        msg.attach(msgImage)

        fp = open('{}/imgs/log_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<log>')
        msg.attach(msgImage)

        fp = open('{}/imgs/warning_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<warning>')
        msg.attach(msgImage)

        fp = open('{}/imgs/user_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<user>')
        msg.attach(msgImage)

        fp = open('{}/imgs/wiki_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<wiki>')
        msg.attach(msgImage)

        fp = open('{}/imgs/patch_icon.png'.format(base_path), 'rb')
        msgImage = MIMEImage(fp.read())
        fp.close()
        msgImage.add_header('Content-ID', '<patch>')
        msg.attach(msgImage)

        if excel:
            att1 = MIMEText(open(excel, 'rb').read(), 'base64', 'gb2312')
            att1["Content-Type"] = 'application/octet-stream'
            att1["Content-Disposition"] = 'attachment; filename="result.xls"'
            msg.attach(att1)

        if std_att:
            msg.attach(std_att)

        mailto = mail_list if mail_list else ["*****@*****.**"]
        msg['to'] = ','.join(mailto)
        msg['from'] = '*****@*****.**'
        msg['subject'] = '{}_{}_{}_{}'.format(test_result.build,
                                              test_result.platform,
                                              test_result.scenario_name,
                                              test_result.pass_rate)

        try:
            server = smtplib.SMTP('mail.sample.com')
            mail_results = server.sendmail(msg['from'], mailto,
                                           msg.as_string())
            server.quit()
            LOGGER.debug(str(mail_results))
        except Exception, e:
            LOGGER.error(str(traceback.format_exc()))
    except:
        LOGGER.error(traceback.format_exc())
Esempio n. 24
0
    def __init__(self, scenario, dut):
        self.scenario_name = scenario.scenario_name
        self.project_code = dut.project_code
        # self.platform = dut.platform # whether use the one of scenario?
        self.platform = scenario.platform
        self.sub_platform = dut.sub_platform
        self.subversion = scenario.subversion
        self.build = dut.task_variable.get("build", "NA")
        self.component = dut.task_variable.get("component", "NA")
        if self.build == 'NA':
            self.build = dut.task_variable.get("Build_No_Variable", "NA")

        self.patch = dut.task_variable.get("patchset", "NA")
        self.manifest_id = dut.task_variable.get("manifest_id", "NA")

        if self.manifest_id != 'NA':
            self.gerrit_link = 'https://icggerrit.corp.sample.com/#/x/hydra-ci/iset/manifestvotes&manifest_id='.format(
                self.manifest_id)
        elif self.patch != 'NA':
            self.gerrit_link = 'https://icggerrit.corp.sample.com/#/c/{}/'.format(
                self.patch)
        else:
            self.gerrit_link = 'NA'

        self.owner = dut.task_variable.get("owner", "NA")
        self.author_email = dut.task_variable.get("author_email", "")
        if self.author_email:
            scenario.mail_list.append(self.author_email)

        if dut.task_variable.get("email", ""):
            scenario.mail_list.append(dut.task_variable["email"])

        self.os = dut.task_variable.get("os", "NA")
        self.test_type = scenario.test_type
        self.report_ww = dut.task_variable.get(
            "report_ww", "NA")  # [year, ww, stackIndex, 2018WW2_0]
        self.fail_case_list = []
        self.bCancelled = False
        self.bEnv_failure = False
        self.cases = []
        self.server_config = dut.server_config
        self.dut_name = dut.dut_name
        self.dut_ip = dut.dut_ip
        self.total_count = 0
        self.non_gating_count = 0
        self.fail_count = 0
        self.pass_count = 0
        self.not_run_count = 0
        self.repeat_pass_count = 0
        self.start_time = time.ctime(
            dut.task_variable.get("start", time.time()))
        self.finish_time = time.ctime(
            dut.task_variable.get("finish", time.time()))
        self.patch_detail = 'NA'
        self.subject = 'NA'
        self.email = 'NA'
        self.artifacts_path = 'NA'
        self.dut_info = ''
        self.log_path = scenario.base_log_path if scenario.base_log_path else WIN_LOG_ROOT_PATH
        self.merge_slot_info = []
        self.merge_slot_info_str = ''
        self.user = dut.task_variable.get(
            "username", "NA") + ';' + dut.task_variable.get("killer", "")
        self.run_id = str(uuid.uuid1()).split('-')[0]

        html_path = '{}_{}_{}_{}.html'.format(self.build, self.dut_name,
                                              self.scenario_name,
                                              dut.task_variable['UUID'])
        self.html_full_path = os.path.join(
            dut.server_config["base_share_path"], 'html', html_path)

        try:
            self.critical_issues = repr(
                scenario.bsod_dict) if scenario.bsod_dict else ''
        except:
            LOGGER.critical(traceback.format_exc())

        try:
            _t = dut.staf_handle.get_dut_info(dut)
            if _t:
                for key, value in _t.items():
                    self.dut_info += "{} : {} </br>".format(key, value)
        except:
            LOGGER.error(traceback.format_exc())

        try:
            self.fecthPatchInfo()
        except:
            LOGGER.error(traceback.format_exc())

        _last_cmd = ''

        if dut.task_variable.get("Interval_Cancel", False):
            self.bCancelled = True

        for case in scenario.cases():
            self.total_count += 1
            # self.cases.append(case)
            case.pretty_result = case.result()

            if case.pretty_result == 'running':
                case.pretty_result = 'fail'

            if case.pretty_result == 'fail' and case.gating:
                self.fail_count += 1
                self.fail_case_list.append(case.name)
Esempio n. 25
0
def generateExcelResults(test_result):
    try:
        base_path = r'{}/excel/{}'.format(
            test_result.server_config["base_share_path"], test_result.dut_name)
        if not os.path.isdir(base_path):
            os.mkdir(base_path)
        filename = r'{}/{}_{}_{}.xls'.format(base_path,
                                             test_result.scenario_name,
                                             test_result.build,
                                             "%.3f" % time.time())

        w = Workbook()
        ws = w.add_sheet('case_result')
        # -------------------------------------------------------------------------- xls settings
        # -----------------------------------title
        pattern0 = Pattern()
        pattern0.pattern = pattern0.SOLID_PATTERN
        pattern0.pattern_fore_colour = 0x5
        pattern0.pattern_back_colour = 0x5
        style0 = XFStyle()
        style0.pattern = pattern0
        # ---------------------------------- blank
        pattern5 = Pattern()
        pattern5.pattern = pattern0.SOLID_PATTERN
        pattern5.pattern_fore_colour = 0x09
        pattern5.pattern_back_colour = 0x09
        style5 = XFStyle()
        style5.pattern = pattern5
        # -----------------------------------pass
        pattern1 = Pattern()
        pattern1.pattern = pattern0.SOLID_PATTERN
        pattern1.pattern_fore_colour = 0x11
        pattern1.pattern_back_colour = 0x11
        style1 = XFStyle()
        style1.pattern = pattern1
        # ----------------------------------fail
        pattern2 = Pattern()
        pattern2.pattern = pattern0.SOLID_PATTERN
        pattern2.pattern_fore_colour = 0x0A
        pattern2.pattern_back_colour = 0x0A
        style2 = XFStyle()
        style2.pattern = pattern2
        # ----------------------------------cancelled
        pattern3 = Pattern()
        pattern3.pattern = pattern0.SOLID_PATTERN
        pattern3.pattern_fore_colour = 0x17
        pattern3.pattern_back_colour = 0x17
        style3 = XFStyle()
        style3.pattern = pattern3
        # -----------------------------------repeat pass
        pattern4 = Pattern()
        pattern4.pattern = pattern0.SOLID_PATTERN
        pattern4.pattern_fore_colour = 0x35
        pattern4.pattern_back_colour = 0x35
        style4 = XFStyle()
        style4.pattern = pattern4
        # -------------------------------------set info
        ws.write(0, 0, 'CASE_NAME', style0)
        ws.write(0, 1, 'RESULT', style0)
        ws.write(0, 2, 'BEGIN TIME', style0)
        ws.write(0, 3, 'END TIME', style0)
        ws.write(0, 4, 'HSD ID', style0)
        ws.write(0, 5, 'ERROR LOG', style0)
        ws.write(0, 6, 'RERUN CMD', style0)
        ws.write(0, 7, 'ADDITIONAL INFO', style0)
        ws.write(0, 8, 'STDOUT/ERR', style0)

        i = 0
        std_logs_list = []
        for case in test_result.cases:
            try:
                ws.write(i + 1, 0, case.name)
                res = case.pretty_result
                style = style0
                if res == 'not-run':
                    style = style3
                elif res == 'pass':
                    style = style1
                elif res == 'fail':
                    style = style2
                elif res == 'repeat pass':
                    style = style4
                ws.write(i + 1, 1, res, style)
                ws.write(i + 1, 2, case.start)
                ws.write(i + 1, 3, case.end)
                err_log = ''
                std_log = str(i) + ' ' + case.name + ':\r\n'
                #print case.error_log
                """
                for run in case.runs():
                    err_log += '||'.join(['|'.join(_) for _ in run.error_log])
                    std_log += run.std_out_err + '\r\n'
                """
                ws.write(i + 1, 5, case.error_log)
                ws.write(i + 1, 6, case.cmd)
                ws.write(i + 1, 7, '')
                ws.write(i + 1, 8, '')
                std_logs_list.append(case.std_log)

            except:
                LOGGER.error(traceback.format_exc())
            i += 1

        w.save(filename)
        att = None

        try:
            try:
                std_logs_all = ''.join(std_logs_list)
                att = MIMEText(std_logs_all, 'plain', 'gb2312')
                att["Content-Type"] = 'text/plain'
                att["Content-Disposition"] = 'attachment; filename="case_std_details.log"'
            except Exception, e:
                LOGGER.error(str(traceback.format_exc()))
        except:
            LOGGER.error(traceback.format_exc())

        return filename, att

    except IOError:
        LOGGER.error(traceback)

    return None, None
Esempio n. 26
0
        def launchConnection(self, sock):
            try:
                sock.settimeout(2)
                buf = ''
                while True:
                    try:
                        buf_frag = sock.recv(1024)
                        buf += buf_frag
                        #LOGGER.debug(buf)
                        if len(buf_frag) == 0:
                            break
                    except Exception, e:
                        LOGGER.debug(str(traceback.format_exc()))
                        break
                if buf:
                    LOGGER.debug(buf)
                    try:
                        data = json.loads(buf)
                        if data["option"] == 'QUERY':
                            LOGGER.debug("Action: Query Status")
                            '''
                            {"option" : "QUERY", "duts":[], "status":[]}
                            '''
                            status = []
                            for dut in data["duts"]:
                                bFound = False
                                for g in self.server.groups():
                                    d = g.getDutbyName(dut)
                                    if d:
                                        d.refreshStatus()
                                        status.append(d.status)
                                        break
                                else:
                                    status.append(DUT.STATUS_UNKNOWN)
                            data["status"] = status
                            sock.send(json.dumps(data))
                        if data["option"] == 'QUEUE':
                            LOGGER.debug("Action: Query Queue")
                            '''
                            {"option" : "QUEUE", "duts":[], "pools":[], queue":[]}
                            '''
                            queue = []

                            for dut in data.get('duts', []):
                                tmp = []
                                for g in self.server.groups():
                                    d = g.getDutbyName(dut)
                                    if d and d.task_runner:
                                        _t = d.task_runner.getCurrentTask()
                                        tmp.append(_t)
                                        tmp.append(d.getDedicatedTaskQueue())
                                    break

                                queue.append(tmp)

                            for q in data.get('pools', []):
                                queue.append(
                                    rabbitMQ.getDedicatedTaskQueue_pika(
                                        self.server.server_config, q))

                            data["queue"] = queue
                            sock.send(json.dumps(data))
                        if data["option"] == 'REORDER':
                            LOGGER.debug("Action: REORDER Queue")
                            '''
                            {"option" : "REORDER", "queue":"", "task_id":""}
                            '''
                            data["rc"] = rabbitMQ.moveToTop_pika(
                                self.server.server_config, data["queue"],
                                data["task_id"])
                            sock.send(json.dumps(data))
                        if data["option"] == 'LATE_LOCK':
                            LOGGER.debug("Action: LATE_LOCK DUT triggered")
                            # {"option": "LATE_LOCK", "lock": True, "dut" : "APL_TEST"}
                            if 'dut' not in data:
                                data['rc'] = -2
                                data['message'] = "key word dut missing!"
                                sock.send(json.dumps(data))
                            else:
                                for g in self.server.groups():
                                    d = g.getDutbyName(data["dut"])
                                    if d:
                                        d.task_runner.scenario_level_pause = data.get(
                                            'lock', False)
                                        data['rc'] = 0
                                        data['message'] = "Action Triggered!"
                                        sock.send(json.dumps(data))
                                        break
                                else:
                                    data['rc'] = -1
                                    data['message'] = "Selected DUT not found!"
                                    sock.send(json.dumps(data))

                        if data["option"] == 'CANCEL':
                            LOGGER.debug("Action: CANCEL TASK")
                            '''
                            {"option" : "CANCEL", "dut": "", "queue":"", "task_id":""}
                            '''
                            if data.has_key("dut"):
                                # 1. the task might be already triggered
                                d = None
                                for g in self.server.groups():
                                    d = g.getDutbyName(data["dut"])
                                    if d:
                                        break
                                if d and d.task_variable:
                                    _cur_task = d.task_variable
                                    if _cur_task["task_id"] == data["task_id"]:
                                        LOGGER.info(
                                            "Cancel a task in running with id {} on DUT {}"
                                            .format(data["task_id"],
                                                    data["dut"]))
                                        d.task_runner.b_clear = True
                                        d.task_variable["killer"] = data.get(
                                            "killer", "NA")
                                        data["rc"] = 0
                                        sock.send(json.dumps(data))
                                    else:
                                        data["rc"] = -1
                                        data["message"] = "No such task!"
                                        sock.send(json.dumps(data))
                                else:
                                    data["rc"] = -2
                                    data["message"] = "No such DUT!"
                                    sock.send(json.dumps(data))
                            else:
                                # 2. the task is still in queue
                                #data["rc"] = rabbitMQ.cancelTask(self.server.server_config, data["queue"], data["task_id"])
                                data["rc"] = rabbitMQ.cancelTask_pika(
                                    self.server.server_config, data["queue"],
                                    data["task_id"])
                                sock.send(json.dumps(data))

                        if data["option"] == "DETAIL":
                            LOGGER.debug("Action: QUERY DETAIL")
                            try:
                                if data.has_key("dut"):
                                    d = None
                                    for g in self.server.groups():
                                        d = g.getDutbyName(data["dut"])
                                        if d:
                                            break
                                    else:
                                        data["rc"] = -1
                                        data["message"] = "No matched dut!"
                                        sock.send(json.dumps(data))
                                    if d.task_runner:
                                        _t = d.task_runner.getCurrentTaskDetails(
                                            data.get("case_name", None))
                                        data["result"] = _t
                                        data["rc"] = 0
                                        data["message"] = "query successfully"
                                        sock.send(json.dumps(data))
                                else:
                                    data["rc"] = -2
                                    data["message"] = "key dut missing!"
                                    sock.send(json.dumps(data))
                            except:
                                data["rc"] = -3
                                data["message"] = traceback.format_exc()
                                sock.send(json.dumps(data))
                        if data["option"] == "LOCK":
                            LOGGER.debug("Action: LOCK/UNLOCK DUT INFO")
                            # {"option": "LOCK", "lock": True, "dut" : "APL_TEST"}
                            if 'dut' not in data:
                                data['rc'] = -2
                                data['message'] = "key word dut missing!"
                                sock.send(json.dumps(data))
                            else:
                                for g in self.server.groups():
                                    d = g.getDutbyName(data["dut"])
                                    if d:
                                        if data.get('lock', False):
                                            d.pauseRunner()
                                        else:
                                            d.startRunner()
                                        data['rc'] = 0
                                        data['message'] = "Action Triggered!"
                                        sock.send(json.dumps(data))
                                        break
                                else:
                                    data['rc'] = -1
                                    data['message'] = "Selected DUT not found!"
                                    sock.send(json.dumps(data))

                        if data["option"] == "UPDATE":
                            LOGGER.debug("Action: Update DUT INFO")
                            # {"option": "UPDATE", "dut": "APL_TEST", "dut_name":"", "dut_ip":"",
                            # "test_type" : ["PIT"], "platform":"","subversion":"","project_code":""}
                            if 'dut' not in data:
                                data['rc'] = -2
                                data['message'] = "key word dut missing!"

                            for g in self.server.groups():
                                d = g.getDutbyName(data["dut"])
                                if d:
                                    d.dut_name = data.get(
                                        "dut_name", d.dut_name)
                                    d.dut_ip = data.get("dut_ip", d.dut_ip)
                                    d.test_type = data.get(
                                        "test_type", d.test_type)
                                    d.branch = data.get("branch", d.branch)
                                    d.project_code = data.get(
                                        "project_code", d.project_code)
                                    d.platform = data.get(
                                        "platform", d.platform)
                                    d.sub_platform = data.get(
                                        "sub_platform", d.sub_platform)
                                    data['rc'] = 0
                                    sock.send(json.dumps(data))
                                    break
                            else:
                                data['rc'] = -1
                                data['message'] = "Selected DUT not found!"
                                sock.send(json.dumps(data))

                        if data["option"] == "PAAS":
                            LOGGER.debug("Action: PASS REQUIREMENT")
                            try:
                                if data.has_key("dut"):
                                    d = None
                                    for g in self.server.groups():
                                        d = g.getDutbyName(data["dut"])
                                        if d:
                                            break
                                    else:
                                        data["rc"] = -1
                                        data["message"] = "No matched dut!"
                                        sock.send(json.dumps(data))

                                    scenario = Scenario(d.dut_id)
                                    if scenario.load(data):
                                        d.addTestScenarioObject(scenario)
                                    else:
                                        data["rc"] = -4
                                        data[
                                            "message"] = "failed to load cases from database"
                                        sock.send(json.dumps(data))

                                    data["rc"] = 0
                                    data[
                                        "message"] = "PaaS scenario generated successfully"
                                    sock.send(json.dumps(data))

                                else:
                                    data["rc"] = -2
                                    data["message"] = "key dut missing!"
                                    sock.send(json.dumps(data))
                            except:
                                data["rc"] = -3
                                data["message"] = traceback.format_exc()
                                sock.send(json.dumps(data))
                    except:
                        sock.send(traceback.format_exc())
Esempio n. 27
0
        self.pass_rate = '%.3f' % (float(
            (1 - float(self.fail_count + self.not_run_count) /
             self.total_count) * 100.0)) + r'%'

        try:
            if _last_cmd:
                params = _last_cmd.split(' ')
                for param in params:
                    _p = param.strip('"').strip('{').strip('}')
                    if _p in dut.task_variable.keys():
                        self.log_path = os.path.join(self.log_path,
                                                     dut.task_variable[_p])
                    if not os.path.exists(self.log_path):
                        os.makedirs(self.log_path)

                LOGGER.info(self.log_path)
        except:
            LOGGER.error(traceback.format_exc())

        try:
            if self.log_path:
                result_js_file = '{0}/../result-{1}.js'.format(
                    self.log_path, dut.task_variable.get("UUID", 'NA'))
                result_js = {
                    'cancelled':
                    self.bCancelled,
                    'env_failed':
                    self.bEnv_failure,
                    'pass':
                    self.pass_count,
                    'fail':
Esempio n. 28
0
                                res = re.search(pt, log_list[i])
                                self.additional_info = res.group(1)
                            except Exception, e:
                                LOGGER.error(str(e))
                        if 'Log file:' in log_list[i]:
                            try:
                                pt = re.compile(r'Log file:(.*)]')
                                res = re.search(pt, log_list[i])
                                self.extra_check_log = res.group(1)
                            except Exception, e:
                                LOGGER.error(str(e))

                        if '[ERROR]' in str(log_list[i]):
                            self.error_log.append(log_list[i].split('[ERROR]')[1])
                else:
                    LOGGER.info("can not get testid.log, so directly return")
                    self.error_log = []
                    return -1
            except Exception,e:
                LOGGER.error(traceback.format_exc())
                return -1

        if 'RunStoreAppAutoTest' in self.case_cmd:
            try:
                """
                for metro case need to get app log
                """
                log = self.dut.staf_handle.getFile(self.dut.dut_ip, '{}/{}._AppLog.txt'.format(self.log_base_path, self.case_id))
                if log:
                    log_list = log.split('\r')
                    for i in range(0, len(log_list)):
Esempio n. 29
0
                        test_result.patch,
                        'error_log': {
                            'fail_type': 'True',
                            'driver_log': '',  # optional
                            'fw_log': '',  # optional
                            'crash_log': '',  # optional
                            'error_log':
                            test_result.cases[i].error_log,  # optional
                            'exec_time': 0.0,
                            'rerun_time': '0.0'
                        }
                    }

                    case_detail.append(case_item)
                else:
                    LOGGER.debug("Bypass {} env case".format(
                        test_result.cases[i].name))
        else:
            LOGGER.error("Failed to query case details from dashboard!")
            return

        data.update({"guid": guid, "case_detail": case_detail})
        data.update(csrf_token)
        LOGGER.debug(data)
        LOGGER.info('data: {0}'.format(data))
        response = session.post(URL, json=data)
        if response.status_code != 200 or 'success' not in str(
                response.content).lower():
            response = session.put(URL, json=data)
        LOGGER.info('upload results: {} {}'.format(response.status_code,
                                                   response.content))
Esempio n. 30
0
    def _is_new_bug(self, log_cur, work, run):
        if self.query_result:
            for i in range(0, len(self.query_result)):
                if 'case_id' in self.query_result[i].keys():
                    if self.query_result[i]['case_id'] == log_cur.case_id:

                        if (work.test_type.lower() == 'weekly' and self.query_result[i]['guid__day'] == 0) or \
                            (work.test_type.lower() != 'weekly' and self.query_result[i]['guid__day'] != 0):
                            LOGGER.info("matched one case: {0} from DB".format(log_cur.case_id))
                            '''
                            to identify history and current error log
                            '''
                            rst_error_log = self.query_result[i].get('error_log', '').replace('[}{]', '')

                            # print rst_error_log
                            log_prev = self.log_structure(work, rst_error_log)

                            LOGGER.info("prev error_log {0}".format(log_prev.log_set))
                            LOGGER.info("cur error_log {0}".format(log_cur.log_set))

                            '''
                            get category
                            '''
                            if 'second_category' in self.query_result[i].keys():
                                work.category = self.query_result[i]['second_category']
                            else:
                                work.category=''
                            if len(log_prev.log_set) >= len(log_cur.log_set):
                                run.fail_type = 'known'
                                """
                                API changed, just keep today's error log
                                """
                                # run.erbror_log
                                # += '[}{]' + self.query_result[i]['error_log']
                                run.hsd_id = self.query_result[i]['hsd']
                                run.hsd_url = self.query_result[i]['hsd_url']
                                
                                LOGGER.info('run.hsd_id:%s'% run.hsd_id )
                                if run.hsd_id == 'new' or run.hsd_id == '':
                                    run.hsd_id = 'known-not-filed'
                                return False, run.hsd_id
                            # elif self.test_type == work.test_type and self.sub_version == work.sub_version:
                            else:
                                """
                                API changed, just keep today's error log
                                """
                                LOGGER.info("a potenial new bug:{0}".format(log_cur.case_id))
                                # run.bug_id += self.query_result[i]['HSD']
                                '''
                                [}{]to indentify history and current error log
                                '''
                                # run.error_log += '[}{]' + self.query_result[i]['error_log']
            try:
                """
                check in same sub category cases
                """
                if len(work.category) > 1:
                    for i in range(0, len(self.query_result)):
                        if self.query_result[i]['second_category'] == work.category:
                            '''
                            to indentify history and current error log
                            '''
                            rst_error_log = self.query_result[i]['error_log'].replace('[}{]', '')
                            log_prev = self.log_structure(work, rst_error_log)
                            if len(log_prev.log_set) >= len(log_cur.log_set):
                                run.fail_type = 'known'
                                """
                                API changed, just keep today's error log
                                """
                                run.hsd_id = self.query_result[i]['hsd']
                                run.hsd_url = self.query_result[i]['hsd_url']
                                if run.hsd_id == 'new' or run.hsd_id == '':
                                    run.hsd_id = 'known-not-filed'
                                return False, run.hsd_id
                            else:
                                """
                                API changed, just keep today's error log
                                """
                                pass
            except Exception, e:
                LOGGER.error(str(e))