Esempio n. 1
0
class PTLTestData(Plugin):
    """
    Save post analysis data on test cases failure or error
    """
    name = 'PTLTestData'
    score = sys.maxsize - 6
    logger = logging.getLogger(__name__)

    def __init__(self):
        Plugin.__init__(self)
        self.post_data_dir = None
        self.max_postdata_threshold = None
        self.__save_data_count = 0
        self.__priv_sn = ''
        self.du = DshUtils()

    def options(self, parser, env):
        """
        Register command line options
        """
        pass

    def set_data(self, post_data_dir, max_postdata_threshold):
        self.post_data_dir = post_data_dir
        self.max_postdata_threshold = max_postdata_threshold

    def configure(self, options, config):
        """
        Configure the plugin and system, based on selected options
        """
        self.config = config
        if self.post_data_dir is not None:
            self.enabled = True
        else:
            self.enabled = False

    def __save_home(self, test, status, err=None):
        if hasattr(test, 'test'):
            _test = test.test
            sn = _test.__class__.__name__
        elif hasattr(test, 'context'):
            _test = test.context
            sn = _test.__name__
        else:
            # test does not have any PBS Objects, so just return
            return
        if self.__priv_sn != sn:
            self.__save_data_count = 0
            self.__priv_sn = sn
        # Saving home might take time so disable timeout
        # handler set by runner
        tn = getattr(_test, '_testMethodName', 'unknown')
        testlogs = getattr(test, 'captured_logs', '')
        datadir = os.path.join(self.post_data_dir, sn, tn)
        if os.path.exists(datadir):
            _msg = 'Old post analysis data exists at %s' % datadir
            _msg += ', skipping saving data for this test case'
            self.logger.warn(_msg)
            _msg = 'Please remove old directory or'
            _msg += ' provide different directory'
            self.logger.warn(_msg)
            return
        if getattr(test, 'old_sigalrm_handler', None) is not None:
            _h = getattr(test, 'old_sigalrm_handler')
            signal.signal(signal.SIGALRM, _h)
            signal.alarm(0)
        self.logger.log(logging.DEBUG2, 'Saving post analysis data...')
        current_host = socket.gethostname().split('.')[0]
        self.du.mkdir(current_host,
                      path=datadir,
                      mode=0o755,
                      parents=True,
                      logerr=False,
                      level=logging.DEBUG2)
        if err is not None:
            if isclass(err[0]) and issubclass(err[0], SkipTest):
                status = 'SKIP'
                status_data = 'Reason = %s' % (err[1])
            else:
                if isclass(err[0]) and issubclass(err[0], TimeOut):
                    status = 'TIMEDOUT'
                status_data = getattr(test, 'err_in_string', '')
        else:
            status_data = ''
        logfile = os.path.join(datadir, 'logfile_' + status)
        f = open(logfile, 'w+')
        f.write(testlogs + '\n')
        f.write(status_data + '\n')
        f.write('test duration: %s\n' % str(getattr(test, 'duration', '0')))
        if status in ('PASS', 'SKIP'):
            # Test case passed or skipped, no need to save post analysis data
            f.close()
            return
        if ((self.max_postdata_threshold != 0)
                and (self.__save_data_count >= self.max_postdata_threshold)):
            _msg = 'Total number of saved post analysis data for this'
            _msg += ' testsuite is exceeded max postdata threshold'
            _msg += ' (%d)' % self.max_postdata_threshold
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return

        servers = getattr(_test, 'servers', None)
        if servers is not None:
            server_host = servers.values()[0].shortname
        else:
            _msg = 'Could not find Server Object in given test object'
            _msg += ', skipping saving post analysis data'
            f.write(_msg + '\n')
            self.logger.warning(_msg)
            f.close()
            return
        moms = getattr(_test, 'moms', None)
        comms = getattr(_test, 'comms', None)
        client = getattr(_test.servers.values()[0], 'client', None)
        server = servers.values()[0]
        add_hosts = []
        if len(servers) > 1:
            for param in servers.values()[1:]:
                add_hosts.append(param.shortname)
        if moms is not None:
            for param in moms.values():
                add_hosts.append(param.shortname)
        if comms is not None:
            for param in comms.values():
                add_hosts.append(param.shortname)
        if client is not None:
            add_hosts.append(client.split('.')[0])

        add_hosts = list(set(add_hosts) - set([server_host]))

        pbs_snapshot_path = os.path.join(server.pbs_conf["PBS_EXEC"], "sbin",
                                         "pbs_snapshot")
        cur_user = self.du.get_current_user()
        cur_user_dir = pwd.getpwnam(cur_user).pw_dir
        cmd = [
            pbs_snapshot_path, '-H', server_host, '--daemon-logs', '2',
            '--accounting-logs', '2', '--with-sudo'
        ]
        if len(add_hosts) > 0:
            cmd += ['--additional-hosts=' + ','.join(add_hosts)]
        cmd += ['-o', cur_user_dir]
        ret = self.du.run_cmd(current_host,
                              cmd,
                              level=logging.DEBUG2,
                              logerr=False)
        if ret['rc'] != 0:
            _msg = 'Failed to get analysis information '
            _msg += 'on %s:' % server_host
            _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        else:
            if len(ret['out']) == 0:
                self.logger.error('Snapshot command failed')
                f.close()
                return

        snap_out = ret['out'][0]
        snap_out_dest = (snap_out.split(":")[1]).strip()

        dest = os.path.join(datadir, 'PBS_' + server_host + '.tar.gz')
        ret = self.du.run_copy(current_host,
                               snap_out_dest,
                               dest,
                               sudo=True,
                               level=logging.DEBUG2)
        self.du.rm(current_host,
                   path=snap_out_dest,
                   recursive=True,
                   force=True,
                   level=logging.DEBUG2)

        f.close()
        self.__save_data_count += 1
        _msg = 'Saved post analysis data'
        self.logger.info(_msg)

    def addError(self, test, err):
        self.__save_home(test, 'ERROR', err)

    def addFailure(self, test, err):
        self.__save_home(test, 'FAIL', err)

    def addSuccess(self, test):
        self.__save_home(test, 'PASS')
Esempio n. 2
0
class PTLJsonData(object):
    """
    The intent of the class is to generate json format of PTL test data
    """

    cur_repeat_count = 1

    def __init__(self, command):
        self.__du = DshUtils()
        self.__cmd = command

    def get_json(self, data, prev_data=None):
        """
        Method to generate test data in accordance to json schema

        :param data: dictionary of a test case details
        :type data: dict
        :param prev_data: dictionary of test run details that ran before
                          the current test
        :type prev_data: dict

        :returns a formatted dictionary of the data
        """
        FMT = '%H:%M:%S.%f'
        run_count = str(PtlTextTestRunner.cur_repeat_count)
        data_json = None
        if not prev_data:
            PTLJsonData.cur_repeat_count = 1
            tests_start = str(data['start_time']).split()[1]
            data_json = {
                'command': self.__cmd,
                'user': self.__du.get_current_user(),
                'product_version': data['pbs_version'],
                'run_id': data['start_time'].strftime('%s'),
                'test_conf': {},
                'machine_info': data['machinfo'],
                'testsuites': {},
                'additional_data': {},
                'test_summary': {},
                'avg_measurements': {},
                'result': {
                    'tests_with_failures': [],
                    'test_suites_with_failures': [],
                    'start': str(data['start_time'])
                }

            }
            test_summary = {
                'result_summary': {
                    'run': 0,
                    'succeeded': 0,
                    'failed': 0,
                    'errors': 0,
                    'skipped': 0,
                    'timedout': 0
                },
                'test_start_time': str(data['start_time']),
                'tests_with_failures': [],
                'test_suites_with_failures': []
            }
            data_json['test_summary'][run_count] = test_summary
            if data['testparam']:
                for param in data['testparam'].split(','):
                    if '=' in param:
                        par = param.split('=', 1)
                        data_json['test_conf'][par[0]] = par[1]
                    else:
                        data_json['test_conf'][param] = True
        else:
            data_json = prev_data
        if PTLJsonData.cur_repeat_count != PtlTextTestRunner.cur_repeat_count:
            test_summary = {
                'result_summary': {
                    'run': 0,
                    'succeeded': 0,
                    'failed': 0,
                    'errors': 0,
                    'skipped': 0,
                    'timedout': 0
                },
                'test_start_time': str(data['start_time']),
                'tests_with_failures': [],
                'test_suites_with_failures': []
            }
            data_json['test_summary'][run_count] = test_summary
            PTLJsonData.cur_repeat_count = PtlTextTestRunner.cur_repeat_count
        tsname = data['suite']
        tcname = data['testcase']
        jdata = {
            'status': data['status'],
            'status_data': str(data['status_data']),
            'duration': str(data['duration']),
            'start_time': str(data['start_time']),
            'end_time': str(data['end_time']),
            'measurements': []
        }
        if 'measurements' in data:
            jdata['measurements'] = data['measurements']
        if PtlTextTestRunner.cur_repeat_count == 1:
            if tsname not in data_json['testsuites']:
                data_json['testsuites'][tsname] = {
                    'module': data['module'],
                    'file': data['file'],
                    'testcases': {}
                }
            tsdoc = []
            if data['suitedoc']:
                tsdoc = (re.sub(r"[\t\n ]+", " ", data['suitedoc'])).strip()
            data_json['testsuites'][tsname]['docstring'] = tsdoc
            tcdoc = []
            if data['testdoc']:
                tcdoc = (re.sub(r"[\t\n ]+", " ", data['testdoc'])).strip()
            data_json['testsuites'][tsname]['testcases'][tcname] = {
                'docstring': tcdoc,
                'requirements': data['requirements'],
                'results': {}
            }
            if data['testdoc']:
                jdata_tests = data_json['testsuites'][tsname]['testcases']
                jdata_tests[tcname]['tags'] = data['tags']
        jdata_tests = data_json['testsuites'][tsname]['testcases']
        jdata_tests[tcname]['results'][run_count] = jdata
        if 'additional_data' in data:
            data_json['additional_data'] = data['additional_data']
        data_json['test_summary'][run_count]['test_end_time'] = str(
            data['end_time'])
        run_summary = data_json['test_summary'][run_count]
        start = run_summary['test_start_time'].split()[1]
        end = str(data['end_time']).split()[1]
        dur = str(datetime.datetime.strptime(end, FMT) -
                  datetime.datetime.strptime(start, FMT))
        data_json['test_summary'][run_count]['tests_duration'] = dur
        data_json['test_summary'][run_count]['result_summary']['run'] += 1
        d_ts = data_json['test_summary'][run_count]
        if data['status'] == 'PASS':
            d_ts['result_summary']['succeeded'] += 1
        elif data['status'] == 'SKIP':
            d_ts['result_summary']['skipped'] += 1
        elif data['status'] == 'TIMEDOUT':
            d_ts['result_summary']['timedout'] += 1
            d_ts['tests_with_failures'].append(data['testcase'])
            if data['suite'] not in d_ts['test_suites_with_failures']:
                d_ts['test_suites_with_failures'].append(data['suite'])
        elif data['status'] == 'ERROR':
            d_ts['result_summary']['errors'] += 1
            d_ts['tests_with_failures'].append(data['testcase'])
            if data['suite'] not in d_ts['test_suites_with_failures']:
                d_ts['test_suites_with_failures'].append(data['suite'])
        elif data['status'] == 'FAIL':
            d_ts['result_summary']['failed'] += 1
            d_ts['tests_with_failures'].append(data['testcase'])
            if data['suite'] not in d_ts['test_suites_with_failures']:
                d_ts['test_suites_with_failures'].append(data['suite'])
        m_avg = {
            'testsuites': {}
        }

        for tsname in data_json['testsuites']:
            m_avg['testsuites'][tsname] = {
                'testcases': {}
            }
            for tcname in data_json['testsuites'][tsname]['testcases']:
                test_status = "PASS"
                m_avg['testsuites'][tsname]['testcases'][tcname] = []
                t_sum = []
                count = 0
                j_data = data_json['testsuites'][tsname]['testcases'][tcname]
                measurements_data = []
                for key in j_data['results'].keys():
                    count += 1
                    r_count = str(count)
                    m_case = data_json['testsuites'][tsname]['testcases']
                    m = m_case[tcname]['results'][r_count]['measurements']
                    if j_data['results'][r_count]['status'] is not "PASS":
                        test_status = "FAIL"
                    m_sum = []
                    for i in range(len(m)):
                        sum_mean = 0
                        sum_std = 0
                        sum_min = 0
                        sum_max = 0
                        record = []
                        if "test_measure" in m[i].keys():
                            if len(t_sum) > i:
                                sum_mean = m[i]["test_data"]['mean'] + \
                                    t_sum[i][0]
                                sum_std = m[i]["test_data"]['std_dev'] + \
                                    t_sum[i][1]
                                sum_min = m[i]["test_data"]['minimum'] + \
                                    t_sum[i][2]
                                sum_max = m[i]["test_data"]['maximum'] + \
                                    t_sum[i][3]
                            else:
                                measurements_data.append(m[i])
                                sum_mean = m[i]["test_data"]['mean']
                                sum_std = m[i]["test_data"]['std_dev']
                                sum_min = m[i]["test_data"]['minimum']
                                sum_max = m[i]["test_data"]['maximum']
                            record = [sum_mean, sum_std, sum_min, sum_max]
                        else:
                            if len(measurements_data) <= i:
                                measurements_data.append(m[i])
                            record = [sum_mean, sum_std, sum_min, sum_max]
                        m_sum.append(record)
                    if len(t_sum) > len(m_sum):
                        for v in range(len(m_sum)):
                            t_sum[v] = m_sum[v]
                    else:
                        t_sum = m_sum
                m_list = []
                if test_status == "PASS":
                    for i in range(len(measurements_data)):
                        m_data = {}
                        if "test_measure" in measurements_data[i].keys():
                            measure = measurements_data[i]['test_measure']
                            m_data['test_measure'] = measure
                            m_data['unit'] = measurements_data[i]['unit']
                            m_data['test_data'] = {}
                            div = count
                            m_data['test_data']['mean'] = t_sum[i][0] / div
                            m_data['test_data']['std_dev'] = t_sum[i][1] / div
                            m_data['test_data']['minimum'] = t_sum[i][2] / div
                            m_data['test_data']['maximum'] = t_sum[i][3] / div
                        m_list.append(m_data)
                    m_avg['testsuites'][tsname]['testcases'][tcname] = m_list
        data_json["avg_measurements"] = m_avg

        data_json['result']['end'] = str(data['end_time'])
        start = data_json['result']['start'].split()[1]
        end = data_json['result']['end'].split()[1]
        dur = str(datetime.datetime.strptime(end, FMT) -
                  datetime.datetime.strptime(start, FMT))
        fail_tests = []
        fail_ts = []
        for count in range(PtlTextTestRunner.cur_repeat_count):
            r_count = str(count + 1)
            fail_tests.extend(
                data_json['test_summary'][r_count]['tests_with_failures'])
            fail_ts.extend(data_json['test_summary']
                           [r_count]['test_suites_with_failures'])
        data_json['result']['duration'] = dur
        data_json['result']['tests_with_failures'] = list(set(fail_tests))
        data_json['result']['test_suites_with_failures'] = list(set(fail_ts))
        return data_json
Esempio n. 3
0
class PTLTestData(Plugin):

    """
    Save post analysis data on test cases failure or error
    """
    name = 'PTLTestData'
    score = sys.maxint - 6
    logger = logging.getLogger(__name__)

    def __init__(self):
        Plugin.__init__(self)
        self.post_data_dir = None
        self.max_postdata_threshold = None
        self.__save_data_count = 0
        self.__priv_sn = ''
        self.du = DshUtils()

    def options(self, parser, env):
        """
        Register command line options
        """
        pass

    def set_data(self, post_data_dir, max_postdata_threshold):
        self.post_data_dir = post_data_dir
        self.max_postdata_threshold = max_postdata_threshold

    def configure(self, options, config):
        """
        Configure the plugin and system, based on selected options
        """
        self.config = config
        if self.post_data_dir is not None:
            self.enabled = True
        else:
            self.enabled = False

    def __save_home(self, test, status, err=None):
        if hasattr(test, 'test'):
            _test = test.test
            sn = _test.__class__.__name__
        elif hasattr(test, 'context'):
            _test = test.context
            sn = _test.__name__
        else:
            # test does not have any PBS Objects, so just return
            return
        if self.__priv_sn != sn:
            self.__save_data_count = 0
            self.__priv_sn = sn
        # Saving home might take time so disable timeout
        # handler set by runner
        tn = getattr(_test, '_testMethodName', 'unknown')
        testlogs = getattr(test, 'captured_logs', '')
        datadir = os.path.join(self.post_data_dir, sn, tn)
        if os.path.exists(datadir):
            _msg = 'Old post analysis data exists at %s' % datadir
            _msg += ', skipping saving data for this test case'
            self.logger.warn(_msg)
            _msg = 'Please remove old directory or'
            _msg += ' provide different directory'
            self.logger.warn(_msg)
            return
        if getattr(test, 'old_sigalrm_handler', None) is not None:
            _h = getattr(test, 'old_sigalrm_handler')
            signal.signal(signal.SIGALRM, _h)
            signal.alarm(0)
        self.logger.log(logging.DEBUG2, 'Saving post analysis data...')
        current_host = socket.gethostname().split('.')[0]
        self.du.mkdir(current_host, path=datadir, mode=0755,
                      parents=True, logerr=False, level=logging.DEBUG2)
        if err is not None:
            if isclass(err[0]) and issubclass(err[0], SkipTest):
                status = 'SKIP'
                status_data = 'Reason = %s' % (err[1])
            else:
                if isclass(err[0]) and issubclass(err[0], TimeOut):
                    status = 'TIMEDOUT'
                status_data = getattr(test, 'err_in_string', '')
        else:
            status_data = ''
        logfile = os.path.join(datadir, 'logfile_' + status)
        f = open(logfile, 'w+')
        f.write(testlogs + '\n')
        f.write(status_data + '\n')
        f.write('test duration: %s\n' % str(getattr(test, 'duration', '0')))
        if status in ('PASS', 'SKIP'):
            # Test case passed or skipped, no need to save post analysis data
            f.close()
            return
        if ((self.max_postdata_threshold != 0) and
                (self.__save_data_count >= self.max_postdata_threshold)):
            _msg = 'Total number of saved post analysis data for this'
            _msg += ' testsuite is exceeded max postdata threshold'
            _msg += ' (%d)' % self.max_postdata_threshold
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        svr = getattr(_test, 'server', None)
        if svr is not None:
            svr_host = svr.hostname
        else:
            _msg = 'Could not find Server Object in given test object'
            _msg += ', skipping saving post analysis data'
            f.write(_msg + '\n')
            self.logger.warning(_msg)
            f.close()
            return
        pbs_diag = os.path.join(svr.pbs_conf['PBS_EXEC'],
                                'unsupported', 'pbs_diag')
        cur_user = self.du.get_current_user()
        cmd = [pbs_diag, '-f', '-d', '2']
        cmd += ['-u', cur_user]
        cmd += ['-o', pwd.getpwnam(cur_user).pw_dir]
        if len(svr.jobs) > 0:
            cmd += ['-j', ','.join(svr.jobs.keys())]
        ret = self.du.run_cmd(svr_host, cmd, sudo=True, level=logging.DEBUG2)
        if ret['rc'] != 0:
            _msg = 'Failed to get diag information for '
            _msg += 'on %s:' % svr_host
            _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        else:
            diag_re = r"(?P<path>\/.*\/pbs_diag_[\d]+_[\d]+\.tar\.gz).*"
            m = re.search(diag_re, '\n'.join(ret['out']))
            if m is not None:
                diag_out = m.group('path')
            else:
                _msg = 'Failed to find generated diag path in below output:'
                _msg += '\n\n' + '-' * 80 + '\n'
                _msg += '\n'.join(ret['out']) + '\n'
                _msg += '-' * 80 + '\n\n'
                f.write(_msg)
                self.logger.error(_msg)
                f.close()
                return
        diag_out_dest = os.path.join(datadir, os.path.basename(diag_out))
        if not self.du.is_localhost(svr_host):
            diag_out_r = svr_host + ':' + diag_out
        else:
            diag_out_r = diag_out
        ret = self.du.run_copy(current_host, diag_out_r, diag_out_dest,
                               sudo=True, level=logging.DEBUG2)
        if ret['rc'] != 0:
            _msg = 'Failed to copy generated diag from'
            _msg += ' %s to %s' % (diag_out_r, diag_out_dest)
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        else:
            self.du.rm(svr_host, path=diag_out, sudo=True, force=True,
                       level=logging.DEBUG2)
        cores = []
        dir_list = ['server_priv', 'sched_priv', 'mom_priv']
        for d in dir_list:
            path = os.path.join(svr.pbs_conf['PBS_HOME'], d)
            files = self.du.listdir(hostname=svr_host, path=path, sudo=True,
                                    level=logging.DEBUG2)
            for _f in files:
                if os.path.basename(_f).startswith('core'):
                    cores.append(_f)
        cores = list(set(cores))
        if len(cores) > 0:
            cmd = ['gunzip', diag_out_dest]
            ret = self.du.run_cmd(current_host, cmd, sudo=True,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed unzip generated diag at %s:' % diag_out_dest
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            diag_out_dest = diag_out_dest.rstrip('.gz')
            cmd = ['tar', '-xf', diag_out_dest, '-C', datadir]
            ret = self.du.run_cmd(current_host, cmd, sudo=True,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed extract generated diag %s' % diag_out_dest
                _msg += ' to %s:' % datadir
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            self.du.rm(hostname=current_host, path=diag_out_dest,
                       force=True, sudo=True, level=logging.DEBUG2)
            diag_out_dest = diag_out_dest.rstrip('.tar')
            for c in cores:
                cmd = [pbs_diag, '-g', c]
                ret = self.du.run_cmd(svr_host, cmd, sudo=True,
                                      level=logging.DEBUG2)
                if ret['rc'] != 0:
                    _msg = 'Failed to get core file information for '
                    _msg += '%s on %s:' % (c, svr_host)
                    _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                    f.write(_msg + '\n')
                    self.logger.error(_msg)
                else:
                    of = os.path.join(diag_out_dest,
                                      os.path.basename(c) + '.out')
                    _f = open(of, 'w+')
                    _f.write('\n'.join(ret['out']) + '\n')
                    _f.close()
                    self.du.rm(hostname=svr_host, path=c, force=True,
                               sudo=True, level=logging.DEBUG2)
            cmd = ['tar', '-cf', diag_out_dest + '.tar']
            cmd += [os.path.basename(diag_out_dest)]
            ret = self.du.run_cmd(current_host, cmd, sudo=True, cwd=datadir,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed generate tarball of diag directory'
                _msg += ' %s' % diag_out_dest
                _msg += ' after adding core(s) information in it:'
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            cmd = ['gzip', diag_out_dest + '.tar']
            ret = self.du.run_cmd(current_host, cmd, sudo=True,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed compress tarball of diag %s' % diag_out_dest
                _msg += '.tar after adding core(s) information in it:'
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            self.du.rm(current_host, diag_out_dest, sudo=True,
                       recursive=True, force=True, level=logging.DEBUG2)
        else:
            diag_out_dest = diag_out_dest.rstrip('.tar.gz')
        dest = os.path.join(datadir,
                            'PBS_' + current_host.split('.')[0] + '.tar.gz')
        ret = self.du.run_copy(current_host, diag_out_dest + '.tar.gz',
                               dest, sudo=True, level=logging.DEBUG2)
        if ret['rc'] != 0:
            _msg = 'Failed rename tarball of diag from %s' % diag_out_dest
            _msg += '.tar.gz to %s:' % dest
            _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        self.du.rm(current_host, path=diag_out_dest + '.tar.gz',
                   force=True, sudo=True, level=logging.DEBUG2)
        f.close()
        self.__save_data_count += 1
        _msg = 'Successfully saved post analysis data'
        self.logger.log(logging.DEBUG2, _msg)

    def addError(self, test, err):
        self.__save_home(test, 'ERROR', err)

    def addFailure(self, test, err):
        self.__save_home(test, 'FAIL', err)

    def addSuccess(self, test):
        self.__save_home(test, 'PASS')
Esempio n. 4
0
class PTLTestData(Plugin):
    """
    Save post analysis data on test cases failure or error
    """
    name = 'PTLTestData'
    score = sys.maxint - 6
    logger = logging.getLogger(__name__)

    def __init__(self):
        Plugin.__init__(self)
        self.post_data_dir = None
        self.max_postdata_threshold = None
        self.__save_data_count = 0
        self.__priv_sn = ''
        self.du = DshUtils()

    def options(self, parser, env):
        """
        Register command line options
        """
        pass

    def set_data(self, post_data_dir, max_postdata_threshold):
        self.post_data_dir = post_data_dir
        self.max_postdata_threshold = max_postdata_threshold

    def configure(self, options, config):
        """
        Configure the plugin and system, based on selected options
        """
        self.config = config
        if self.post_data_dir is not None:
            self.enabled = True
        else:
            self.enabled = False

    def __save_home(self, test, status, err=None):
        if hasattr(test, 'test'):
            _test = test.test
            sn = _test.__class__.__name__
        elif hasattr(test, 'context'):
            _test = test.context
            sn = _test.__name__
        else:
            # test does not have any PBS Objects, so just return
            return
        if self.__priv_sn != sn:
            self.__save_data_count = 0
            self.__priv_sn = sn
        # Saving home might take time so disable timeout
        # handler set by runner
        tn = getattr(_test, '_testMethodName', 'unknown')
        testlogs = getattr(test, 'captured_logs', '')
        datadir = os.path.join(self.post_data_dir, sn, tn)
        if os.path.exists(datadir):
            _msg = 'Old post analysis data exists at %s' % datadir
            _msg += ', skipping saving data for this test case'
            self.logger.warn(_msg)
            _msg = 'Please remove old directory or'
            _msg += ' provide different directory'
            self.logger.warn(_msg)
            return
        if getattr(test, 'old_sigalrm_handler', None) is not None:
            _h = getattr(test, 'old_sigalrm_handler')
            signal.signal(signal.SIGALRM, _h)
            signal.alarm(0)
        self.logger.log(logging.DEBUG2, 'Saving post analysis data...')
        current_host = socket.gethostname().split('.')[0]
        self.du.mkdir(current_host,
                      path=datadir,
                      mode=0755,
                      parents=True,
                      logerr=False,
                      level=logging.DEBUG2)
        if err is not None:
            if isclass(err[0]) and issubclass(err[0], SkipTest):
                status = 'SKIP'
                status_data = 'Reason = %s' % (err[1])
            else:
                if isclass(err[0]) and issubclass(err[0], TimeOut):
                    status = 'TIMEDOUT'
                status_data = getattr(test, 'err_in_string', '')
        else:
            status_data = ''
        logfile = os.path.join(datadir, 'logfile_' + status)
        f = open(logfile, 'w+')
        f.write(testlogs + '\n')
        f.write(status_data + '\n')
        f.write('test duration: %s\n' % str(getattr(test, 'duration', '0')))
        if status in ('PASS', 'SKIP'):
            # Test case passed or skipped, no need to save post analysis data
            f.close()
            return
        if ((self.max_postdata_threshold != 0)
                and (self.__save_data_count >= self.max_postdata_threshold)):
            _msg = 'Total number of saved post analysis data for this'
            _msg += ' testsuite is exceeded max postdata threshold'
            _msg += ' (%d)' % self.max_postdata_threshold
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        svr = getattr(_test, 'server', None)
        if svr is not None:
            svr_host = svr.hostname
        else:
            _msg = 'Could not find Server Object in given test object'
            _msg += ', skipping saving post analysis data'
            f.write(_msg + '\n')
            self.logger.warning(_msg)
            f.close()
            return
        pbs_diag = os.path.join(svr.pbs_conf['PBS_EXEC'], 'unsupported',
                                'pbs_diag')
        cmd = [pbs_diag, '-f', '-d', '2']
        cmd += ['-u', self.du.get_current_user()]
        if len(svr.jobs) > 0:
            cmd += ['-j', ','.join(svr.jobs.keys())]
        ret = self.du.run_cmd(svr_host, cmd, sudo=True, level=logging.DEBUG2)
        if ret['rc'] != 0:
            _msg = 'Failed to get diag information for '
            _msg += 'on %s:' % svr_host
            _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        else:
            diag_re = r"(?P<path>\/.*\/pbs_diag_[\d]+_[\d]+\.tar\.gz).*"
            m = re.search(diag_re, '\n'.join(ret['out']))
            if m is not None:
                diag_out = m.group('path')
            else:
                _msg = 'Failed to find generated diag path in below output:'
                _msg += '\n\n' + '-' * 80 + '\n'
                _msg += '\n'.join(ret['out']) + '\n'
                _msg += '-' * 80 + '\n\n'
                f.write(_msg)
                self.logger.error(_msg)
                f.close()
                return
        diag_out_dest = os.path.join(datadir, os.path.basename(diag_out))
        if not self.du.is_localhost(svr_host):
            diag_out_r = svr_host + ':' + diag_out
        else:
            diag_out_r = diag_out
        ret = self.du.run_copy(current_host,
                               diag_out_r,
                               diag_out_dest,
                               sudo=True,
                               level=logging.DEBUG2)
        if ret['rc'] != 0:
            _msg = 'Failed to copy generated diag from'
            _msg += ' %s to %s' % (diag_out_r, diag_out_dest)
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        else:
            self.du.rm(svr_host,
                       path=diag_out,
                       sudo=True,
                       force=True,
                       level=logging.DEBUG2)
        cores = []
        dir_list = ['server_priv', 'sched_priv', 'mom_priv']
        for d in dir_list:
            path = os.path.join(svr.pbs_conf['PBS_HOME'], d)
            files = self.du.listdir(hostname=svr_host,
                                    path=path,
                                    sudo=True,
                                    level=logging.DEBUG2)
            for _f in files:
                if os.path.basename(_f).startswith('core'):
                    cores.append(_f)
        cores = list(set(cores))
        if len(cores) > 0:
            cmd = ['gunzip', diag_out_dest]
            ret = self.du.run_cmd(current_host,
                                  cmd,
                                  sudo=True,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed unzip generated diag at %s:' % diag_out_dest
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            diag_out_dest = diag_out_dest.rstrip('.gz')
            cmd = ['tar', '-xf', diag_out_dest, '-C', datadir]
            ret = self.du.run_cmd(current_host,
                                  cmd,
                                  sudo=True,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed extract generated diag %s' % diag_out_dest
                _msg += ' to %s:' % datadir
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            self.du.rm(hostname=current_host,
                       path=diag_out_dest,
                       force=True,
                       sudo=True,
                       level=logging.DEBUG2)
            diag_out_dest = diag_out_dest.rstrip('.tar')
            for c in cores:
                cmd = [pbs_diag, '-g', c]
                ret = self.du.run_cmd(svr_host,
                                      cmd,
                                      sudo=True,
                                      level=logging.DEBUG2)
                if ret['rc'] != 0:
                    _msg = 'Failed to get core file information for '
                    _msg += '%s on %s:' % (c, svr_host)
                    _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                    f.write(_msg + '\n')
                    self.logger.error(_msg)
                else:
                    of = os.path.join(diag_out_dest,
                                      os.path.basename(c) + '.out')
                    _f = open(of, 'w+')
                    _f.write('\n'.join(ret['out']) + '\n')
                    _f.close()
                    self.du.rm(hostname=svr_host,
                               path=c,
                               force=True,
                               sudo=True,
                               level=logging.DEBUG2)
            cmd = ['tar', '-cf', diag_out_dest + '.tar']
            cmd += [os.path.basename(diag_out_dest)]
            ret = self.du.run_cmd(current_host,
                                  cmd,
                                  sudo=True,
                                  cwd=datadir,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed generate tarball of diag directory'
                _msg += ' %s' % diag_out_dest
                _msg += ' after adding core(s) information in it:'
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            cmd = ['gzip', diag_out_dest + '.tar']
            ret = self.du.run_cmd(current_host,
                                  cmd,
                                  sudo=True,
                                  level=logging.DEBUG2)
            if ret['rc'] != 0:
                _msg = 'Failed compress tarball of diag %s' % diag_out_dest
                _msg += '.tar after adding core(s) information in it:'
                _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
                f.write(_msg + '\n')
                self.logger.error(_msg)
                f.close()
                return
            self.du.rm(current_host,
                       diag_out_dest,
                       sudo=True,
                       recursive=True,
                       force=True,
                       level=logging.DEBUG2)
        else:
            diag_out_dest = diag_out_dest.rstrip('.tar.gz')
        dest = os.path.join(datadir,
                            'PBS_' + current_host.split('.')[0] + '.tar.gz')
        ret = self.du.run_copy(current_host,
                               diag_out_dest + '.tar.gz',
                               dest,
                               sudo=True,
                               level=logging.DEBUG2)
        if ret['rc'] != 0:
            _msg = 'Failed rename tarball of diag from %s' % diag_out_dest
            _msg += '.tar.gz to %s:' % dest
            _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        self.du.rm(current_host,
                   path=diag_out_dest + '.tar.gz',
                   force=True,
                   sudo=True,
                   level=logging.DEBUG2)
        f.close()
        self.__save_data_count += 1
        _msg = 'Successfully saved post analysis data'
        self.logger.log(logging.DEBUG2, _msg)

    def addError(self, test, err):
        self.__save_home(test, 'ERROR', err)

    def addFailure(self, test, err):
        self.__save_home(test, 'FAIL', err)

    def addSuccess(self, test):
        self.__save_home(test, 'PASS')
Esempio n. 5
0
class PTLTestData(Plugin):

    """
    Save post analysis data on test cases failure or error
    """
    name = 'PTLTestData'
    score = sys.maxsize - 6
    logger = logging.getLogger(__name__)

    def __init__(self):
        Plugin.__init__(self)
        self.post_data_dir = None
        self.max_postdata_threshold = None
        self.__save_data_count = 0
        self.__priv_sn = ''
        self.du = DshUtils()

    def options(self, parser, env):
        """
        Register command line options
        """
        pass

    def set_data(self, post_data_dir, max_postdata_threshold):
        self.post_data_dir = post_data_dir
        self.max_postdata_threshold = max_postdata_threshold

    def configure(self, options, config):
        """
        Configure the plugin and system, based on selected options
        """
        self.config = config
        if self.post_data_dir is not None:
            self.enabled = True
        else:
            self.enabled = False

    def __save_home(self, test, status, err=None):
        if hasattr(test, 'test'):
            _test = test.test
            sn = _test.__class__.__name__
        elif hasattr(test, 'context'):
            _test = test.context
            sn = _test.__name__
        else:
            # test does not have any PBS Objects, so just return
            return
        if self.__priv_sn != sn:
            self.__save_data_count = 0
            self.__priv_sn = sn
        # Saving home might take time so disable timeout
        # handler set by runner
        tn = getattr(_test, '_testMethodName', 'unknown')
        testlogs = getattr(test, 'captured_logs', '')
        datadir = os.path.join(self.post_data_dir, sn, tn)
        if os.path.exists(datadir):
            _msg = 'Old post analysis data exists at %s' % datadir
            _msg += ', skipping saving data for this test case'
            self.logger.warn(_msg)
            _msg = 'Please remove old directory or'
            _msg += ' provide different directory'
            self.logger.warn(_msg)
            return
        if getattr(test, 'old_sigalrm_handler', None) is not None:
            _h = getattr(test, 'old_sigalrm_handler')
            signal.signal(signal.SIGALRM, _h)
            signal.alarm(0)
        self.logger.log(logging.DEBUG2, 'Saving post analysis data...')
        current_host = socket.gethostname().split('.')[0]
        self.du.mkdir(current_host, path=datadir, mode=0o755,
                      parents=True, logerr=False, level=logging.DEBUG2)
        if err is not None:
            if isclass(err[0]) and issubclass(err[0], SkipTest):
                status = 'SKIP'
                status_data = 'Reason = %s' % (err[1])
            else:
                if isclass(err[0]) and issubclass(err[0], TimeOut):
                    status = 'TIMEDOUT'
                status_data = getattr(test, 'err_in_string', '')
        else:
            status_data = ''
        logfile = os.path.join(datadir, 'logfile_' + status)
        f = open(logfile, 'w+')
        f.write(testlogs + '\n')
        f.write(status_data + '\n')
        f.write('test duration: %s\n' % str(getattr(test, 'duration', '0')))
        if status in ('PASS', 'SKIP'):
            # Test case passed or skipped, no need to save post analysis data
            f.close()
            return
        if ((self.max_postdata_threshold != 0) and
                (self.__save_data_count >= self.max_postdata_threshold)):
            _msg = 'Total number of saved post analysis data for this'
            _msg += ' testsuite is exceeded max postdata threshold'
            _msg += ' (%d)' % self.max_postdata_threshold
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return

        servers = getattr(_test, 'servers', None)
        if servers is not None:
            server_host = servers.values()[0].shortname
        else:
            _msg = 'Could not find Server Object in given test object'
            _msg += ', skipping saving post analysis data'
            f.write(_msg + '\n')
            self.logger.warning(_msg)
            f.close()
            return
        moms = getattr(_test, 'moms', None)
        comms = getattr(_test, 'comms', None)
        client = getattr(_test.servers.values()[0], 'client', None)
        server = servers.values()[0]
        add_hosts = []
        if len(servers) > 1:
            for param in servers.values()[1:]:
                add_hosts.append(param.shortname)
        if moms is not None:
            for param in moms.values():
                add_hosts.append(param.shortname)
        if comms is not None:
            for param in comms.values():
                add_hosts.append(param.shortname)
        if client is not None:
            add_hosts.append(client.split('.')[0])

        add_hosts = list(set(add_hosts) - set([server_host]))

        pbs_snapshot_path = os.path.join(
            server.pbs_conf["PBS_EXEC"], "sbin", "pbs_snapshot")
        cur_user = self.du.get_current_user()
        cur_user_dir = pwd.getpwnam(cur_user).pw_dir
        cmd = [
            pbs_snapshot_path,
            '-H', server_host,
            '--daemon-logs',
            '2',
            '--accounting-logs',
            '2',
            '--with-sudo'
            ]
        if len(add_hosts) > 0:
            cmd += ['--additional-hosts=' + ','.join(add_hosts)]
        cmd += ['-o', cur_user_dir]
        ret = self.du.run_cmd(current_host, cmd, level=logging.DEBUG2,
                              logerr=False)
        if ret['rc'] != 0:
            _msg = 'Failed to get analysis information for '
            _msg += 'on %s:' % servers_host
            _msg += '\n\n' + '\n'.join(ret['err']) + '\n\n'
            f.write(_msg + '\n')
            self.logger.error(_msg)
            f.close()
            return
        else:
            if len(ret['out']) == 0:
                self.logger.error('Snapshot command failed')
                f.close()
                return

        snap_out = ret['out'][0]
        snap_out_dest = (snap_out.split(":")[1]).strip()

        dest = os.path.join(datadir,
                            'PBS_' + server_host + '.tar.gz')
        ret = self.du.run_copy(current_host, snap_out_dest,
                               dest, sudo=True, level=logging.DEBUG2)
        self.du.rm(current_host, path=snap_out_dest,
                   recursive=True, force=True, level=logging.DEBUG2)

        f.close()
        self.__save_data_count += 1
        _msg = 'Saved post analysis data'
        self.logger.info(_msg)

    def addError(self, test, err):
        self.__save_home(test, 'ERROR', err)

    def addFailure(self, test, err):
        self.__save_home(test, 'FAIL', err)

    def addSuccess(self, test):
        self.__save_home(test, 'PASS')
Esempio n. 6
0
class PTLJsonData(object):
    """
    The intent of the class is to generate json format of PTL test data
    """

    def __init__(self, command):
        self.__du = DshUtils()
        self.__cmd = command

    def get_json(self, data, prev_data=None):
        """
        Method to generate test data in accordance to json schema

        :param data: dictionary of a test case details
        :type data: dict
        :param prev_data: dictionary of test run details that ran before
                          the current test
        :type prev_data: dict

        :returns a formatted dictionary of the data
        """
        data_json = None
        if not prev_data:
            data_json = {
                'command': self.__cmd,
                'user': self.__du.get_current_user(),
                'product_version': data['pbs_version'],
                'run_id': data['start_time'].strftime('%s'),
                'test_conf': {},
                'machine_info': data['machinfo'],
                'testsuites': {},
                'additional_data': {},
                'test_summary': {
                    'result_summary': {
                        'run': 0,
                        'succeeded': 0,
                        'failed': 0,
                        'errors': 0,
                        'skipped': 0,
                        'timedout': 0
                    },
                    'test_start_time': str(data['start_time']),
                    'tests_with_failures': [],
                    'test_suites_with_failures': []
                }
            }
            if data['testparam']:
                for param in data['testparam'].split(','):
                    par = param.split('=', 1)
                    data_json['test_conf'][par[0]] = par[1]
        else:
            data_json = prev_data
        tsname = data['suite']
        tcname = data['testcase']
        if tsname not in data_json['testsuites']:
            data_json['testsuites'][tsname] = {
                'module': data['module'],
                'file': data['file'],
                'testcases': {}
            }
        tsdoc = []
        if data['suitedoc']:
            tsdoc = (re.sub(r"[\t\n ]+", " ", data['suitedoc'])).strip()
        data_json['testsuites'][tsname]['docstring'] = tsdoc
        tcshort = {}
        tcdoc = []
        if data['testdoc']:
            tcdoc = (re.sub(r"[\t\n ]+", " ", data['testdoc'])).strip()
        tcshort['docstring'] = tcdoc
        if data['tags']:
            tcshort['tags'] = data['tags']
        tcshort['results'] = {
            'status': data['status'],
            'status_data': str(data['status_data']),
            'duration': str(data['duration']),
            'start_time': str(data['start_time']),
            'end_time': str(data['end_time']),
            'measurements': []
        }
        tcshort['requirements'] = {}
        if 'measurements' in data:
            tcshort['results']['measurements'] = data['measurements']
        data_json['testsuites'][tsname]['testcases'][tcname] = tcshort
        if 'additional_data' in data:
            data_json['additional_data'] = data['additional_data']
        data_json['test_summary']['test_end_time'] = str(data['end_time'])
        data_json['test_summary']['result_summary']['run'] += 1
        d_ts = data_json['test_summary']
        if data['status'] == 'PASS':
            d_ts['result_summary']['succeeded'] += 1
        elif data['status'] == 'SKIP':
            d_ts['result_summary']['skipped'] += 1
        elif data['status'] == 'TIMEDOUT':
            d_ts['result_summary']['timedout'] += 1
            d_ts['tests_with_failures'].append(data['testcase'])
            if data['suite'] not in d_ts['test_suites_with_failures']:
                d_ts['test_suites_with_failures'].append(data['suite'])
        elif data['status'] == 'ERROR':
            d_ts['result_summary']['errors'] += 1
            d_ts['tests_with_failures'].append(data['testcase'])
            if data['suite'] not in d_ts['test_suites_with_failures']:
                d_ts['test_suites_with_failures'].append(data['suite'])
        elif data['status'] == 'FAIL':
            d_ts['result_summary']['failed'] += 1
            d_ts['tests_with_failures'].append(data['testcase'])
            if data['suite'] not in d_ts['test_suites_with_failures']:
                d_ts['test_suites_with_failures'].append(data['suite'])
        return data_json