def load_tast_test_aux_results(job, test_name):
    """Loads auxiliary results of a Tast test.

    @param job: A job object.
    @param test_name: The name of the test.
    @return (attributes, perf_values) where
        attributes: A str-to-str dict of attribute keyvals
        perf_values: A dict loaded from a chromeperf JSON
    """
    assert is_tast_test(test_name)

    test_dir = os.path.join(job.dir, _TAST_AUTOTEST_NAME)

    case_name = test_name[len(_TAST_TEST_NAME_PREFIX):]
    case_dir = os.path.join(test_dir, 'results', 'tests', case_name)

    # Load attribute keyvals.
    attributes_path = os.path.join(test_dir, 'keyval')
    if os.path.exists(attributes_path):
        attributes = utils.read_keyval(attributes_path)
    else:
        attributes = {}

    # Load a chromeperf JSON.
    perf_values_path = os.path.join(case_dir, 'results-chart.json')
    if os.path.exists(perf_values_path):
        with open(perf_values_path) as fp:
            perf_values = json.load(fp)
    else:
        perf_values = {}

    return attributes, perf_values
Beispiel #2
0
    def read_keyval(dir):
        dir = os.path.normpath(dir)
        top_dir = tko_utils.find_toplevel_job_dir(dir)
        if not top_dir:
            top_dir = dir
        assert(dir.startswith(top_dir))

        # pull in and merge all the keyval files, with higher-level
        # overriding values in the lower-level ones
        keyval = {}
        while True:
            try:
                upper_keyval = utils.read_keyval(dir)
                # HACK: exclude hostname from the override - this is a special
                # case where we want lower to override higher
                if "hostname" in upper_keyval and "hostname" in keyval:
                    del upper_keyval["hostname"]
                keyval.update(upper_keyval)
            except IOError:
                pass  # if the keyval can't be read just move on to the next
            if dir == top_dir:
                break
            else:
                assert(dir != "/")
                dir = os.path.dirname(dir)
        return keyval
def _symbolicate_minidump_with_devserver(minidump_path, resultdir,
                                         crashserver_name):
    """
    Generates a stack trace for the specified minidump by consulting devserver.

    This function assumes the debug symbols have been staged on the devserver.

    @param minidump_path: absolute path to minidump to by symbolicated.
    @param resultdir: server job's result directory.
    @param crashserver_name: Name of crashserver to attempt to symbolicate with.
    @raise DevServerException upon failure, HTTP or otherwise.
    """
    # First, look up what build we tested.  If we can't find this, we can't
    # get the right debug symbols, so we might as well give up right now.
    keyvals = client_utils.read_keyval(resultdir)
    if JOB_BUILD_KEY not in keyvals:
        raise dev_server.DevServerException(
            'Cannot determine build being tested.')

    devserver = dev_server.CrashServer(crashserver_name)

    with metrics.SecondsTimer(
            'chromeos/autotest/crashcollect/symbolicate_duration',
            fields={'crash_server': crashserver_name}):
        trace_text = devserver.symbolicate_dump(minidump_path,
                                                keyvals[JOB_BUILD_KEY])

    if not trace_text:
        raise dev_server.DevServerException('Unknown error!!')
    with open(minidump_path + '.txt', 'w') as trace_file:
        trace_file.write(trace_text)
Beispiel #4
0
    def read_keyval(dir):
        dir = os.path.normpath(dir)
        top_dir = tko_utils.find_toplevel_job_dir(dir)
        if not top_dir:
            top_dir = dir
        assert (dir.startswith(top_dir))

        # pull in and merge all the keyval files, with higher-level
        # overriding values in the lower-level ones
        keyval = {}
        while True:
            try:
                upper_keyval = utils.read_keyval(dir)
                # HACK: exclude hostname from the override - this is a special
                # case where we want lower to override higher
                if "hostname" in upper_keyval and "hostname" in keyval:
                    del upper_keyval["hostname"]
                keyval.update(upper_keyval)
            except IOError:
                pass  # if the keyval can't be read just move on to the next
            if dir == top_dir:
                break
            else:
                assert (dir != "/")
                dir = os.path.dirname(dir)
        return keyval
    def read_keyval(dir):
        """
        Read job keyval files.

        @param dir: String name of directory containing job keyval files.

        @return A dictionary containing job keyvals.

        """
        dir = os.path.normpath(dir)
        top_dir = tko_utils.find_toplevel_job_dir(dir)
        if not top_dir:
            top_dir = dir
        assert (dir.startswith(top_dir))

        # Pull in and merge all the keyval files, with higher-level
        # overriding values in the lower-level ones.
        keyval = {}
        while True:
            try:
                upper_keyval = utils.read_keyval(dir)
                # HACK: exclude hostname from the override - this is a special
                # case where we want lower to override higher.
                if 'hostname' in upper_keyval and 'hostname' in keyval:
                    del upper_keyval['hostname']
                keyval.update(upper_keyval)
            except IOError:
                pass  # If the keyval can't be read just move on to the next.
            if dir == top_dir:
                break
            else:
                assert (dir != '/')
                dir = os.path.dirname(dir)
        return keyval
Beispiel #6
0
def new_parser_harness(results_dirpath):
    """Ensure sane environment and create new parser with wrapper.

    Args:
      results_dirpath: str; Path to job results directory

    Returns:
      ParserHarness;

    Raises:
      BadResultsDirectoryError; If results dir does not exist or is malformed.
    """
    if not path.exists(results_dirpath):
        raise BadResultsDirectoryError

    keyval_path = path.join(results_dirpath, KEYVAL)
    job_keyval = utils.read_keyval(keyval_path)
    status_version = job_keyval[STATUS_VERSION]
    parser = status_lib.parser(status_version)
    job = parser.make_job(results_dirpath)
    status_log_filepath = path.join(results_dirpath, 'status.log')
    if not path.exists(status_log_filepath):
        raise BadResultsDirectoryError

    return ParserHarness(parser, job, job_keyval, status_version,
                         status_log_filepath)
def symbolicate_minidump_with_devserver(minidump_path, resultdir):
    """
    Generates a stack trace for the specified minidump by consulting devserver.

    This function assumes the debug symbols have been staged on the devserver.

    @param minidump_path: absolute path to minidump to by symbolicated.
    @param resultdir: server job's result directory.
    @raise DevServerException upon failure, HTTP or otherwise.
    """
    # First, look up what build we tested.  If we can't find this, we can't
    # get the right debug symbols, so we might as well give up right now.
    keyvals = client_utils.read_keyval(resultdir)
    if JOB_BUILD_KEY not in keyvals:
        raise dev_server.DevServerException(
            'Cannot determine build being tested.')

    crashserver_name = dev_server.get_least_loaded_devserver(
        devserver_type=dev_server.CrashServer)
    if not crashserver_name:
        autotest_stats.Counter(CRASH_SERVER_OVERLOAD).increment()
        raise dev_server.DevServerException(
            'No crash server has the capacity to symbolicate the dump.')
    else:
        autotest_stats.Counter(CRASH_SERVER_FOUND).increment()
    devserver = dev_server.CrashServer(crashserver_name)
    trace_text = devserver.symbolicate_dump(minidump_path,
                                            keyvals[JOB_BUILD_KEY])
    if not trace_text:
        raise dev_server.DevServerException('Unknown error!!')
    with open(minidump_path + '.txt', 'w') as trace_file:
        trace_file.write(trace_text)
Beispiel #8
0
def new_parser_harness(results_dirpath):
    """Ensure sane environment and create new parser with wrapper.

    Args:
      results_dirpath: str; Path to job results directory

    Returns:
      ParserHarness;

    Raises:
      BadResultsDirectoryError; If results dir does not exist or is malformed.
    """
    if not path.exists(results_dirpath):
        raise BadResultsDirectoryError

    keyval_path = path.join(results_dirpath, KEYVAL)
    job_keyval = utils.read_keyval(keyval_path)
    status_version = job_keyval[STATUS_VERSION]
    parser = status_lib.parser(status_version)
    job = parser.make_job(results_dirpath)
    status_log_filepath = path.join(results_dirpath, "status.log")
    if not path.exists(status_log_filepath):
        raise BadResultsDirectoryError

    return ParserHarness(parser, job, job_keyval, status_version, status_log_filepath)
Beispiel #9
0
    def _load_job_keyvals(self):
        """Loads job keyvals.

        @return Keyvals as a str-to-str dict, or None if keyval file is missing.
        """
        if not os.path.exists(os.path.join(self._job.resultdir, 'keyval')):
            return None
        return utils.read_keyval(self._job.resultdir)
Beispiel #10
0
    def parse_host_keyval(job_dir, hostname):
        # the "real" job dir may be higher up in the directory tree
        job_dir = tko_utils.find_toplevel_job_dir(job_dir)
        if not job_dir:
            return {} # we can't find a top-level job dir with host keyvals

        # the keyval is <job_dir>/host_keyvals/<hostname> if it exists
        keyval_path = os.path.join(job_dir, "host_keyvals", hostname)
        if os.path.isfile(keyval_path):
            return utils.read_keyval(keyval_path)
        else:
            return {}
Beispiel #11
0
    def parse_host_keyval(job_dir, hostname):
        # the "real" job dir may be higher up in the directory tree
        job_dir = tko_utils.find_toplevel_job_dir(job_dir)
        if not job_dir:
            return {}  # we can't find a top-level job dir with host keyvals

        # the keyval is <job_dir>/host_keyvals/<hostname> if it exists
        keyval_path = os.path.join(job_dir, "host_keyvals", hostname)
        if os.path.isfile(keyval_path):
            return utils.read_keyval(keyval_path)
        else:
            return {}
Beispiel #12
0
    def _pull_sysinfo_keyval(self, host, outputdir, mytest):
        """Pulls sysinfo and keyval data from the client.
        """
        # pull the sysinfo data back on to the server
        host.get_file(os.path.join(outputdir, "sysinfo"), mytest.outputdir)

        # pull the keyval data back into the local one
        fd, path = tempfile.mkstemp(dir=self.job.tmpdir)
        os.close(fd)
        host.get_file(os.path.join(outputdir, "keyval"), path)
        keyval = utils.read_keyval(path)
        os.remove(path)
        mytest.write_test_keyval(keyval)
Beispiel #13
0
    def _pull_sysinfo_keyval(self, host, outputdir, mytest):
        """Pulls sysinfo and keyval data from the client.
        """
        # pull the sysinfo data back on to the server
        host.get_file(os.path.join(outputdir, "sysinfo"), mytest.outputdir)

        # pull the keyval data back into the local one
        fd, path = tempfile.mkstemp(dir=self.job.tmpdir)
        os.close(fd)
        host.get_file(os.path.join(outputdir, "keyval"), path)
        keyval = utils.read_keyval(path)
        os.remove(path)
        mytest.write_test_keyval(keyval)
    def load_attributes(keyval_path):
        """
        Load test attributes from a test keyval path.

        Load the test attributes into a dictionary from a test
        keyval path. Does not assume that the path actually exists.

        @param keyval_path: The string path to a keyval file.

        @return A dictionary representing the test keyvals.

        """
        if not os.path.exists(keyval_path):
            return {}
        return utils.read_keyval(keyval_path)
Beispiel #15
0
    def parse_host_keyval(job_dir, hostname):
        """
        Parse host keyvals.

        @param job_dir: The string directory name of the associated job.
        @param hostname: The string hostname.

        @return A dictionary representing the host keyvals.

        """
        # The "real" job dir may be higher up in the directory tree.
        job_dir = tko_utils.find_toplevel_job_dir(job_dir)
        if not job_dir:
            return {}  # We can't find a top-level job dir with host keyvals.

        # The keyval is <job_dir>/host_keyvals/<hostname> if it exists.
        keyval_path = os.path.join(job_dir, 'host_keyvals', hostname)
        if os.path.isfile(keyval_path):
            return utils.read_keyval(keyval_path)
        else:
            return {}
    def _parse_keyval(job_dir, sub_keyval_path):
        """
        Parse a file of keyvals.

        @param job_dir: The string directory name of the associated job.
        @param sub_keyval_path: Path to a keyval file relative to job_dir.

        @return A dictionary representing the keyvals.

        """
        # The "real" job dir may be higher up in the directory tree.
        job_dir = tko_utils.find_toplevel_job_dir(job_dir)
        if not job_dir:
            return {}  # We can't find a top-level job dir with job keyvals.

        # The keyval is <job_dir>/`sub_keyval_path` if it exists.
        keyval_path = os.path.join(job_dir, sub_keyval_path)
        if os.path.isfile(keyval_path):
            return utils.read_keyval(keyval_path)
        else:
            return {}
 def test_returns_empty_when_file_doesnt_exist(self):
     os.path.isdir.expect_call("file").and_return(False)
     os.path.exists.expect_call("file").and_return(False)
     self.assertEqual({}, utils.read_keyval("file"))
     self.god.check_playback()
 def read_keyval(self, contents):
     os.path.isdir.expect_call("file").and_return(False)
     self.create_test_file("file", contents)
     keyval = utils.read_keyval("file")
     self.god.check_playback()
     return keyval
Beispiel #19
0
    def __init__(self,
                 control,
                 args,
                 resultdir,
                 label,
                 user,
                 machines,
                 client=False,
                 parse_job='',
                 ssh_user='******',
                 ssh_port=22,
                 ssh_pass='',
                 group_name='',
                 tag='',
                 control_filename=SERVER_CONTROL_FILENAME):
        """
        Create a server side job object.

        @param control: The pathname of the control file.
        @param args: Passed to the control file.
        @param resultdir: Where to throw the results.
        @param label: Description of the job.
        @param user: Username for the job (email address).
        @param client: True if this is a client-side control file.
        @param parse_job: string, if supplied it is the job execution tag that
                the results will be passed through to the TKO parser with.
        @param ssh_user: The SSH username.  [root]
        @param ssh_port: The SSH port number.  [22]
        @param ssh_pass: The SSH passphrase, if needed.
        @param group_name: If supplied, this will be written out as
                host_group_name in the keyvals file for the parser.
        @param tag: The job execution tag from the scheduler.  [optional]
        @param control_filename: The filename where the server control file
                should be written in the results directory.
        """
        super(base_server_job, self).__init__(resultdir=resultdir)

        path = os.path.dirname(__file__)
        self.control = control
        self._uncollected_log_file = os.path.join(self.resultdir,
                                                  'uncollected_logs')
        debugdir = os.path.join(self.resultdir, 'debug')
        if not os.path.exists(debugdir):
            os.mkdir(debugdir)

        if user:
            self.user = user
        else:
            self.user = getpass.getuser()

        self.args = args
        self.machines = machines
        self._client = client
        self.warning_loggers = set()
        self.warning_manager = warning_manager()
        self._ssh_user = ssh_user
        self._ssh_port = ssh_port
        self._ssh_pass = ssh_pass
        self.tag = tag
        self.last_boot_tag = None
        self.hosts = set()
        self.drop_caches = False
        self.drop_caches_between_iterations = False
        self._control_filename = control_filename

        self.logging = logging_manager.get_logging_manager(
            manage_stdout_and_stderr=True, redirect_fds=True)
        subcommand.logging_manager_object = self.logging

        self.sysinfo = sysinfo.sysinfo(self.resultdir)
        self.profilers = profilers.profilers(self)

        job_data = {
            'label': label,
            'user': user,
            'hostname': ','.join(machines),
            'drone': platform.node(),
            'status_version': str(self._STATUS_VERSION),
            'job_started': str(int(time.time()))
        }
        if group_name:
            job_data['host_group_name'] = group_name

        # only write these keyvals out on the first job in a resultdir
        if 'job_started' not in utils.read_keyval(self.resultdir):
            job_data.update(get_site_job_data(self))
            utils.write_keyval(self.resultdir, job_data)

        self._parse_job = parse_job
        self._using_parser = (self._parse_job and len(machines) <= 1)
        self.pkgmgr = packages.PackageManager(
            self.autodir, run_function_dargs={'timeout': 600})
        self.num_tests_run = 0
        self.num_tests_failed = 0

        self._register_subcommand_hooks()

        # these components aren't usable on the server
        self.bootloader = None
        self.harness = None

        # set up the status logger
        self._indenter = status_indenter()
        self._logger = base_job.status_logger(
            self,
            self._indenter,
            'status.log',
            'status.log',
            record_hook=server_job_record_hook(self))
Beispiel #20
0
 def load_attributes(keyval_path):
     """Load the test attributes into a dictionary from a test
     keyval path. Does not assume that the path actually exists."""
     if not os.path.exists(keyval_path):
         return {}
     return utils.read_keyval(keyval_path)
Beispiel #21
0
            except os.error, e:
                # Thrown if the directory already exists, which it may.
                # If the problem was something other than the
                # directory already existing, this chmod should throw as well
                # exception.
                os.chmod(self.tmpdir, stat.S_IRWXU)

        job_data = {'label' : label, 'user' : user,
                    'hostname' : ','.join(machines),
                    'status_version' : str(self.STATUS_VERSION),
                    'job_started' : str(int(time.time()))}
        if group_name:
            job_data['host_group_name'] = group_name
        if self.resultdir:
            # only write these keyvals out on the first job in a resultdir
            if 'job_started' not in utils.read_keyval(self.resultdir):
                job_data.update(get_site_job_data(self))
                utils.write_keyval(self.resultdir, job_data)

        self.parse_job = parse_job
        if self.parse_job and len(machines) == 1:
            self.using_parser = True
            self.init_parser(resultdir)
        else:
            self.using_parser = False
        self.pkgmgr = packages.PackageManager(self.autodir,
                                             run_function_dargs={'timeout':600})
        self.pkgdir = os.path.join(self.autodir, 'packages')

        self.num_tests_run = 0
        self.num_tests_failed = 0
Beispiel #22
0
 def load_attributes(keyval_path):
     """Load the test attributes into a dictionary from a test
     keyval path. Does not assume that the path actually exists."""
     if not os.path.exists(keyval_path):
         return {}
     return utils.read_keyval(keyval_path)
 def test_accesses_directories_through_keyval_file(self):
     os.path.isdir.expect_call("dir").and_return(True)
     self.create_test_file("dir/keyval", "")
     utils.read_keyval("dir")
     self.god.check_playback()
 def test_accesses_files_directly(self):
     os.path.isdir.expect_call("file").and_return(False)
     self.create_test_file("file", "")
     utils.read_keyval("file")
     self.god.check_playback()
 def test_returns_empty_when_file_doesnt_exist(self):
     os.path.isdir.expect_call("file").and_return(False)
     os.path.exists.expect_call("file").and_return(False)
     self.assertEqual({}, utils.read_keyval("file"))
     self.god.check_playback()
 def read_keyval(self, contents):
     os.path.isdir.expect_call("file").and_return(False)
     self.create_test_file("file", contents)
     keyval = utils.read_keyval("file")
     self.god.check_playback()
     return keyval
 def test_accesses_files_directly(self):
     os.path.isdir.expect_call("file").and_return(False)
     self.create_test_file("file", "")
     utils.read_keyval("file")
     self.god.check_playback()
 def test_accesses_directories_through_keyval_file(self):
     os.path.isdir.expect_call("dir").and_return(True)
     self.create_test_file("dir/keyval", "")
     utils.read_keyval("dir")
     self.god.check_playback()
Beispiel #29
0
    def __init__(
        self,
        control,
        args,
        resultdir,
        label,
        user,
        machines,
        client=False,
        parse_job="",
        ssh_user="******",
        ssh_port=22,
        ssh_pass="",
        group_name="",
        tag="",
        control_filename=SERVER_CONTROL_FILENAME,
    ):
        """
        Create a server side job object.

        @param control: The pathname of the control file.
        @param args: Passed to the control file.
        @param resultdir: Where to throw the results.
        @param label: Description of the job.
        @param user: Username for the job (email address).
        @param client: True if this is a client-side control file.
        @param parse_job: string, if supplied it is the job execution tag that
                the results will be passed through to the TKO parser with.
        @param ssh_user: The SSH username.  [root]
        @param ssh_port: The SSH port number.  [22]
        @param ssh_pass: The SSH passphrase, if needed.
        @param group_name: If supplied, this will be written out as
                host_group_name in the keyvals file for the parser.
        @param tag: The job execution tag from the scheduler.  [optional]
        @param control_filename: The filename where the server control file
                should be written in the results directory.
        """
        super(base_server_job, self).__init__(resultdir=resultdir)

        path = os.path.dirname(__file__)
        self.control = control
        self._uncollected_log_file = os.path.join(self.resultdir, "uncollected_logs")
        debugdir = os.path.join(self.resultdir, "debug")
        if not os.path.exists(debugdir):
            os.mkdir(debugdir)

        if user:
            self.user = user
        else:
            self.user = getpass.getuser()

        self.args = args
        self.machines = machines
        self._client = client
        self.warning_loggers = set()
        self.warning_manager = warning_manager()
        self._ssh_user = ssh_user
        self._ssh_port = ssh_port
        self._ssh_pass = ssh_pass
        self.tag = tag
        self.last_boot_tag = None
        self.hosts = set()
        self.drop_caches = False
        self.drop_caches_between_iterations = False
        self._control_filename = control_filename

        self.logging = logging_manager.get_logging_manager(manage_stdout_and_stderr=True, redirect_fds=True)
        subcommand.logging_manager_object = self.logging

        self.sysinfo = sysinfo.sysinfo(self.resultdir)
        self.profilers = profilers.profilers(self)

        job_data = {
            "label": label,
            "user": user,
            "hostname": ",".join(machines),
            "drone": platform.node(),
            "status_version": str(self._STATUS_VERSION),
            "job_started": str(int(time.time())),
        }
        if group_name:
            job_data["host_group_name"] = group_name

        # only write these keyvals out on the first job in a resultdir
        if "job_started" not in utils.read_keyval(self.resultdir):
            job_data.update(get_site_job_data(self))
            utils.write_keyval(self.resultdir, job_data)

        self._parse_job = parse_job
        self._using_parser = self._parse_job and len(machines) <= 1
        self.pkgmgr = packages.PackageManager(self.autodir, run_function_dargs={"timeout": 600})
        self.num_tests_run = 0
        self.num_tests_failed = 0

        self._register_subcommand_hooks()

        # these components aren't usable on the server
        self.bootloader = None
        self.harness = None

        # set up the status logger
        self._indenter = status_indenter()
        self._logger = base_job.status_logger(
            self, self._indenter, "status.log", "status.log", record_hook=server_job_record_hook(self)
        )