def main(argv):
    parser = GetParser()
    options = parser.parse_args(argv)

    afe = None
    if options.afe:
        afe = frontend.AFE(server=options.afe)
    tko = frontend.TKO()

    special_tasks = []
    builds = []

    # Handle a JSON file being specified.
    if options.input:
        with open(options.input) as f:
            data = json.load(f)
            for build in data.get('builds', []):
                # For each build, amend it to include the list of
                # special tasks for its suite's jobs.
                build.setdefault('special_tasks', {})
                for suite_job_id in build['suite_ids']:
                    suite_tasks = FindSpecialTasks(
                        suite_job_id,
                        name_filter=options.name_filter,
                        status_filter=options.status_filter,
                        afe=afe,
                        tko=tko)
                    special_tasks.extend(suite_tasks)
                    build['special_tasks'][suite_job_id] = suite_tasks
                logging.debug(build)
                builds.append(build)

    # Handle and specifically specified suite IDs.
    for suite_job_id in options.suite_ids:
        special_tasks.extend(
            FindSpecialTasks(suite_job_id,
                             name_filter=options.name_filter,
                             status_filter=options.status_filter,
                             afe=afe,
                             tko=tko))

    # Output a resulting JSON file.
    with open(options.output, 'w') if options.output else sys.stdout as f:
        output = {
            'special_tasks': special_tasks,
            'name_filter': options.name_filter,
            'status_filter': options.status_filter,
        }
        if len(builds):
            output['builds'] = builds
        json.dump(output, f)
Exemplo n.º 2
0
class ExecutionEngine(object):
    """
    Provides the Test Planner execution engine
    """

    _planner_rpc = frontend.Planner()
    _tko_rpc = frontend.TKO()

    def __init__(self, plan_id, server, label_name, owner):
        self._plan_id = plan_id
        self._server = server
        self._label_name = label_name
        self._owner = owner
        self._afe_rest = rest_client.Resource.load(
            'http://%s/afe/server/resources' % server)

    def start(self):
        """
        Starts the execution engine.

        Thread remains in this method until the execution engine is complete.
        """
        while True:
            try:
                self._initialize_plan()

                while not self._tick():
                    time.sleep(TICK_INTERVAL_SECS)

                self._cleanup()
                break
            except Exception, e:
                logging.error(
                    'Execution engine caught exception, restarting:'
                    '\n%s', e)
                time.sleep(PAUSE_BEFORE_RESTARTING_SECS)
Exemplo n.º 3
0
def generate_suite_report(suite_job_id, afe=None, tko=None):
    """Generate a list of events corresonding to a single suite job.

    @param suite_job_id: The AFE id of the suite job.
    @param afe: AFE database handle.
    @param tko: TKO database handle.

    @return A list of entries suitable for dumping via JSON.
    """
    if afe is None:
        afe = frontend.AFE()
    if tko is None:
        tko = frontend.TKO()

    # Retrieve the main suite job.
    suite_job = afe.get_jobs(id=suite_job_id)[0]

    suite_entry = make_job_entry(tko, suite_job, suite_job=True)
    entries = [suite_entry]

    # Retrieve the child jobs and cache all their statuses
    logging.debug('Fetching child jobs...')
    child_jobs = afe.get_jobs(parent_job_id=suite_job_id)
    logging.debug('... fetched %s child jobs.' % len(child_jobs))
    job_statuses = {}
    job_entries = {}
    for j in child_jobs:
        job_entry = make_job_entry(tko,
                                   j,
                                   suite_entry['id'],
                                   job_entries=job_entries)
        entries.append(job_entry)
        job_statuses[j.id] = job_entry['status']
        job_entries[j.id] = job_entry

    # Retrieve the HQEs from all the child jobs, record statuses from
    # job statuses.
    child_job_ids = {j.id for j in child_jobs}
    logging.debug('Fetching HQEs...')
    hqes = afe.get_host_queue_entries(job_id__in=list(child_job_ids))
    logging.debug('... fetched %s HQEs.' % len(hqes))
    hqe_statuses = {h.id: job_statuses.get(h.job.id, None) for h in hqes}

    # Generate list of hosts.
    hostnames = {h.host.hostname for h in hqes if h.host}
    logging.debug('%s distinct hosts participated in the suite.' %
                  len(hostnames))

    # Retrieve histories for the time of the suite for all associated hosts.
    # TODO: Include all hosts in the pool.
    if suite_entry['start_time'] and suite_entry['finish_time']:
        histories = [
            HostJobHistory.get_host_history(afe, hostname,
                                            suite_entry['start_time'],
                                            suite_entry['finish_time'])
            for hostname in sorted(hostnames)
        ]

        for history in histories:
            entries.extend(
                make_hqe_entry(history.hostname, h, hqe_statuses,
                               suite_entry['id']) for h in history)

    return entries
def GetSuiteHQEs(suite_job_id, look_past_seconds, afe=None, tko=None):
    """Get the host queue entries for active DUTs during a suite job.

    @param suite_job_id: Suite's AFE job id.
    @param look_past_seconds: Number of seconds past the end of the suite
                              job to look for next HQEs.
    @param afe: AFE database handle.
    @param tko: TKO database handle.

    @returns A dictionary keyed on hostname to a list of host queue entry
             dictionaries.  HQE dictionary contains the following keys:
             name, hostname, job_status, job_url, gs_url, start_time, end_time
    """
    if afe is None:
        afe = frontend.AFE()
    if tko is None:
        tko = frontend.TKO()

    # Find the suite job and when it ran.
    statuses = tko.get_job_test_statuses_from_db(suite_job_id)
    if len(statuses):
        for s in statuses:
            if s.test_started_time == 'None' or s.test_finished_time == 'None':
                logging.error(
                    'TKO entry missing time: %s %s %s %s %s %s %s %s %s' %
                    (s.id, s.test_name, s.status, s.reason,
                     s.test_started_time, s.test_finished_time, s.job_owner,
                     s.hostname, s.job_tag))
        start_time = min(
            int(time_utils.to_epoch_time(s.test_started_time))
            for s in statuses if s.test_started_time != 'None')
        finish_time = max(
            int(time_utils.to_epoch_time(s.test_finished_time))
            for s in statuses if s.test_finished_time != 'None')
    else:
        start_time = None
        finish_time = None

    # If there is no start time or finish time, won't be able to get HQEs.
    if start_time is None or finish_time is None:
        return {}

    # Find all the HQE entries.
    child_jobs = afe.get_jobs(parent_job_id=suite_job_id)
    child_job_ids = {j.id for j in child_jobs}
    hqes = afe.get_host_queue_entries(job_id__in=list(child_job_ids))
    hostnames = {h.host.hostname for h in hqes if h.host}
    host_hqes = {}
    for hostname in hostnames:
        history = HostJobHistory.get_host_history(
            afe, hostname, start_time, finish_time + look_past_seconds)
        for h in history:
            gs_url = re.sub(r'http://.*/tko/retrieve_logs.cgi\?job=/results',
                            r'gs://chromeos-autotest-results', h.job_url)
            entry = {
                'name': h.name,
                'hostname': history.hostname,
                'job_status': h.job_status,
                'job_url': h.job_url,
                'gs_url': gs_url,
                'start_time': h.start_time,
                'end_time': h.end_time,
            }
            host_hqes.setdefault(history.hostname, []).append(entry)

    return host_hqes