def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False, exclude_atomic_group_hosts=False, valid_only=True, **filter_data): """ @param multiple_labels: match hosts in all of the labels given. Should be a list of label names. @param exclude_only_if_needed_labels: Exclude hosts with at least one "only_if_needed" label applied. @param exclude_atomic_group_hosts: Exclude hosts that have one or more atomic group labels associated with them. """ hosts = rpc_utils.get_host_query(multiple_labels, exclude_only_if_needed_labels, exclude_atomic_group_hosts, valid_only, filter_data) hosts = list(hosts) models.Host.objects.populate_relationships(hosts, models.Label, 'label_list') models.Host.objects.populate_relationships(hosts, models.AclGroup, 'acl_list') models.Host.objects.populate_relationships(hosts, models.HostAttribute, 'attribute_list') host_dicts = [] for host_obj in hosts: host_dict = host_obj.get_object_dict() host_dict['labels'] = [label.name for label in host_obj.label_list] host_dict['platform'], host_dict['atomic_group'] = (rpc_utils. find_platform_and_atomic_group(host_obj)) host_dict['acls'] = [acl.name for acl in host_obj.acl_list] host_dict['attributes'] = dict((attribute.attribute, attribute.value) for attribute in host_obj.attribute_list) host_dicts.append(host_dict) return rpc_utils.prepare_for_serialization(host_dicts)
def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None): """ Retrieves all the information needed to clone a job. """ job = models.Job.objects.get(id=id) job_info = rpc_utils.get_job_info(job, preserve_metahosts, queue_entry_filter_data) host_dicts = [] for host, profile in zip(job_info["hosts"], job_info["profiles"]): host_dict = get_hosts(id=host.id)[0] other_labels = host_dict["labels"] if host_dict["platform"]: other_labels.remove(host_dict["platform"]) host_dict["other_labels"] = ", ".join(other_labels) host_dict["profile"] = profile host_dicts.append(host_dict) for host in job_info["one_time_hosts"]: host_dict = dict(hostname=host.hostname, id=host.id, platform="(one-time host)", locked_text="") host_dicts.append(host_dict) # convert keys from Label objects to strings (names of labels) meta_host_counts = dict((meta_host.name, count) for meta_host, count in job_info["meta_host_counts"].iteritems()) info = dict(job=job.get_object_dict(), meta_host_counts=meta_host_counts, hosts=host_dicts) info["job"]["dependencies"] = job_info["dependencies"] if job_info["atomic_group"]: info["atomic_group_name"] = (job_info["atomic_group"]).name else: info["atomic_group_name"] = None info["hostless"] = job_info["hostless"] info["drone_set"] = job.drone_set and job.drone_set.name return rpc_utils.prepare_for_serialization(info)
def get_acl_groups(**filter_data): acl_groups = models.AclGroup.list_objects(filter_data) for acl_group in acl_groups: acl_group_obj = models.AclGroup.objects.get(id=acl_group["id"]) acl_group["users"] = [user.login for user in acl_group_obj.users.all()] acl_group["hosts"] = [host.hostname for host in acl_group_obj.hosts.all()] return rpc_utils.prepare_for_serialization(acl_groups)
def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data): """ Extra filter args for get_jobs: -not_yet_run: Include only jobs that have not yet started running. -running: Include only jobs that have start running but for which not all hosts have completed. -finished: Include only jobs for which all hosts have completed (or aborted). At most one of these three fields should be specified. """ filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run, running, finished) job_dicts = [] jobs = list(models.Job.query_objects(filter_data)) models.Job.objects.populate_relationships(jobs, models.Label, 'dependencies') models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals') for job in jobs: job_dict = job.get_object_dict() job_dict['dependencies'] = ','.join(label.name for label in job.dependencies) job_dict['keyvals'] = dict((keyval.key, keyval.value) for keyval in job.keyvals) job_dicts.append(job_dict) return rpc_utils.prepare_for_serialization(job_dicts)
def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data): """ Extra filter args for get_jobs: -not_yet_run: Include only jobs that have not yet started running. -running: Include only jobs that have start running but for which not all hosts have completed. -finished: Include only jobs for which all hosts have completed (or aborted). At most one of these three fields should be specified. """ filter_data['extra_args'] = rpc_utils.extra_job_filters( not_yet_run, running, finished) job_dicts = [] jobs = list(models.Job.query_objects(filter_data)) models.Job.objects.populate_relationships(jobs, models.Label, 'dependencies') models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals') for job in jobs: job_dict = job.get_object_dict() job_dict['dependencies'] = ','.join(label.name for label in job.dependencies) job_dict['keyvals'] = dict( (keyval.key, keyval.value) for keyval in job.keyvals) job_dicts.append(job_dict) return rpc_utils.prepare_for_serialization(job_dicts)
def get_profiles(): """ Get profiles. :return: Sequence of profiles. """ error_encountered = True profile_dicts = [] profiles = get_install_server_profiles() if profiles is not None: if len(profiles) < 1: msg = 'No profiles defined on install server' rpc_logger = logging.getLogger('rpc_logger') rpc_logger.info(msg) else: error_encountered = False # not sorted profiles.sort() profile_dicts.append(dict(name="Do_not_install")) for profile in profiles: profile_dicts.append(dict(name=profile)) if error_encountered: profile_dicts.append(dict(name="N/A")) return rpc_utils.prepare_for_serialization(profile_dicts)
def get_host_queue_entries_and_special_tasks(hostname, query_start=None, query_limit=None): """ :return: an interleaved list of HostQueueEntries and SpecialTasks, in approximate run order. each dict contains keys for type, host, job, status, started_on, execution_path, and ID. """ total_limit = None if query_limit is not None: total_limit = query_start + query_limit filter_data = { 'host__hostname': hostname, 'query_limit': total_limit, 'sort_by': ['-id'] } queue_entries = list(models.HostQueueEntry.query_objects(filter_data)) special_tasks = list(models.SpecialTask.query_objects(filter_data)) interleaved_entries = rpc_utils.interleave_entries(queue_entries, special_tasks) if query_start is not None: interleaved_entries = interleaved_entries[query_start:] if query_limit is not None: interleaved_entries = interleaved_entries[:query_limit] return rpc_utils.prepare_for_serialization(interleaved_entries)
def get_detailed_test_views(**filter_data): test_views = models.TestView.list_objects(filter_data) tests_by_id = models.Test.objects.in_bulk([test_view['test_idx'] for test_view in test_views]) tests = tests_by_id.values() models.Test.objects.populate_relationships(tests, models.TestAttribute, 'attributes') models.Test.objects.populate_relationships(tests, models.IterationAttribute, 'iteration_attributes') models.Test.objects.populate_relationships(tests, models.IterationResult, 'iteration_results') models.Test.objects.populate_relationships(tests, models.TestLabel, 'labels') jobs_by_id = models.Job.objects.in_bulk([test_view['job_idx'] for test_view in test_views]) jobs = jobs_by_id.values() models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals') for test_view in test_views: test = tests_by_id[test_view['test_idx']] test_view['attributes'] = _attributes_to_dict(test.attributes) test_view['iterations'] = _format_iteration_keyvals(test) test_view['labels'] = [label.name for label in test.labels] job = jobs_by_id[test_view['job_idx']] test_view['job_keyvals'] = _job_keyvals_to_dict(job.keyvals) return rpc_utils.prepare_for_serialization(test_views)
def get_tests(**filter_data): """ Get tests. :param filter_data: Filters out which tests to get. :return: Sequence of tests. """ return rpc_utils.prepare_for_serialization( models.Test.list_objects(filter_data))
def get_acl_groups(**filter_data): acl_groups = models.AclGroup.list_objects(filter_data) for acl_group in acl_groups: acl_group_obj = models.AclGroup.objects.get(id=acl_group['id']) acl_group['users'] = [user.login for user in acl_group_obj.users.all()] acl_group['hosts'] = [ host.hostname for host in acl_group_obj.hosts.all() ] return rpc_utils.prepare_for_serialization(acl_groups)
def get_atomic_groups(**filter_data): """ Get atomic groups. :param filter_data: Filters out which atomic groups to get. :return: Sequence of atomic groups. """ return rpc_utils.prepare_for_serialization( models.AtomicGroup.list_objects(filter_data))
def get_users(**filter_data): """ Get users. :param filter_data: Filters out which users to get. :return: Sequence of users. """ return rpc_utils.prepare_for_serialization( models.User.list_objects(filter_data))
def get_jobs_summary(**filter_data): """ Like get_jobs(), but adds a 'status_counts' field, which is a dictionary mapping status strings to the number of hosts currently with that status, i.e. {'Queued' : 4, 'Running' : 2}. """ jobs = get_jobs(**filter_data) ids = [job['id'] for job in jobs] all_status_counts = models.Job.objects.get_status_counts(ids) for job in jobs: job['status_counts'] = all_status_counts[job['id']] return rpc_utils.prepare_for_serialization(jobs)
def get_latest_tests(group_by, header_groups=[], fixed_headers={}, extra_info=[], **filter_data): """ Similar to get_status_counts, but return only the latest test result per group. It still returns the same information (i.e. with pass count etc.) for compatibility. It includes an additional field "test_idx" with each group. :param extra_info a list containing the field names that should be returned with each cell. The fields are returned in the extra_info field of the return dictionary. """ # find latest test per group initial_query = models.TestView.objects.get_query_set_with_joins( filter_data) query = models.TestView.query_objects(filter_data, initial_query=initial_query, apply_presentation=False) query = query.exclude(status__in=tko_rpc_utils._INVALID_STATUSES) query = query.extra( select={ 'latest_test_idx': 'MAX(%s)' % models.TestView.objects.get_key_on_this_table('test_idx') }) query = models.TestView.apply_presentation(query, filter_data) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, header_groups, fixed_headers) group_processor.process_group_dicts() info = group_processor.get_info_dict() # fetch full info for these tests so we can access their statuses all_test_ids = [group['latest_test_idx'] for group in info['groups']] test_views = initial_query.in_bulk(all_test_ids) for group_dict in info['groups']: test_idx = group_dict.pop('latest_test_idx') group_dict['test_idx'] = test_idx test_view = test_views[test_idx] tko_rpc_utils.add_status_counts(group_dict, test_view.status) group_dict['extra_info'] = [] for field in extra_info: group_dict['extra_info'].append(getattr(test_view, field)) return rpc_utils.prepare_for_serialization(info)
def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None): """ Retrieves all the information needed to clone a job. """ job = models.Job.objects.get(id=id) job_info = rpc_utils.get_job_info(job, preserve_metahosts, queue_entry_filter_data) host_dicts = [] for host, profile in zip(job_info['hosts'], job_info['profiles']): host_dict = get_hosts(id=host.id)[0] other_labels = host_dict['labels'] if host_dict['platform']: other_labels.remove(host_dict['platform']) host_dict['other_labels'] = ', '.join(other_labels) host_dict['profile'] = profile host_dicts.append(host_dict) for host in job_info['one_time_hosts']: host_dict = dict(hostname=host.hostname, id=host.id, platform='(one-time host)', locked_text='') host_dicts.append(host_dict) meta_host_dicts = [] # convert keys from Label objects to strings (names of labels) meta_host_counts = dict( (meta_host.name, count) for meta_host, count in job_info['meta_host_counts'].iteritems()) for meta_host, meta_host_profile in zip(job_info['meta_hosts'], job_info['meta_host_profiles']): meta_host_dict = dict(name=meta_host.name, count=meta_host_counts[meta_host.name], profile=meta_host_profile) meta_host_dicts.append(meta_host_dict) info = dict(job=job.get_object_dict(), meta_hosts=meta_host_dicts, hosts=host_dicts) info['job']['dependencies'] = job_info['dependencies'] if job_info['atomic_group']: info['atomic_group_name'] = (job_info['atomic_group']).name else: info['atomic_group_name'] = None info['hostless'] = job_info['hostless'] info['drone_set'] = job.drone_set and job.drone_set.name return rpc_utils.prepare_for_serialization(info)
def get_acl_groups(**filter_data): """ Get ACL groups. :param filter_data: Filters out which ACL groups to get. :return: Sequence of ACL groups. """ acl_groups = models.AclGroup.list_objects(filter_data) for acl_group in acl_groups: acl_group_obj = models.AclGroup.objects.get(id=acl_group['id']) acl_group['users'] = [user.login for user in acl_group_obj.users.all()] acl_group['hosts'] = [host.hostname for host in acl_group_obj.hosts.all()] return rpc_utils.prepare_for_serialization(acl_groups)
def get_acl_groups(**filter_data): """ Get ACL groups. :param filter_data: Filters out which ACL groups to get. :return: Sequence of ACL groups. """ acl_groups = models.AclGroup.list_objects(filter_data) for acl_group in acl_groups: acl_group_obj = models.AclGroup.objects.get(id=acl_group['id']) acl_group['users'] = [user.login for user in acl_group_obj.users.all()] acl_group['hosts'] = [ host.hostname for host in acl_group_obj.hosts.all() ] return rpc_utils.prepare_for_serialization(acl_groups)
def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None): """ Retrieves all the information needed to clone a job. """ job = models.Job.objects.get(id=id) job_info = rpc_utils.get_job_info(job, preserve_metahosts, queue_entry_filter_data) host_dicts = [] for host, profile in zip(job_info['hosts'], job_info['profiles']): host_dict = get_hosts(id=host.id)[0] other_labels = host_dict['labels'] if host_dict['platform']: other_labels.remove(host_dict['platform']) host_dict['other_labels'] = ', '.join(other_labels) host_dict['profile'] = profile host_dicts.append(host_dict) for host in job_info['one_time_hosts']: host_dict = dict(hostname=host.hostname, id=host.id, platform='(one-time host)', locked_text='') host_dicts.append(host_dict) meta_host_dicts = [] # convert keys from Label objects to strings (names of labels) meta_host_counts = dict((meta_host.name, count) for meta_host, count in job_info['meta_host_counts'].iteritems()) for meta_host, meta_host_profile in zip(job_info['meta_hosts'], job_info['meta_host_profiles']): meta_host_dict = dict(name=meta_host.name, count=meta_host_counts[meta_host.name], profile=meta_host_profile) meta_host_dicts.append(meta_host_dict) info = dict(job=job.get_object_dict(), meta_hosts=meta_host_dicts, hosts=host_dicts) info['job']['dependencies'] = job_info['dependencies'] if job_info['atomic_group']: info['atomic_group_name'] = (job_info['atomic_group']).name else: info['atomic_group_name'] = None info['hostless'] = job_info['hostless'] info['drone_set'] = job.drone_set and job.drone_set.name return rpc_utils.prepare_for_serialization(info)
def get_group_counts(group_by, header_groups=None, fixed_headers=None, extra_select_fields=None, **filter_data): """ Queries against TestView grouping by the specified fields and computings counts for each group. * group_by should be a list of field names. * extra_select_fields can be used to specify additional fields to select (usually for aggregate functions). * header_groups can be used to get lists of unique combinations of group fields. It should be a list of tuples of fields from group_by. It's primarily for use by the spreadsheet view. * fixed_headers can map header fields to lists of values. the header will guaranteed to return exactly those value. this does not work together with header_groups. Returns a dictionary with two keys: * header_values contains a list of lists, one for each header group in header_groups. Each list contains all the values for the corresponding header group as tuples. * groups contains a list of dicts, one for each row. Each dict contains keys for each of the group_by fields, plus a 'group_count' key for the total count in the group, plus keys for each of the extra_select_fields. The keys for the extra_select_fields are determined by the "AS" alias of the field. """ query = models.TestView.objects.get_query_set_with_joins(filter_data) # don't apply presentation yet, since we have extra selects to apply query = models.TestView.query_objects(filter_data, initial_query=query, apply_presentation=False) count_alias, count_sql = models.TestView.objects.get_count_sql(query) query = query.extra(select={count_alias: count_sql}) if extra_select_fields: query = query.extra(select=extra_select_fields) query = models.TestView.apply_presentation(query, filter_data) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, header_groups or [], fixed_headers or {}) group_processor.process_group_dicts() return rpc_utils.prepare_for_serialization(group_processor.get_info_dict())
def get_latest_tests(group_by, header_groups=[], fixed_headers={}, extra_info=[], **filter_data): """ Similar to get_status_counts, but return only the latest test result per group. It still returns the same information (i.e. with pass count etc.) for compatibility. It includes an additional field "test_idx" with each group. @param extra_info a list containing the field names that should be returned with each cell. The fields are returned in the extra_info field of the return dictionary. """ # find latest test per group initial_query = models.TestView.objects.get_query_set_with_joins( filter_data) query = models.TestView.query_objects(filter_data, initial_query=initial_query, apply_presentation=False) query = query.exclude(status__in=tko_rpc_utils._INVALID_STATUSES) query = query.extra( select={'latest_test_idx': 'MAX(%s)' % models.TestView.objects.get_key_on_this_table('test_idx')}) query = models.TestView.apply_presentation(query, filter_data) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, header_groups, fixed_headers) group_processor.process_group_dicts() info = group_processor.get_info_dict() # fetch full info for these tests so we can access their statuses all_test_ids = [group['latest_test_idx'] for group in info['groups']] test_views = initial_query.in_bulk(all_test_ids) for group_dict in info['groups']: test_idx = group_dict.pop('latest_test_idx') group_dict['test_idx'] = test_idx test_view = test_views[test_idx] tko_rpc_utils.add_status_counts(group_dict, test_view.status) group_dict['extra_info'] = [] for field in extra_info: group_dict['extra_info'].append(getattr(test_view, field)) return rpc_utils.prepare_for_serialization(info)
def get_host_queue_entries_and_special_tasks(hostname, query_start=None, query_limit=None): """ :return: an interleaved list of HostQueueEntries and SpecialTasks, in approximate run order. each dict contains keys for type, host, job, status, started_on, execution_path, and ID. """ total_limit = None if query_limit is not None: total_limit = query_start + query_limit filter_data = {"host__hostname": hostname, "query_limit": total_limit, "sort_by": ["-id"]} queue_entries = list(models.HostQueueEntry.query_objects(filter_data)) special_tasks = list(models.SpecialTask.query_objects(filter_data)) interleaved_entries = rpc_utils.interleave_entries(queue_entries, special_tasks) if query_start is not None: interleaved_entries = interleaved_entries[query_start:] if query_limit is not None: interleaved_entries = interleaved_entries[:query_limit] return rpc_utils.prepare_for_serialization(interleaved_entries)
def get_hosts_and_tests(): """\ Gets every host that has had a benchmark run on it. Additionally, also gets a dictionary mapping the host names to the benchmarks. """ host_info = {} q = (dbmodels.Q(test_name__startswith='kernbench') | dbmodels.Q(test_name__startswith='dbench') | dbmodels.Q(test_name__startswith='tbench') | dbmodels.Q(test_name__startswith='unixbench') | dbmodels.Q(test_name__startswith='iozone')) test_query = models.TestView.objects.filter(q).values( 'test_name', 'hostname', 'machine_idx').distinct() for result_dict in test_query: hostname = result_dict['hostname'] test = result_dict['test_name'] machine_idx = result_dict['machine_idx'] host_info.setdefault(hostname, {}) host_info[hostname].setdefault('tests', []) host_info[hostname]['tests'].append(test) host_info[hostname]['id'] = machine_idx return rpc_utils.prepare_for_serialization(host_info)
def get_hosts_and_tests(): """ Gets every host that has had a benchmark run on it. Additionally, also gets a dictionary mapping the host names to the benchmarks. """ host_info = {} q = (dbmodels.Q(test_name__startswith='kernbench') | dbmodels.Q(test_name__startswith='dbench') | dbmodels.Q(test_name__startswith='tbench') | dbmodels.Q(test_name__startswith='unixbench') | dbmodels.Q(test_name__startswith='iozone')) test_query = models.TestView.objects.filter(q).values( 'test_name', 'hostname', 'machine_idx').distinct() for result_dict in test_query: hostname = result_dict['hostname'] test = result_dict['test_name'] machine_idx = result_dict['machine_idx'] host_info.setdefault(hostname, {}) host_info[hostname].setdefault('tests', []) host_info[hostname]['tests'].append(test) host_info[hostname]['id'] = machine_idx return rpc_utils.prepare_for_serialization(host_info)
def get_hosts_and_tests(): """\ Gets every host that has had a benchmark run on it. Additionally, also gets a dictionary mapping the host names to the benchmarks. """ host_info = {} q = ( dbmodels.Q(test_name__startswith="kernbench") | dbmodels.Q(test_name__startswith="dbench") | dbmodels.Q(test_name__startswith="tbench") | dbmodels.Q(test_name__startswith="unixbench") | dbmodels.Q(test_name__startswith="iozone") ) test_query = models.TestView.objects.filter(q).values("test_name", "hostname", "machine_idx").distinct() for result_dict in test_query: hostname = result_dict["hostname"] test = result_dict["test_name"] machine_idx = result_dict["machine_idx"] host_info.setdefault(hostname, {}) host_info[hostname].setdefault("tests", []) host_info[hostname]["tests"].append(test) host_info[hostname]["id"] = machine_idx return rpc_utils.prepare_for_serialization(host_info)
def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False, exclude_atomic_group_hosts=False, valid_only=True, **filter_data): """ @param multiple_labels: match hosts in all of the labels given. Should be a list of label names. @param exclude_only_if_needed_labels: Exclude hosts with at least one "only_if_needed" label applied. @param exclude_atomic_group_hosts: Exclude hosts that have one or more atomic group labels associated with them. """ hosts = rpc_utils.get_host_query(multiple_labels, exclude_only_if_needed_labels, exclude_atomic_group_hosts, valid_only, filter_data) hosts = list(hosts) models.Host.objects.populate_relationships(hosts, models.Label, 'label_list') models.Host.objects.populate_relationships(hosts, models.AclGroup, 'acl_list') models.Host.objects.populate_relationships(hosts, models.HostAttribute, 'attribute_list') host_dicts = [] for host_obj in hosts: host_dict = host_obj.get_object_dict() host_dict['labels'] = [label.name for label in host_obj.label_list] host_dict['platform'], host_dict['atomic_group'] = ( rpc_utils.find_platform_and_atomic_group(host_obj)) host_dict['acls'] = [acl.name for acl in host_obj.acl_list] host_dict['attributes'] = dict( (attribute.attribute, attribute.value) for attribute in host_obj.attribute_list) host_dicts.append(host_dict) return rpc_utils.prepare_for_serialization(host_dicts)
def get_job_keyvals(**filter_data): return rpc_utils.prepare_for_serialization( models.JobKeyval.list_objects(filter_data))
def get_statuses(**filter_data): return rpc_utils.prepare_for_serialization( models.Status.list_objects(filter_data))
def get_patches(**filter_data): return rpc_utils.prepare_for_serialization( models.Patch.list_objects(filter_data))
def get_kernels(**filter_data): return rpc_utils.prepare_for_serialization( models.Kernel.list_objects(filter_data))
def get_iteration_results(**filter_data): return rpc_utils.prepare_for_serialization( models.IterationResult.list_objects(filter_data))
def get_users(**filter_data): return rpc_utils.prepare_for_serialization( models.User.list_objects(filter_data))
def get_test_labels_for_tests(**test_filter_data): label_ids = models.TestView.objects.query_test_label_ids(test_filter_data) labels = models.TestLabel.list_objects({'id__in' : label_ids}) return rpc_utils.prepare_for_serialization(labels)
def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False, exclude_atomic_group_hosts=False, valid_only=True, **filter_data): """ Get hosts. :param multiple_labels: match hosts in all of the labels given (optional). Should be a list of label names. :param exclude_only_if_needed_labels: Exclude hosts with at least one "only_if_needed" label applied (optional). :param exclude_atomic_group_hosts: Exclude hosts that have one or more atomic group labels associated with them. :param valid_only: Filter valid hosts (optional). :param filter_data: Filters out which hosts to get. :return: Sequence of hosts. """ hosts = rpc_utils.get_host_query(multiple_labels, exclude_only_if_needed_labels, exclude_atomic_group_hosts, valid_only, filter_data) hosts = list(hosts) models.Host.objects.populate_relationships(hosts, models.Label, 'label_list') models.Host.objects.populate_relationships(hosts, models.AclGroup, 'acl_list') models.Host.objects.populate_relationships(hosts, models.HostAttribute, 'attribute_list') install_server = None install_server_info = get_install_server_info() install_server_type = install_server_info.get('type', None) install_server_url = install_server_info.get('xmlrpc_url', None) if install_server_type == 'cobbler' and install_server_url: install_server = xmlrpclib.ServerProxy(install_server_url) host_dicts = [] for host_obj in hosts: host_dict = host_obj.get_object_dict() host_dict['labels'] = [label.name for label in host_obj.label_list] host_dict['platform'], host_dict['atomic_group'] = ( rpc_utils.find_platform_and_atomic_group(host_obj)) host_dict['acls'] = [acl.name for acl in host_obj.acl_list] host_dict['attributes'] = dict( (attribute.attribute, attribute.value) for attribute in host_obj.attribute_list) error_encountered = True if install_server is not None: system_params = {"name": host_dict['hostname']} system_list = install_server.find_system(system_params, True) if len(system_list) < 1: msg = 'System "%s" not found on install server' rpc_logger = logging.getLogger('rpc_logger') rpc_logger.info(msg, host_dict['hostname']) elif len(system_list) > 1: msg = 'Found multiple systems on install server named %s' if install_server_type == 'cobbler': msg = '%s. This should never happen on cobbler' % msg rpc_logger = logging.getLogger('rpc_logger') rpc_logger.error(msg, host_dict['hostname']) else: system = system_list[0] if host_dict['platform']: error_encountered = False profiles = sorted(install_server.get_item_names('profile')) host_dict['profiles'] = profiles host_dict['profiles'].insert(0, 'Do_not_install') use_current_profile = settings.get_value( 'INSTALL_SERVER', 'use_current_profile', type=bool, default=True) if use_current_profile: host_dict['current_profile'] = system['profile'] else: host_dict['current_profile'] = 'Do_not_install' if error_encountered: host_dict['profiles'] = ['N/A'] host_dict['current_profile'] = 'N/A' host_dicts.append(host_dict) return rpc_utils.prepare_for_serialization(host_dicts)
def get_tests(**filter_data): return rpc_utils.prepare_for_serialization( models.Test.list_objects(filter_data))
def get_iteration_attributes(**filter_data): return rpc_utils.prepare_for_serialization( models.IterationAttribute.list_objects(filter_data))
def get_static_data(): """ Returns a dictionary containing a bunch of data that shouldn't change often and is otherwise inaccessible. This includes: priorities: List of job priority choices. default_priority: Default priority value for new jobs. users: Sorted list of all users. labels: Sorted list of all labels. atomic_groups: Sorted list of all atomic groups. tests: Sorted list of all tests. profilers: Sorted list of all profilers. current_user: Logged-in username. host_statuses: Sorted list of possible Host statuses. job_statuses: Sorted list of possible HostQueueEntry statuses. job_timeout_default: The default job timeout length in hours. parse_failed_repair_default: Default value for the parse_failed_repair job option. reboot_before_options: A list of valid RebootBefore string enums. reboot_after_options: A list of valid RebootAfter string enums. motd: Server's message of the day. status_dictionary: A mapping from one word job status names to a more informative description. """ job_fields = models.Job.get_field_dict() default_drone_set_name = models.DroneSet.default_drone_set_name() drone_sets = ([default_drone_set_name] + sorted(drone_set.name for drone_set in models.DroneSet.objects.exclude( name=default_drone_set_name))) result = {} result['priorities'] = models.Job.Priority.choices() default_priority = job_fields['priority'].default default_string = models.Job.Priority.get_string(default_priority) result['default_priority'] = default_string result['users'] = get_users(sort_by=['login']) result['labels'] = get_labels(sort_by=['-platform', 'name']) result['atomic_groups'] = get_atomic_groups(sort_by=['name']) result['tests'] = get_tests(sort_by=['name']) result['profilers'] = get_profilers(sort_by=['name']) result['current_user'] = rpc_utils.prepare_for_serialization( models.User.current_user().get_object_dict()) result['host_statuses'] = sorted(models.Host.Status.names) result['job_statuses'] = sorted(models.HostQueueEntry.Status.names) result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS result['parse_failed_repair_default'] = bool( models.Job.DEFAULT_PARSE_FAILED_REPAIR) result['reboot_before_options'] = model_attributes.RebootBefore.names result['reboot_after_options'] = model_attributes.RebootAfter.names result['motd'] = rpc_utils.get_motd() result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled() result['drone_sets'] = drone_sets result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled() result['status_dictionary'] = { "Aborted": "Aborted", "Verifying": "Verifying Host", "Pending": "Waiting on other hosts", "Running": "Running autoserv", "Completed": "Autoserv completed", "Failed": "Failed to complete", "Queued": "Queued", "Starting": "Next in host's queue", "Stopped": "Other host(s) failed verify", "Parsing": "Awaiting parse of final results", "Gathering": "Gathering log files", "Template": "Template job for recurring run", "Waiting": "Waiting for scheduler action", "Archiving": "Archiving results" } return result
def get_saved_queries(**filter_data): return rpc_utils.prepare_for_serialization( models.SavedQuery.list_objects(filter_data))
def get_static_data(): result = {} group_fields = [] for field in models.TestView.group_fields: if field in models.TestView.extra_fields: name = models.TestView.extra_fields[field] else: name = models.TestView.get_field_dict()[field].verbose_name group_fields.append((name.capitalize(), field)) model_fields = [(field.verbose_name.capitalize(), field.column) for field in models.TestView._meta.fields] extra_fields = [(field_name.capitalize(), field_sql) for field_sql, field_name in models.TestView.extra_fields.iteritems()] benchmark_key = { 'kernbench' : 'elapsed', 'dbench' : 'throughput', 'tbench' : 'throughput', 'unixbench' : 'score', 'iozone' : '32768-4096-fwrite' } tko_perf_view = [ ['Test Index', 'test_idx'], ['Job Index', 'job_idx'], ['Test Name', 'test_name'], ['Subdirectory', 'subdir'], ['Kernel Index', 'kernel_idx'], ['Status Index', 'status_idx'], ['Reason', 'reason'], ['Host Index', 'machine_idx'], ['Test Started Time', 'test_started_time'], ['Test Finished Time', 'test_finished_time'], ['Job Tag', 'job_tag'], ['Job Name', 'job_name'], ['Owner', 'job_owner'], ['Job Queued Time', 'job_queued_time'], ['Job Started Time', 'job_started_time'], ['Job Finished Time', 'job_finished_time'], ['Hostname', 'hostname'], ['Platform', 'platform'], ['Machine Owner', 'machine_owner'], ['Kernel Hash', 'kernel_hash'], ['Kernel Base', 'kernel_base'], ['Kernel', 'kernel'], ['Status', 'status'], ['Iteration Number', 'iteration'], ['Performance Keyval (Key)', 'iteration_key'], ['Performance Keyval (Value)', 'iteration_value'], ] result['group_fields'] = sorted(group_fields) result['all_fields'] = sorted(model_fields + extra_fields) result['test_labels'] = get_test_labels(sort_by=['name']) result['current_user'] = rpc_utils.prepare_for_serialization( afe_models.User.current_user().get_object_dict()) result['benchmark_key'] = benchmark_key result['tko_perf_view'] = tko_perf_view result['tko_test_view'] = model_fields result['preconfigs'] = preconfigs.manager.all_preconfigs() result['motd'] = rpc_utils.get_motd() return result
def get_machines(**filter_data): return rpc_utils.prepare_for_serialization( models.Machine.list_objects(filter_data))
def get_static_data(): """ Returns a dictionary containing a bunch of data that shouldn't change often and is otherwise inaccessible. This includes: priorities: List of job priority choices. default_priority: Default priority value for new jobs. users: Sorted list of all users. labels: Sorted list of all labels. atomic_groups: Sorted list of all atomic groups. tests: Sorted list of all tests. profilers: Sorted list of all profilers. current_user: Logged-in username. host_statuses: Sorted list of possible Host statuses. job_statuses: Sorted list of possible HostQueueEntry statuses. job_timeout_default: The default job timeout length in hours. parse_failed_repair_default: Default value for the parse_failed_repair job option. reboot_before_options: A list of valid RebootBefore string enums. reboot_after_options: A list of valid RebootAfter string enums. motd: Server's message of the day. status_dictionary: A mapping from one word job status names to a more informative description. """ job_fields = models.Job.get_field_dict() default_drone_set_name = models.DroneSet.default_drone_set_name() drone_sets = ([default_drone_set_name] + sorted(drone_set.name for drone_set in models.DroneSet.objects.exclude( name=default_drone_set_name))) result = {} result['priorities'] = models.Job.Priority.choices() default_priority = job_fields['priority'].default default_string = models.Job.Priority.get_string(default_priority) result['default_priority'] = default_string result['users'] = get_users(sort_by=['login']) result['labels'] = get_labels(sort_by=['-platform', 'name']) result['atomic_groups'] = get_atomic_groups(sort_by=['name']) result['tests'] = get_tests(sort_by=['name']) result['profilers'] = get_profilers(sort_by=['name']) result['current_user'] = rpc_utils.prepare_for_serialization( models.User.current_user().get_object_dict()) result['host_statuses'] = sorted(models.Host.Status.names) result['job_statuses'] = sorted(models.HostQueueEntry.Status.names) result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS result['parse_failed_repair_default'] = bool( models.Job.DEFAULT_PARSE_FAILED_REPAIR) result['reboot_before_options'] = model_attributes.RebootBefore.names result['reboot_after_options'] = model_attributes.RebootAfter.names result['motd'] = rpc_utils.get_motd() result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled() result['drone_sets'] = drone_sets result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled() result['status_dictionary'] = {"Aborted": "Aborted", "Verifying": "Verifying Host", "Pending": "Waiting on other hosts", "Running": "Running autoserv", "Completed": "Autoserv completed", "Failed": "Failed to complete", "Queued": "Queued", "Starting": "Next in host's queue", "Stopped": "Other host(s) failed verify", "Parsing": "Awaiting parse of final results", "Gathering": "Gathering log files", "Template": "Template job for recurring run", "Waiting": "Waiting for scheduler action", "Archiving": "Archiving results"} return result
def get_test_labels_for_tests(**test_filter_data): label_ids = models.TestView.objects.query_test_label_ids(test_filter_data) labels = models.TestLabel.list_objects({'id__in': label_ids}) return rpc_utils.prepare_for_serialization(labels)
def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False, exclude_atomic_group_hosts=False, valid_only=True, **filter_data): """ Get hosts. :param multiple_labels: match hosts in all of the labels given (optional). Should be a list of label names. :param exclude_only_if_needed_labels: Exclude hosts with at least one "only_if_needed" label applied (optional). :param exclude_atomic_group_hosts: Exclude hosts that have one or more atomic group labels associated with them. :param valid_only: Filter valid hosts (optional). :param filter_data: Filters out which hosts to get. :return: Sequence of hosts. """ hosts = rpc_utils.get_host_query(multiple_labels, exclude_only_if_needed_labels, exclude_atomic_group_hosts, valid_only, filter_data) hosts = list(hosts) models.Host.objects.populate_relationships(hosts, models.Label, 'label_list') models.Host.objects.populate_relationships(hosts, models.AclGroup, 'acl_list') models.Host.objects.populate_relationships(hosts, models.HostAttribute, 'attribute_list') install_server = None install_server_info = get_install_server_info() install_server_type = install_server_info.get('type', None) install_server_url = install_server_info.get('xmlrpc_url', None) if install_server_type == 'cobbler' and install_server_url: install_server = xmlrpclib.ServerProxy(install_server_url) host_dicts = [] for host_obj in hosts: host_dict = host_obj.get_object_dict() host_dict['labels'] = [label.name for label in host_obj.label_list] host_dict['platform'], host_dict['atomic_group'] = (rpc_utils. find_platform_and_atomic_group(host_obj)) host_dict['acls'] = [acl.name for acl in host_obj.acl_list] host_dict['attributes'] = dict((attribute.attribute, attribute.value) for attribute in host_obj.attribute_list) error_encountered = True if install_server is not None: system_params = {"name": host_dict['hostname']} system_list = install_server.find_system(system_params, True) if len(system_list) < 1: msg = 'System "%s" not found on install server' rpc_logger = logging.getLogger('rpc_logger') rpc_logger.info(msg, host_dict['hostname']) elif len(system_list) > 1: msg = 'Found multiple systems on install server named %s' if install_server_type == 'cobbler': msg = '%s. This should never happen on cobbler' % msg rpc_logger = logging.getLogger('rpc_logger') rpc_logger.error(msg, host_dict['hostname']) else: system = system_list[0] if host_dict['platform']: error_encountered = False profiles = sorted(install_server.get_item_names('profile')) host_dict['profiles'] = profiles host_dict['profiles'].insert(0, 'Do_not_install') use_current_profile = settings.get_value('INSTALL_SERVER', 'use_current_profile', type=bool, default=True) if use_current_profile: host_dict['current_profile'] = system['profile'] else: host_dict['current_profile'] = 'Do_not_install' if error_encountered: host_dict['profiles'] = ['N/A'] host_dict['current_profile'] = 'N/A' host_dicts.append(host_dict) return rpc_utils.prepare_for_serialization(host_dicts)
def get_atomic_groups(**filter_data): return rpc_utils.prepare_for_serialization( models.AtomicGroup.list_objects(filter_data))
def get_static_data(): result = {} group_fields = [] for field in models.TestView.group_fields: if field in models.TestView.extra_fields: name = models.TestView.extra_fields[field] else: name = models.TestView.get_field_dict()[field].verbose_name group_fields.append((name.capitalize(), field)) model_fields = [(field.verbose_name.capitalize(), field.column) for field in models.TestView._meta.fields] extra_fields = [(field_name.capitalize(), field_sql) for field_sql, field_name in models.TestView.extra_fields.iteritems()] benchmark_key = { 'kernbench': 'elapsed', 'dbench': 'throughput', 'tbench': 'throughput', 'unixbench': 'score', 'iozone': '32768-4096-fwrite' } tko_perf_view = [ ['Test Index', 'test_idx'], ['Job Index', 'job_idx'], ['Test Name', 'test_name'], ['Subdirectory', 'subdir'], ['Kernel Index', 'kernel_idx'], ['Status Index', 'status_idx'], ['Reason', 'reason'], ['Host Index', 'machine_idx'], ['Test Started Time', 'test_started_time'], ['Test Finished Time', 'test_finished_time'], ['Job Tag', 'job_tag'], ['Job Name', 'job_name'], ['Owner', 'job_owner'], ['Job Queued Time', 'job_queued_time'], ['Job Started Time', 'job_started_time'], ['Job Finished Time', 'job_finished_time'], ['Hostname', 'hostname'], ['Platform', 'platform'], ['Machine Owner', 'machine_owner'], ['Kernel Hash', 'kernel_hash'], ['Kernel Base', 'kernel_base'], ['Kernel', 'kernel'], ['Status', 'status'], ['Iteration Number', 'iteration'], ['Performance Keyval (Key)', 'iteration_key'], ['Performance Keyval (Value)', 'iteration_value'], ] result['group_fields'] = sorted(group_fields) result['all_fields'] = sorted(model_fields + extra_fields) result['test_labels'] = get_test_labels(sort_by=['name']) result['current_user'] = rpc_utils.prepare_for_serialization( afe_models.User.current_user().get_object_dict()) result['benchmark_key'] = benchmark_key result['tko_perf_view'] = tko_perf_view result['tko_test_view'] = model_fields result['preconfigs'] = preconfigs.manager.all_preconfigs() result['motd'] = rpc_utils.get_motd() return result
def get_test_views(**filter_data): return rpc_utils.prepare_for_serialization( models.TestView.list_objects(filter_data))