def testGetCloudStorageInfo(self): """Ensure the cloud storage info is properly converted to a dict.""" self.setIsMoblab(True) config_mock = self.mox.CreateMockAnything() moblab_rpc_interface._CONFIG = config_mock config_mock.get_config_value( 'CROS', 'image_storage_server').AndReturn('gs://bucket1') config_mock.get_config_value('CROS', 'results_storage_server', default=None).AndReturn('gs://bucket2') self.mox.StubOutWithMock(moblab_rpc_interface, '_get_boto_config') moblab_rpc_interface._get_boto_config().AndReturn(config_mock) config_mock.sections().AndReturn(['Credentials', 'b']) config_mock.options('Credentials').AndReturn( ['gs_access_key_id', 'gs_secret_access_key']) config_mock.get('Credentials', 'gs_access_key_id').AndReturn('key') config_mock.get('Credentials', 'gs_secret_access_key').AndReturn('secret') rpc_utils.prepare_for_serialization({ 'gs_access_key_id': 'key', 'gs_secret_access_key': 'secret', 'use_existing_boto_file': True, 'image_storage_server': 'gs://bucket1', 'results_storage_server': 'gs://bucket2' }) self.mox.ReplayAll() moblab_rpc_interface.get_cloud_storage_info() self.mox.VerifyAll()
def testGetNetworkInfoWithNoIp(self): """Queries network info with no public IP address.""" self.setIsMoblab(True) self.mox.StubOutWithMock(moblab_rpc_interface, '_get_network_info') moblab_rpc_interface._get_network_info().AndReturn((None, False)) self.mox.StubOutWithMock(rpc_utils, 'prepare_for_serialization') rpc_utils.prepare_for_serialization({'is_connected': False}) self.mox.ReplayAll() moblab_rpc_interface.get_network_info() self.mox.VerifyAll()
def testGetNetworkInfo(self): """Ensure the network info is properly converted to a dict.""" self.setIsMoblab(True) self.mox.StubOutWithMock(moblab_rpc_interface, '_get_network_info') moblab_rpc_interface._get_network_info().AndReturn(('10.0.0.1', True)) self.mox.StubOutWithMock(rpc_utils, 'prepare_for_serialization') rpc_utils.prepare_for_serialization( {'is_connected': True, 'server_ips': ['10.0.0.1']}) self.mox.ReplayAll() moblab_rpc_interface.get_network_info() self.mox.VerifyAll()
def testGetNetworkInfoWithNoConnectivity(self): """Queries network info with public IP address but no connectivity.""" self.setIsMoblab(True) self.mox.StubOutWithMock(moblab_rpc_interface, '_get_network_info') moblab_rpc_interface._get_network_info().AndReturn(('10.0.0.1', False)) self.mox.StubOutWithMock(rpc_utils, 'prepare_for_serialization') rpc_utils.prepare_for_serialization( {'is_connected': False, 'server_ips': ['10.0.0.1']}) self.mox.ReplayAll() moblab_rpc_interface.get_network_info() self.mox.VerifyAll()
def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data): """\ Extra filter args for get_jobs: -not_yet_run: Include only jobs that have not yet started running. -running: Include only jobs that have start running but for which not all hosts have completed. -finished: Include only jobs for which all hosts have completed (or aborted). At most one of these three fields should be specified. """ filter_data['extra_args'] = rpc_utils.extra_job_filters( not_yet_run, running, finished) job_dicts = [] jobs = list(models.Job.query_objects(filter_data)) models.Job.objects.populate_relationships(jobs, models.Label, 'dependencies') models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals') for job in jobs: job_dict = job.get_object_dict() job_dict['dependencies'] = ','.join(label.name for label in job.dependencies) job_dict['keyvals'] = dict( (keyval.key, keyval.value) for keyval in job.keyvals) job_dicts.append(job_dict) return rpc_utils.prepare_for_serialization(job_dicts)
def get_host_queue_entries_and_special_tasks(hostname, query_start=None, query_limit=None): """ @returns an interleaved list of HostQueueEntries and SpecialTasks, in approximate run order. each dict contains keys for type, host, job, status, started_on, execution_path, and ID. """ total_limit = None if query_limit is not None: total_limit = query_start + query_limit filter_data = { 'host__hostname': hostname, 'query_limit': total_limit, 'sort_by': ['-id'] } queue_entries = list(models.HostQueueEntry.query_objects(filter_data)) special_tasks = list(models.SpecialTask.query_objects(filter_data)) interleaved_entries = rpc_utils.interleave_entries(queue_entries, special_tasks) if query_start is not None: interleaved_entries = interleaved_entries[query_start:] if query_limit is not None: interleaved_entries = interleaved_entries[:query_limit] return rpc_utils.prepare_for_serialization(interleaved_entries)
def get_connected_dut_info(): """ RPC handler to get informaiton about the DUTs connected to the moblab. @return: A serialized JSON RPC object. """ # Make a list of the connected DUT's leases = _get_dhcp_dut_leases() connected_duts = _test_all_dut_connections(leases) # Get a list of the AFE configured DUT's hosts = list(rpc_utils.get_host_query((), False, True, {})) models.Host.objects.populate_relationships(hosts, models.Label, 'label_list') configured_duts = {} for host in hosts: labels = [label.name for label in host.label_list] labels.sort() for host_attribute in host.hostattribute_set.all(): labels.append("ATTR:(%s=%s)" % (host_attribute.attribute, host_attribute.value)) configured_duts[host.hostname] = ', '.join(labels) return rpc_utils.prepare_for_serialization({ 'configured_duts': configured_duts, 'connected_duts': connected_duts })
def get_detailed_test_views(**filter_data): test_views = models.TestView.list_objects(filter_data) tests_by_id = models.Test.objects.in_bulk([test_view['test_idx'] for test_view in test_views]) tests = tests_by_id.values() models.Test.objects.populate_relationships(tests, models.TestAttribute, 'attributes') models.Test.objects.populate_relationships(tests, models.IterationAttribute, 'iteration_attributes') models.Test.objects.populate_relationships(tests, models.IterationResult, 'iteration_results') models.Test.objects.populate_relationships(tests, models.TestLabel, 'labels') jobs_by_id = models.Job.objects.in_bulk([test_view['job_idx'] for test_view in test_views]) jobs = jobs_by_id.values() models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals') for test_view in test_views: test = tests_by_id[test_view['test_idx']] test_view['attributes'] = _attributes_to_dict(test.attributes) test_view['iterations'] = _format_iteration_keyvals(test) test_view['labels'] = [label.name for label in test.labels] job = jobs_by_id[test_view['job_idx']] test_view['job_keyvals'] = _job_keyvals_to_dict(job.keyvals) return rpc_utils.prepare_for_serialization(test_views)
def get_hosts(multiple_labels=[], exclude_only_if_needed_labels=False, **filter_data): """\ multiple_labels: match hosts in all of the labels given. Should be a list of label names. exclude_only_if_needed_labels: exclude hosts with at least one "only_if_needed" label applied. """ hosts = rpc_utils.get_host_query(multiple_labels, exclude_only_if_needed_labels, filter_data) hosts = list(hosts) models.Host.objects.populate_relationships(hosts, models.Label, 'label_list') models.Host.objects.populate_relationships(hosts, models.AclGroup, 'acl_list') models.Host.objects.populate_relationships(hosts, models.HostAttribute, 'attribute_list') host_dicts = [] for host_obj in hosts: host_dict = host_obj.get_object_dict() host_dict['labels'] = [label.name for label in host_obj.label_list] host_dict['platform'], host_dict['atomic_group'] = (rpc_utils. find_platform_and_atomic_group(host_obj)) host_dict['acls'] = [acl.name for acl in host_obj.acl_list] host_dict['attributes'] = dict((attribute.attribute, attribute.value) for attribute in host_obj.attribute_list) host_dicts.append(host_dict) return rpc_utils.prepare_for_serialization(host_dicts)
def get_detailed_test_views(**filter_data): test_views = models.TestView.list_objects(filter_data) tests_by_id = models.Test.objects.in_bulk( [test_view['test_idx'] for test_view in test_views]) tests = tests_by_id.values() models.Test.objects.populate_relationships(tests, models.TestAttribute, 'attributes') models.Test.objects.populate_relationships(tests, models.IterationAttribute, 'iteration_attributes') models.Test.objects.populate_relationships(tests, models.IterationResult, 'iteration_results') models.Test.objects.populate_relationships(tests, models.TestLabel, 'labels') jobs_by_id = models.Job.objects.in_bulk( [test_view['job_idx'] for test_view in test_views]) jobs = jobs_by_id.values() models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals') for test_view in test_views: test = tests_by_id[test_view['test_idx']] test_view['attributes'] = _attributes_to_dict(test.attributes) test_view['iterations'] = _format_iteration_keyvals(test) test_view['labels'] = [label.name for label in test.labels] job = jobs_by_id[test_view['job_idx']] test_view['job_keyvals'] = _job_keyvals_to_dict(job.keyvals) return rpc_utils.prepare_for_serialization(test_views)
def get_host_history(start_time, end_time, hosts=None, board=None, pool=None): """Get history of a list of host. The return is a JSON string of host history for each host, for example, {'172.22.33.51': [{'status': 'Resetting' 'start_time': '2014-08-07 10:02:16', 'end_time': '2014-08-07 10:03:16', 'log_url': 'http://autotest/reset-546546/debug', 'dbg_str': 'Task: Special Task 19441991 (host ...)'}, {'status': 'Running' 'start_time': '2014-08-07 10:03:18', 'end_time': '2014-08-07 10:13:00', 'log_url': 'http://autotest/reset-546546/debug', 'dbg_str': 'HQE: 15305005, for job: 14995562'} ] } @param start_time: start time to search for history, can be string value or epoch time. @param end_time: end time to search for history, can be string value or epoch time. @param hosts: A list of hosts to search for history. Default is None. @param board: board type of hosts. Default is None. @param pool: pool type of hosts. Default is None. @returns: JSON string of the host history. """ return rpc_utils.prepare_for_serialization( host_history.get_history_details(start_time=start_time, end_time=end_time, hosts=hosts, board=board, pool=pool, process_pool_size=4))
def get_cloud_storage_info(): """RPC handler to get the cloud storage access information. """ cloud_storage_info = {} value = _CONFIG.get_config_value('CROS', _IMAGE_STORAGE_SERVER) if value is not None: cloud_storage_info[_IMAGE_STORAGE_SERVER] = value value = _CONFIG.get_config_value('CROS', _RESULT_STORAGE_SERVER, default=None) if value is not None: cloud_storage_info[_RESULT_STORAGE_SERVER] = value boto_config = _get_boto_config() sections = boto_config.sections() if sections: cloud_storage_info[_USE_EXISTING_BOTO_FILE] = True else: cloud_storage_info[_USE_EXISTING_BOTO_FILE] = False if 'Credentials' in sections: options = boto_config.options('Credentials') if _GS_ACCESS_KEY_ID in options: value = boto_config.get('Credentials', _GS_ACCESS_KEY_ID) cloud_storage_info[_GS_ACCESS_KEY_ID] = value if _GS_SECRET_ACCESS_KEY in options: value = boto_config.get('Credentials', _GS_SECRET_ACCESS_KEY) cloud_storage_info[_GS_SECRET_ACCESS_KEY] = value return rpc_utils.prepare_for_serialization(cloud_storage_info)
def get_jobs(not_yet_run=False, running=False, finished=False, **filter_data): """\ Extra filter args for get_jobs: -not_yet_run: Include only jobs that have not yet started running. -running: Include only jobs that have start running but for which not all hosts have completed. -finished: Include only jobs for which all hosts have completed (or aborted). At most one of these three fields should be specified. """ filter_data['extra_args'] = rpc_utils.extra_job_filters(not_yet_run, running, finished) job_dicts = [] jobs = list(models.Job.query_objects(filter_data)) models.Job.objects.populate_relationships(jobs, models.Label, 'dependencies') models.Job.objects.populate_relationships(jobs, models.JobKeyval, 'keyvals') for job in jobs: job_dict = job.get_object_dict() job_dict['dependencies'] = ','.join(label.name for label in job.dependencies) job_dict['keyvals'] = dict((keyval.key, keyval.value) for keyval in job.keyvals) job_dicts.append(job_dict) return rpc_utils.prepare_for_serialization(job_dicts)
def get_hosts(multiple_labels=(), exclude_only_if_needed_labels=False, exclude_atomic_group_hosts=False, valid_only=True, **filter_data): """ @param multiple_labels: match hosts in all of the labels given. Should be a list of label names. @param exclude_only_if_needed_labels: Exclude hosts with at least one "only_if_needed" label applied. @param exclude_atomic_group_hosts: Exclude hosts that have one or more atomic group labels associated with them. """ hosts = rpc_utils.get_host_query(multiple_labels, exclude_only_if_needed_labels, exclude_atomic_group_hosts, valid_only, filter_data) hosts = list(hosts) models.Host.objects.populate_relationships(hosts, models.Label, 'label_list') models.Host.objects.populate_relationships(hosts, models.AclGroup, 'acl_list') models.Host.objects.populate_relationships(hosts, models.HostAttribute, 'attribute_list') host_dicts = [] for host_obj in hosts: host_dict = host_obj.get_object_dict() host_dict['labels'] = [label.name for label in host_obj.label_list] host_dict['platform'], host_dict['atomic_group'] = (rpc_utils. find_platform_and_atomic_group(host_obj)) host_dict['acls'] = [acl.name for acl in host_obj.acl_list] host_dict['attributes'] = dict((attribute.attribute, attribute.value) for attribute in host_obj.attribute_list) host_dicts.append(host_dict) return rpc_utils.prepare_for_serialization(host_dicts)
def testValidateCloudStorageInfo(self): """ Ensure the cloud storage info validation flow.""" self.setIsMoblab(True) cloud_storage_info = { 'use_existing_boto_file': False, 'gs_access_key_id': 'key', 'gs_secret_access_key': 'secret', 'image_storage_server': 'gs://bucket1', 'results_storage_server': 'gs://bucket2'} self.mox.StubOutWithMock(moblab_rpc_interface, '_run_bucket_performance_test') moblab_rpc_interface._run_bucket_performance_test( 'key', 'secret', 'gs://bucket1').AndReturn((True, None)) rpc_utils.prepare_for_serialization({'status_ok': True }) self.mox.ReplayAll() moblab_rpc_interface.validate_cloud_storage_info(cloud_storage_info) self.mox.VerifyAll()
def get_acl_groups(**filter_data): acl_groups = models.AclGroup.list_objects(filter_data) for acl_group in acl_groups: acl_group_obj = models.AclGroup.objects.get(id=acl_group['id']) acl_group['users'] = [user.login for user in acl_group_obj.users.all()] acl_group['hosts'] = [host.hostname for host in acl_group_obj.hosts.all()] return rpc_utils.prepare_for_serialization(acl_groups)
def get_acl_groups(**filter_data): acl_groups = models.AclGroup.list_objects(filter_data) for acl_group in acl_groups: acl_group_obj = models.AclGroup.objects.get(id=acl_group['id']) acl_group['users'] = [user.login for user in acl_group_obj.users.all()] acl_group['hosts'] = [ host.hostname for host in acl_group_obj.hosts.all() ] return rpc_utils.prepare_for_serialization(acl_groups)
def testGetConfigValues(self): """Ensure that the config object is properly converted to a dict.""" self.setIsMoblab(True) config_mock = self.mox.CreateMockAnything() moblab_rpc_interface._CONFIG = config_mock config_mock.get_sections().AndReturn(['section1', 'section2']) config_mock.config = self.mox.CreateMockAnything() config_mock.config.items('section1').AndReturn([('item1', 'value1'), ('item2', 'value2')]) config_mock.config.items('section2').AndReturn([('item3', 'value3'), ('item4', 'value4')]) rpc_utils.prepare_for_serialization({ 'section1': [('item1', 'value1'), ('item2', 'value2')], 'section2': [('item3', 'value3'), ('item4', 'value4')] }) self.mox.ReplayAll() moblab_rpc_interface.get_config_values()
def get_config_values(): """Returns all config values parsed from global and shadow configs. Config values are grouped by sections, and each section is composed of a list of name value pairs. """ sections = _CONFIG.get_sections() config_values = {} for section in sections: config_values[section] = _CONFIG.config.items(section) return rpc_utils.prepare_for_serialization(config_values)
def get_dut_wifi_info(): """RPC handler to get the dut wifi AP information. """ dut_wifi_info = {} value = _CONFIG.get_config_value('MOBLAB', _WIFI_AP_NAME, default=None) if value is not None: dut_wifi_info[_WIFI_AP_NAME] = value value = _CONFIG.get_config_value('MOBLAB', _WIFI_AP_PASS, default=None) if value is not None: dut_wifi_info[_WIFI_AP_PASS] = value return rpc_utils.prepare_for_serialization(dut_wifi_info)
def get_jobs_summary(**filter_data): """\ Like get_jobs(), but adds a 'status_counts' field, which is a dictionary mapping status strings to the number of hosts currently with that status, i.e. {'Queued' : 4, 'Running' : 2}. """ jobs = get_jobs(**filter_data) ids = [job['id'] for job in jobs] all_status_counts = models.Job.objects.get_status_counts(ids) for job in jobs: job['status_counts'] = all_status_counts[job['id']] return rpc_utils.prepare_for_serialization(jobs)
def get_job_history(**filter_data): """Get history of the job, including the special tasks executed for the job @param filter_data: filter for the call, should at least include {'job_id': [job id]} @returns: JSON string of the job's history, including the information such as the hosts run the job and the special tasks executed before and after the job. """ job_id = filter_data['job_id'] job_info = job_history.get_job_info(job_id) return rpc_utils.prepare_for_serialization(job_info.get_history())
def _create_operation_status_response(is_ok, details): """Helper method to create a operation status reponse. @param: is_ok: Boolean for if the operation is ok. @param: details: A detailed string. @return: A serialized JSON RPC object. """ status_response = {'status_ok': is_ok} if details: status_response['status_details'] = details return rpc_utils.prepare_for_serialization(status_response)
def get_network_info(): """Returns the server ip addresses, and if the server connectivity. The server ip addresses as an array of strings, and the connectivity as a flag. """ network_info = {} info = _get_network_info() if info[0] is not None: network_info['server_ips'] = [info[0]] network_info['is_connected'] = info[1] return rpc_utils.prepare_for_serialization(network_info)
def get_hosts(plan_id): """ Gets the hostnames of all the hosts in this test plan. Resolves host labels in the plan. """ plan = models.Plan.smart_get(plan_id) hosts = set(plan.hosts.all().values_list('hostname', flat=True)) for label in plan.host_labels.all(): hosts.update(label.host_set.all().values_list('hostname', flat=True)) return afe_rpc_utils.prepare_for_serialization(hosts)
def get_latest_tests(group_by, header_groups=[], fixed_headers={}, extra_info=[], **filter_data): """ Similar to get_status_counts, but return only the latest test result per group. It still returns the same information (i.e. with pass count etc.) for compatibility. It includes an additional field "test_idx" with each group. @param extra_info a list containing the field names that should be returned with each cell. The fields are returned in the extra_info field of the return dictionary. """ # find latest test per group initial_query = models.TestView.objects.get_query_set_with_joins( filter_data) query = models.TestView.query_objects(filter_data, initial_query=initial_query, apply_presentation=False) query = query.exclude(status__in=tko_rpc_utils._INVALID_STATUSES) query = query.extra( select={ 'latest_test_idx': 'MAX(%s)' % models.TestView.objects.get_key_on_this_table('test_idx') }) query = models.TestView.apply_presentation(query, filter_data) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, header_groups, fixed_headers) group_processor.process_group_dicts() info = group_processor.get_info_dict() # fetch full info for these tests so we can access their statuses all_test_ids = [group['latest_test_idx'] for group in info['groups']] test_views = initial_query.in_bulk(all_test_ids) for group_dict in info['groups']: test_idx = group_dict.pop('latest_test_idx') group_dict['test_idx'] = test_idx test_view = test_views[test_idx] tko_rpc_utils.add_status_counts(group_dict, test_view.status) group_dict['extra_info'] = [] for field in extra_info: group_dict['extra_info'].append(getattr(test_view, field)) return rpc_utils.prepare_for_serialization(info)
def get_version_info(): """ RPC handler to get informaiton about the version of the moblab. @return: A serialized JSON RPC object. """ lines = open(_ETC_LSB_RELEASE).readlines() version_response = { x.split('=')[0]: x.split('=')[1] for x in lines if '=' in x } version_response['MOBLAB_ID'] = utils.get_moblab_id() version_response['MOBLAB_MAC_ADDRESS'] = ( utils.get_default_interface_mac_address()) return rpc_utils.prepare_for_serialization(version_response)
def get_group_counts(group_by, header_groups=[], fixed_headers={}, machine_label_headers={}, extra_select_fields={}, **filter_data): """ Queries against TestView grouping by the specified fields and computings counts for each group. * group_by should be a list of field names. * extra_select_fields can be used to specify additional fields to select (usually for aggregate functions). * header_groups can be used to get lists of unique combinations of group fields. It should be a list of tuples of fields from group_by. It's primarily for use by the spreadsheet view. * fixed_headers can map header fields to lists of values. the header will guaranteed to return exactly those value. this does not work together with header_groups. * machine_label_headers can specify special headers to be constructed from machine labels. It should map arbitrary names to lists of machine labels. a field will be created with the given name containing a comma-separated list indicating which of the given machine labels are on each test. this field can then be grouped on. Returns a dictionary with two keys: * header_values contains a list of lists, one for each header group in header_groups. Each list contains all the values for the corresponding header group as tuples. * groups contains a list of dicts, one for each row. Each dict contains keys for each of the group_by fields, plus a 'group_count' key for the total count in the group, plus keys for each of the extra_select_fields. The keys for the extra_select_fields are determined by the "AS" alias of the field. """ extra_select_fields = dict(extra_select_fields) query = models.TestView.objects.get_query_set_with_joins( filter_data, include_host_labels=bool(machine_label_headers)) query = models.TestView.query_objects(filter_data, initial_query=query) count_alias, count_sql = models.TestView.objects.get_count_sql(query) extra_select_fields[count_alias] = count_sql if 'test_idx' not in group_by: extra_select_fields['test_idx'] = 'test_idx' tko_rpc_utils.add_machine_label_headers(machine_label_headers, extra_select_fields) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, header_groups, fixed_headers, extra_select_fields) group_processor.process_group_dicts() return rpc_utils.prepare_for_serialization(group_processor.get_info_dict())
def get_group_counts(group_by, header_groups=None, fixed_headers=None, extra_select_fields=None, **filter_data): """ Queries against TestView grouping by the specified fields and computings counts for each group. * group_by should be a list of field names. * extra_select_fields can be used to specify additional fields to select (usually for aggregate functions). * header_groups can be used to get lists of unique combinations of group fields. It should be a list of tuples of fields from group_by. It's primarily for use by the spreadsheet view. * fixed_headers can map header fields to lists of values. the header will guaranteed to return exactly those value. this does not work together with header_groups. Returns a dictionary with two keys: * header_values contains a list of lists, one for each header group in header_groups. Each list contains all the values for the corresponding header group as tuples. * groups contains a list of dicts, one for each row. Each dict contains keys for each of the group_by fields, plus a 'group_count' key for the total count in the group, plus keys for each of the extra_select_fields. The keys for the extra_select_fields are determined by the "AS" alias of the field. """ query = models.TestView.objects.get_query_set_with_joins(filter_data) # don't apply presentation yet, since we have extra selects to apply query = models.TestView.query_objects(filter_data, initial_query=query, apply_presentation=False) count_alias, count_sql = models.TestView.objects.get_count_sql(query) query = query.extra(select={count_alias: count_sql}) if extra_select_fields: query = query.extra(select=extra_select_fields) query = models.TestView.apply_presentation(query, filter_data) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, header_groups or [], fixed_headers or {}) group_processor.process_group_dicts() return rpc_utils.prepare_for_serialization(group_processor.get_info_dict())
def get_latest_tests(group_by, header_groups=[], fixed_headers={}, extra_info=[], **filter_data): """ Similar to get_status_counts, but return only the latest test result per group. It still returns the same information (i.e. with pass count etc.) for compatibility. It includes an additional field "test_idx" with each group. @param extra_info a list containing the field names that should be returned with each cell. The fields are returned in the extra_info field of the return dictionary. """ # find latest test per group initial_query = models.TestView.objects.get_query_set_with_joins( filter_data) query = models.TestView.query_objects(filter_data, initial_query=initial_query, apply_presentation=False) query = query.exclude(status__in=tko_rpc_utils._INVALID_STATUSES) query = query.extra( select={'latest_test_idx' : 'MAX(%s)' % models.TestView.objects.get_key_on_this_table('test_idx')}) query = models.TestView.apply_presentation(query, filter_data) group_processor = tko_rpc_utils.GroupDataProcessor(query, group_by, header_groups, fixed_headers) group_processor.process_group_dicts() info = group_processor.get_info_dict() # fetch full info for these tests so we can access their statuses all_test_ids = [group['latest_test_idx'] for group in info['groups']] test_views = initial_query.in_bulk(all_test_ids) for group_dict in info['groups']: test_idx = group_dict.pop('latest_test_idx') group_dict['test_idx'] = test_idx test_view = test_views[test_idx] tko_rpc_utils.add_status_counts(group_dict, test_view.status) group_dict['extra_info'] = [] for field in extra_info: group_dict['extra_info'].append(getattr(test_view, field)) return rpc_utils.prepare_for_serialization(info)
def get_version_info(): """ RPC handler to get informaiton about the version of the moblab. @return: A serialized JSON RPC object. """ lines = open(_ETC_LSB_RELEASE).readlines() version_response = { x.split('=')[0]: x.split('=')[1] for x in lines if '=' in x } version_response['MOBLAB_ID'] = utils.get_moblab_id() version_response['MOBLAB_SERIAL_NUMBER'] = ( utils.get_moblab_serial_number()) _check_for_system_update() update_status = _get_system_update_status() version_response['MOBLAB_UPDATE_VERSION'] = update_status['NEW_VERSION'] version_response['MOBLAB_UPDATE_STATUS'] = update_status['CURRENT_OP'] version_response['MOBLAB_UPDATE_PROGRESS'] = update_status['PROGRESS'] return rpc_utils.prepare_for_serialization(version_response)
def handle_file_upload(request): """Handler for uploading files. Saves the files to /tmp and returns the resulting paths on disk. @param request: request containing the file data. @returns HttpResponse: with the paths of the saved files. """ if request.method == 'POST': TEMPT_DIR = '/tmp/' file_paths = [] for file_name, upload_file in request.FILES.iteritems(): file_path = os.path.join(TEMPT_DIR, '_'.join([file_name, upload_file.name])) with open(file_path, 'wb+') as destination: for chunk in upload_file.chunks(): destination.write(chunk) file_paths.append(file_path) return HttpResponse(rpc_utils.prepare_for_serialization(file_paths))
def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None): """\ Retrieves all the information needed to clone a job. """ job = models.Job.objects.get(id=id) job_info = rpc_utils.get_job_info(job, preserve_metahosts, queue_entry_filter_data) host_dicts = [] for host in job_info['hosts']: host_dict = get_hosts(id=host.id)[0] other_labels = host_dict['labels'] if host_dict['platform']: other_labels.remove(host_dict['platform']) host_dict['other_labels'] = ', '.join(other_labels) host_dicts.append(host_dict) for host in job_info['one_time_hosts']: host_dict = dict(hostname=host.hostname, id=host.id, platform='(one-time host)', locked_text='') host_dicts.append(host_dict) # convert keys from Label objects to strings (names of labels) meta_host_counts = dict((meta_host.name, count) for meta_host, count in job_info['meta_host_counts'].iteritems()) info = dict(job=job.get_object_dict(), meta_host_counts=meta_host_counts, hosts=host_dicts) info['job']['dependencies'] = job_info['dependencies'] if job_info['atomic_group']: info['atomic_group_name'] = (job_info['atomic_group']).name else: info['atomic_group_name'] = None info['hostless'] = job_info['hostless'] info['drone_set'] = job.drone_set and job.drone_set.name return rpc_utils.prepare_for_serialization(info)
def get_info_for_clone(id, preserve_metahosts, queue_entry_filter_data=None): """\ Retrieves all the information needed to clone a job. """ job = models.Job.objects.get(id=id) job_info = rpc_utils.get_job_info(job, preserve_metahosts, queue_entry_filter_data) host_dicts = [] for host in job_info['hosts']: host_dict = get_hosts(id=host.id)[0] other_labels = host_dict['labels'] if host_dict['platform']: other_labels.remove(host_dict['platform']) host_dict['other_labels'] = ', '.join(other_labels) host_dicts.append(host_dict) for host in job_info['one_time_hosts']: host_dict = dict(hostname=host.hostname, id=host.id, platform='(one-time host)', locked_text='') host_dicts.append(host_dict) # convert keys from Label objects to strings (names of labels) meta_host_counts = dict( (meta_host.name, count) for meta_host, count in job_info['meta_host_counts'].iteritems()) info = dict(job=job.get_object_dict(), meta_host_counts=meta_host_counts, hosts=host_dicts) info['job']['dependencies'] = job_info['dependencies'] if job_info['atomic_group']: info['atomic_group_name'] = (job_info['atomic_group']).name else: info['atomic_group_name'] = None info['hostless'] = job_info['hostless'] info['drone_set'] = job.drone_set and job.drone_set.name return rpc_utils.prepare_for_serialization(info)
def get_iteration_views(result_keys, **test_filter_data): """ Similar to get_test_views, but returns a dict for each iteration rather than for each test. Accepts the same filter data as get_test_views. @param result_keys: list of iteration result keys to include. Only iterations contains all these keys will be included. @returns a list of dicts, one for each iteration. Each dict contains: * all the same information as get_test_views() * all the keys specified in result_keys * an additional key 'iteration_index' """ iteration_views = tko_rpc_utils.get_iteration_view_query(result_keys, test_filter_data) final_filter_data = tko_rpc_utils.extract_presentation_params( test_filter_data) final_filter_data['no_distinct'] = True fields = (models.TestView.get_field_dict().keys() + result_keys + ['iteration_index']) iteration_dicts = models.TestView.list_objects( final_filter_data, initial_query=iteration_views, fields=fields) return rpc_utils.prepare_for_serialization(iteration_dicts)
def get_hosts_and_tests(): """\ Gets every host that has had a benchmark run on it. Additionally, also gets a dictionary mapping the host names to the benchmarks. """ host_info = {} q = (dbmodels.Q(test_name__startswith='kernbench') | dbmodels.Q(test_name__startswith='dbench') | dbmodels.Q(test_name__startswith='tbench') | dbmodels.Q(test_name__startswith='unixbench') | dbmodels.Q(test_name__startswith='iozone')) test_query = models.TestView.objects.filter(q).values( 'test_name', 'hostname', 'machine_idx').distinct() for result_dict in test_query: hostname = result_dict['hostname'] test = result_dict['test_name'] machine_idx = result_dict['machine_idx'] host_info.setdefault(hostname, {}) host_info[hostname].setdefault('tests', []) host_info[hostname]['tests'].append(test) host_info[hostname]['id'] = machine_idx return rpc_utils.prepare_for_serialization(host_info)
def get_host_queue_entries_and_special_tasks(hostname, query_start=None, query_limit=None): """ @returns an interleaved list of HostQueueEntries and SpecialTasks, in approximate run order. each dict contains keys for type, host, job, status, started_on, execution_path, and ID. """ total_limit = None if query_limit is not None: total_limit = query_start + query_limit filter_data = {'host__hostname': hostname, 'query_limit': total_limit, 'sort_by': ['-id']} queue_entries = list(models.HostQueueEntry.query_objects(filter_data)) special_tasks = list(models.SpecialTask.query_objects(filter_data)) interleaved_entries = rpc_utils.interleave_entries(queue_entries, special_tasks) if query_start is not None: interleaved_entries = interleaved_entries[query_start:] if query_limit is not None: interleaved_entries = interleaved_entries[:query_limit] return rpc_utils.prepare_for_serialization(interleaved_entries)
def get_static_data(): result = {} group_fields = [] for field in models.TestView.group_fields: if field in models.TestView.extra_fields: name = models.TestView.extra_fields[field] else: name = models.TestView.get_field_dict()[field].verbose_name group_fields.append((name.capitalize(), field)) model_fields = [(field.verbose_name.capitalize(), field.column) for field in models.TestView._meta.fields] extra_fields = [(field_name.capitalize(), field_sql) for field_sql, field_name in models.TestView.extra_fields.iteritems()] benchmark_key = { 'kernbench' : 'elapsed', 'dbench' : 'throughput', 'tbench' : 'throughput', 'unixbench' : 'score', 'iozone' : '32768-4096-fwrite' } tko_perf_view = [ ['Test Index', 'test_idx'], ['Job Index', 'job_idx'], ['Test Name', 'test_name'], ['Subdirectory', 'subdir'], ['Kernel Index', 'kernel_idx'], ['Status Index', 'status_idx'], ['Reason', 'reason'], ['Host Index', 'machine_idx'], ['Test Started Time', 'test_started_time'], ['Test Finished Time', 'test_finished_time'], ['Job Tag', 'job_tag'], ['Job Name', 'job_name'], ['Owner', 'job_owner'], ['Job Queued Time', 'job_queued_time'], ['Job Started Time', 'job_started_time'], ['Job Finished Time', 'job_finished_time'], ['Hostname', 'hostname'], ['Platform', 'platform'], ['Machine Owner', 'machine_owner'], ['Kernel Hash', 'kernel_hash'], ['Kernel Base', 'kernel_base'], ['Kernel', 'kernel'], ['Status', 'status'], ['Iteration Number', 'iteration'], ['Performance Keyval (Key)', 'iteration_key'], ['Performance Keyval (Value)', 'iteration_value'], ] result['group_fields'] = sorted(group_fields) result['all_fields'] = sorted(model_fields + extra_fields) result['test_labels'] = get_test_labels(sort_by=['name']) result['current_user'] = rpc_utils.prepare_for_serialization( afe_models.User.current_user().get_object_dict()) result['benchmark_key'] = benchmark_key result['tko_perf_view'] = tko_perf_view result['tko_test_view'] = model_fields result['preconfigs'] = preconfigs.manager.all_preconfigs() result['motd'] = rpc_utils.get_motd() return result
def get_saved_queries(**filter_data): return rpc_utils.prepare_for_serialization( models.SavedQuery.list_objects(filter_data))
def get_tests_by_build(build): """Get the tests that are available for the specified build. @param build: unique name by which to refer to the image. @return: A sorted list of all tests that are in the build specified. """ # Stage the test artifacts. try: ds = dev_server.ImageServer.resolve(build) build = ds.translate(build) except dev_server.DevServerException as e: raise ValueError('Could not resolve build %s: %s' % (build, e)) try: ds.stage_artifacts(build, ['test_suites']) except dev_server.DevServerException as e: raise error.StageControlFileFailure('Failed to stage %s: %s' % (build, e)) # Collect the control files specified in this build cfile_getter = control_file_getter.DevServerGetter.create(build, ds) control_file_list = cfile_getter.get_control_file_list() test_objects = [] _id = 0 for control_file_path in control_file_list: # Read and parse the control file control_file = cfile_getter.get_control_file_contents( control_file_path) control_obj = control_data.parse_control_string(control_file) # Extract the values needed for the AFE from the control_obj. # The keys list represents attributes in the control_obj that # are required by the AFE keys = [ 'author', 'doc', 'name', 'time', 'test_type', 'experimental', 'test_category', 'test_class', 'dependencies', 'run_verify', 'sync_count', 'job_retries', 'retries', 'path' ] test_object = {} for key in keys: test_object[key] = getattr(control_obj, key) if hasattr( control_obj, key) else '' # Unfortunately, the AFE expects different key-names for certain # values, these must be corrected to avoid the risk of tests # being omitted by the AFE. # The 'id' is an additional value used in the AFE. # The control_data parsing does not reference 'run_reset', but it # is also used in the AFE and defaults to True. test_object['id'] = _id test_object['run_reset'] = True test_object['description'] = test_object.get('doc', '') test_object['test_time'] = test_object.get('time', 0) test_object['test_retry'] = test_object.get('retries', 0) # Fix the test name to be consistent with the current presentation # of test names in the AFE. testpath, subname = os.path.split(control_file_path) testname = os.path.basename(testpath) subname = subname.split('.')[1:] if subname: testname = '%s:%s' % (testname, ':'.join(subname)) test_object['name'] = testname # Correct the test path as parse_control_string sets an empty string. test_object['path'] = control_file_path _id += 1 test_objects.append(test_object) test_objects = sorted(test_objects, key=lambda x: x.get('name')) return rpc_utils.prepare_for_serialization(test_objects)
def get_test_labels_for_tests(**test_filter_data): label_ids = models.TestView.objects.query_test_label_ids(test_filter_data) labels = models.TestLabel.list_objects({'id__in' : label_ids}) return rpc_utils.prepare_for_serialization(labels)
def get_static_data(): result = {} group_fields = [] for field in models.TestView.group_fields: if field in models.TestView.extra_fields: name = models.TestView.extra_fields[field] else: name = models.TestView.get_field_dict()[field].verbose_name group_fields.append((name.capitalize(), field)) model_fields = [(field.verbose_name.capitalize(), field.column) for field in models.TestView._meta.fields] extra_fields = [ (field_name.capitalize(), field_sql) for field_sql, field_name in models.TestView.extra_fields.iteritems() ] benchmark_key = { 'kernbench': 'elapsed', 'dbench': 'throughput', 'tbench': 'throughput', 'unixbench': 'score', 'iozone': '32768-4096-fwrite' } tko_perf_view = [ ['Test Index', 'test_idx'], ['Job Index', 'job_idx'], ['Test Name', 'test_name'], ['Subdirectory', 'subdir'], ['Kernel Index', 'kernel_idx'], ['Status Index', 'status_idx'], ['Reason', 'reason'], ['Host Index', 'machine_idx'], ['Test Started Time', 'test_started_time'], ['Test Finished Time', 'test_finished_time'], ['Job Tag', 'job_tag'], ['Job Name', 'job_name'], ['Owner', 'job_owner'], ['Job Queued Time', 'job_queued_time'], ['Job Started Time', 'job_started_time'], ['Job Finished Time', 'job_finished_time'], ['Hostname', 'hostname'], ['Platform', 'platform'], ['Machine Owner', 'machine_owner'], ['Kernel Hash', 'kernel_hash'], ['Kernel Base', 'kernel_base'], ['Kernel', 'kernel'], ['Status', 'status'], ['Iteration Number', 'iteration'], ['Performance Keyval (Key)', 'iteration_key'], ['Performance Keyval (Value)', 'iteration_value'], ] result['priorities'] = priorities.Priority.choices() result['group_fields'] = sorted(group_fields) result['all_fields'] = sorted(model_fields + extra_fields) result['test_labels'] = get_test_labels(sort_by=['name']) result['current_user'] = rpc_utils.prepare_for_serialization( afe_models.User.current_user().get_object_dict()) result['benchmark_key'] = benchmark_key result['tko_perf_view'] = tko_perf_view result['tko_test_view'] = model_fields result['preconfigs'] = preconfigs.manager.all_preconfigs() result['motd'] = rpc_utils.get_motd() return result
def get_users(**filter_data): return rpc_utils.prepare_for_serialization( models.User.list_objects(filter_data))
def get_test_views(**filter_data): return rpc_utils.prepare_for_serialization( models.TestView.list_objects(filter_data))
def get_atomic_groups(**filter_data): return rpc_utils.prepare_for_serialization( models.AtomicGroup.list_objects(filter_data))
def get_plan(id): return afe_rpc_utils.prepare_for_serialization( models.Plan.smart_get(id).get_object_dict())
def get_static_data(): """\ Returns a dictionary containing a bunch of data that shouldn't change often and is otherwise inaccessible. This includes: priorities: List of job priority choices. default_priority: Default priority value for new jobs. users: Sorted list of all users. labels: Sorted list of all labels. atomic_groups: Sorted list of all atomic groups. tests: Sorted list of all tests. profilers: Sorted list of all profilers. current_user: Logged-in username. host_statuses: Sorted list of possible Host statuses. job_statuses: Sorted list of possible HostQueueEntry statuses. job_timeout_default: The default job timeout length in hours. parse_failed_repair_default: Default value for the parse_failed_repair job option. reboot_before_options: A list of valid RebootBefore string enums. reboot_after_options: A list of valid RebootAfter string enums. motd: Server's message of the day. status_dictionary: A mapping from one word job status names to a more informative description. """ job_fields = models.Job.get_field_dict() result = {} result['priorities'] = models.Job.Priority.choices() default_priority = job_fields['priority'].default default_string = models.Job.Priority.get_string(default_priority) result['default_priority'] = default_string result['users'] = get_users(sort_by=['login']) result['labels'] = get_labels(sort_by=['-platform', 'name']) result['atomic_groups'] = get_atomic_groups(sort_by=['name']) result['tests'] = get_tests(sort_by=['name']) result['profilers'] = get_profilers(sort_by=['name']) result['current_user'] = rpc_utils.prepare_for_serialization( thread_local.get_user().get_object_dict()) result['host_statuses'] = sorted(models.Host.Status.names) result['job_statuses'] = sorted(models.HostQueueEntry.Status.names) result['job_timeout_default'] = models.Job.DEFAULT_TIMEOUT result['job_max_runtime_hrs_default'] = models.Job.DEFAULT_MAX_RUNTIME_HRS result['parse_failed_repair_default'] = bool( models.Job.DEFAULT_PARSE_FAILED_REPAIR) result['reboot_before_options'] = models.RebootBefore.names result['reboot_after_options'] = models.RebootAfter.names result['motd'] = rpc_utils.get_motd() result['status_dictionary'] = {"Aborted": "Aborted", "Verifying": "Verifying Host", "Pending": "Waiting on other hosts", "Running": "Running autoserv", "Completed": "Autoserv completed", "Failed": "Failed to complete", "Queued": "Queued", "Starting": "Next in host's queue", "Stopped": "Other host(s) failed verify", "Parsing": "Awaiting parse of final results", "Gathering": "Gathering log files", "Template": "Template job for recurring run"} return result