def _load_details(provider_name, run_id, test_name): if (provider_name, run_id) not in _cached_details: providers = get_providers() if not providers: raise NoRunDataException("No test providers could be loaded") if provider_name not in providers: raise ProviderNotFoundException("Requested subunit provider could " "not be found: " + provider_name) provider = providers[provider_name] try: stream = provider.get_stream(run_id) converted_run = convert_stream(stream, strip_details=False) # remap dict to allow direct access to details via test name dest = {} for entry in converted_run: dest[entry['name']] = entry['details'] _cached_details[provider_name, run_id] = dest except (KeyError, IndexError): raise RunNotFoundException("Requested test run could not be found") details_map = _cached_details[provider_name, run_id] if test_name is None: return details_map else: if test_name in details_map: return details_map[test_name] else: raise TestNotFoundException( "Requested test could not be found in run")
def _load_run(provider_name, run_id): if (provider_name, run_id) in _cached_run: return _cached_run[provider_name, run_id] providers = get_providers() if not providers: raise NoRunDataException("No test providers could be loaded") if provider_name not in providers: raise ProviderNotFoundException("Requested subunit provider could not " "be found") p = providers[provider_name] try: # assume first repo for now stream = p.get_stream(run_id) # strip details for now # TODO(provide method for getting details on demand) # (preferably for individual tests to avoid bloat) converted_run = convert_stream(stream, strip_details=True) _cached_run[provider_name, run_id] = converted_run return converted_run except KeyError: raise RunNotFoundException("Requested test run could not be found")
def export_tempest_details(stream, output_stream): converted = tempest_subunit.convert_stream(stream, strip_details=True) output = {} for entry in converted: output[entry['name']] = entry['details'] json.dump(output, output_stream, default=json_date_handler) output_stream.close()
def export_tempest_raw(name, subunit, output_dir, prefix, compress): converted = tempest_subunit.convert_stream(subunit, strip_details=True) stream, path = open_compressed(output_dir, prefix + '-raw.json', compress) json.dump(converted, stream, default=json_date_handler) stream.close() return converted, build_artifact(path, name, 'subunit', 'application/json', True, compress)
def export_tempest_details(name, subunit, output_dir, prefix, compress): converted = tempest_subunit.convert_stream(subunit, strip_details=False) output = {} for entry in converted: output[entry['name']] = entry['details'] stream, path = open_compressed(output_dir, prefix + '-details.json', compress) json.dump(output, stream, default=json_date_handler) stream.close() return build_artifact(path, name, 'subunit-details', 'application/json', False, compress)
def export_tempest_raw(stream, output_stream): converted = tempest_subunit.convert_stream(stream, strip_details=True) json.dump(converted, output_stream, default=json_date_handler) output_stream.close()
def export_tempest_tree(stream, output_stream): converted = tempest_subunit.convert_stream(stream, strip_details=True) tree = tempest_subunit.reorganize(converted) json.dump(tree, output_stream, default=json_date_handler) output_stream.close()