def generate_data(args: argparse.Namespace) -> None: def prepare_dir(path: Text) -> None: if os.path.exists(path): shutil.rmtree(path) os.makedirs(path) cases = model_test.collect_testcases() if args.op_type is None: # include all of the testcases cases += node_test.collect_testcases() else: # only include those testcases including the given operator cases += node_test.collect_testcases_by_operator(args.op_type) for case in cases: output_dir = os.path.join(args.output, case.kind, case.name) prepare_dir(output_dir) if case.kind == 'real': with open(os.path.join(output_dir, 'data.json'), 'w') as fi: json.dump( { 'url': case.url, 'model_name': case.model_name, 'rtol': case.rtol, 'atol': case.atol, }, fi, sort_keys=True) else: assert case.model with open(os.path.join(output_dir, 'model.onnx'), 'wb') as f: f.write(case.model.SerializeToString()) assert case.data_sets for i, (inputs, outputs) in enumerate(case.data_sets): data_set_dir = os.path.join(output_dir, 'test_data_set_{}'.format(i)) prepare_dir(data_set_dir) for j, input in enumerate(inputs): with open( os.path.join(data_set_dir, 'input_{}.pb'.format(j)), 'wb') as f: if case.model.graph.input[j].type.HasField('map_type'): f.write( numpy_helper.from_dict( input, case.model.graph.input[j].name). SerializeToString()) elif case.model.graph.input[j].type.HasField( 'sequence_type'): f.write( numpy_helper.from_list( input, case.model.graph.input[j].name). SerializeToString()) elif case.model.graph.input[j].type.HasField( 'optional_type'): f.write( numpy_helper.from_optional( input, case.model.graph.input[j].name). SerializeToString()) else:
def generate_data(args): # type: (argparse.Namespace) -> None def prepare_dir(path): # type: (Text) -> None if os.path.exists(path): shutil.rmtree(path) os.makedirs(path) cases = model_test.collect_testcases() + node_test.collect_testcases() for case in cases: output_dir = os.path.join( args.output, case.kind, case.name) prepare_dir(output_dir) if case.kind == 'real': with open(os.path.join(output_dir, 'data.json'), 'w') as fi: json.dump({ 'url': case.url, 'model_name': case.model_name, 'rtol': case.rtol, 'atol': case.atol, }, fi, sort_keys=True) else: with open(os.path.join(output_dir, 'model.onnx'), 'wb') as f: f.write(case.model.SerializeToString()) for i, (inputs, outputs) in enumerate(case.data_sets): data_set_dir = os.path.join( output_dir, 'test_data_set_{}'.format(i)) prepare_dir(data_set_dir) for j, input_np in enumerate(inputs): tensor = numpy_helper.from_array( input_np, case.model.graph.input[j].name) with open(os.path.join( data_set_dir, 'input_{}.pb'.format(j)), 'wb') as f: f.write(tensor.SerializeToString()) for j, output_np in enumerate(outputs): tensor = numpy_helper.from_array( output_np, case.model.graph.output[j].name) with open(os.path.join( data_set_dir, 'output_{}.pb'.format(j)), 'wb') as f: f.write(tensor.SerializeToString())
def generate_data(args): # type: (argparse.Namespace) -> None def prepare_dir(path): # type: (Text) -> None if os.path.exists(path): shutil.rmtree(path) os.makedirs(path) cases = model_test.collect_testcases() + node_test.collect_testcases() for case in cases: output_dir = os.path.join( args.output, case.kind, case.name) prepare_dir(output_dir) if case.kind == 'real': with open(os.path.join(output_dir, 'data.json'), 'w') as fi: json.dump({ 'url': case.url, 'model_name': case.model_name, }, fi, sort_keys=True) else: with open(os.path.join(output_dir, 'model.onnx'), 'wb') as f: f.write(case.model.SerializeToString()) for i, (inputs, outputs) in enumerate(case.data_sets): data_set_dir = os.path.join( output_dir, 'test_data_set_{}'.format(i)) prepare_dir(data_set_dir) for j, input_np in enumerate(inputs): tensor = numpy_helper.from_array( input_np, case.model.graph.input[j].name) with open(os.path.join( data_set_dir, 'input_{}.pb'.format(j)), 'wb') as f: f.write(tensor.SerializeToString()) for j, output_np in enumerate(outputs): tensor = numpy_helper.from_array( output_np, case.model.graph.output[j].name) with open(os.path.join( data_set_dir, 'output_{}.pb'.format(j)), 'wb') as f: f.write(tensor.SerializeToString())
for case in node_cases: output_dir = os.path.join(args.output, 'node', case.name) prepare_dir(output_dir) with open(os.path.join(output_dir, 'node.pb'), 'wb') as f: f.write(case.node.SerializeToString()) for i, input_np in enumerate(case.inputs): tensor = numpy_helper.from_array(input_np, case.node.input[i]) with open(os.path.join(output_dir, 'input_{}.pb'.format(i)), 'wb') as f: f.write(tensor.SerializeToString()) for i, output_np in enumerate(case.outputs): tensor = numpy_helper.from_array(output_np, case.node.output[i]) with open(os.path.join(output_dir, 'output_{}.pb'.format(i)), 'wb') as f: f.write(tensor.SerializeToString()) # model tests model_cases = model_test.collect_testcases() for case in model_cases: output_dir = os.path.join(args.output, 'model', case.name) prepare_dir(output_dir) with open(os.path.join(output_dir, 'data.json'), 'w') as f: json.dump({ 'url': case.url, 'model_name': case.model_name, }, f, sort_keys=True) def parse_args(): parser = argparse.ArgumentParser('backend-test-tools') subparsers = parser.add_subparsers() subparser = subparsers.add_parser('generate-data', help='convert testcases to test data')