def _write_tensor(array, filename): directory = os.path.dirname(filename) if directory and not os.path.exists(directory): os.makedirs(directory) with open(filename, "wb") as file: nnef.write_tensor(file=file, tensor=array)
def write_nnef_tensor(filename, value): with open(filename, 'wb') as file: dtype = _numpy_dtype_remap.get(value.dtype.type) if dtype is not None: value = value.astype(dtype) nnef.write_tensor(file, value)
def _write_nnef_tensor(filename, tensor): directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) with open(filename, "wb") as file: nnef.write_tensor(file, tensor, version=(1, 0))
def main(): try: args = get_args(sys.argv) if not args.output: if sys.stdout.isatty(): raise utils.NNEFToolsException("No output provided.") utils.set_stdout_to_binary() if args.dtype is None: distribution = args.params[0] if distribution == 'binomial': args.dtype = "int32" elif distribution == 'bernoulli': args.dtype = "bool" else: args.dtype = "float32" args.params[1:] = [float(param) for param in args.params[1:]] if args.seed != -1: np.random.seed(args.seed) random_input = RandomInput(*args.params) arr = create_input(random_input, np_dtype=np.dtype(args.dtype), shape=args.shape) if args.output: write_nnef_tensor(args.output, arr) else: nnef.write_tensor(sys.stdout, arr) except Exception as e: print('Error: {}'.format(e), file=sys.stderr) exit(1)
def main(): try: args = get_args(sys.argv) if not args.output: if sys.stdout.isatty(): raise utils.NNEFToolsException("No output provided.") utils.set_stdout_to_binary() image_input = ImageInput([ os.path.join(path, '*') if os.path.isdir(path) else path for path in args.input ], color_format=args.color, data_format=args.format, range=args.range, norm=[args.mean, args.std]) shape = None if args.size is not None: shape = [ 1, 3, args.size[1], args.size[0] ] if args.format == 'NCHW' else [1, args.size[1], args.size[0], 3] arr = create_input(image_input, np_dtype=np.dtype(args.dtype), shape=shape, allow_bigger_batch=True) if args.output: write_nnef_tensor(args.output, arr) else: nnef.write_tensor(sys.stdout, arr) except Exception as e: print('Error: {}'.format(e), file=sys.stderr) exit(1)
def main(args): if args.seed is not None: np.random.seed(args.seed) distributions = { 'scalar': uniform(0.0, 1.0), 'integer': integers(0, 100), 'logical': bernoulli(0.5), } try: random = eval(args.random) if isinstance(random, dict): distributions.update( {key: _ensure_lambda(value) for key, value in random.items()}) else: random = _ensure_lambda(random) if args.random.startswith('integers'): distributions['integer'] = random elif args.random.startswith('bernoulli'): distributions['logical'] = random else: distributions['scalar'] = random except Exception as e: print("Could not evaluate distribution: " + str(e), file=sys.stderr) return -1 graph = nnef.parse_file(os.path.join(args.model, 'graph.nnef')) for op in graph.operations: if args.weights and op.name == 'variable': label = op.attribs['label'] shape = op.attribs['shape'] data = distributions[op.dtype](shape) filename = os.path.join(args.model, label + '.dat') os.makedirs(os.path.split(filename)[0], exist_ok=True) with open(filename, 'wb') as file: nnef.write_tensor(file, data) if args.verbose: print("Generated weight '{}'".format(filename)) if args.inputs and op.name == 'external': name = op.outputs['output'] shape = op.attribs['shape'] data = distributions[op.dtype](shape) filename = os.path.join(args.model, args.inputs, name + '.dat') os.makedirs(os.path.split(filename)[0], exist_ok=True) with open(filename, 'wb') as file: nnef.write_tensor(file, data) if args.verbose: print("Generated input '{}'".format(filename)) return 0
def __call__(self, nnef_tensor, torch_tensor): # type: (NNEFTensor, torch.Tensor)->None if nnef_tensor.name in self.tensor_names_to_export: array = to_numpy_array(torch_tensor, nnef_dtype=nnef_tensor.dtype) if self.output_directory is not None: nnef_io.write_nnef_tensor(filename=os.path.join( self.output_directory, nnef_tensor.name + ".dat"), array=array) else: nnef.write_tensor(sys.stdout, array)
def main(args): if args.seed is not None: np.random.seed(args.seed) try: distribution = eval(args.random) if not _is_lambda(distribution): distribution = distribution() except Exception as e: print("Could not evaluate distribution: " + str(e), file=sys.stderr) return -1 graph = nnef.parse_file(os.path.join(args.model, 'graph.nnef')) for op in graph.operations: if args.weights and op.name == 'variable': label = op.attribs['label'] shape = op.attribs['shape'] data = distribution(shape).astype(_nnef_dtype_to_numpy[op.dtype]) filename = os.path.join(args.model, label + '.dat') os.makedirs(os.path.split(filename)[0], exist_ok=True) with open(filename, 'wb') as file: nnef.write_tensor(file, data) if args.verbose: print("Generated weight '{}'".format(filename)) if args.inputs and op.name == 'external': name = op.outputs['output'] shape = op.attribs['shape'] data = distribution(shape).astype(_nnef_dtype_to_numpy[op.dtype]) filename = os.path.join(args.model, args.inputs, name + '.dat') os.makedirs(os.path.split(filename)[0], exist_ok=True) with open(filename, 'wb') as file: nnef.write_tensor(file, data) if args.verbose: print("Generated input '{}'".format(filename)) return 0
def main(): try: args = get_args(sys.argv) if not args.output: if sys.stdout.isatty(): raise utils.NNEFToolsException("No output provided.") utils.set_stdout_to_binary() args.params = InputSources(args.params) if args.seed != -1: np.random.seed(args.seed) parser_configs = NNEFParserConfig.load_configs(args.custom_operations, load_standard=True) reader = nnef_io.Reader(parser_configs=parser_configs, input_shape=args.shape) # read without weights graph = reader( os.path.join(args.network, 'graph.nnef') if os.path. isdir(args.network) else args.network) inputs = tuple( args.params.create_input(name=input.name, np_dtype=input.get_numpy_dtype(), shape=input.shape, allow_bigger_batch=True) for input in graph.inputs) if args.output: for tensor, array in zip(graph.inputs, inputs): nnef_io.write_nnef_tensor( os.path.join(args.output, tensor.name + '.dat'), array) else: for array in inputs: nnef.write_tensor(sys.stdout, array) except Exception as e: print('Error: {}'.format(e), file=sys.stderr) exit(1)
def main(args): if args.output is None: if not stdio.is_stdout_piped(): print("Output must be piped", file=sys.stderr) return -1 stdio.set_stdout_to_binary() images = [] for pattern in args.images: filenames = sorted(glob.glob(os.path.expanduser(pattern))) assert filenames, "No files found for path: {}".format(pattern) for filename in filenames: img = skimage.img_as_ubyte(skimage.io.imread(filename)) if len(img.shape) == 2: img = skimage.color.gray2rgb(img) img = transform_image(img, args.color, args.range, args.mean, args.std, args.size, np.dtype(args.dtype), args.format) images.append(img) if not all(img.shape == images[0].shape for img in images): print( "The size of all images must be the same, or --size must be specified", file=sys.stderr) return -1 tensor = np.stack(images) if args.output is not None: with open(args.output, 'wb') as file: nnef.write_tensor(file, tensor) else: nnef.write_tensor(sys.stdout, tensor) return 0
def main(args): if args.output is None: if not stdio.is_stdout_piped(): print("Output must be piped", file=sys.stderr) return -1 stdio.set_stdout_to_binary() try: distribution = eval(args.distribution) if not _is_lambda(distribution): distribution = distribution() except Exception as e: print("Could not evaluate distribution: " + str(e), file=sys.stderr) return -1 tensor = distribution(args.shape).astype(np.dtype(args.dtype)) if args.output is not None: with open(args.output, 'wb') as file: nnef.write_tensor(file, tensor) else: nnef.write_tensor(sys.stdout, tensor) return 0
def save_to_file(self, save_file): assert isinstance( save_file, str), "Output model path is required to be of type str." network_dir, model_filename = os.path.split(save_file) assert model_filename == "graph.nnef", "NNEF Format requires to write to a file named 'graph.nnef'" cwd = os.getcwd() self.network_dir = network_dir if not os.path.exists(network_dir): os.makedirs(network_dir) os.chdir(network_dir) with open(model_filename, 'w') as f: f.write("\nversion %s;\n\n" % (NNEFVersion().version)) fragments = set() inputs = '' outputs = '' for node, data in self.nx_graph.nodes(data=True): if data['node'].op in ['gru']: fragments.add(data['node'].op) assert 'type_node' in data, "Node in graph is missing type information" if data['type_node'] == 'input': inputs += node + ', ' elif data['type_node'] == 'output': outputs += node + ', ' inputs = inputs[:-2] outputs = outputs[:-2] if fragments: f.write( "extension KHR_enable_fragment_definitions, KHR_enable_operator_expressions;\n\n" ) for frag in fragments: if frag == 'gru': gru = \ "fragment gru( \ \n\tinput: tensor<scalar>, \ \n\tchannels: integer, \ \n\tscope: string ) \ \n-> ( output: tensor<scalar> ) \ \n{ \ \n\tbatch = shape_of(input)[0]; \ \n\n\th = variable(shape = [batch,channels], label = scope + '/h'); \ \n\n\tm = concat([input, h], axis = 1); \ \n\n\tz = sigmoid(linear_layer(m, channels = channels, scope = scope + '/z')); \ \n\tr = sigmoid(linear_layer(m, channels = channels, scope = scope + '/r')); \ \n\ts = tanh(linear_layer(concat([input, r * h], axis = 1), channels = channels, scope = scope + '/s')); \ \n\n\toutput = update(h, z * s + (1.0 - z) * h); \ \n}\n\n" f.write(gru) f.write("graph %s( %s ) -> ( %s )\n" % (self.model_name.replace('/', '_'), inputs, outputs)) f.write("{\n") for node, data in self.nx_graph.nodes(data=True): if 'node' in data: if data['node'].op == 'output_val': continue #print((data['node'].nnef_node_definition())) f.write("\t") f.write(data['node'].nnef_node_definition()) f.write(";\n") assert data[ 'node'] is not None, "Node doesn't have NNEF node!" nnef_node = data['node'] if data['node'].op == 'variable': #print("=> Node %s is saving tensor to disk(%s)" % ( # nnef_node.name, nnef_node.parameters['label'])) loc = nnef_node.parameters['label'].rfind('/') if loc != -1: if not os.path.isdir( nnef_node.parameters['label'][:loc]): os.makedirs( nnef_node.parameters['label'][:loc]) dat_file = open(nnef_node.parameters['label'] + '.dat', 'wb') nnef.write_tensor(dat_file, nnef_node.tensor) dat_file.close() else: print("===> %s doesn't have node!?" % (node)) f.write("}") os.chdir(cwd)
def run_using_argv(argv): try: args = get_args(argv) write_outputs = args.output_names is None or args.output_names if args.input is None: if sys.stdin.isatty(): raise utils.NNEFToolsException("No input provided!") utils.set_stdin_to_binary() if write_outputs: if args.output is None: if sys.stdout.isatty(): raise utils.NNEFToolsException("No output provided!") utils.set_stdout_to_binary() parent_dir_of_input_model = os.path.dirname( utils.path_without_trailing_separator(args.network)) tmp_dir = None if args.network.endswith('.tgz'): nnef_path = tmp_dir = tempfile.mkdtemp( prefix="nnef_", dir=parent_dir_of_input_model) utils.tgz_extract(args.network, nnef_path) else: nnef_path = args.network try: parser_configs = NNEFParserConfig.load_configs( args.custom_operations, load_standard=True) # read without weights reader = nnef_io.Reader(parser_configs=parser_configs, infer_shapes=False) graph = reader( os.path.join(nnef_path, 'graph.nnef') if os.path. isdir(nnef_path) else nnef_path) if args.input is None: inputs = tuple( nnef.read_tensor(sys.stdin) for _ in range(len(graph.inputs))) elif len(args.input) == 1 and os.path.isdir(args.input[0]): inputs = tuple( nnef_io.read_nnef_tensor( os.path.join(args.input[0], tensor.name + '.dat')) for tensor in graph.inputs) else: inputs = tuple( nnef_io.read_nnef_tensor(path) for path in args.input) reader = nnef_io.Reader(parser_configs=parser_configs, input_shape=tuple( list(input.shape) for input in inputs)) graph = reader(nnef_path) tensor_hooks = [] stats_hook = None if args.stats: stats_hook = backend.StatisticsHook() tensor_hooks.append(stats_hook) if write_outputs and args.output_names is not None: if '*' in args.output_names: tensor_hooks.append( backend.ActivationExportHook( tensor_names=[ t.name for t in graph.tensors if not t.is_constant and not t.is_variable ], output_directory=args.output)) else: tensor_hooks.append( backend.ActivationExportHook( tensor_names=args.output_names, output_directory=args.output)) if args.permissive: backend.try_to_fix_unsupported_attributes(graph) outputs = backend.run(nnef_graph=graph, inputs=inputs, device=args.device, custom_operations=get_custom_runners( args.custom_operations), tensor_hooks=tensor_hooks) if write_outputs and args.output_names is None: if args.output is None: for array in outputs: nnef.write_tensor(sys.stdout, array) else: for tensor, array in zip(graph.outputs, outputs): nnef_io.write_nnef_tensor( os.path.join(args.output, tensor.name + '.dat'), array) if stats_hook: if args.stats.endswith('/') or args.stats.endswith('\\'): stats_path = os.path.join(nnef_path, args.stats, 'graph.stats') else: stats_path = os.path.join(nnef_path, args.stats) stats_hook.save_statistics(stats_path) if tmp_dir and (args.stats and _is_inside(nnef_path, args.stats)): if args.network.endswith('.tgz'): print("Info: Changing input archive", file=sys.stderr) shutil.move(args.network, args.network + '.nnef-tools-backup') utils.tgz_compress(dir_path=nnef_path, file_path=args.network) os.remove(args.network + '.nnef-tools-backup') else: output_path = args.network.rsplit('.', 1)[0] + '.nnef.tgz' backup_path = output_path + '.nnef-tools-backup' if os.path.exists(output_path): shutil.move(output_path, backup_path) utils.tgz_compress(dir_path=nnef_path, file_path=output_path) if os.path.exists(backup_path): os.remove(backup_path) finally: if tmp_dir: shutil.rmtree(tmp_dir) except utils.NNEFToolsException as e: print("Error: " + str(e), file=sys.stderr) exit(1) except nnef.Error as e: print("Error: " + str(e), file=sys.stderr) exit(1)
def main(args): if args.input_path is not None: source = FileInputSource(args.input_path, args.io_transpose) elif args.random is not None: if args.batch_size == 0: print('batch-size must not be 0 when inputs are random generated', file=sys.stderr) return -1 try: distribution = eval(args.random) if not _is_lambda(distribution): distribution = distribution() source = RandomInputSource(distribution) except Exception as e: print("Could not evaluate distribution: " + str(e), file=sys.stderr) return -1 else: if not stdio.is_stdin_piped(): print('Input must be piped', file=sys.stderr) return -1 stdio.set_stdin_to_binary() source = StreamInputSource(sys.stdin, args.io_transpose) output_names = eval( args.output_names ) if args.output_names is not None and args.output_names != "*" else args.output_names custom_operators = get_custom_operators( args.custom_operators) if args.custom_operators is not None else None if args.random is not None and args.seed is not None: np.random.seed(args.seed) collect_statistics = args.statistics is not None try: executor = get_executor(args.format, args.model, collect_statistics, custom_operators, args.decompose) if isinstance(output_names, dict): fetch_names = output_names.keys() elif output_names == "*": tensors = executor.tensor_info() fetch_names = [info.name for info in tensors ] if tensors is not None else None else: fetch_names = output_names input_info = executor.input_info() if args.batch_size is not None: input_info = batched_info(input_info, args.batch_size) output_info = executor.output_info() inputs = { info.name: source(info.name, info.shape, info.dtype) for info in input_info } batch_size = args.batch_size if batch_size == 0: batch_size = next(iter(six.itervalues(inputs))).shape[0] if not all(input.shape[0] == batch_size for input in six.itervalues(inputs)): print('All inputs must have the same batch-size', file=sys.stderr) return -1 if batch_size is not None and batch_size != 1: slices = {name: [] for name in fetch_names} if fetch_names is not None else \ {info.name: [] for info in output_info} stats = None for k in range(batch_size): slice_inputs = { name: np.expand_dims(data[k], axis=0) for name, data in six.iteritems(inputs) } slice_outputs, slice_stats = executor(slice_inputs, fetch_names, collect_statistics) for name, data in six.iteritems(slice_outputs): slices[name].append(data) if collect_statistics: stats = accumulate_statistics(stats, slice_stats) outputs = { name: np.concatenate(items, axis=0) for name, items in six.iteritems(slices) } else: outputs, stats = executor(inputs, fetch_names, collect_statistics) except ValueError as e: print(e, file=sys.stderr) return -1 for name, value in six.iteritems(outputs): if needs_transpose(args.io_transpose, name): outputs[name] = transpose_channels_last_to_first(value) if isinstance(output_names, dict): outputs = { output_names[name]: value for name, value in six.iteritems(outputs) } if args.tensor_mapping is not None: with open(args.tensor_mapping) as file: tensor_mapping = json.load(file) if stats is not None: stats = { tensor_mapping.get(key, key): value for key, value in six.iteritems(stats) } if stats is not None: write_statistics(args.statistics, stats) print('Written {}'.format(args.statistics)) if args.output_path is not None: for name, value in six.iteritems(outputs): filename = os.path.join(args.output_path, name + ".dat") write_nnef_tensor(filename, value) print('Written {}'.format(filename)) else: if not stdio.is_stdout_piped(): if collect_statistics: return 0 print('Output must be piped', file=sys.stderr) return -1 stdio.set_stdout_to_binary() for name, value in six.iteritems(outputs): nnef.write_tensor(sys.stdout, value) return 0