def load_components(file_descr, graph, component_layer_map=None): num_components = collect_until_token_and_read(file_descr, b'<NumComponents>') log.debug('Network contains {} components'.format(num_components)) is_nnet3 = False if component_layer_map is None else True if not is_nnet3: collect_until_token(file_descr, b'<Components>') all_components = list() name = "" for _ in range(num_components): if is_nnet3: name = collect_until_token_and_read(file_descr, b'<ComponentName>', np.string_) component_type = find_next_component(file_descr) if component_type == end_of_nnet_tag.lower()[1:-1]: break start_index = file_descr.tell() end_tag, end_index = find_end_of_component(file_descr, component_type) # read dim info where possible to simplify shape calculation for MemoryOffset # shape calculation for MemoryOffset can't be done through shape of previous layer because # it is separated in 2 parts to remove cycle from graph file_descr.seek(start_index) dim = 0 try: collect_until_token(file_descr, b'<Dim>', size_search_zone=end_index - start_index) cur_index = file_descr.tell() if start_index < cur_index < end_index: dim = read_binary_integer32_token(file_descr) else: file_descr.seek(start_index) except Error: file_descr.seek(start_index) if is_nnet3: if name in component_layer_map: layer_id = component_layer_map[name][0] for layer in component_layer_map[name]: node = Node(graph, layer) node['parameters'] = get_parameters(file_descr, start_index, end_index) node['op'] = component_type # Read dim info where possible to simplify shape calculation for MemoryOffset for o_n_name, params in node.get_outputs(): o_n = Node(graph, o_n_name) if o_n['op'] == 'MemoryOffset' and dim != 0: o_n['parameters']['element_size'] = dim else: raise Error("Something wrong with layer {}".format(name)) else: layer_id = graph.unique_id(prefix=component_type) graph.add_node(layer_id, parameters=get_parameters(file_descr, start_index, end_index), op=component_type, kind='op') all_components.append(layer_id) log.debug('{} (type is {}) was loaded'.format(layer_id, component_type)) return all_components
def test_collect_until_token_and_read(self): tag = b'<InputDim>' test_file = b'<ComponentName> opgru3.renorm <NormalizeComponent> <InputDim> ' + self.pack_value(4, 'B') + \ self.pack_value(256, 'I') + b' <TargetRms> ' + self.pack_value(4, 'B') + \ self.pack_value(0.5, 'f') + b' <AddLogStddev> F</NormalizeComponent>' value = collect_until_token_and_read(self.bytesio_from(test_file), tag) self.assertEqual(value, 256)
def extract(cls, node): pb = node.parameters collect_until_token(pb, b'<Params>') ifo_x_weights, ifo_x_weights_shape = read_binary_matrix(pb) try: use_dropout = collect_until_token_and_read(pb, b'<UseDropout>', np.bool) except Error: # layer have not UseDropout attribute, so setup it to False use_dropout = False mapping_rule = {'use_dropout': use_dropout} assert len( ifo_x_weights_shape ) == 2, "Unexpected shape of weights in LSTMNonLinearityComponent" assert ifo_x_weights_shape[ 0] == 3, "Unexpected shape of weights in LSTMNonLinearityComponent" ifo_x_weights = ifo_x_weights.reshape(ifo_x_weights_shape) embed_input(mapping_rule, 1, 'i_weights', ifo_x_weights[0][:]) embed_input(mapping_rule, 2, 'f_weights', ifo_x_weights[1][:]) embed_input(mapping_rule, 3, 'o_weights', ifo_x_weights[2][:]) LstmNonLinearity.update_node_stat(node, mapping_rule) return cls.enabled
def extract(cls, node): pb = node.parameters size = collect_until_token_and_read(pb, b'<OutputDim>') collect_until_token(pb, b'<DropoutProportion>') dropout_proportion = read_binary_float_token(pb) DropoutMask.update_node_stat(node, { 'dropout_proportion': 1.0 - dropout_proportion, 'size': size }) return cls.enabled