Пример #1
0
 def write_weights_Flatten(self, layer):
     _, outputs = get_layer_io_names(layer)
     for i, outp in enumerate(outputs):
         inshp = layer.get_input_at(i).shape[1:]
         if outp not in self.model_io[1]:
             self.write_weights_array2c(
                 np.zeros(inshp).flatten(), outp + '_output')
Пример #2
0
    def write_layers(self, verbose=True):
        """Writes layers in the correct graph order.

        Args:
            verbose (bool): whether to print progress

        Returns:
            layers (str): C code for calling layer functions in correct order

        """
        written_io = set(self.model_inputs)
        unwritten_io = set(get_all_io_names(self.model)) - written_io
        while len(unwritten_io) > 0:
            for layer in self.model.layers:
                layer_inputs, layer_outputs = get_layer_io_names(layer)
                for i, (inp,
                        outp) in enumerate(zip(layer_inputs, layer_outputs)):
                    if (set(flatten(inp)).issubset(written_io) and
                            set(flatten(outp)).issubset(unwritten_io))or \
                            layer_type(layer) == 'InputLayer':
                        if verbose:
                            print('Writing layer ', outp)
                        method = getattr(self,
                                         '_write_layer_' + layer_type(layer))
                        method(layer, inp, outp, i)
                        written_io |= set(flatten(inp))
                        written_io |= set(flatten(outp))
                        unwritten_io -= set(flatten(inp))
                        unwritten_io -= set(flatten(outp))
        return self.layers
Пример #3
0
    def _write_weights_Bidirectional(self, layer):
        try:
            foo = layer.forward_layer.input_shape
            foo = layer.backward_layer.input_shape
        except:
            temp_input = tf.keras.layers.Input(layer.input_shape[2:])
            foo = layer.layer.__call__(temp_input)
            foo = layer.forward_layer.__call__(temp_input)
            foo = layer.backward_layer.__call__(temp_input)
        self._write_weights_layer(layer.backward_layer)
        self._write_weights_layer(layer.forward_layer)
        if layer.merge_mode:

            self._write_outputs(layer)
            self.stack_vars += 'size_t ' + layer.name + '_num_tensors' + str(0) + \
                ' = ' + str(2) + '; \n'
            if layer.merge_mode == 'concat':
                if layer.return_sequences:
                    ax = 1
                else:
                    ax = 0
                self.stack_vars += 'size_t ' + layer.name + '_axis = ' +\
                    str(ax) + '; \n'

        else:
            output_names = get_layer_io_names(layer)[1][0]
            subname = layer.layer.name
            self.stack_vars += 'k2c_tensor * ' + \
                output_names[0] + ' = forward_' + subname + '_output; \n'
            self.stack_vars += 'k2c_tensor * ' + \
                output_names[1] + ' = backward_' + subname + '_output; \n'
Пример #4
0
 def write_outputs(self, layer):
     _, outputs = get_layer_io_names(layer)
     print('printing out outpus', outputs)
     for i, outp in enumerate(outputs):
         outshp = layer.get_output_at(i).shape[1:]
         print('printing outshp.....', outshp)
         if outp not in self.model_io[1]:
             self.write_weights_array2c(np.zeros(outshp), outp + '_output')
Пример #5
0
 def _write_weights_Merge(self, layer):
     self._write_outputs(layer)
     inputs, outputs = get_layer_io_names(layer)
     for i, (inp, outp) in enumerate(zip(inputs, outputs)):
         num_tensors = len(inp)
         self.stack_vars += 'size_t ' + layer.name + '_num_tensors' + str(i) + \
             ' = ' + str(num_tensors) + '; \n'
     self.stack_vars += '\n\n'
Пример #6
0
 def write_weights_Merge(self, layer):
     inputs, outputs = get_layer_io_names(layer)
     for i, (inp, outp) in enumerate(zip(inputs, outputs)):
         outshp = layer.get_output_at(i).shape[1:]
         num_tensors = len(inp)
         self.stack_vars += 'size_t ' + layer.name + '_num_tensors' + str(i) + \
             ' = ' + str(num_tensors) + '; \n'
     if outp not in self.model_io[1]:
         self.write_weights_array2c(np.zeros(outshp), outp + '_output')
     self.stack_vars += '\n\n'
Пример #7
0
 def write_weights_Concatenate(self, layer):
     inputs, outputs = get_layer_io_names(layer)
     for i, (inp, outp) in enumerate(zip(inputs, outputs)):
         outshp = layer.get_output_at(i).shape[1:]
         num_tensors = len(inp)
         self.stack_vars += 'size_t ' + layer.name + '_num_tensors' + str(i) + \
             ' = ' + str(num_tensors) + '; \n'
         ax = layer.get_config()['axis']
         if ax < 0:
             ax += len(layer.get_input_at(i)[0].shape)
         self.stack_vars += 'size_t ' + layer.name + '_axis = ' +\
             str(ax-1) + '; \n'
     if outp not in self.model_io[1]:
         self.write_weights_array2c(np.zeros(outshp), outp + '_output')
     self.stack_vars += '\n\n'
Пример #8
0
 def _write_outputs(self, layer):
     _, outputs = get_layer_io_names(layer)
     if len(outputs) > 1:
         for i, outp in enumerate(outputs):
             outshp = layer.get_output_at(i).shape[1:]
             if outp not in self.model_io[1]:
                 self._write_weights_array2c(np.zeros(outshp),
                                             outp + '_output')
     else:
         outshp = layer.output_shape[1:]
         if outputs[0] not in self.model_io[1]:
             # self._write_weights_array2c(
             #     np.zeros(outshp), outputs[0] + '_output')
             self._write_weights_array2c(np.zeros(outshp),
                                         layer.name + '_output')
Пример #9
0
 def write_layers(self):
     written_io = set(self.model_inputs)
     unwritten_io = set(get_all_io_names(self.model)) - written_io
     while len(unwritten_io) > 0:
         for layer in self.model.layers:
             layer_inputs, layer_outputs = get_layer_io_names(layer)
             for i, (inp,
                     outp) in enumerate(zip(layer_inputs, layer_outputs)):
                 if (set(flatten(inp)).issubset(written_io) and
                         set(flatten(outp)).issubset(unwritten_io))or \
                         layer_type(layer) == 'InputLayer':
                     print('Writing layer ', outp)
                     method = getattr(self,
                                      'write_layer_' + layer_type(layer))
                     method(layer, inp, outp, i)
                     written_io |= set(flatten(inp))
                     written_io |= set(flatten(outp))
                     unwritten_io -= set(flatten(inp))
                     unwritten_io -= set(flatten(outp))
     return self.layers