Ejemplo n.º 1
0
    def _write_layer_AdvancedActivation(self, layer, inputs, outputs, i):
        nm, _, inputs, outputs, is_model_input, is_model_output = self._format_io_names(
            layer, inputs, outputs, True)
        if is_model_input:
            inp = inputs + '->'
        else:
            inp = inputs + '.'

        if layer_type(layer) == 'LeakyReLU':
            self.layers += 'k2c_LeakyReLU(' + inp + 'array,' + \
                inp + 'numel,' + nm + '_alpha); \n'
        if layer_type(layer) == 'PReLU':
            self.layers += 'k2c_PReLU(' + inp + 'array,' + inp + \
                'numel,' + nm + '_alpha.array); \n'
        if layer_type(layer) == 'ELU':
            self.layers += 'k2c_ELU(' + inp + 'array,' + inp + \
                'numel,' + nm + '_alpha); \n'
        if layer_type(layer) == 'ThresholdedReLU':
            self.layers += 'k2c_ThresholdedReLU(' + inp + 'array,' + \
                inp + 'numel,' + nm + '_theta); \n'
        if layer_type(layer) == 'ReLU':
            self.layers += 'k2c_ReLU(' + inp + 'array,' + inp + \
                           'numel,' + nm + '_max_value, \n\t' + \
                           nm + '_negative_slope,' + nm + '_threshold); \n'
        self._write_dummy_layer(layer, inputs, outputs, i, is_model_input,
                                is_model_output)
Ejemplo n.º 2
0
def config_supported_check(model):
    valid = True
    log = ''
    for layer in model.layers:
        config = layer.get_config()
        if config.get('data_format') not in ['channels_last', None]:
            valid = False
            log += "data format '" + layer.get_config()['data_format'] +\
                   "' for layer '" + layer.name + \
                   "' is not supported at this time. \n"
        if config.get('return_state'):
            valid = False
            log += "'return_state' option for layer '" + layer.name + \
                   "' is not supported at this time. \n"
        if config.get('shared_axes'):
            valid = False
            log += "shared axes option for layer '" + layer.name + \
                   "' is not supported at this time. \n"
        if layer_type(layer) in [
                'Add', 'Subtract', 'Multiply', 'Average', 'Maximum', 'Minimum'
        ]:
            inshps = layer.input_shape
            insize = [np.prod(inp[1:]) for inp in inshps]
            if len(set(insize)) > 1:
                valid = False
                log += "broadcasting merge functions between tensors" + \
                       " of different shapes for layer '" + \
                       layer.name + "' is not currently supported. \n"
        if layer_type(layer) in ['BatchNormalizationV1', 'BatchNormalization']:
            if len(flatten(config.get('axis'))) > 1:
                valid = False
                log += 'batch normalization along multiple axes is' + \
                       ' not currently supported. \n'
    return valid, log
Ejemplo n.º 3
0
    def write_layers(self, verbose=True):
        """Writes layers in the correct graph order.

        Args:
            verbose (bool): whether to print progress

        Returns:
            layers (str): C code for calling layer functions in correct order

        """
        written_io = set(self.model_inputs)
        unwritten_io = set(get_all_io_names(self.model)) - written_io
        while len(unwritten_io) > 0:
            for layer in self.model.layers:
                layer_inputs, layer_outputs = get_layer_io_names(layer)
                for i, (inp,
                        outp) in enumerate(zip(layer_inputs, layer_outputs)):
                    if (set(flatten(inp)).issubset(written_io) and
                            set(flatten(outp)).issubset(unwritten_io))or \
                            layer_type(layer) == 'InputLayer':
                        if verbose:
                            print('Writing layer ', outp)
                        method = getattr(self,
                                         '_write_layer_' + layer_type(layer))
                        method(layer, inp, outp, i)
                        written_io |= set(flatten(inp))
                        written_io |= set(flatten(outp))
                        unwritten_io -= set(flatten(inp))
                        unwritten_io -= set(flatten(outp))
        return self.layers
Ejemplo n.º 4
0
 def _write_layer_UpSampling(self, layer, inputs, outputs, i):
     nm, _, inputs, outputs = self._format_io_names(layer, inputs, outputs)
     if layer_type(layer)[-2:] == '1D':
         self.layers += 'k2c_upsampling1d('
     elif layer_type(layer)[-2:] == '2D':
         self.layers += 'k2c_upsampling2d('
     elif layer_type(layer)[-2:] == '3D':
         self.layers += 'k2c_upsampling3d('
     self.layers += outputs + ',' + inputs + ',' + nm + '_size); \n'
Ejemplo n.º 5
0
 def _write_layer_Cropping(self, layer, inputs, outputs, i):
     nm, _, inputs, outputs = self._format_io_names(layer, inputs, outputs)
     if layer_type(layer)[-2:] == '1D':
         self.layers += 'k2c_crop1d('
     elif layer_type(layer)[-2:] == '2D':
         self.layers += 'k2c_crop2d('
     elif layer_type(layer)[-2:] == '3D':
         self.layers += 'k2c_crop3d('
     self.layers += outputs + ',' + inputs + ',' + nm + '_crop); \n'
Ejemplo n.º 6
0
def layers_supported_check(model):
    valid = True
    log = ''
    for layer in model.layers:
        if not hasattr(Weights2C, 'write_weights_' + layer_type(layer)) \
           or not hasattr(Layers2C, 'write_layer_' + layer_type(layer)):
            valid = False
            log += "layer type '" + \
                layer_type(layer) + "' is not supported at this time. \n"
    return valid, log
Ejemplo n.º 7
0
 def check_layer(layer):
     valid = True
     log = ''
     if hasattr(layer, 'layer'):
         flag, templog = check_layer(layer.layer)
         valid = valid and flag
         log += templog
     if not hasattr(Weights2C, '_write_weights_' + layer_type(layer)) \
        or not hasattr(Layers2C, '_write_layer_' + layer_type(layer)):
         valid = False
         log += layer_type(layer) + "' is not supported at this time. \n"
     return valid, log
Ejemplo n.º 8
0
 def _write_layer_ZeroPad(self, layer, inputs, outputs, i):
     if 'Zero' in layer_type(layer):
         nm, _, inputs, outputs = self._format_io_names(
             layer, inputs, outputs)
     else:
         nm = layer.name
     if layer_type(layer)[-2:] == '1D':
         self.layers += 'k2c_pad1d('
     elif layer_type(layer)[-2:] == '2D':
         self.layers += 'k2c_pad2d('
     elif layer_type(layer)[-2:] == '3D':
         self.layers += 'k2c_pad3d('
     self.layers += outputs + ',' + inputs + ',' + nm + \
         '_fill, \n\t' + nm + '_pad); \n'
Ejemplo n.º 9
0
 def _write_layer_GlobalPooling(self, layer, inputs, outputs, i):
     _, _, inputs, outputs = self._format_io_names(layer, inputs, outputs)
     if 'Max' in layer_type(layer):
         self.layers += 'k2c_global_max_pooling('
     else:
         self.layers += 'k2c_global_avg_pooling('
     self.layers += outputs + ',' + inputs + '); \n'
Ejemplo n.º 10
0
    def write_layer_Pooling(self, layer, inputs, outputs, i):
        nm, pnm, inputs, outputs = self.format_io_names(layer, inputs, outputs)
        if 'Max' in layer_type(layer):
            s = 'k2c_maxpool'
        else:
            s = 'k2c_avgpool'
        if layer_type(layer)[-2:] == '1D':
            s += '1d(' + outputs + ','
        elif layer_type(layer)[-2:] == '2D':
            s += '2d(' + outputs + ','

        if layer.get_config()['padding'] == 'valid':
            s += inputs + ','
        else:
            self.write_layer_ZeroPad(layer, inputs, pnm + '_padded_input', i)
            s += pnm + '_padded_input,'

        s += nm + '_pool_size, \n\t' + nm + '_stride); \n'
        self.layers += s
Ejemplo n.º 11
0
 def write_layer_Conv(self, layer, inputs, outputs, i):
     nm, pnm, inputs, outputs = self.format_io_names(layer, inputs, outputs)
     activation = 'k2c_' + layer.get_config()['activation']
     if layer_type(layer)[-2:] == '1D':
         fname = 'k2c_conv1d('
     elif layer_type(layer)[-2:] == '2D':
         fname = 'k2c_conv2d('
     elif layer_type(layer)[-2:] == '3D':
         fname = 'k2c_conv3d('
     if layer.get_config()['padding'] == 'valid':
         self.layers += fname + outputs + ',' + inputs + ',' + \
             pnm + '_kernel, \n\t' + pnm + '_bias,' + nm + \
             '_stride,' + nm + '_dilation,' + activation + '); \n'
     else:
         self.write_layer_ZeroPad(layer, inputs, pnm + '_padded_input', i)
         self.layers += fname + outputs + ',' + pnm + \
             '_padded_input,' + pnm + '_kernel, \n\t' + \
             pnm + '_bias,' + nm + '_stride,' + nm + \
             '_dilation,' + activation + '); \n'
Ejemplo n.º 12
0
 def write_layers(self):
     written_io = set(self.model_inputs)
     unwritten_io = set(get_all_io_names(self.model)) - written_io
     while len(unwritten_io) > 0:
         for layer in self.model.layers:
             layer_inputs, layer_outputs = get_layer_io_names(layer)
             for i, (inp,
                     outp) in enumerate(zip(layer_inputs, layer_outputs)):
                 if (set(flatten(inp)).issubset(written_io) and
                         set(flatten(outp)).issubset(unwritten_io))or \
                         layer_type(layer) == 'InputLayer':
                     print('Writing layer ', outp)
                     method = getattr(self,
                                      'write_layer_' + layer_type(layer))
                     method(layer, inp, outp, i)
                     written_io |= set(flatten(inp))
                     written_io |= set(flatten(outp))
                     unwritten_io -= set(flatten(inp))
                     unwritten_io -= set(flatten(outp))
     return self.layers
Ejemplo n.º 13
0
 def check_layer(layer):
     valid = True
     log = ''
     if hasattr(layer, 'layer'):
         flag, templog = check_layer(layer.layer)
         valid = valid and flag
         log += templog
     config = layer.get_config()
     if config.get('merge_mode', 'foo') is None:
         valid = False
         log += "merge mode of 'None' for Bidirectional layers is not " +\
                "supported. Try using two seperate RNNs instead"
     if config.get('data_format') not in ['channels_last', None]:
         valid = False
         log += "data format '" + layer.get_config()['data_format'] +\
                "' for layer '" + layer.name + \
                "' is not supported at this time. \n"
     if config.get('return_state'):
         valid = False
         log += "'return_state' option for layer '" + layer.name + \
                "' is not supported at this time. \n"
     if config.get('shared_axes'):
         valid = False
         log += "shared axes option for layer '" + layer.name + \
                "' is not supported at this time. \n"
     if layer_type(layer) in [
             'Add', 'Subtract', 'Multiply', 'Average', 'Maximum', 'Minimum'
     ]:
         inshps = layer.input_shape
         insize = [np.prod(inp[1:]) for inp in inshps]
         if len(set(insize)) > 1:
             valid = False
             log += "broadcasting merge functions between tensors" + \
                    " of different shapes for layer '" + \
                    layer.name + "' is not currently supported. \n"
     if layer_type(layer) in ['BatchNormalizationV1', 'BatchNormalization']:
         if len(flatten(config.get('axis'))) > 1:
             valid = False
             log += 'batch normalization along multiple axes is' + \
                    ' not currently supported. \n'
     return valid, log
Ejemplo n.º 14
0
 def _write_layer_Bidirectional(self, layer, inputs, outputs, i):
     subname = layer.layer.name
     method = getattr(self, '_write_layer_' + layer_type(layer.layer))
     method(layer.forward_layer, inputs, 'forward_' + subname, i)
     method(layer.backward_layer, inputs, 'backward_' + subname, i)
     mode = layer.merge_mode
     inputs = ['forward_' + subname, 'backward_' + subname]
     if layer.layer.return_sequences:
         self.layers += 'k2c_flip(&backward_' + subname + '_output,0); \n'
     if mode == 'sum':
         self._write_layer_Merge(layer, inputs, outputs, i, 'Add')
     elif mode == 'mul':
         self._write_layer_Merge(layer, inputs, outputs, i, 'Multiply')
     elif mode == 'ave':
         self._write_layer_Merge(layer, inputs, outputs, i, 'Average')
     elif mode == 'concat':
         self._write_layer_Concatenate(layer, inputs, outputs, i)
Ejemplo n.º 15
0
    def write_weights(self, verbose=True):
        """Parses and generates code for model weights and other parameters

        Args:
            verbose (bool): whether to print progress

        Returns:
            (tuple): tuple containing

                - **stack_vars** (*str*): code for variables allocated on the stack
                - **malloc_vars** (*dict*): dictionary of name,value pairs for arrays to be 
                    allocated on the heap
                - **static_vars** (*str*): code fora C struct containing static variables
                    (eg, states of a stateful RNN)
        """
        for layer in self.model.layers:
            method = getattr(self, '_write_weights_' + layer_type(layer))
            method(layer)
        return self.stack_vars, self.malloc_vars, self._write_static_vars()
Ejemplo n.º 16
0
    def _write_layer_TimeDistributed(self, layer, inputs, outputs, i):
        # nm, pnm, inputs, outputs = self._format_io_names(layer, inputs, outputs)
        self.layers += 'for(size_t i=0; i<' + layer.name + \
            '_timesteps; ++i) { \n'
        if inputs in self.model_inputs:
            self.layers += layer.layer.name + '_timeslice_input.array = &' + \
                inputs + '_input->array[i*' + layer.name + '_in_offset]; \n'
        else:
            self.layers += layer.layer.name + '_timeslice_input.array = &' + \
                inputs + '_output.array[i*' + layer.name + '_in_offset]; \n'
        if outputs in self.model_outputs:
            self.layers += layer.layer.name + '_timeslice_output.array = &' + \
                outputs + '_output->array[i*' + layer.name + '_out_offset]; \n'
        else:
            self.layers += layer.layer.name + '_timeslice_output.array = &' + \
                outputs + '_output.array[i*' + layer.name + '_out_offset]; \n'

        inp = '&' + layer.layer.name + '_timeslice'
        outp = '&' + layer.layer.name + '_timeslice'
        method = getattr(self, '_write_layer_' + layer_type(layer.layer))
        method(layer.layer, inp, outp, i)
        self.layers += '\n } \n'
Ejemplo n.º 17
0
 def write_layer_Merge(self, layer, inputs, outputs, i):
     nm, _, inputs, outputs = self.format_io_names(layer, inputs, outputs)
     if 'Subtract' == layer_type(layer):
         self.layers += 'k2c_subtract('
     elif 'Add' == layer_type(layer):
         self.layers += 'k2c_add('
     elif 'Multiply' == layer_type(layer):
         self.layers += 'k2c_multiply('
     elif 'Average' == layer_type(layer):
         self.layers += 'k2c_average('
     elif 'Maximum' == layer_type(layer):
         self.layers += 'k2c_max('
     elif 'Minimum' == layer_type(layer):
         self.layers += 'k2c_min('
     self.layers += outputs + ',' + nm + '_num_tensors' + str(i) + ','
     c = ','.join(inputs)
     self.layers += c + '); \n'
Ejemplo n.º 18
0
 def write_weights(self):
     for layer in self.model.layers:
         method = getattr(self, 'write_weights_' + layer_type(layer))
         method(layer)
     return self.stack_vars, self.malloc_vars, self.write_static_vars()
Ejemplo n.º 19
0
 def write_weights_layer(self, layer):
     method = getattr(self, 'write_weights_' + layer_type(layer))
     return method(layer)