Esempio n. 1
0
  def __call__(self, inputs):
    """Generate Computation Graph"""
    with tf.variable_scope(self.scope):
      if self.data_format == 'channels_first':
        inputs = tf.transpose(inputs, [0, 3, 1, 2])

      inputs = inputs / 255

      inputs, route2, route4 = darknet53(inputs, data_format=self.data_format)

      inputs, route1 = feature_pyramid_network(inputs, filters=512, data_format=self.data_format)
      detect1 = yolo_layer(inputs,
                           n_classes=self.n_classes,
                           anchors=_ANCHORS[6:],
                           img_size=self.input_size,
                           data_format=self.data_format)

      inputs = Conv2D(route1, filters=256, kernel_size=1, data_format=self.data_format)
      inputs = BatchNormalization(inputs, data_format=self.data_format)
      inputs = LeakyReLU(inputs)

      upsample_size = route2.get_shape().as_list()
      inputs = upsample(inputs, out_shape=upsample_size, data_format=self.data_format)
      axis = 1 if self.data_format == 'channels_first' else 3
      inputs = tf.concat([inputs, route2], axis=axis)

      inputs, route3 = feature_pyramid_network(inputs, filters=256, data_format=self.data_format)
      detect2 = yolo_layer(inputs,
                           n_classes=self.n_classes,
                           anchors=_ANCHORS[3:6],
                           img_size=self.input_size,
                           data_format=self.data_format)

      inputs = Conv2D(route3, filters=128, kernel_size=1, data_format=self.data_format)
      inputs = BatchNormalization(inputs, data_format=self.data_format)
      inputs = LeakyReLU(inputs)

      upsample_size = route4.get_shape().as_list()
      inputs = upsample(inputs, out_shape=upsample_size, data_format=self.data_format)
      axis = 1 if self.data_format == 'channels_first' else 3
      inputs = tf.concat([inputs, route4], axis=axis)

      inputs, _ = feature_pyramid_network(inputs, filters=128, data_format=self.data_format)
      detect3 = yolo_layer(inputs,
                           n_classes=self.n_classes,
                           anchors=_ANCHORS[:3],
                           img_size=self.input_size,
                           data_format=self.data_format)

      inputs = tf.concat([detect1, detect2, detect3], axis=1)
      inputs = build_boxes(inputs)
      boxes_dicts = non_max_suppression(inputs,
                                        n_classes=self.n_classes,
                                        max_output_size=self.max_output_size,
                                        iou_threshold=self.iou_threshold,
                                        confidence_threshold=self.confidence_threshold)
      return boxes_dicts
Esempio n. 2
0
def darknet53_residual_block(inputs, filters, data_format, strides=1):
  """Creates a residual block for Darknet."""
  shortcut = inputs

  inputs = Conv2D(inputs, filters=filters, kernel_size=1, strides=strides, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)

  filters *= 2
  inputs = Conv2D(inputs, filters=filters, kernel_size=3, strides=strides, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)

  inputs += shortcut
  return inputs
Esempio n. 3
0
def feature_pyramid_network(inputs, data_format):
    """Creates convolution operations layer used after Darknet"""
    inputs = Conv2D(inputs,
                    filters=256,
                    kernel_size=1,
                    data_format=data_format)
    inputs = BatchNormalization(inputs, data_format=data_format)
    inputs = LeakyReLU(inputs)
    route = inputs

    inputs = Conv2D(inputs,
                    filters=512,
                    kernel_size=3,
                    data_format=data_format)
    inputs = BatchNormalization(inputs, data_format=data_format)
    inputs = LeakyReLU(inputs)
    return inputs, route
Esempio n. 4
0
def darknet(inputs, data_format):
    """Creates Darknet model"""
    filters = 16
    for _ in range(4):
        inputs = Conv2D(inputs,
                        filters,
                        kernel_size=3,
                        data_format=data_format)
        inputs = BatchNormalization(inputs, data_format=data_format)
        inputs = LeakyReLU(inputs)
        inputs = MaxPooling2D(inputs,
                              pool_size=[2, 2],
                              strides=[2, 2],
                              data_format=data_format)
        filters *= 2

    inputs = Conv2D(inputs,
                    filters=256,
                    kernel_size=3,
                    data_format=data_format)
    inputs = BatchNormalization(inputs, data_format=data_format)
    inputs = LeakyReLU(inputs)
    route = inputs  # layers 8
    inputs = MaxPooling2D(inputs,
                          pool_size=[2, 2],
                          strides=[2, 2],
                          data_format=data_format)

    inputs = Conv2D(inputs,
                    filters=512,
                    kernel_size=3,
                    data_format=data_format)
    inputs = BatchNormalization(inputs, data_format=data_format)
    inputs = LeakyReLU(inputs)
    inputs = MaxPooling2D(inputs,
                          pool_size=[2, 2],
                          strides=[1, 1],
                          data_format=data_format)

    inputs = Conv2D(inputs,
                    filters=1024,
                    kernel_size=3,
                    data_format=data_format)
    inputs = BatchNormalization(inputs, data_format=data_format)
    inputs = LeakyReLU(inputs)
    return inputs, route
Esempio n. 5
0
def convert_batch_norm(identifier, spec, previous_layer):
    return BatchNormalization(
        name=identifier,
        axis=0,  #spec.axis,
        momentum=spec.momentum,
        epsilon=0,  #spec.epsilon,
        center=0,  #spec.center,
        inbound_nodes=[previous_layer.name],
    )
Esempio n. 6
0
def convert_batch_norm(spec, previous_layer):
    print(dir(spec))
    return BatchNormalization(
        name=spec._get_name(),
        axis=0,  #spec.axis,
        momentum=spec.momentum,
        epsilon=0,  #spec.epsilon,
        center=0,  #spec.center,
        inbound_nodes=[previous_layer],
    )
Esempio n. 7
0
def darknet53(inputs, data_format):
  """Creates Darknet53 model"""
  inputs = Conv2D(inputs, filters=32, kernel_size=3, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)
  inputs = Conv2D(inputs, filters=64, kernel_size=3, strides=2, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)

  inputs = darknet53_residual_block(inputs, filters=32, data_format=data_format)

  inputs = Conv2D(inputs, filters=128, kernel_size=3, strides=2, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)

  for _ in range(2):
    inputs = darknet53_residual_block(inputs, filters=64, data_format=data_format)

  inputs = Conv2D(inputs, filters=256, kernel_size=3, strides=2, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)

  for _ in range(8):
    inputs = darknet53_residual_block(inputs, filters=128, data_format=data_format)
  route4 = inputs # layers 36

  inputs = Conv2D(inputs, filters=512, kernel_size=3, strides=2, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)

  for _ in range(8):
    inputs = darknet53_residual_block(inputs, filters=256, data_format=data_format)
  route2 = inputs # layers 61

  inputs = Conv2D(inputs, filters=1024, kernel_size=3, strides=2, data_format=data_format)
  inputs = BatchNormalization(inputs, data_format=data_format)
  inputs = LeakyReLU(inputs)

  for _ in range(4):
    inputs = darknet53_residual_block(inputs, filters=512, data_format=data_format)
  return inputs, route2, route4
Esempio n. 8
0
def convert(keras_model, class_map, description="Neural Network Model"):
    """
	Convert a keras model to PMML
	@model. The keras model object
	@class_map. A map in the form {class_id: class_name}
	@description. A short description of the model
	Returns a DeepNeuralNetwork object which can be exported to PMML
	"""
    pmml = DeepNetwork(description=description, class_map=class_map)
    pmml.keras_model = keras_model
    pmml.model_name = keras_model.name
    config = keras_model.get_config()

    for layer in config['layers']:
        layer_class = layer['class_name']
        layer_config = layer['config']
        layer_inbound_nodes = layer['inbound_nodes']
        # Input
        if layer_class is "InputLayer":
            pmml._append_layer(
                InputLayer(name=layer_config['name'],
                           input_size=layer_config['batch_input_shape'][1:]))
        # Conv2D
        elif layer_class is "Conv2D":
            pmml._append_layer(
                Conv2D(
                    name=layer_config['name'],
                    channels=layer_config['filters'],
                    kernel_size=layer_config['kernel_size'],
                    dilation_rate=layer_config['dilation_rate'],
                    use_bias=layer_config['use_bias'],
                    activation=layer_config['activation'],
                    strides=layer_config['strides'],
                    padding=layer_config['padding'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # DepthwiseConv2D
        elif layer_class is "DepthwiseConv2D":
            pmml._append_layer(
                DepthwiseConv2D(
                    name=layer_config['name'],
                    kernel_size=layer_config['kernel_size'],
                    depth_multiplier=layer_config['depth_multiplier'],
                    use_bias=layer_config['use_bias'],
                    activation=layer_config['activation'],
                    strides=layer_config['strides'],
                    padding=layer_config['padding'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # MaxPooling
        elif layer_class is "MaxPooling2D":
            pmml._append_layer(
                MaxPooling2D(
                    name=layer_config['name'],
                    pool_size=layer_config['pool_size'],
                    strides=layer_config['strides'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        elif layer_class is "AveragePooling2D":
            pmml._append_layer(
                AveragePooling2D(
                    name=layer_config['name'],
                    pool_size=layer_config['pool_size'],
                    strides=layer_config['strides'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        elif layer_class is "GlobalAveragePooling2D":
            pmml._append_layer(
                GlobalAveragePooling2D(
                    name=layer_config['name'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # Flatten
        elif layer_class is "Flatten":
            pmml._append_layer(
                Flatten(
                    name=layer_config['name'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # Dense
        elif layer_class is "Dense":
            pmml._append_layer(
                Dense(
                    name=layer_config['name'],
                    channels=layer_config['units'],
                    use_bias=layer_config['use_bias'],
                    activation=layer_config['activation'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # Zero padding layer
        elif layer_class is "ZeroPadding2D":
            pmml._append_layer(
                ZeroPadding2D(
                    name=layer_config['name'],
                    padding=layer_config['padding'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # Reshape layer
        elif layer_class is "Reshape":
            pmml._append_layer(
                Reshape(
                    name=layer_config['name'],
                    target_shape=layer_config['target_shape'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        elif layer_class is "Dropout":
            pmml._append_layer(
                Dropout(
                    name=layer_config['name'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # Batch Normalization
        elif layer_class is "BatchNormalization":
            pmml._append_layer(
                BatchNormalization(
                    name=layer_config['name'],
                    axis=layer_config['axis'],
                    momentum=layer_config['momentum'],
                    epsilon=layer_config['epsilon'],
                    center=layer_config['center'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        elif layer_class is "Add":
            pmml._append_layer(
                Merge(name=layer_config['name'],
                      inbound_nodes=get_inbound_nodes(layer_inbound_nodes)))
        elif layer_class is "Subtract":
            pmml._append_layer(
                Merge(name=layer_config['name'],
                      operator='subtract',
                      inbound_nodes=get_inbound_nodes(layer_inbound_nodes)))
        elif layer_class is "Dot":
            pmml._append_layer(
                Merge(name=layer_config['name'],
                      operator='dot',
                      inbound_nodes=get_inbound_nodes(layer_inbound_nodes)))
        elif layer_class is "Concatenate":
            pmml._append_layer(
                Merge(name=layer_config['name'],
                      axis=layer_config['axis'],
                      operator='concatenate',
                      inbound_nodes=get_inbound_nodes(layer_inbound_nodes)))
        elif layer_class is "Activation":
            pmml._append_layer(
                Activation(
                    name=layer_config['name'],
                    activation=layer_config['activation'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        elif layer_class is "ReLU":
            pmml._append_layer(
                Activation(
                    name=layer_config['name'],
                    activation='relu',
                    threshold=layer_config['threshold'],
                    max_value=layer_config['max_value'],
                    negative_slope=layer_config['negative_slope'],
                    inbound_nodes=get_inbound_nodes(layer_inbound_nodes),
                ))
        # Unknown layer
        else:
            raise ValueError("Unknown layer type:", layer_class)
    return pmml