Example #1
0
def attach_multibox_head(network, source_layer_names,
                         num_priors=4, num_classes=10, activation='softmax'):
    heads = []
    for idx, layer_name in enumerate(source_layer_names):
        source_layer = network.get_layer(layer_name).output

        # Classification
        clf = Conv2D(num_priors * num_classes, (3, 3),
                     padding='same', name=f'clf_head{idx}_logit')(source_layer)
        clf = Reshape((-1, num_classes),
                      name=f'clf_head{idx}_reshape')(clf)
        if activation == 'softmax':
            clf = Softmax(axis=-1, name=f'clf_head{idx}')(clf)
        elif activation == 'sigmoid':
            clf = sigmoid(clf)
        else:
            raise ValueError('activation은 {softmax,sigmoid}중에서 되어야 합니다.')

        # Localization
        loc = Conv2D(num_priors * 4, (3,3), padding='same',
                     name=f'loc_head{idx}')(source_layer)
        loc = Reshape((-1,4),
                      name=f'loc_head{idx}_reshape')(loc)
        head = Concatenate(axis=-1, name=f'head{idx}')([clf, loc])
        heads.append(head)

    if len(heads) > 1:
        predictions = Concatenate(axis=1, name='predictions')(heads)
    else:
        predictions = K.identity(heads[0],name='predictions')
    return predictions
Example #2
0
 def call(self, x, sigmoid_activation=True):
     x = self.conv1(x)
     x = self.conv2(x)
     x = self.flatten(x)
     x = self.fully_connected1(x)
     x = self.fully_connected2(x)
     if sigmoid_activation:
         return sigmoid(x)
     return relu(x)
Example #3
0
def attach_multibox_head(network,
                         source_layer_names,
                         num_priors=4,
                         num_classes=11,
                         activation='softmax'):  # 10->11
    heads = []
    batch_size = 64  # OpenCV에서 인식을못해서 하드코딩한다 reshape를 인식못한다
    for idx, layer_name in enumerate(source_layer_names):
        source_layer = network.get_layer(layer_name).output

        # OpenCV Loading Error
        # "Can't create layer \"loc_head2_reshape_2/Shape\" of type \"Shape\""
        # 조치 : class개수를 10에서 11로 바꿔줌

        w = source_layer.get_shape().as_list()[1]
        h = source_layer.get_shape().as_list()[2]
        print("w : ", w)
        print("h : ", h)
        print("num_priors : ", num_priors)
        # Classification
        clf = Conv2D(num_priors * num_classes, (3, 3),
                     padding='same',
                     name=f'clf_head{idx}_logit')(source_layer)
        print("clf shape입니다 : ", clf.shape)
        clf = tf.reshape(clf,
                         shape=(batch_size, w * h * num_priors, num_classes),
                         name=f'clf_head{idx}_reshape')
        # clf = Reshape((w*h*num_priors, num_classes), name=f'clf_head{idx}_reshape')(clf)  # (-1, num_classes) # w*h*num_priors
        print("clf의 reshape 후입니다 : ", clf.shape)
        if activation == 'softmax':
            clf = Softmax(axis=-1, name=f'clf_head{idx}')(clf)
        elif activation == 'sigmoid':
            clf = sigmoid(clf)
        else:
            raise ValueError('activation은 {softmax,sigmoid}중에서 되어야 합니다.')

        # Localization
        loc = Conv2D(num_priors * 4, (3, 3),
                     padding='same',
                     name=f'loc_head{idx}')(source_layer)
        print("loc의 shape입니다 : ", loc.shape)
        loc = tf.reshape(loc,
                         shape=(batch_size, w * h * num_priors, 4),
                         name='loc_head{}_reshape'.format(idx))
        # loc = Reshape((w*h*num_priors, 4), name=f'loc_head{idx}_reshape')(loc)  #Reshape((-1, 4),
        print("loc의 reshape 후입니다 : ", loc.shape)
        head = Concatenate(axis=-1, name=f'head{idx}')([clf, loc])
        heads.append(head)

    if len(heads) > 1:
        predictions = Concatenate(axis=1, name='predictions')(heads)
    else:
        predictions = K.identity(heads[0], name='predictions')
    return predictions
  def test_sigmoid(self):
    def ref_sigmoid(x):
      if x >= 0:
        return 1 / (1 + np.exp(-x))
      else:
        z = np.exp(x)
        return z / (1 + z)
    sigmoid = np.vectorize(ref_sigmoid)

    x = backend.placeholder(ndim=2)
    f = backend.function([x], [activations.sigmoid(x)])
    test_values = np.random.random((2, 5))
    result = f([test_values])[0]
    expected = sigmoid(test_values)
    self.assertAllClose(result, expected, rtol=1e-05)
Example #5
0
def make_generator(name, s, adj, node_f, use_gcn=True, use_gru=True):
    n = node_f.shape[0]  # number of nodes
    input_s = Input(shape=(s, n))
    input_f = Input(shape=(n, node_f.shape[1]))
    input_g = Input(shape=(n, n))
    if use_gcn:
        gcov1 = GraphConv(2 * base)([input_f, input_g])
        # gcov2 = GraphConv(base)([gcov1, input_g])
        input_s1 = Dot(axes=(2, 1))([
            input_s, gcov1
        ])  # dot product: element by element multiply, (1,s,n).(1,n,n)=(1,s,n)
    else:
        input_s1 = input_s
    fc1 = Dense(4 * base, activation='relu', input_shape=(n, ))(input_s1)
    fc2 = Dense(8 * base, activation='relu', input_shape=(n, ))(fc1)
    # S*D2

    if use_gru:
        rnn1 = Dropout(dropout)(CuDNNGRU(2 * base, return_sequences=True)(fc2))
    else:
        rnn1 = fc2
    fc3 = Dense(16 * base, activation='relu', input_shape=(n, ))(rnn1)
    out = sigmoid(Dense(1)(fc3))
    return Model(name=name, inputs=[input_s, input_f, input_g], outputs=out)
Example #6
0
 def call(self, inputs):
     x = relu(self.linear1(inputs))
     x = relu(self.linear2(x))
     x = relu(self.linear3(x))
     x = sigmoid(self.linear4(x))
     return x