Exemplo n.º 1
0
def reductionA(out, k=0, l=0, m=0, n=0):
    out1 = conv3x3(out, n, stride=2, padding="VALID", batch_norm=use_batch_norm)

    out2 = conv1x1(out, k, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, l, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, m, stride=2, padding="VALID", batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2, padding="VALID")

    return concat(3, [out1, out2, out3])
Exemplo n.º 2
0
def inception7B(out, num_3x3, num_d3x3_red, num_d3x3_1, num_d3x3_2):
    out1 = conv3x3(out, num_3x3, stride=2, padding="VALID", batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_d3x3_red, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, num_d3x3_1, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, num_d3x3_2, stride=2, padding="VALID", batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2, padding="VALID")

    return concat(3, [out1, out2, out3])
Exemplo n.º 3
0
def inception7D(out, num_3x3_red, num_3x3, num_d7_3x3_red, num_d7_1, num_d7_2, num_d7_3x3):
    out1 = conv1x1(out, num_3x3_red, batch_norm=use_batch_norm)
    out1 = conv3x3(out1, num_3x3, stride=2, batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_d7_3x3_red, batch_norm=use_batch_norm)
    out2 = conv1x7(out2, num_d7_1, batch_norm=use_batch_norm)
    out2 = conv7x1(out2, num_d7_2, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, num_d7_3x3, stride=2, batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2)

    return concat(3, [out1, out2, out3])
Exemplo n.º 4
0
def reductionB(out):
    out1 = conv1x1(out, 192, batch_norm=use_batch_norm)
    out1 = conv3x3(out1, 192, stride=2, padding="VALID", batch_norm=use_batch_norm)

    out2 = conv1x1(out, 256, batch_norm=use_batch_norm)
    out2 = conv1x7(out2, 256, batch_norm=use_batch_norm)
    out2 = conv7x1(out2, 320, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, 320, stride=2, padding="VALID", batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2, padding="VALID")

    return concat(3, [out1, out2, out3])
Exemplo n.º 5
0
def inception7D(out, num_3x3_red, num_3x3, num_d7_3x3_red, num_d7_1, num_d7_2,
                num_d7_3x3):
    out1 = conv1x1(out, num_3x3_red, batch_norm=use_batch_norm)
    out1 = conv3x3(out1, num_3x3, stride=2, batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_d7_3x3_red, batch_norm=use_batch_norm)
    out2 = conv1x7(out2, num_d7_1, batch_norm=use_batch_norm)
    out2 = conv7x1(out2, num_d7_2, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, num_d7_3x3, stride=2, batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2)

    return concat(3, [out1, out2, out3])
Exemplo n.º 6
0
def inceptionA(out):
    out1 = conv1x1(out, 96, batch_norm=use_batch_norm)

    out2 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, 96, batch_norm=use_batch_norm)

    out3 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, 96, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, 96, batch_norm=use_batch_norm)

    out4 = avg_pool_3x3(out)
    out4 = conv1x1(out4, 96, batch_norm=use_batch_norm)

    return concat(3, [out1, out2, out3, out4])
Exemplo n.º 7
0
def inceptionA(out):
    out1 = conv1x1(out, 96, batch_norm=use_batch_norm)

    out2 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, 96, batch_norm=use_batch_norm)

    out3 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, 96, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, 96, batch_norm=use_batch_norm)

    out4 = avg_pool_3x3(out)
    out4 = conv1x1(out4, 96, batch_norm=use_batch_norm)

    return concat(3, [out1, out2, out3, out4])
Exemplo n.º 8
0
def inception7A(out, num_1x1, num_3x3_red, num_3x3_1, num_3x3_2, num_5x5_red, num_5x5, proj):
    out1 = conv1x1(out, num_1x1, batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_5x5_red, batch_norm=use_batch_norm)
    out2 = conv5x5(out2, num_5x5, batch_norm=use_batch_norm)

    out3 = conv1x1(out, num_3x3_red, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, num_3x3_1, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, num_3x3_2, batch_norm=use_batch_norm)

    out4 = avg_pool_3x3(out)
    out4 = conv1x1(out4, proj, batch_norm=use_batch_norm)

    return concat(3, [out1, out2, out3, out4])
Exemplo n.º 9
0
def inception7A(out, num_1x1, num_3x3_red, num_3x3_1, num_3x3_2, num_5x5_red,
                num_5x5, proj):
    out1 = conv1x1(out, num_1x1, batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_5x5_red, batch_norm=use_batch_norm)
    out2 = conv5x5(out2, num_5x5, batch_norm=use_batch_norm)

    out3 = conv1x1(out, num_3x3_red, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, num_3x3_1, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, num_3x3_2, batch_norm=use_batch_norm)

    out4 = avg_pool_3x3(out)
    out4 = conv1x1(out4, proj, batch_norm=use_batch_norm)

    return concat(3, [out1, out2, out3, out4])
Exemplo n.º 10
0
def inception7E(out, num_1x1, num_d3_red, num_d3_1, num_d3_2, num_3x3_d3_red, num_3x3, num_3x3_d3_1, num_3x3_d3_2, proj, pool_3x3):
    out1 = conv1x1(out, num_1x1, batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_d3_red, batch_norm=use_batch_norm)
    out2_1 = conv1x3(out2, num_d3_1, batch_norm=use_batch_norm)
    out2_2 = conv3x1(out2, num_d3_2, batch_norm=use_batch_norm)

    out3 = conv1x1(out, num_3x3_d3_red, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, num_3x3, batch_norm=use_batch_norm)
    out3_1 = conv1x3(out3, num_3x3_d3_1, batch_norm=use_batch_norm)
    out3_2 = conv3x3(out3, num_3x3_d3_2, batch_norm=use_batch_norm)

    out4 = pool_3x3(out, stride=1)
    out4 = conv1x1(out4, proj, batch_norm=use_batch_norm)

    return concat(3, [out1, out2_1, out2_2, out3_1, out3_2, out4])
Exemplo n.º 11
0
def inception7E(out, num_1x1, num_d3_red, num_d3_1, num_d3_2, num_3x3_d3_red,
                num_3x3, num_3x3_d3_1, num_3x3_d3_2, proj, pool_3x3):
    out1 = conv1x1(out, num_1x1, batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_d3_red, batch_norm=use_batch_norm)
    out2_1 = conv1x3(out2, num_d3_1, batch_norm=use_batch_norm)
    out2_2 = conv3x1(out2, num_d3_2, batch_norm=use_batch_norm)

    out3 = conv1x1(out, num_3x3_d3_red, batch_norm=use_batch_norm)
    out3 = conv3x3(out3, num_3x3, batch_norm=use_batch_norm)
    out3_1 = conv1x3(out3, num_3x3_d3_1, batch_norm=use_batch_norm)
    out3_2 = conv3x3(out3, num_3x3_d3_2, batch_norm=use_batch_norm)

    out4 = pool_3x3(out, stride=1)
    out4 = conv1x1(out4, proj, batch_norm=use_batch_norm)

    return concat(3, [out1, out2_1, out2_2, out3_1, out3_2, out4])
Exemplo n.º 12
0
def stem(x):
    """
    Stem fo the pure InceptionV4 and Inception-ResNet-V2
    """
    out = conv3x3(x, 32, stride=2, padding="VALID", batch_norm=use_batch_norm)
    out = conv3x3(out, 32, stride=1, padding="VALID", batch_norm=use_batch_norm)
    out = conv3x3(out, 64, stride=1, padding="SAME", batch_norm=use_batch_norm)

    out1 = max_pool_3x3(out, stride=2, padding="VALID")
    out2 = conv3x3(out, 96, stride=2, padding="VALID", batch_norm=use_batch_norm)

    out = concat(3, [out1, out2])

    out1 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out1 = conv3x3(out1, 96, padding="VALID", batch_norm=use_batch_norm)

    out2 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out2 = conv1x7(out2, 64, batch_norm=use_batch_norm)
    out2 = conv7x1(out2, 64, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, 96, padding="VALID", batch_norm=use_batch_norm)

    out = concat(3, [out1, out2])

    out1 = conv3x3(out, 192, stride=2, padding="VALID", batch_norm=use_batch_norm)
    out2 = max_pool_3x3(out, stride=2, padding="VALID")

    out = concat(3, [out1, out2])

    return out
Exemplo n.º 13
0
def inception7B(out, num_3x3, num_d3x3_red, num_d3x3_1, num_d3x3_2):
    out1 = conv3x3(out,
                   num_3x3,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)

    out2 = conv1x1(out, num_d3x3_red, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, num_d3x3_1, batch_norm=use_batch_norm)
    out2 = conv3x3(out2,
                   num_d3x3_2,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2, padding="VALID")

    return concat(3, [out1, out2, out3])
Exemplo n.º 14
0
def reductionA(out, k=0, l=0, m=0, n=0):
    out1 = conv3x3(out,
                   n,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)

    out2 = conv1x1(out, k, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, l, batch_norm=use_batch_norm)
    out2 = conv3x3(out2,
                   m,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2, padding="VALID")

    return concat(3, [out1, out2, out3])
Exemplo n.º 15
0
def reductionB(out):
    out1 = conv1x1(out, 192, batch_norm=use_batch_norm)
    out1 = conv3x3(out1,
                   192,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)

    out2 = conv1x1(out, 256, batch_norm=use_batch_norm)
    out2 = conv1x7(out2, 256, batch_norm=use_batch_norm)
    out2 = conv7x1(out2, 320, batch_norm=use_batch_norm)
    out2 = conv3x3(out2,
                   320,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)

    out3 = max_pool_3x3(out, stride=2, padding="VALID")

    return concat(3, [out1, out2, out3])
Exemplo n.º 16
0
def stem(x):
    """
    Stem fo the pure InceptionV4 and Inception-ResNet-V2
    """
    out = conv3x3(x, 32, stride=2, padding="VALID", batch_norm=use_batch_norm)
    out = conv3x3(out,
                  32,
                  stride=1,
                  padding="VALID",
                  batch_norm=use_batch_norm)
    out = conv3x3(out, 64, stride=1, padding="SAME", batch_norm=use_batch_norm)

    out1 = max_pool_3x3(out, stride=2, padding="VALID")
    out2 = conv3x3(out,
                   96,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)

    out = concat(3, [out1, out2])

    out1 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out1 = conv3x3(out1, 96, padding="VALID", batch_norm=use_batch_norm)

    out2 = conv1x1(out, 64, batch_norm=use_batch_norm)
    out2 = conv1x7(out2, 64, batch_norm=use_batch_norm)
    out2 = conv7x1(out2, 64, batch_norm=use_batch_norm)
    out2 = conv3x3(out2, 96, padding="VALID", batch_norm=use_batch_norm)

    out = concat(3, [out1, out2])

    out1 = conv3x3(out,
                   192,
                   stride=2,
                   padding="VALID",
                   batch_norm=use_batch_norm)
    out2 = max_pool_3x3(out, stride=2, padding="VALID")

    out = concat(3, [out1, out2])

    return out
Exemplo n.º 17
0
    def __init__(self, width=299, height=299, channels=3, classes=10):
        super(InceptionV3, self).__init__()

        self._number_of_classes = classes

        size = width * height * channels

        x = tf.placeholder(tf.float32, [None, size], name="x")
        self._inputs = x

        with tf.variable_scope("reshape1"):
            x = tf.reshape(x, [-1, width, height, channels])

        max_w = 299
        min_w = 299 // 3

        with tf.variable_scope("resize1"):
            if width < min_w:
                x = tf.image.resize_images(x, [min_w, min_w])
            elif width > 299:
                x = tf.image.resize_images(x, [max_w, max_w])

        # Stage 1
        out = conv3x3(x, 32, stride=2, padding="VALID", batch_norm=use_batch_norm)
        out = conv3x3(out, 32, padding="VALID", batch_norm=use_batch_norm)
        out = conv3x3(out, 64, batch_norm=use_batch_norm)
        out = max_pool_3x3(out, padding="VALID")

        # Stage 2
        out = conv3x3(out, 80, padding="VALID", batch_norm=use_batch_norm)
        out = conv3x3(out, 192, padding="VALID", batch_norm=use_batch_norm)
        out = max_pool_3x3(out, padding="VALID")

        # Stage 3
        out = inception7A(out, 64, 64, 96, 96, 48, 64, 32)
        out = inception7A(out, 64, 64, 96, 96, 48, 64, 64)
        out = inception7A(out, 64, 64, 96, 96, 48, 64, 64)
        out = inception7B(out, 384, 64, 96, 96)

        # Stage 4
        out = inception7C(out, 192, 128, 128, 192, 128, 128, 128, 128, 192, 192)
        out = inception7C(out, 192, 160, 160, 192, 160, 160, 160, 160, 192, 192)
        out = inception7C(out, 192, 160, 160, 192, 160, 160, 160, 160, 192, 192)
        out = inception7C(out, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192)
        out = inception7D(out, 192, 320, 192, 192, 192, 192)

        # Stage 5
        out = inception7E(out, 320, 384, 384, 384, 448, 384, 384, 384, 192, avg_pool_3x3)
        out = inception7E(out, 320, 384, 384, 384, 448, 384, 384, 384, 192, max_pool_3x3)

        out = tf.nn.avg_pool(out, ksize=[1, 8, 8, 1], strides=[1, 1, 1, 1], padding='SAME')

        dims = out.get_shape().as_list()
        flatten_size = 1
        for d in dims[1:]:
            flatten_size *= d

        with tf.variable_scope("reshape2"):
            out = tf.reshape(out, [-1, int(flatten_size)])

        with tf.variable_scope("fc1"):
            out = fc(out, classes)

        self._logits = out

        if classes > 1:
            self._predictions = tf.nn.softmax(self._logits)
        else:
            self._predictions = self._logits
Exemplo n.º 18
0
    def __init__(self, width=28, height=28, channels=1, classes=10):
        super(VGG16, self).__init__()

        assert width == height, "width and height must be the same"

        size = width * height * channels

        x = tf.placeholder(tf.float32, [None, size], name="x")
        self._inputs = x

        with tf.variable_scope("reshape1"):
            x = tf.reshape(x, [-1, width, height, channels])

        self._number_of_classes = classes

        with tf.variable_scope("resize1"):
            if width < 224:
                x = tf.image.resize_images(x, [48, 48])
            elif width > 224:
                x = tf.image.resize_images(x, [224, 224])

        with tf.variable_scope("conv1"):
            out = conv3x3(x, 64, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv2"):
            out = conv3x3(out, 128, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv3"):
            out = conv3x3(out, 256, stride=1)
        with tf.variable_scope("conv4"):
            out = conv3x3(out, 256, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv5"):
            out = conv3x3(out, 512, stride=1)
        with tf.variable_scope("conv6"):
            out = conv3x3(out, 512, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv7"):
            out = conv3x3(out, 512, stride=1)
        with tf.variable_scope("conv8"):
            out = conv3x3(out, 512, stride=1)
            out = max_pool_2x2(out, stride=2)

        dims = out.get_shape().as_list()
        flatten_size = 1
        for d in dims[1:]:
            flatten_size *= d

        with tf.variable_scope("reshape2"):
            out = tf.reshape(out, [-1, int(flatten_size)])

        # fully connected
        with tf.variable_scope("fc1"):
            out = fc(out, 4096)
            out = tf.nn.relu(out)
        with tf.variable_scope("fc2"):
            out = fc(out, 4096)
            out = tf.nn.relu(out)
        with tf.variable_scope("fc3"):
            y = fc(out, classes)

        self._logits = y

        if classes > 1:
            self._predictions = tf.nn.softmax(y)
        else:
            self._predictions = self._logits
Exemplo n.º 19
0
    def __init__(self, width, height, channels, classes):
        super(AlexNet, self).__init__()

        size = width * height * channels

        x = tf.placeholder(tf.float32, [None, size], name="x")
        self._inputs = x

        with tf.variable_scope("reshape1"):
            x = tf.reshape(self._inputs, [-1, width, height, channels])

        self._number_of_classes = classes

        with tf.variable_scope("resize1"):
            if width < 224:
                x = tf.image.resize_images(x, [48, 48])
            else:
                x = tf.image.resize_images(x, [224, 224])

        with tf.variable_scope("conv1"):
            conv1 = conv11x11(x, 96, stride=4, padding='VALID')
            pool1 = max_pool_3x3(conv1, padding='VALID')
            lrn1 = tf.nn.lrn(pool1, 5, bias=1.0, alpha=0.0001, beta=0.75)

        with tf.variable_scope("conv2"):
            conv2 = conv5x5(lrn1, 256)
            pool2 = max_pool_3x3(conv2, padding='VALID')
            lrn2 = tf.nn.lrn(pool2, 5, bias=1.0, alpha=0.0001, beta=0.75)

        with tf.variable_scope("conv3"):
            conv3 = conv3x3(lrn2, 384)
        with tf.variable_scope("conv4"):
            conv4 = conv3x3(conv3, 384)
        with tf.variable_scope("conv5"):
            conv5 = conv3x3(conv4, 256)
            pool3 = max_pool_3x3(conv5)
            lrn3 = tf.nn.lrn(pool3, 5, bias=1.0, alpha=0.001 / 9.0, beta=0.75)

        dims = lrn3.get_shape().as_list()
        flatten_size = 1
        for d in dims[1:]:
            flatten_size *= d

        with tf.variable_scope("reshape2"):
            flatten = tf.reshape(lrn3, [-1, int(flatten_size)])

        with tf.variable_scope("fc1"):
            fc1 = fc(flatten, 4096)
            relu1 = tf.nn.relu(fc1)

        with tf.variable_scope("fc2"):
            fc2 = fc(relu1, 4096)
            relu2 = tf.nn.relu(fc2)

        with tf.variable_scope("fc3"):
            y = fc(relu2, classes)

        self._logits = y

        if classes > 1:
            self._predictions = tf.nn.softmax(y)
        else:
            self._predictions = self._logits
Exemplo n.º 20
0
    def __init__(self, width=299, height=299, channels=3, classes=10):
        super(InceptionV3, self).__init__()

        self._number_of_classes = classes

        size = width * height * channels

        x = tf.placeholder(tf.float32, [None, size], name="x")
        self._inputs = x

        with tf.variable_scope("reshape1"):
            x = tf.reshape(x, [-1, width, height, channels])

        max_w = 299
        min_w = 299 // 3

        with tf.variable_scope("resize1"):
            if width < min_w:
                x = tf.image.resize_images(x, [min_w, min_w])
            elif width > 299:
                x = tf.image.resize_images(x, [max_w, max_w])

        # Stage 1
        out = conv3x3(x,
                      32,
                      stride=2,
                      padding="VALID",
                      batch_norm=use_batch_norm)
        out = conv3x3(out, 32, padding="VALID", batch_norm=use_batch_norm)
        out = conv3x3(out, 64, batch_norm=use_batch_norm)
        out = max_pool_3x3(out, padding="VALID")

        # Stage 2
        out = conv3x3(out, 80, padding="VALID", batch_norm=use_batch_norm)
        out = conv3x3(out, 192, padding="VALID", batch_norm=use_batch_norm)
        out = max_pool_3x3(out, padding="VALID")

        # Stage 3
        out = inception7A(out, 64, 64, 96, 96, 48, 64, 32)
        out = inception7A(out, 64, 64, 96, 96, 48, 64, 64)
        out = inception7A(out, 64, 64, 96, 96, 48, 64, 64)
        out = inception7B(out, 384, 64, 96, 96)

        # Stage 4
        out = inception7C(out, 192, 128, 128, 192, 128, 128, 128, 128, 192,
                          192)
        out = inception7C(out, 192, 160, 160, 192, 160, 160, 160, 160, 192,
                          192)
        out = inception7C(out, 192, 160, 160, 192, 160, 160, 160, 160, 192,
                          192)
        out = inception7C(out, 192, 192, 192, 192, 192, 192, 192, 192, 192,
                          192)
        out = inception7D(out, 192, 320, 192, 192, 192, 192)

        # Stage 5
        out = inception7E(out, 320, 384, 384, 384, 448, 384, 384, 384, 192,
                          avg_pool_3x3)
        out = inception7E(out, 320, 384, 384, 384, 448, 384, 384, 384, 192,
                          max_pool_3x3)

        out = tf.nn.avg_pool(out,
                             ksize=[1, 8, 8, 1],
                             strides=[1, 1, 1, 1],
                             padding='SAME')

        dims = out.get_shape().as_list()
        flatten_size = 1
        for d in dims[1:]:
            flatten_size *= d

        with tf.variable_scope("reshape2"):
            out = tf.reshape(out, [-1, int(flatten_size)])

        with tf.variable_scope("fc1"):
            out = fc(out, classes)

        self._logits = out

        if classes > 1:
            self._predictions = tf.nn.softmax(self._logits)
        else:
            self._predictions = self._logits
Exemplo n.º 21
0
    def __init__(self, width, height, channels, classes):
        super(AlexNet, self).__init__()

        size = width * height * channels

        x = tf.placeholder(tf.float32, [None, size], name="x")
        self._inputs = x

        with tf.variable_scope("reshape1"):
            x = tf.reshape(self._inputs, [-1, width, height, channels])

        self._number_of_classes = classes

        with tf.variable_scope("resize1"):
            if width < 224:
                x = tf.image.resize_images(x, [48, 48])
            else:
                x = tf.image.resize_images(x, [224, 224])

        with tf.variable_scope("conv1"):
            conv1 = conv11x11(x, 96, stride=4, padding='VALID')
            pool1 = max_pool_3x3(conv1, padding='VALID')
            lrn1 = tf.nn.lrn(pool1, 5, bias=1.0, alpha=0.0001, beta=0.75)

        with tf.variable_scope("conv2"):
            conv2 = conv5x5(lrn1, 256)
            pool2 = max_pool_3x3(conv2, padding='VALID')
            lrn2 = tf.nn.lrn(pool2, 5, bias=1.0, alpha=0.0001, beta=0.75)

        with tf.variable_scope("conv3"):
            conv3 = conv3x3(lrn2, 384)
        with tf.variable_scope("conv4"):
            conv4 = conv3x3(conv3, 384)
        with tf.variable_scope("conv5"):
            conv5 = conv3x3(conv4, 256)
            pool3 = max_pool_3x3(conv5)
            lrn3 = tf.nn.lrn(pool3, 5, bias=1.0, alpha=0.001 / 9.0, beta=0.75)

        dims = lrn3.get_shape().as_list()
        flatten_size = 1
        for d in dims[1:]:
            flatten_size *= d

        with tf.variable_scope("reshape2"):
            flatten = tf.reshape(lrn3, [-1, int(flatten_size)])

        with tf.variable_scope("fc1"):
            fc1 = fc(flatten, 4096)
            relu1 = tf.nn.relu(fc1)

        with tf.variable_scope("fc2"):
            fc2 = fc(relu1, 4096)
            relu2 = tf.nn.relu(fc2)

        with tf.variable_scope("fc3"):
            y = fc(relu2, classes)

        self._logits = y

        if classes > 1:
            self._predictions = tf.nn.softmax(y)
        else:
            self._predictions = self._logits
Exemplo n.º 22
0
    def __init__(self, width=28, height=28, channels=1, classes=10):
        super(VGG16, self).__init__()

        assert width == height, "width and height must be the same"

        size = width * height * channels

        x = tf.placeholder(tf.float32, [None, size], name="x")
        self._inputs = x

        with tf.variable_scope("reshape1"):
            x = tf.reshape(x, [-1, width, height, channels])

        self._number_of_classes = classes

        with tf.variable_scope("resize1"):
            if width < 224:
                x = tf.image.resize_images(x, [48, 48])
            elif width > 224:
                x = tf.image.resize_images(x, [224, 224])

        with tf.variable_scope("conv1"):
            out = conv3x3(x, 64, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv2"):
            out = conv3x3(out, 128, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv3"):
            out = conv3x3(out, 256, stride=1)
        with tf.variable_scope("conv4"):
            out = conv3x3(out, 256, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv5"):
            out = conv3x3(out, 512, stride=1)
        with tf.variable_scope("conv6"):
            out = conv3x3(out, 512, stride=1)
            out = max_pool_2x2(out, stride=2)

        with tf.variable_scope("conv7"):
            out = conv3x3(out, 512, stride=1)
        with tf.variable_scope("conv8"):
            out = conv3x3(out, 512, stride=1)
            out = max_pool_2x2(out, stride=2)

        dims = out.get_shape().as_list()
        flatten_size = 1
        for d in dims[1:]:
            flatten_size *= d

        with tf.variable_scope("reshape2"):
            out = tf.reshape(out, [-1, int(flatten_size)])

        # fully connected
        with tf.variable_scope("fc1"):
            out = fc(out, 4096)
            out = tf.nn.relu(out)
        with tf.variable_scope("fc2"):
            out = fc(out, 4096)
            out = tf.nn.relu(out)
        with tf.variable_scope("fc3"):
            y = fc(out, classes)

        self._logits = y

        if classes > 1:
            self._predictions = tf.nn.softmax(y)
        else:
            self._predictions = self._logits