Ejemplo n.º 1
0
def projection_skip(nfm, stride=1):
    mainpath = [Conv(**conv_params(3, nfm, stride=stride)),
                Conv(**conv_params(3, nfm, relu=False))]
    sidepath = [SkipNode() if stride == 1 else Conv(**conv_params(1, nfm, stride, relu=False))]
    module = [MergeSum([mainpath, sidepath]),
              Activation(Rectlin())]
    return module
Ejemplo n.º 2
0
    def __init__(self,
                 overlapping_classes=None,
                 exclusive_classes=None,
                 analytics_input=True,
                 network_type='conv_net',
                 num_words=60,
                 width=100,
                 lookup_size=0,
                 lookup_dim=0,
                 optimizer=Adam()):
        assert (overlapping_classes is not None) or (exclusive_classes
                                                     is not None)

        self.width = width
        self.num_words = num_words
        self.overlapping_classes = overlapping_classes
        self.exclusive_classes = exclusive_classes
        self.analytics_input = analytics_input
        self.recurrent = network_type == 'lstm'
        self.lookup_size = lookup_size
        self.lookup_dim = lookup_dim

        init = GlorotUniform()
        activation = Rectlin(slope=1E-05)
        gate = Logistic()

        input_layers = self.input_layers(analytics_input, init, activation,
                                         gate)

        if self.overlapping_classes is None:
            output_layers = [
                Affine(len(self.exclusive_classes), init, activation=Softmax())
            ]
        elif self.exclusive_classes is None:
            output_layers = [
                Affine(len(self.overlapping_classes),
                       init,
                       activation=Logistic())
            ]
        else:
            output_branch = BranchNode(name='exclusive_overlapping')
            output_layers = Tree([[
                SkipNode(), output_branch,
                Affine(len(self.exclusive_classes), init, activation=Softmax())
            ],
                                  [
                                      output_branch,
                                      Affine(len(self.overlapping_classes),
                                             init,
                                             activation=Logistic())
                                  ]])
        layers = [
            input_layers,
            # this is where inputs meet, and where we may want to add depth or
            # additional functionality
            Dropout(keep=0.8),
            output_layers
        ]
        super(ClassifierNetwork, self).__init__(layers, optimizer=optimizer)
Ejemplo n.º 3
0
def module_s1(nfm, first=False):
    '''
    non-strided
    '''
    sidepath = Conv(
        **conv_params(1, nfm * 4, 1, False, False)) if first else SkipNode()
    mainpath = [] if first else [BatchNorm(), Activation(Rectlin())]
    mainpath.append(Conv(**conv_params(1, nfm)))
    mainpath.append(Conv(**conv_params(3, nfm)))
    mainpath.append(
        Conv(**conv_params(1, nfm * 4, relu=False, batch_norm=False)))

    return MergeSum([sidepath, mainpath])
Ejemplo n.º 4
0
def module_factory(nfm, bottleneck=True, stride=1):
    nfm_out = nfm * 4 if bottleneck else nfm
    use_skip = True if stride == 1 else False
    stride = abs(stride)
    sidepath = [SkipNode() if use_skip else Conv(
        **conv_params(1, nfm_out, stride, False))]

    if bottleneck:
        mainpath = [Conv(**conv_params(1, nfm, stride)),
                    Conv(**conv_params(3, nfm)),
                    Conv(**conv_params(1, nfm_out, relu=False))]
    else:
        mainpath = [Conv(**conv_params(3, nfm, stride)),
                    Conv(**conv_params(3, nfm, relu=False))]
    return [MergeSum([mainpath, sidepath]),
            Activation(Rectlin())]