Exemplo n.º 1
0
    def __init__(self, **kwargs):
        self._numhid = kwargs.pop('numhid', 64)
        self._outnum = kwargs.pop('outnum', 10)
        self._outact = kwargs.pop('outact', None)
        self._patest = kwargs.pop('patest', 'linear')
        self._outest = kwargs.pop('outest', self._patest)
        super().__init__(**kwargs)
        with self.name_scope():
            # _numhid x 28 x 28
            self.add(
                Conv2D(self._numhid,
                       4,
                       strides=2,
                       padding=1,
                       use_bias=False,
                       regimes=estimators[self._patest](),
                       isinput=True))
            self.add(ReLU(regimes=estimators[self._patest]()))
            # _numhid x 14 x 14

            self.add(
                Conv2D(self._numhid * 2,
                       4,
                       strides=2,
                       padding=1,
                       use_bias=False,
                       regimes=estimators[self._patest]()))
            self.add(ReLU(regimes=estimators[self._patest]()))
            # _numhid x 7 x 7

            self.add(
                Conv2D(self._numhid * 4,
                       4,
                       strides=1,
                       padding=0,
                       use_bias=False,
                       regimes=estimators[self._patest]()))
            self.add(ReLU(regimes=estimators[self._patest]()))
            # _numhid x 4 x 4

            self.add(
                Conv2D(self._numhid * 8,
                       4,
                       strides=1,
                       padding=0,
                       use_bias=False,
                       regimes=estimators[self._patest]()))
            self.add(ReLU(regimes=estimators[self._patest]()))
            # filters x 1 x 1

            self.add(Dense(self._outnum, regimes=estimators[self._outest]()))
            if self._outact == 'relu':
                self.add(ReLU(regimes=estimators[self._outest]()))
            else:
                self.add(Identity(regimes=estimators[self._outest]()))
Exemplo n.º 2
0
    def __init__(self, **kwargs):
        self._numhid = kwargs.pop('numhid', 64)
        self._outnum = kwargs.pop('outnum', 10)
        self._patest = kwargs.pop('patest', 'linear')
        self._outest = kwargs.pop('outest', self._patest)
        super().__init__(**kwargs)
        with self.name_scope():
            # 3 x 28 x 28
            self.add(
                Conv2D(self._numhid,
                       5,
                       strides=1,
                       padding=0,
                       use_bias=True,
                       regimes=estimators[self._patest]()))
            self.add(ReLU(regimes=estimators[self._patest]()))
            # filt x 22 x 22
            self.add(MaxPool2D(pool_size=2, strides=2))
            # filt x 11 x 11

            self.add(
                Conv2D(self._numhid * 2,
                       4,
                       strides=1,
                       padding=0,
                       use_bias=True,
                       regimes=estimators[self._patest]()))
            # filt x  8 x  8
            self.add(ReLU(regimes=estimators[self._patest]()))
            self.add(MaxPool2D(pool_size=2, strides=2))
            # filt x  4 x  4

            self.add(
                Dense(self._numhid * 2, regimes=estimators[self._patest]()))
            self.add(Dense(self._outnum, regimes=estimators[self._outest]()))
            self.add(Identity(regimes=estimators[self._outest]()))
Exemplo n.º 3
0
    def __init__(self, **kwargs):
        outnum = kwargs.pop('outnum', 1)
        outact = kwargs.pop('outact', None)
        numhid = kwargs.pop('numhid', 512)
        droprate = kwargs.pop('droprate', 0.25)
        use_bias = kwargs.pop('use_bias', False)

        # patest = dict(relu='relu', out='clip', pixel='relu', gauss='relu')
        patest = dict(relu='linear',
                      out='linear',
                      pixel='linear',
                      gauss='linear')
        patest.update(kwargs.pop('patest', {}))
        explain = dict(relu='zplus', out='zplus', pixel='zb', gauss='wsquare')
        explain.update(kwargs.pop('explain', {}))
        super().__init__(**kwargs)
        with self.name_scope():
            # 28 x 28
            self.add(
                Conv2D(128,
                       3,
                       strides=1,
                       padding=1,
                       use_bias=use_bias,
                       explain=explain['pixel'],
                       regimes=estimators[patest['pixel']]()))
            self.add(ReLU())
            self.add(MaxPool2D(pool_size=2, strides=2))
            # 14 x 14

            self.add(
                Conv2D(128,
                       3,
                       strides=1,
                       padding=1,
                       use_bias=use_bias,
                       explain=explain['pixel'],
                       regimes=estimators[patest['pixel']]()))
            self.add(ReLU())
            self.add(MaxPool2D(pool_size=2, strides=2))
            #  7 x  7

            self.add(
                Conv2D(128,
                       3,
                       strides=1,
                       padding=1,
                       use_bias=use_bias,
                       explain=explain['pixel'],
                       regimes=estimators[patest['pixel']]()))
            self.add(ReLU())
            self.add(MaxPool2D(pool_size=2, strides=2))
            #  3 x  3

            self.add(
                Conv2D(128,
                       3,
                       strides=1,
                       padding=1,
                       use_bias=use_bias,
                       explain=explain['pixel'],
                       regimes=estimators[patest['pixel']]()))
            self.add(ReLU())
            self.add(MaxPool2D(pool_size=2, strides=2))
            #  2 x  2

            self.add(Flatten())

            self.add(
                Dense(numhid,
                      explain=explain['relu'],
                      regimes=estimators[patest['relu']]()))
            self.add(ReLU())
            self.add(Dropout(droprate))

            self.add(
                Dense(outnum,
                      explain=explain['relu'],
                      regimes=estimators[patest['relu']]()))

            if outact == 'relu':
                self.add(ReLU())
            else:
                self.add(Identity())
Exemplo n.º 4
0
    def __init__(self, **kwargs):
        outnum = kwargs.pop('outnum', 1)
        outact = kwargs.pop('outact', None)
        numhid = kwargs.pop('numhid', 64)
        leakage = kwargs.pop('leakage', 0.1)
        use_bias = kwargs.pop('use_bias', False)

        # patest = dict(relu='relu', out='clip', pixel='relu', gauss='relu')
        patest = dict(relu='linear',
                      out='linear',
                      pixel='linear',
                      gauss='linear')
        patest.update(kwargs.pop('patest', {}))
        explain = dict(relu='zplus', out='zplus', pixel='zb', gauss='wsquare')
        explain.update(kwargs.pop('explain', {}))
        super().__init__(**kwargs)
        with self.name_scope():
            # _numhid x 32 x 32
            self.add(
                Conv2D(numhid,
                       4,
                       strides=2,
                       padding=1,
                       use_bias=use_bias,
                       explain=explain['pixel'],
                       regimes=estimators[patest['pixel']]()))
            self.add(LeakyReLU(leakage))
            # _numhid x 16 x 14

            self.add(
                Conv2D(numhid * 2,
                       4,
                       strides=2,
                       padding=1,
                       use_bias=use_bias,
                       explain=explain['relu'],
                       regimes=estimators[patest['relu']]()))
            self.add(BatchNorm())
            self.add(LeakyReLU(leakage))
            # _numhid x 8 x 8

            self.add(
                Conv2D(numhid * 4,
                       4,
                       strides=2,
                       padding=1,
                       use_bias=use_bias,
                       explain=explain['relu'],
                       regimes=estimators[patest['relu']]()))
            self.add(BatchNorm())
            self.add(LeakyReLU(leakage))
            # _numhid x 4 x 4

            self.add(
                Conv2D(outnum,
                       4,
                       strides=1,
                       padding=0,
                       use_bias=use_bias,
                       explain=explain['out'],
                       regimes=estimators[patest['out']]()))
            self.add(Flatten())
            # self.add(BatchNorm())
            # self.add(LeakyReLU(leakage))
            # # filters x 1 x 1

            # self.add(Dense(outnum,
            #                explain=explain['relu'], regimes=estimators[patest['relu']]()))
            if outact == 'relu':
                self.add(ReLU())
            else:
                self.add(Identity())
Exemplo n.º 5
0
#patterntest
import numpy as np
from mxnet import nd
from ecGAN.layer import Conv2D
from ecGAN.explain.pattern.estimator import estimators

lay = Conv2D(20, 2, strides=2, padding=0, regimes=estimators['linear']())
lay.initialize()

data = nd.random.normal(5, shape=[1000, 3, 8, 8])
out = lay(data)
lay.init_pattern()
lay.collect_pparams().initialize()

for mdat in [data[i::100] for i in range(100)]:
    lay.forward_logged(mdat)
    lay.learn_pattern()
lay.compute_pattern()
resdat = data.reshape([1000, 3, 4, 2, 4,
                       2]).transpose([0, 2, 4, 1, 3,
                                      5]).reshape([1000 * 4 * 4, 3 * 4])
resout = out.transpose([0, 2, 3, 1]).reshape([1000 * 4 * 4, 20])
rescov = nd.dot((resout - resout.mean(0)).T,
                (resdat - resdat.mean(0))) / resout.shape[0]

#TODO check whether correlation is correct!
var_y = (lay.weight.data().flatten() * rescov).mean(1, keepdims=True)
std_y = (resout - resout.mean(0)).mean(0)