Esempio n. 1
0
def batch_normalization():
    def compile_fn(di, dh):
        bn = keras.layers.BatchNormalization()
        def fn(di):
            return {'out' : bn(di['in'])}
        return fn
    return siso_tfm('BatchNormalization', compile_fn, {})
Esempio n. 2
0
def dropout(h_keep_prob):
    def compile_fn(di, dh):
        Dropout = keras.layers.Dropout(dh['keep_prob'])
        def fn(di):
            return {'out' : Dropout(di['in'])}
        return fn
    return siso_tfm('Dropout', compile_fn, {'keep_prob' : h_keep_prob})
Esempio n. 3
0
def dense(h_units):
    def compile_fn(di, dh):
        Dense = keras.layers.Dense(dh['units'])
        def fn(di):
            return {'out' : Dense(di['in'])}
        return fn
    return siso_tfm('Dense', compile_fn, {'units' : h_units})
Esempio n. 4
0
def flatten():
    def compile_fn(di, dh):
        Flatten = keras.layers.Flatten()
        def fn(di):
            return {'out': Flatten(di['in'])}
        return fn
    return siso_tfm('Flatten', compile_fn, {}) # use siso_tfm for now
Esempio n. 5
0
def dense(h_units):

    def compile_fn(di, dh):  # compile function
        Dense = tf.keras.layers.Dense(dh['units'])

        def fn(di):  # forward function
            return {'out': Dense(di['in'])}

        return fn

    return siso_tfm('Dense', compile_fn, {'units': h_units})
Esempio n. 6
0
def nonlinearity(h_nonlin_name):
    def compile_fn(di, dh):
        def fn(di):
            nonlin_name = dh['nonlin_name']
            if nonlin_name == 'relu':
                Out = keras.layers.Activation('relu')(di['in'])
            elif nonlin_name == 'tanh':
                Out = keras.layers.Activation('tanh')(di['in'])
            elif nonlin_name == 'elu':
                Out = keras.layers.Activation('elu')(di['in'])
            else:
                raise ValueError
            return {"out" : Out}
        return fn
    return siso_tfm('Nonlinearity', compile_fn, {'nonlin_name' : h_nonlin_name})