Esempio n. 1
0
def create_sequence(lay_shapes, isConv=True, kernel_shape=3, sampling_rate=2, addBatchNorm=True, addDropout=True,
                    activate=Activate_CD.relu, last_lay=Sampling_CD.DownSampling):

    assert activate in properties(Activate_CD), Messages_CD.USV.format(activate, 'Activations', properties(Activate_CD))
    assert activate in properties(Activate_CD), Messages_CD.USV.format(activate, 'Activations', properties(Activate_CD))
    x = []
    if isConv:
        lay_cd = Layer_CD.Conv
    else:
        lay_cd = Layer_CD.Dense

    if len(lay_shapes) %2 != 0:
        lay_shapes = lay_shapes[0] + lay_shapes

    for i, lay in enumerate(lay_shapes):
        x = create_layer(lay_cd, lay, kernel_shape=kernel_shape, addBatchNorm=addBatchNorm, addDropout=addDropout, activate=activate) + x

        if isConv:
            if i%2 == 0:
                x = [Sampling_CD.DownSampling((sampling_rate, sampling_rate), padding='same')] + x
            else:
                x = [Sampling_CD.UpSampling((sampling_rate, sampling_rate))] + x

    x = create_layer(lay_cd, lay, kernel_shape=kernel_shape, addBatchNorm=addBatchNorm, addDropout=addDropout, activate=activate) + x
    if last_lay == Sampling_CD.DownSampling:
        x = [Sampling_CD.DownSampling((sampling_rate, sampling_rate), padding='same')] + x
    else:
        x = [Sampling_CD.UpSampling((1, 1), padding='same')] + x

    return x
def create_layer(layer_cd,
                 lay_dim,
                 kernel_shape=None,
                 addBatchNorm=True,
                 addDropout=True,
                 activate=None):
    assert layer_cd in properties(Layer_CD), Messages_CD.USV.format(
        layer_cd, 'Layers', properties(Layer_CD))
    assert activate in properties(Activate_CD), Messages_CD.USV.format(
        activate, 'Activatation', properties(Activate_CD))
    x = []
    if layer_cd in [Layer_CD.Conv, Layer_CD.Deconv]:
        x = [layer_cd(lay_dim, kernel_shape, padding='same')] + x
    else:
        x = [layer_cd(lay_dim)] + x

    if addBatchNorm:
        x = [tf.keras.layers.BatchNormalization()] + x

    if addDropout:
        x = [tf.keras.layers.Dropout(0.2)] + x

    if activate:
        x = [tf.keras.layers.Activation(activate)] + x

    return x
Esempio n. 3
0
 def __init__(self):
     keys = list()
     items = list()
     ddict = dict(default_config.__dict__)
     for key, item in ddict.items():
         if key in properties(default_config):
             keys.append(key)
             items.append(item)
     ddict = dict(zip(keys, items))
     for k, v in ddict.items():
         setattr(self, k, v)