def __init__(self, depthlen=288, gen_features=8, numparticle=6, overscale=10.0, expscale=False, activation=tf.keras.activations.relu, **kwargs): super().__init__(**kwargs) self.depthlen = depthlen self.gen_features = gen_features self.overscale = 10.0 self.expscale = expscale self.labelmerger = utils.LabelMerger(numparticle=numparticle, expscale=expscale) self.activation = activation if self.expscale: last_activation = tf.keras.activations.elu else: last_activation = tf.keras.activations.sigmoid self.layer1 = layers.Dense(512, activation=self.activation) self.layer1_norm = layers.LayerNormalization(epsilon=1e-6) self.layer2 = layers.Dense(1024, activation=self.activation) self.layer2_norm = layers.LayerNormalization(epsilon=1e-6) self.layer3 = layers.Dense(self.depthlen * self.gen_features, activation=last_activation)
def __init__(self, numparticle=6, expscale=False, epsilon=1e-25, activation=tf.keras.activations.tanh, **kwargs): super().__init__(**kwargs) self._numparticle = numparticle self.expscale = expscale self.epsilon = epsilon self.labelmerger = utils.LabelMerger(numparticle=numparticle, expscale=expscale) self.activation = activation self.layer_label_flatten = layers.Flatten() self.layer_data_flatten = layers.Flatten() self.layer_dense1 = layers.Dense(1024, activation=self.activation) self.layer_dense1_norm = layers.LayerNormalization(epsilon=1e-6) self.layer_dense2 = layers.Dense(512, activation=self.activation) self.layer_dense2_norm = layers.LayerNormalization(epsilon=1e-6) self.layer_dense3 = layers.Dense(256, activation=self.activation) self.layer_dense3_norm = layers.LayerNormalization(epsilon=1e-6) self.layer_dense4 = layers.Dense(1, activation=None)
def __init__(self, depthlen=288, gen_features=8, numparticle=6, overscale=10.0, expscale=False, activation=tf.keras.activations.tanh, **kwargs): super().__init__(**kwargs) self.depthlen = depthlen self.gen_features = gen_features self.overscale = 10.0 self.expscale = expscale self.labelmerger = utils.LabelMerger(numparticle=numparticle, expscale=expscale) self.activation = activation if self.expscale: last_activation = tf.keras.activations.elu else: last_activation = tf.keras.activations.sigmoid self._nheight = 4 self._nwidth = int(np.ceil(self.depthlen / (2 * 2 * 2 * 2 * 2))) self._nfilter = 32 # dense label/noise combination self.layer_combine1 = layers.Dense(512, activation=self.activation) self.layer_combine2 = layers.Dense(self._nheight * self._nwidth * self._nfilter, activation=self.activation) # 1st transpose then conv self.layer_tc1 = layers.Conv2DTranspose(64, (2, 5), strides=(1, 2), padding="same", activation=self.activation) self.layer_rc1 = layers.Conv2D(64, (2, 5), padding="same", activation=self.activation) # 2nd transpose then conv self.layer_tc2 = layers.Conv2DTranspose(32, (2, 10), strides=(1, 2), padding="same", activation=self.activation) self.layer_rc2 = layers.Conv2D(32, (2, 10), padding="same", activation=self.activation) # 3rd transpose then conv self.layer_tc3 = layers.Conv2DTranspose(16, (2, 10), strides=(1, 2), padding="same", activation=self.activation) self.layer_rc3 = layers.Conv2D(16, (2, 10), padding="same", activation=self.activation) # 4th transpose then conv self.layer_tc4 = layers.Conv2DTranspose(32, (2, 5), strides=(1, 2), padding="same", activation=self.activation) self.layer_rc4 = layers.Conv2D(32, (2, 5), padding="same", activation=self.activation) # 5th transpose then conv self.layer_tc5 = layers.Conv2DTranspose(16, (2, 5), strides=(1, 2), padding="same", activation=self.activation) self.layer_rc5 = layers.Conv2D(self.gen_features, (self._nheight, 5), padding="valid", activation=last_activation)
def __init__(self, numparticle=6, expscale=False, epsilon=1e-25, activation=layers.LeakyReLU(alpha=0.01), **kwargs): super().__init__(**kwargs) self._numparticle = numparticle self.expscale = expscale self.epsilon = epsilon self.labelmerger = utils.LabelMerger(numparticle=numparticle, expscale=expscale) self.activation = activation # transform label to data format self.layer_combine_flatten = layers.Flatten() self.layer_combine1 = layers.Dense(256, activation=self.activation) self.layer_combine1_norm = layers.LayerNormalization(epsilon=1e-6) # downstream self.layer_conv1 = layers.Conv1D(64, 16, padding="same", activation=self.activation) self.layer_conv2 = layers.Conv1D(56, 14, padding="same", activation=self.activation) self.layer_conv3 = layers.Conv1D(48, 12, padding="same", activation=self.activation) self.layer_conv4 = layers.Conv1D(40, 10, padding="same", activation=self.activation) self.layer_conv5 = layers.Conv1D(32, 8, padding="same", activation=self.activation) self.layer_conv6 = layers.Conv1D(24, 6, padding="same", activation=self.activation) self.layer_conv7 = layers.Conv1D(16, 4, padding="same", activation=self.activation) self.layer_conv1_norm = tfa.layers.InstanceNormalization(epsilon=1e-6) self.layer_conv2_norm = tfa.layers.InstanceNormalization(epsilon=1e-6) self.layer_conv3_norm = tfa.layers.InstanceNormalization(epsilon=1e-6) self.layer_conv4_norm = tfa.layers.InstanceNormalization(epsilon=1e-6) self.layer_conv5_norm = tfa.layers.InstanceNormalization(epsilon=1e-6) self.layer_conv6_norm = tfa.layers.InstanceNormalization(epsilon=1e-6) self.layer_conv7_norm = tfa.layers.InstanceNormalization(epsilon=1e-6) self.layer_end_flatten = layers.Flatten() self.layer_end_dense1 = layers.Dense(256, activation=self.activation) self.layer_end_dense1_norm = layers.LayerNormalization(epsilon=1e-6) self.layer_end_dense2 = layers.Dense(1, activation=None)