def __init__(self, inputs, n_layers, n_neurons, activation, output_size, name='decoder'): with tf.name_scope(name): self.input = inputs # Slicer--Second slicer to multiply PE outcome self.slice_output = RToC.get_c(self.input) self.slice_output = tf.slice(self.slice_output, [0, l1], [-1, l2 - l1 + 1]) #print("output from slice_output: ", self.slice_output.shape) # FE self._fe = util.build_neural_net(input=self.input, n_layers=1, n_neurons=n_neurons, activation=activation, n_outputs=output_size) self._fe = tf.contrib.layers.fully_connected(self._fe, num_outputs=8, activation_fn=None) self._fe = RToC.get_c(self._fe) #print("output from FE: ", self._fe.shape) # PE self._pe = util.build_neural_net(input=self.input, n_layers=1, n_neurons=n_neurons, activation=activation, n_outputs=output_size) self._pe = tf.contrib.layers.fully_connected(self._pe, num_outputs=2, activation_fn=None) self._pe = RToC.get_c(self._pe) # Mulitiply self._mul = tf.multiply(self.slice_output, self._pe) # Concatenate self._concat = tf.concat([self._mul, self._fe], 1) self._concat = CToR.get_r(self._concat) # RX self._decoding = util.build_neural_net(input=self._concat, n_layers=n_layers, n_neurons=n_neurons, activation=activation, n_outputs=output_size) #print("self._decoding shape is: ", self._decoding.shape) # reshape input back to (batch_size, 1, 256) self._decoding = tf.reshape(self._decoding, [-1, 1, 256])
def __init__(self, inputs, n_layers, n_neurons, activation, latent_size, name='encoder'): with tf.name_scope(name): self._input = inputs self._w = util.build_neural_net(input=inputs, n_layers=n_layers, n_neurons=n_neurons, activation=activation, n_outputs=latent_size) self._encoding = tf.contrib.layers.fully_connected( self._w, num_outputs=latent_size, activation_fn=tf.nn.relu) #Normalization axis = list(range(len(self._encoding.get_shape()) - 1)) mean, variance = tf.nn.moments(self._encoding, axis) self._encoding = tf.nn.batch_normalization(self._encoding, mean, variance, offset=None, scale=0.7071, variance_epsilon=1e-8) #print("self._encoding shape is: ", self._encoding.shape) # Reshape-combine 13 messages self._encoding = tf.reshape(self._encoding, [-1, 1, group_num * latent_size])
def __init__(self, inputs, n_layers, n_neurons, activation, latent_size, name='encoder'): with tf.name_scope(name): self._input = inputs self._w = util.build_neural_net(input=self._input, n_layers=n_layers, n_neurons=n_neurons, activation=activation, n_outputs=latent_size) self._encoding = tf.contrib.layers.fully_connected( self._w, num_outputs=latent_size, activation_fn=tf.nn.relu) # shape (?, 13, 8) # RtoC self._real = tf.slice(self._encoding, [0, 0, 0], [-1, -1, 4]) self._image = tf.slice(self._encoding, [0, 0, 4], [-1, -1, 4]) self._complex = tf.complex(self._real, self._image) #print("output is: ", self._complex.shape) # Normalization mean, variance = tf.nn.moments(self._complex, 2, keep_dims=True) variance = variance + 1e-8 minus_mean = tf.math.subtract(self._complex, mean) self._encoding = tf.math.divide(minus_mean, tf.math.sqrt(variance)) #print("self._encoding shape is: ", self._encoding.shape) # shape of the output from normalization is (?, 13, 4) # CtoR self._real = tf.math.real(self._encoding) self._real = tf.reshape(self._real, [-1, 1, 52]) self._image = tf.math.imag(self._encoding) self._image = tf.reshape(self._image, [-1, 1, 52]) self._encoding = tf.concat([self._real, self._image], 2) # self.normalized = tf.math.l2_normalize(self._encoding, axis=2) # lenth = tf.dtypes.cast(self.normalized.get_shape()[2], tf.float32) # sqrt_len = tf.math.sqrt(lenth) # self._encoding = tf.multiply(sqrt_len/2, self.normalized) # axis = list(range(len(self._encoding.get_shape()) - 1)) # #mean, variance = tf.nn.moments(self._encoding, axis) # print("axis is: ", axis) # self._encoding = tf.compat.v1.layers.batch_normalization(self._encoding, axis=2, center=False, scale=False, trainable=False) #tf.nn.batch_normalization, tf.contrib.layers.batch_norm # Reshape-combine 13 messages #self._encoding = tf.concat([self._real, self._image], 2) print("self._encoding shape is: ", self._encoding.shape) # shape of the output is (?, 1, 104)
def __init__(self, inputs, n_layers, n_neurons, activation, output_size, name='decision'): with tf.name_scope(name): self._decision = util.build_neural_net(input=inputs, n_layers=n_layers, n_neurons=n_neurons, activation=activation, n_outputs=output_size)
def __init__(self, inputs, n_layers, n_neurons, activation, latent_size, name='encoder'): with tf.name_scope(name): self._encoding = util.build_neural_net(input=inputs, n_layers=n_layers, n_neurons=n_neurons, activation=activation, n_outputs=latent_size)