def cnn(mini_timeslice): """ input: event tensor numpy shape 1, 13, 13, 18, 3""" conv1 = tf.nn.relu( conv3d(mini_timeslice, weights["l1"]) + biases["l1"]) conv1 = tf.contrib.layers.batch_norm(conv1) conv2 = tf.nn.relu( conv3d(conv1, weights["l2"]) + biases["l2"]) conv1 = tf.contrib.layers.batch_norm(conv1) conv2 = maxpool3d(conv2) conv2 = tf.contrib.layers.batch_norm(conv2) conv3 = tf.nn.relu( conv3d(conv2, weights["l3"]) + biases["l3"]) conv3 = tf.contrib.layers.batch_norm(conv3) conv3 = maxpool3d(conv3) conv4 = tf.nn.relu( conv3d(conv3, weights["l4"]) + biases["l4"]) return conv4
def cnn(mini_timeslice): """ input: event tensor numpy shape 1, 13, 13, 18, 3""" conv1 = tf.nn.relu( conv3d(mini_timeslice, weights["l1"]) + biases["l1"]) conv1 = tf.contrib.layers.batch_norm(conv1) conv2 = tf.nn.relu( conv3d(conv1, weights["l2"]) + biases["l2"]) conv1 = tf.contrib.layers.batch_norm(conv1) conv2 = maxpool3d(conv2) conv2 = tf.contrib.layers.batch_norm(conv2) #elements = np.prod(conv2._shape_as_list()[1:]) fc = tf.reshape(conv2, [-1, 11025]) fc = tf.nn.relu( tf.matmul(fc, weights["l3"]) + biases["l3"]) fc = tf.nn.dropout(fc, keep_prob) conv1 = tf.contrib.layers.batch_norm(fc) fc = tf.nn.relu( tf.matmul(fc, weights["l4"]) + biases["l4"]) return fc
def cnn(input_slice): """ input: event tensor numpy shape 1, 13, 13, 18, 3""" conv1 = tf.nn.relu(conv3d(input_slice, weights["l1"]) + biases["l1"]) conv2 = tf.nn.relu(conv3d(conv1, weights["l2"]) + biases["l2"]) conv2 = maxpool3d(conv2) conv3 = tf.nn.relu(conv3d(conv2, weights["l3"] + biases["l3"])) fc = tf.reshape(conv3, [-1, 7 * 7 * 9 * nodes["l3"]]) fc = tf.nn.sigmoid(tf.matmul(fc, weights["l4"]) + biases["l4"]) fc = tf.nn.sigmoid(tf.matmul(fc, weights["l5"]) + biases["l5"]) return fc
def km3nnet(x): """ input: event tensor numpy shape num_minitimeslices, 18, 18, 13, 3 output: label prediction shape 3 (one hot encoded)""" # loop over mini time slices nodes = {"l1": 25, "l2": 25, "l3": 80, "l4": 40, "l5": 20} weights = {"l1": weight([4, 4, 4, 3, nodes["l1"]]), "l2": weight([3, 3, 3, nodes["l1"], nodes["l2"]]), "l3": weight([11025, nodes["l3"]]), "l4": weight([nodes["l3"], nodes["l4"]])} biases = {"l1": bias(nodes["l1"]), "l2": bias(nodes["l2"]), "l3": bias(nodes["l3"]), "l4": bias(nodes["l4"])} conv1 = tf.nn.relu( conv3d(x, weights["l1"]) + biases["l1"]) conv2 = tf.nn.relu( conv3d(conv1, weights["l2"]) + biases["l2"]) conv2 = maxpool3d(conv2) elements = np.prod(conv2._shape_as_list()[1:]) fc = tf.reshape(conv2, [-1, elements]) fc = tf.nn.relu( tf.matmul(fc, weights["l3"]) + biases["l3"]) fc = tf.nn.relu( tf.matmul(fc, weights["l4"]) + biases["l4"]) fc = tf.reshape(fc, [50, 1, 40]) c = tf.unstack(fc, num_mini_timeslices, 0) lstm_layer = tf.contrib.rnn.BasicLSTMCell(nodes["l5"], forget_bias=1.) outputs, _ = tf.contrib.rnn.static_rnn(lstm_layer, c, dtype=tf.float32)
def cnn(input_slice): """ input: event tensor numpy shape 1, 13, 13, 18, 3""" conv1 = tf.nn.relu( conv3d(input_slice, weights["l1"]) + biases["l1"]) conv2 = tf.nn.relu( conv3d(conv1, weights["l2"]) + biases["l2"]) conv2 = maxpool3d(conv2) elements = np.prod(conv2._shape_as_list()[1:]) fc = tf.reshape(conv2, [-1, elements]) fc = tf.nn.sigmoid( tf.matmul(fc, weights["l3"]) + biases["l3"]) fc = tf.nn.dropout(fc, keep_prob) fc = tf.nn.sigmoid( tf.matmul(fc, weights["l4"]) + biases["l4"]) return fc