def main():
    # Parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument('model_path',
                        help='Path to the converted model parameters (.npy)')
    parser.add_argument('val_gt',
                        help='Path to validation set ground truth (.txt)')
    parser.add_argument('imagenet_data_dir',
                        help='ImageNet validation set images directory path')
    parser.add_argument('--model',
                        default='GoogleNet',
                        help='The name of the model to evaluate')
    args = parser.parse_args()

    # Load the network
    net = load_model(args.model)
    if net is None:
        exit(-1)

    # Load the dataset
    data_spec = models.get_data_spec(model_instance=net)
    image_producer = dataset.ImageNetProducer(val_path=args.val_gt,
                                              data_path=args.imagenet_data_dir,
                                              data_spec=data_spec)

    # Evaluate its performance on the ILSVRC12 validation set
    validate(net, args.model_path, image_producer)
def validate(net, model_path, image_producer, top_k=5):
    '''Compute the top_k classification accuracy for the given network and images.'''
    # Get the data specifications for given network
    spec = models.get_data_spec(model_instance=net)
    # Get the input node for feeding in the images
    input_node = net.inputs['data']
    # Create a placeholder for the ground truth labels
    label_node = tf.placeholder(tf.int32)
    # Get the output of the network (class probabilities)
    probs = net.get_output()
    # Create a top_k accuracy node
    top_k_op = tf.nn.in_top_k(probs, label_node, top_k)
    # The number of images processed
    count = 0
    # The number of correctly classified images
    correct = 0
    # The total number of images
    total = len(image_producer)

    with tf.Session() as sesh:
        coordinator = tf.train.Coordinator()
        # Load the converted parameters
        net.load(data_path=model_path, session=sesh)
        # Start the image processing workers
        threads = image_producer.start(session=sesh, coordinator=coordinator)
        # Iterate over and classify mini-batches
        for (labels, images) in image_producer.batches(sesh):
            correct += np.sum(
                sesh.run(top_k_op,
                         feed_dict={
                             input_node: images,
                             label_node: labels
                         }))
            count += len(labels)
            cur_accuracy = float(correct) * 100 / count
            print('{:>6}/{:<6} {:>6.2f}%'.format(count, total, cur_accuracy))
        # Stop the worker threads
        coordinator.request_stop()
        coordinator.join(threads, stop_grace_period_secs=2)
    print('Top {} Accuracy: {}'.format(top_k, float(correct) / total))
def load_model(name):
    '''Creates and returns an instance of the model given its class name.
    The created model has a single placeholder node for feeding images.
    '''
    # Find the model class from its name
    all_models = models.get_models()
    lut = {model.__name__: model for model in all_models}
    if name not in lut:
        print('Invalid model index. Options are:')
        # Display a list of valid model names
        for model in all_models:
            print('\t* {}'.format(model.__name__))
        return None
    NetClass = lut[name]

    # Create a placeholder for the input image
    spec = models.get_data_spec(model_class=NetClass)
    data_node = tf.placeholder(tf.float32,
                               shape=(None, spec.crop_size, spec.crop_size,
                                      spec.channels))

    # Construct and return the model
    return NetClass({'data': data_node})
Beispiel #4
0
def resnet50_noisy(input_node, netparams, err_mean, err_stddev, train_vars):
	weights_noisy, biases_noisy, err_w, err_b = helper.add_noise(netparams['weights'], netparams['biases'], err_mean, err_stddev, train_vars)
	mean, variance, scale, offset = netparams['mean'], netparams['variance'], netparams['scale'], netparams['offset']
	err_lyr = {}
	layers_err  = {}
	data_spec = helper.get_data_spec('resnet50')
	err_lyr['input'] = tf.get_variable(name='input_lyr_err', shape=(1, data_spec.crop_size, data_spec.crop_size, data_spec.channels), initializer=tf.random_normal_initializer(mean=err_mean[0], stddev=err_stddev[0]), trainable=train_vars[0])
	input_node_noisy = tf.add(input_node, err_lyr['input'])
	conv1 = conv(input_node_noisy, weights_noisy['conv1'], biases_noisy['conv1'], 2, 2, relu=False)
	err_lyr['conv1'] = tf.get_variable(name='conv1_lyr_err', shape=conv1.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['conv1'] = tf.add(conv1, err_lyr['conv1'])
	bn_conv1 = batch_normalization(layers_err['conv1'], scale['bn_conv1'], offset['bn_conv1'], mean['bn_conv1'], variance['bn_conv1'], relu=True)
	pool1 = max_pool(bn_conv1, 3, 3, 2, 2)
	res2a_branch1 = conv(pool1, weights_noisy['res2a_branch1'], biases_noisy['res2a_branch1'], 1, 1, biased=False, relu=False)
	err_lyr['res2a_branch1'] = tf.get_variable(name='res2a_branch1_lyr_err', shape=res2a_branch1.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2a_branch1'] = tf.add(res2a_branch1, err_lyr['res2a_branch1'])
	bn2a_branch1 = batch_normalization(layers_err['res2a_branch1'], scale['bn2a_branch1'], offset['bn2a_branch1'], mean['bn2a_branch1'], variance['bn2a_branch1'])
	res2a_branch2a = conv(pool1, weights_noisy['res2a_branch2a'], biases_noisy['res2a_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res2a_branch2a'] = tf.get_variable(name='res2a_branch2a_lyr_err', shape=res2a_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2a_branch2a'] = tf.add(res2a_branch2a, err_lyr['res2a_branch2a'])
	bn2a_branch2a = batch_normalization(layers_err['res2a_branch2a'], scale['bn2a_branch2a'], offset['bn2a_branch2a'], mean['bn2a_branch2a'], variance['bn2a_branch2a'], relu=True)
	res2a_branch2b = conv(bn2a_branch2a, weights_noisy['res2a_branch2b'], biases_noisy['res2a_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res2a_branch2b'] = tf.get_variable(name='res2a_branch2b_lyr_err', shape=res2a_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2a_branch2b'] = tf.add(res2a_branch2b, err_lyr['res2a_branch2b'])
	bn2a_branch2b = batch_normalization(layers_err['res2a_branch2b'], scale['bn2a_branch2b'], offset['bn2a_branch2b'], mean['bn2a_branch2b'], variance['bn2a_branch2b'], relu=True)
	res2a_branch2c = conv(bn2a_branch2b, weights_noisy['res2a_branch2c'], biases_noisy['res2a_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res2a_branch2c'] = tf.get_variable(name='res2a_branch2c_lyr_err', shape=res2a_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2a_branch2c'] = tf.add(res2a_branch2c, err_lyr['res2a_branch2c'])
	bn2a_branch2c = batch_normalization(layers_err['res2a_branch2c'], scale['bn2a_branch2c'], offset['bn2a_branch2c'], mean['bn2a_branch2c'], variance['bn2a_branch2c'])
	res2a = add([bn2a_branch1, bn2a_branch2c])
	err_lyr['res2a'] = tf.get_variable(name='res2a_lyr_err', shape=res2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2a'] = tf.add(res2a, err_lyr['res2a'])
	res2a_relu = relu(layers_err['res2a'])
	res2b_branch2a = conv(res2a_relu, weights_noisy['res2b_branch2a'], biases_noisy['res2b_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res2b_branch2a'] = tf.get_variable(name='res2b_branch2a_lyr_err', shape=res2b_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2b_branch2a'] = tf.add(res2b_branch2a, err_lyr['res2b_branch2a'])
	bn2b_branch2a = batch_normalization(layers_err['res2b_branch2a'], scale['bn2b_branch2a'], offset['bn2b_branch2a'], mean['bn2b_branch2a'], variance['bn2b_branch2a'], relu=True)
	res2b_branch2b = conv(bn2b_branch2a, weights_noisy['res2b_branch2b'], biases_noisy['res2b_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res2b_branch2b'] = tf.get_variable(name='res2b_branch2b_lyr_err', shape=res2b_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2b_branch2b'] = tf.add(res2b_branch2b, err_lyr['res2b_branch2b'])
	bn2b_branch2b = batch_normalization(layers_err['res2b_branch2b'], scale['bn2b_branch2b'], offset['bn2b_branch2b'], mean['bn2b_branch2b'], variance['bn2b_branch2b'], relu=True)
	res2b_branch2c = conv(bn2b_branch2b, weights_noisy['res2b_branch2c'], biases_noisy['res2b_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res2b_branch2c'] = tf.get_variable(name='res2b_branch2c_lyr_err', shape=res2b_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2b_branch2c'] = tf.add(res2b_branch2c, err_lyr['res2b_branch2c'])
	bn2b_branch2c = batch_normalization(layers_err['res2b_branch2c'], scale['bn2b_branch2c'], offset['bn2b_branch2c'], mean['bn2b_branch2c'], variance['bn2b_branch2c'])
	res2b = add([res2a_relu, bn2b_branch2c])
	err_lyr['res2b'] = tf.get_variable(name='res2b_lyr_err', shape=res2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2b'] = tf.add(res2b, err_lyr['res2b'])
	res2b_relu = relu(layers_err['res2b'])
	res2c_branch2a = conv(res2b_relu, weights_noisy['res2c_branch2a'], biases_noisy['res2c_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res2c_branch2a'] = tf.get_variable(name='res2c_branch2a_lyr_err', shape=res2c_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2c_branch2a'] = tf.add(res2c_branch2a, err_lyr['res2c_branch2a'])
	bn2c_branch2a = batch_normalization(layers_err['res2c_branch2a'], scale['bn2c_branch2a'], offset['bn2c_branch2a'], mean['bn2c_branch2a'], variance['bn2c_branch2a'], relu=True)
	res2c_branch2b = conv(bn2c_branch2a, weights_noisy['res2c_branch2b'], biases_noisy['res2c_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res2c_branch2b'] = tf.get_variable(name='res2c_branch2b_lyr_err', shape=res2c_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2c_branch2b'] = tf.add(res2c_branch2b, err_lyr['res2c_branch2b'])
	bn2c_branch2b = batch_normalization(layers_err['res2c_branch2b'], scale['bn2c_branch2b'], offset['bn2c_branch2b'], mean['bn2c_branch2b'], variance['bn2c_branch2b'], relu=True)
	res2c_branch2c = conv(bn2c_branch2b, weights_noisy['res2c_branch2c'], biases_noisy['res2c_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res2c_branch2c'] = tf.get_variable(name='res2c_branch2c_lyr_err', shape=res2c_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2c_branch2c'] = tf.add(res2c_branch2c, err_lyr['res2c_branch2c'])
	bn2c_branch2c = batch_normalization(layers_err['res2c_branch2c'], scale['bn2c_branch2c'], offset['bn2c_branch2c'], mean['bn2c_branch2c'], variance['bn2c_branch2c'])
	res2c = add([res2b_relu, bn2c_branch2c])
	err_lyr['res2c'] = tf.get_variable(name='res2c_lyr_err', shape=res2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res2c'] = tf.add(res2c, err_lyr['res2c'])
	res2c_relu = relu(layers_err['res2c'])
	res3a_branch1 = conv(res2c_relu, weights_noisy['res3a_branch1'], biases_noisy['res3a_branch1'], 2, 2, biased=False, relu=False)
	err_lyr['res3a_branch1'] = tf.get_variable(name='res3a_branch1_lyr_err', shape=res3a_branch1.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3a_branch1'] = tf.add(res3a_branch1, err_lyr['res3a_branch1'])
	bn3a_branch1 = batch_normalization(layers_err['res3a_branch1'], scale['bn3a_branch1'], offset['bn3a_branch1'], mean['bn3a_branch1'], variance['bn3a_branch1'])
	res3a_branch2a = conv(res2c_relu, weights_noisy['res3a_branch2a'], biases_noisy['res3a_branch2a'], 2, 2, biased=False, relu=False)
	err_lyr['res3a_branch2a'] = tf.get_variable(name='res3a_branch2a_lyr_err', shape=res3a_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3a_branch2a'] = tf.add(res3a_branch2a, err_lyr['res3a_branch2a'])
	bn3a_branch2a = batch_normalization(layers_err['res3a_branch2a'], scale['bn3a_branch2a'], offset['bn3a_branch2a'], mean['bn3a_branch2a'], variance['bn3a_branch2a'], relu=True)
	res3a_branch2b = conv(bn3a_branch2a, weights_noisy['res3a_branch2b'], biases_noisy['res3a_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res3a_branch2b'] = tf.get_variable(name='res3a_branch2b_lyr_err', shape=res3a_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3a_branch2b'] = tf.add(res3a_branch2b, err_lyr['res3a_branch2b'])
	bn3a_branch2b = batch_normalization(layers_err['res3a_branch2b'], scale['bn3a_branch2b'], offset['bn3a_branch2b'], mean['bn3a_branch2b'], variance['bn3a_branch2b'], relu=True)
	res3a_branch2c = conv(bn3a_branch2b, weights_noisy['res3a_branch2c'], biases_noisy['res3a_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res3a_branch2c'] = tf.get_variable(name='res3a_branch2c_lyr_err', shape=res3a_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3a_branch2c'] = tf.add(res3a_branch2c, err_lyr['res3a_branch2c'])
	bn3a_branch2c = batch_normalization(layers_err['res3a_branch2c'], scale['bn3a_branch2c'], offset['bn3a_branch2c'], mean['bn3a_branch2c'], variance['bn3a_branch2c'])
	res3a = add([bn3a_branch1, bn3a_branch2c])
	err_lyr['res3a'] = tf.get_variable(name='res3a_lyr_err', shape=res3a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3a'] = tf.add(res3a, err_lyr['res3a'])
	res3a_relu = relu(layers_err['res3a'])
	res3b_branch2a = conv(res3a_relu, weights_noisy['res3b_branch2a'], biases_noisy['res3b_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res3b_branch2a'] = tf.get_variable(name='res3b_branch2a_lyr_err', shape=res3b_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3b_branch2a'] = tf.add(res3b_branch2a, err_lyr['res3b_branch2a'])
	bn3b_branch2a = batch_normalization(layers_err['res3b_branch2a'], scale['bn3b_branch2a'], offset['bn3b_branch2a'], mean['bn3b_branch2a'], variance['bn3b_branch2a'], relu=True)
	res3b_branch2b = conv(bn3b_branch2a, weights_noisy['res3b_branch2b'], biases_noisy['res3b_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res3b_branch2b'] = tf.get_variable(name='res3b_branch2b_lyr_err', shape=res3b_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3b_branch2b'] = tf.add(res3b_branch2b, err_lyr['res3b_branch2b'])
	bn3b_branch2b = batch_normalization(layers_err['res3b_branch2b'], scale['bn3b_branch2b'], offset['bn3b_branch2b'], mean['bn3b_branch2b'], variance['bn3b_branch2b'], relu=True)
	res3b_branch2c = conv(bn3b_branch2b, weights_noisy['res3b_branch2c'], biases_noisy['res3b_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res3b_branch2c'] = tf.get_variable(name='res3b_branch2c_lyr_err', shape=res3b_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3b_branch2c'] = tf.add(res3b_branch2c, err_lyr['res3b_branch2c'])
	bn3b_branch2c = batch_normalization(layers_err['res3b_branch2c'], scale['bn3b_branch2c'], offset['bn3b_branch2c'], mean['bn3b_branch2c'], variance['bn3b_branch2c'])
	res3b = add([res3a_relu, bn3b_branch2c])
	err_lyr['res3b'] = tf.get_variable(name='res3b_lyr_err', shape=res3b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3b'] = tf.add(res3b, err_lyr['res3b'])
	res3b_relu = relu(layers_err['res3b'])
	res3c_branch2a = conv(res3b_relu, weights_noisy['res3c_branch2a'], biases_noisy['res3c_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res3c_branch2a'] = tf.get_variable(name='res3c_branch2a_lyr_err', shape=res3c_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3c_branch2a'] = tf.add(res3c_branch2a, err_lyr['res3c_branch2a'])
	bn3c_branch2a = batch_normalization(layers_err['res3c_branch2a'], scale['bn3c_branch2a'], offset['bn3c_branch2a'], mean['bn3c_branch2a'], variance['bn3c_branch2a'], relu=True)
	res3c_branch2b = conv(bn3c_branch2a, weights_noisy['res3c_branch2b'], biases_noisy['res3c_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res3c_branch2b'] = tf.get_variable(name='res3c_branch2b_lyr_err', shape=res3c_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3c_branch2b'] = tf.add(res3c_branch2b, err_lyr['res3c_branch2b'])
	bn3c_branch2b = batch_normalization(layers_err['res3c_branch2b'], scale['bn3c_branch2b'], offset['bn3c_branch2b'], mean['bn3c_branch2b'], variance['bn3c_branch2b'], relu=True)
	res3c_branch2c = conv(bn3c_branch2b, weights_noisy['res3c_branch2c'], biases_noisy['res3c_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res3c_branch2c'] = tf.get_variable(name='res3c_branch2c_lyr_err', shape=res3c_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3c_branch2c'] = tf.add(res3c_branch2c, err_lyr['res3c_branch2c'])
	bn3c_branch2c = batch_normalization(layers_err['res3c_branch2c'], scale['bn3c_branch2c'], offset['bn3c_branch2c'], mean['bn3c_branch2c'], variance['bn3c_branch2c'])
	res3c = add([res3b_relu, bn3c_branch2c])
	err_lyr['res3c'] = tf.get_variable(name='res3c_lyr_err', shape=res3c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3c'] = tf.add(res3c, err_lyr['res3c'])
	res3c_relu = relu(layers_err['res3c'])
	res3d_branch2a = conv(res3c_relu, weights_noisy['res3d_branch2a'], biases_noisy['res3d_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res3d_branch2a'] = tf.get_variable(name='res3d_branch2a_lyr_err', shape=res3d_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3d_branch2a'] = tf.add(res3d_branch2a, err_lyr['res3d_branch2a'])
	bn3d_branch2a = batch_normalization(layers_err['res3d_branch2a'], scale['bn3d_branch2a'], offset['bn3d_branch2a'], mean['bn3d_branch2a'], variance['bn3d_branch2a'], relu=True)
	res3d_branch2b = conv(bn3d_branch2a, weights_noisy['res3d_branch2b'], biases_noisy['res3d_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res3d_branch2b'] = tf.get_variable(name='res3d_branch2b_lyr_err', shape=res3d_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3d_branch2b'] = tf.add(res3d_branch2b, err_lyr['res3d_branch2b'])
	bn3d_branch2b = batch_normalization(layers_err['res3d_branch2b'], scale['bn3d_branch2b'], offset['bn3d_branch2b'], mean['bn3d_branch2b'], variance['bn3d_branch2b'], relu=True)
	res3d_branch2c = conv(bn3d_branch2b, weights_noisy['res3d_branch2c'], biases_noisy['res3d_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res3d_branch2c'] = tf.get_variable(name='res3d_branch2c_lyr_err', shape=res3d_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3d_branch2c'] = tf.add(res3d_branch2c, err_lyr['res3d_branch2c'])
	bn3d_branch2c = batch_normalization(layers_err['res3d_branch2c'], scale['bn3d_branch2c'], offset['bn3d_branch2c'], mean['bn3d_branch2c'], variance['bn3d_branch2c'])
	res3d = add([res3c_relu, bn3d_branch2c])
	err_lyr['res3d'] = tf.get_variable(name='res3d_lyr_err', shape=res3d.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res3d'] = tf.add(res3d, err_lyr['res3d'])
	res3d_relu = relu(layers_err['res3d'])
	res4a_branch1 = conv(res3d_relu, weights_noisy['res4a_branch1'], biases_noisy['res4a_branch1'], 2, 2, biased=False, relu=False)
	err_lyr['res4a_branch1'] = tf.get_variable(name='res4a_branch1_lyr_err', shape=res4a_branch1.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4a_branch1'] = tf.add(res4a_branch1, err_lyr['res4a_branch1'])
	bn4a_branch1 = batch_normalization(layers_err['res4a_branch1'], scale['bn4a_branch1'], offset['bn4a_branch1'], mean['bn4a_branch1'], variance['bn4a_branch1'])
	res4a_branch2a = conv(res3d_relu, weights_noisy['res4a_branch2a'], biases_noisy['res4a_branch2a'], 2, 2, biased=False, relu=False)
	err_lyr['res4a_branch2a'] = tf.get_variable(name='res4a_branch2a_lyr_err', shape=res4a_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4a_branch2a'] = tf.add(res4a_branch2a, err_lyr['res4a_branch2a'])
	bn4a_branch2a = batch_normalization(layers_err['res4a_branch2a'], scale['bn4a_branch2a'], offset['bn4a_branch2a'], mean['bn4a_branch2a'], variance['bn4a_branch2a'], relu=True)
	res4a_branch2b = conv(bn4a_branch2a, weights_noisy['res4a_branch2b'], biases_noisy['res4a_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res4a_branch2b'] = tf.get_variable(name='res4a_branch2b_lyr_err', shape=res4a_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4a_branch2b'] = tf.add(res4a_branch2b, err_lyr['res4a_branch2b'])
	bn4a_branch2b = batch_normalization(layers_err['res4a_branch2b'], scale['bn4a_branch2b'], offset['bn4a_branch2b'], mean['bn4a_branch2b'], variance['bn4a_branch2b'], relu=True)
	res4a_branch2c = conv(bn4a_branch2b, weights_noisy['res4a_branch2c'], biases_noisy['res4a_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res4a_branch2c'] = tf.get_variable(name='res4a_branch2c_lyr_err', shape=res4a_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4a_branch2c'] = tf.add(res4a_branch2c, err_lyr['res4a_branch2c'])
	bn4a_branch2c = batch_normalization(layers_err['res4a_branch2c'], scale['bn4a_branch2c'], offset['bn4a_branch2c'], mean['bn4a_branch2c'], variance['bn4a_branch2c'])
	res4a = add([bn4a_branch1, bn4a_branch2c])
	err_lyr['res4a'] = tf.get_variable(name='res4a_lyr_err', shape=res4a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4a'] = tf.add(res4a, err_lyr['res4a'])
	res4a_relu = relu(layers_err['res4a'])
	res4b_branch2a = conv(res4a_relu, weights_noisy['res4b_branch2a'], biases_noisy['res4b_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res4b_branch2a'] = tf.get_variable(name='res4b_branch2a_lyr_err', shape=res4b_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4b_branch2a'] = tf.add(res4b_branch2a, err_lyr['res4b_branch2a'])
	bn4b_branch2a = batch_normalization(layers_err['res4b_branch2a'], scale['bn4b_branch2a'], offset['bn4b_branch2a'], mean['bn4b_branch2a'], variance['bn4b_branch2a'], relu=True)
	res4b_branch2b = conv(bn4b_branch2a, weights_noisy['res4b_branch2b'], biases_noisy['res4b_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res4b_branch2b'] = tf.get_variable(name='res4b_branch2b_lyr_err', shape=res4b_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4b_branch2b'] = tf.add(res4b_branch2b, err_lyr['res4b_branch2b'])
	bn4b_branch2b = batch_normalization(layers_err['res4b_branch2b'], scale['bn4b_branch2b'], offset['bn4b_branch2b'], mean['bn4b_branch2b'], variance['bn4b_branch2b'], relu=True)
	res4b_branch2c = conv(bn4b_branch2b, weights_noisy['res4b_branch2c'], biases_noisy['res4b_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res4b_branch2c'] = tf.get_variable(name='res4b_branch2c_lyr_err', shape=res4b_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4b_branch2c'] = tf.add(res4b_branch2c, err_lyr['res4b_branch2c'])
	bn4b_branch2c = batch_normalization(layers_err['res4b_branch2c'], scale['bn4b_branch2c'], offset['bn4b_branch2c'], mean['bn4b_branch2c'], variance['bn4b_branch2c'])
	res4b = add([res4a_relu, bn4b_branch2c])
	err_lyr['res4b'] = tf.get_variable(name='res4b_lyr_err', shape=res4b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4b'] = tf.add(res4b, err_lyr['res4b'])
	res4b_relu = relu(layers_err['res4b'])
	res4c_branch2a = conv(res4b_relu, weights_noisy['res4c_branch2a'], biases_noisy['res4c_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res4c_branch2a'] = tf.get_variable(name='res4c_branch2a_lyr_err', shape=res4c_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4c_branch2a'] = tf.add(res4c_branch2a, err_lyr['res4c_branch2a'])
	bn4c_branch2a = batch_normalization(layers_err['res4c_branch2a'], scale['bn4c_branch2a'], offset['bn4c_branch2a'], mean['bn4c_branch2a'], variance['bn4c_branch2a'], relu=True)
	res4c_branch2b = conv(bn4c_branch2a, weights_noisy['res4c_branch2b'], biases_noisy['res4c_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res4c_branch2b'] = tf.get_variable(name='res4c_branch2b_lyr_err', shape=res4c_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4c_branch2b'] = tf.add(res4c_branch2b, err_lyr['res4c_branch2b'])
	bn4c_branch2b = batch_normalization(layers_err['res4c_branch2b'], scale['bn4c_branch2b'], offset['bn4c_branch2b'], mean['bn4c_branch2b'], variance['bn4c_branch2b'], relu=True)
	res4c_branch2c = conv(bn4c_branch2b, weights_noisy['res4c_branch2c'], biases_noisy['res4c_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res4c_branch2c'] = tf.get_variable(name='res4c_branch2c_lyr_err', shape=res4c_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4c_branch2c'] = tf.add(res4c_branch2c, err_lyr['res4c_branch2c'])
	bn4c_branch2c = batch_normalization(layers_err['res4c_branch2c'], scale['bn4c_branch2c'], offset['bn4c_branch2c'], mean['bn4c_branch2c'], variance['bn4c_branch2c'])
	res4c = add([res4b_relu, bn4c_branch2c])
	err_lyr['res4c'] = tf.get_variable(name='res4c_lyr_err', shape=res4c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4c'] = tf.add(res4c, err_lyr['res4c'])
	res4c_relu = relu(layers_err['res4c'])
	res4d_branch2a = conv(res4c_relu, weights_noisy['res4d_branch2a'], biases_noisy['res4d_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res4d_branch2a'] = tf.get_variable(name='res4d_branch2a_lyr_err', shape=res4d_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4d_branch2a'] = tf.add(res4d_branch2a, err_lyr['res4d_branch2a'])
	bn4d_branch2a = batch_normalization(layers_err['res4d_branch2a'], scale['bn4d_branch2a'], offset['bn4d_branch2a'], mean['bn4d_branch2a'], variance['bn4d_branch2a'], relu=True)
	res4d_branch2b = conv(bn4d_branch2a, weights_noisy['res4d_branch2b'], biases_noisy['res4d_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res4d_branch2b'] = tf.get_variable(name='res4d_branch2b_lyr_err', shape=res4d_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4d_branch2b'] = tf.add(res4d_branch2b, err_lyr['res4d_branch2b'])
	bn4d_branch2b = batch_normalization(layers_err['res4d_branch2b'], scale['bn4d_branch2b'], offset['bn4d_branch2b'], mean['bn4d_branch2b'], variance['bn4d_branch2b'], relu=True)
	res4d_branch2c = conv(bn4d_branch2b, weights_noisy['res4d_branch2c'], biases_noisy['res4d_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res4d_branch2c'] = tf.get_variable(name='res4d_branch2c_lyr_err', shape=res4d_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4d_branch2c'] = tf.add(res4d_branch2c, err_lyr['res4d_branch2c'])
	bn4d_branch2c = batch_normalization(layers_err['res4d_branch2c'], scale['bn4d_branch2c'], offset['bn4d_branch2c'], mean['bn4d_branch2c'], variance['bn4d_branch2c'])
	res4d = add([res4c_relu, bn4d_branch2c])
	err_lyr['res4d'] = tf.get_variable(name='res4d_lyr_err', shape=res4d.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4d'] = tf.add(res4d, err_lyr['res4d'])
	res4d_relu = relu(layers_err['res4d'])
	res4e_branch2a = conv(res4d_relu, weights_noisy['res4e_branch2a'], biases_noisy['res4e_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res4e_branch2a'] = tf.get_variable(name='res4e_branch2a_lyr_err', shape=res4e_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4e_branch2a'] = tf.add(res4e_branch2a, err_lyr['res4e_branch2a'])
	bn4e_branch2a = batch_normalization(layers_err['res4e_branch2a'], scale['bn4e_branch2a'], offset['bn4e_branch2a'], mean['bn4e_branch2a'], variance['bn4e_branch2a'], relu=True)
	res4e_branch2b = conv(bn4e_branch2a, weights_noisy['res4e_branch2b'], biases_noisy['res4e_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res4e_branch2b'] = tf.get_variable(name='res4e_branch2b_lyr_err', shape=res4e_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4e_branch2b'] = tf.add(res4e_branch2b, err_lyr['res4e_branch2b'])
	bn4e_branch2b = batch_normalization(layers_err['res4e_branch2b'], scale['bn4e_branch2b'], offset['bn4e_branch2b'], mean['bn4e_branch2b'], variance['bn4e_branch2b'], relu=True)
	res4e_branch2c = conv(bn4e_branch2b, weights_noisy['res4e_branch2c'], biases_noisy['res4e_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res4e_branch2c'] = tf.get_variable(name='res4e_branch2c_lyr_err', shape=res4e_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4e_branch2c'] = tf.add(res4e_branch2c, err_lyr['res4e_branch2c'])
	bn4e_branch2c = batch_normalization(layers_err['res4e_branch2c'], scale['bn4e_branch2c'], offset['bn4e_branch2c'], mean['bn4e_branch2c'], variance['bn4e_branch2c'])
	res4e = add([res4d_relu, bn4e_branch2c])
	err_lyr['res4e'] = tf.get_variable(name='res4e_lyr_err', shape=res4e.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4e'] = tf.add(res4e, err_lyr['res4e'])
	res4e_relu = relu(layers_err['res4e'])
	res4f_branch2a = conv(res4e_relu, weights_noisy['res4f_branch2a'], biases_noisy['res4f_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res4f_branch2a'] = tf.get_variable(name='res4f_branch2a_lyr_err', shape=res4f_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4f_branch2a'] = tf.add(res4f_branch2a, err_lyr['res4f_branch2a'])
	bn4f_branch2a = batch_normalization(layers_err['res4f_branch2a'], scale['bn4f_branch2a'], offset['bn4f_branch2a'], mean['bn4f_branch2a'], variance['bn4f_branch2a'], relu=True)
	res4f_branch2b = conv(bn4f_branch2a, weights_noisy['res4f_branch2b'], biases_noisy['res4f_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res4f_branch2b'] = tf.get_variable(name='res4f_branch2b_lyr_err', shape=res4f_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4f_branch2b'] = tf.add(res4f_branch2b, err_lyr['res4f_branch2b'])
	bn4f_branch2b = batch_normalization(layers_err['res4f_branch2b'], scale['bn4f_branch2b'], offset['bn4f_branch2b'], mean['bn4f_branch2b'], variance['bn4f_branch2b'], relu=True)
	res4f_branch2c = conv(bn4f_branch2b, weights_noisy['res4f_branch2c'], biases_noisy['res4f_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res4f_branch2c'] = tf.get_variable(name='res4f_branch2c_lyr_err', shape=res4f_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4f_branch2c'] = tf.add(res4f_branch2c, err_lyr['res4f_branch2c'])
	bn4f_branch2c = batch_normalization(layers_err['res4f_branch2c'], scale['bn4f_branch2c'], offset['bn4f_branch2c'], mean['bn4f_branch2c'], variance['bn4f_branch2c'])
	res4f = add([res4e_relu, bn4f_branch2c])
	err_lyr['res4f'] = tf.get_variable(name='res4f_lyr_err', shape=res4f.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res4f'] = tf.add(res4f, err_lyr['res4f'])
	res4f_relu = relu(layers_err['res4f'])
	res5a_branch1 = conv(res4f_relu, weights_noisy['res5a_branch1'], biases_noisy['res5a_branch1'], 2, 2, biased=False, relu=False)
	err_lyr['res5a_branch1'] = tf.get_variable(name='res5a_branch1_lyr_err', shape=res5a_branch1.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5a_branch1'] = tf.add(res5a_branch1, err_lyr['res5a_branch1'])
	bn5a_branch1 = batch_normalization(layers_err['res5a_branch1'], scale['bn5a_branch1'], offset['bn5a_branch1'], mean['bn5a_branch1'], variance['bn5a_branch1'])
	res5a_branch2a = conv(res4f_relu, weights_noisy['res5a_branch2a'], biases_noisy['res5a_branch2a'], 2, 2, biased=False, relu=False)
	err_lyr['res5a_branch2a'] = tf.get_variable(name='res5a_branch2a_lyr_err', shape=res5a_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5a_branch2a'] = tf.add(res5a_branch2a, err_lyr['res5a_branch2a'])
	bn5a_branch2a = batch_normalization(layers_err['res5a_branch2a'], scale['bn5a_branch2a'], offset['bn5a_branch2a'], mean['bn5a_branch2a'], variance['bn5a_branch2a'], relu=True)
	res5a_branch2b = conv(bn5a_branch2a, weights_noisy['res5a_branch2b'], biases_noisy['res5a_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res5a_branch2b'] = tf.get_variable(name='res5a_branch2b_lyr_err', shape=res5a_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5a_branch2b'] = tf.add(res5a_branch2b, err_lyr['res5a_branch2b'])
	bn5a_branch2b = batch_normalization(layers_err['res5a_branch2b'], scale['bn5a_branch2b'], offset['bn5a_branch2b'], mean['bn5a_branch2b'], variance['bn5a_branch2b'], relu=True)
	res5a_branch2c = conv(bn5a_branch2b, weights_noisy['res5a_branch2c'], biases_noisy['res5a_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res5a_branch2c'] = tf.get_variable(name='res5a_branch2c_lyr_err', shape=res5a_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5a_branch2c'] = tf.add(res5a_branch2c, err_lyr['res5a_branch2c'])
	bn5a_branch2c = batch_normalization(layers_err['res5a_branch2c'], scale['bn5a_branch2c'], offset['bn5a_branch2c'], mean['bn5a_branch2c'], variance['bn5a_branch2c'])
	res5a = add([bn5a_branch1, bn5a_branch2c])
	err_lyr['res5a'] = tf.get_variable(name='res5a_lyr_err', shape=res5a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5a'] = tf.add(res5a, err_lyr['res5a'])
	res5a_relu = relu(layers_err['res5a'])
	res5b_branch2a = conv(res5a_relu, weights_noisy['res5b_branch2a'], biases_noisy['res5b_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res5b_branch2a'] = tf.get_variable(name='res5b_branch2a_lyr_err', shape=res5b_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5b_branch2a'] = tf.add(res5b_branch2a, err_lyr['res5b_branch2a'])
	bn5b_branch2a = batch_normalization(layers_err['res5b_branch2a'], scale['bn5b_branch2a'], offset['bn5b_branch2a'], mean['bn5b_branch2a'], variance['bn5b_branch2a'], relu=True)
	res5b_branch2b = conv(bn5b_branch2a, weights_noisy['res5b_branch2b'], biases_noisy['res5b_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res5b_branch2b'] = tf.get_variable(name='res5b_branch2b_lyr_err', shape=res5b_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5b_branch2b'] = tf.add(res5b_branch2b, err_lyr['res5b_branch2b'])
	bn5b_branch2b = batch_normalization(layers_err['res5b_branch2b'], scale['bn5b_branch2b'], offset['bn5b_branch2b'], mean['bn5b_branch2b'], variance['bn5b_branch2b'], relu=True)
	res5b_branch2c = conv(bn5b_branch2b, weights_noisy['res5b_branch2c'], biases_noisy['res5b_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res5b_branch2c'] = tf.get_variable(name='res5b_branch2c_lyr_err', shape=res5b_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5b_branch2c'] = tf.add(res5b_branch2c, err_lyr['res5b_branch2c'])
	bn5b_branch2c = batch_normalization(layers_err['res5b_branch2c'], scale['bn5b_branch2c'], offset['bn5b_branch2c'], mean['bn5b_branch2c'], variance['bn5b_branch2c'])
	res5b = add([res5a_relu, bn5b_branch2c])
	err_lyr['res5b'] = tf.get_variable(name='res5b_lyr_err', shape=res5b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5b'] = tf.add(res5b, err_lyr['res5b'])
	res5b_relu = relu(layers_err['res5b'])
	res5c_branch2a = conv(res5b_relu, weights_noisy['res5c_branch2a'], biases_noisy['res5c_branch2a'], 1, 1, biased=False, relu=False)
	err_lyr['res5c_branch2a'] = tf.get_variable(name='res5c_branch2a_lyr_err', shape=res5c_branch2a.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5c_branch2a'] = tf.add(res5c_branch2a, err_lyr['res5c_branch2a'])
	bn5c_branch2a = batch_normalization(layers_err['res5c_branch2a'], scale['bn5c_branch2a'], offset['bn5c_branch2a'], mean['bn5c_branch2a'], variance['bn5c_branch2a'], relu=True)
	res5c_branch2b = conv(bn5c_branch2a, weights_noisy['res5c_branch2b'], biases_noisy['res5c_branch2b'], 1, 1, biased=False, relu=False)
	err_lyr['res5c_branch2b'] = tf.get_variable(name='res5c_branch2b_lyr_err', shape=res5c_branch2b.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5c_branch2b'] = tf.add(res5c_branch2b, err_lyr['res5c_branch2b'])
	bn5c_branch2b = batch_normalization(layers_err['res5c_branch2b'], scale['bn5c_branch2b'], offset['bn5c_branch2b'], mean['bn5c_branch2b'], variance['bn5c_branch2b'], relu=True)
	res5c_branch2c = conv(bn5c_branch2b, weights_noisy['res5c_branch2c'], biases_noisy['res5c_branch2c'], 1, 1, biased=False, relu=False)
	err_lyr['res5c_branch2c'] = tf.get_variable(name='res5c_branch2c_lyr_err', shape=res5c_branch2c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5c_branch2c'] = tf.add(res5c_branch2c, err_lyr['res5c_branch2c'])
	bn5c_branch2c = batch_normalization(layers_err['res5c_branch2c'], scale['bn5c_branch2c'], offset['bn5c_branch2c'], mean['bn5c_branch2c'], variance['bn5c_branch2c'])
	res5c = add([res5b_relu, bn5c_branch2c])
	err_lyr['res5c'] = tf.get_variable(name='res5c_lyr_err', shape=res5c.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['res5c'] = tf.add(res5c, err_lyr['res5c'])
	res5c_relu = relu(layers_err['res5c'])
	pool5 = avg_pool(res5c_relu, 7, 7, 1, 1, padding='VALID')
	fc1000 = fc(pool5, weights_noisy['fc1000'], biases_noisy['fc1000'], relu=False)
	err_lyr['fc1000'] = tf.get_variable(name='fc1000_lyr_err', shape=fc1000.shape[1:], initializer=tf.random_normal_initializer(mean=err_mean[3], stddev=err_stddev[3]), trainable=train_vars[3])
	layers_err['fc1000'] = tf.add(fc1000, err_lyr['fc1000'])
	return layers_err['fc1000'], err_w, err_b, err_lyr
	
Beispiel #5
0
def nin_noisy(input_node, netparams, err_mean, err_stddev, train_vars):
    weights_noisy, biases_noisy, err_w, err_b = helper.add_noise(
        netparams['weights'], netparams['biases'], err_mean, err_stddev,
        train_vars)
    mean, variance, scale, offset = netparams['mean'], netparams[
        'variance'], netparams['scale'], netparams['offset']
    err_lyr = {}
    layers_err = {}
    data_spec = helper.get_data_spec('nin')
    err_lyr['input'] = tf.get_variable(
        name='input_lyr_err',
        shape=(1, data_spec.crop_size, data_spec.crop_size,
               data_spec.channels),
        initializer=tf.random_normal_initializer(mean=err_mean[0],
                                                 stddev=err_stddev[0]),
        trainable=train_vars[0])
    input_node_noisy = tf.add(input_node, err_lyr['input'])
    conv1 = conv(input_node_noisy,
                 weights_noisy['conv1'],
                 biases_noisy['conv1'],
                 4,
                 4,
                 padding='VALID')
    err_lyr['conv1'] = tf.get_variable(
        name='conv1_lyr_err',
        shape=conv1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['conv1'] = tf.add(conv1, err_lyr['conv1'])
    cccp1 = conv(layers_err['conv1'], weights_noisy['cccp1'],
                 biases_noisy['cccp1'], 1, 1)
    err_lyr['cccp1'] = tf.get_variable(
        name='cccp1_lyr_err',
        shape=cccp1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp1'] = tf.add(cccp1, err_lyr['cccp1'])
    cccp2 = conv(layers_err['cccp1'], weights_noisy['cccp2'],
                 biases_noisy['cccp2'], 1, 1)
    err_lyr['cccp2'] = tf.get_variable(
        name='cccp2_lyr_err',
        shape=cccp2.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp2'] = tf.add(cccp2, err_lyr['cccp2'])
    pool1 = max_pool(layers_err['cccp2'], 3, 3, 2, 2)
    conv2 = conv(pool1, weights_noisy['conv2'], biases_noisy['conv2'], 1, 1)
    err_lyr['conv2'] = tf.get_variable(
        name='conv2_lyr_err',
        shape=conv2.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['conv2'] = tf.add(conv2, err_lyr['conv2'])
    cccp3 = conv(layers_err['conv2'], weights_noisy['cccp3'],
                 biases_noisy['cccp3'], 1, 1)
    err_lyr['cccp3'] = tf.get_variable(
        name='cccp3_lyr_err',
        shape=cccp3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp3'] = tf.add(cccp3, err_lyr['cccp3'])
    cccp4 = conv(layers_err['cccp3'], weights_noisy['cccp4'],
                 biases_noisy['cccp4'], 1, 1)
    err_lyr['cccp4'] = tf.get_variable(
        name='cccp4_lyr_err',
        shape=cccp4.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp4'] = tf.add(cccp4, err_lyr['cccp4'])
    pool2 = max_pool(layers_err['cccp4'], 3, 3, 2, 2, padding='VALID')
    conv3 = conv(pool2, weights_noisy['conv3'], biases_noisy['conv3'], 1, 1)
    err_lyr['conv3'] = tf.get_variable(
        name='conv3_lyr_err',
        shape=conv3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['conv3'] = tf.add(conv3, err_lyr['conv3'])
    cccp5 = conv(layers_err['conv3'], weights_noisy['cccp5'],
                 biases_noisy['cccp5'], 1, 1)
    err_lyr['cccp5'] = tf.get_variable(
        name='cccp5_lyr_err',
        shape=cccp5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp5'] = tf.add(cccp5, err_lyr['cccp5'])
    cccp6 = conv(layers_err['cccp5'], weights_noisy['cccp6'],
                 biases_noisy['cccp6'], 1, 1)
    err_lyr['cccp6'] = tf.get_variable(
        name='cccp6_lyr_err',
        shape=cccp6.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp6'] = tf.add(cccp6, err_lyr['cccp6'])
    pool3 = max_pool(layers_err['cccp6'], 3, 3, 2, 2, padding='VALID')
    conv4_1024 = conv(pool3, weights_noisy['conv4_1024'],
                      biases_noisy['conv4_1024'], 1, 1)
    err_lyr['conv4_1024'] = tf.get_variable(
        name='conv4_1024_lyr_err',
        shape=conv4_1024.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['conv4_1024'] = tf.add(conv4_1024, err_lyr['conv4_1024'])
    cccp7_1024 = conv(layers_err['conv4_1024'], weights_noisy['cccp7_1024'],
                      biases_noisy['cccp7_1024'], 1, 1)
    err_lyr['cccp7_1024'] = tf.get_variable(
        name='cccp7_1024_lyr_err',
        shape=cccp7_1024.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp7_1024'] = tf.add(cccp7_1024, err_lyr['cccp7_1024'])
    cccp8_1024 = conv(layers_err['cccp7_1024'], weights_noisy['cccp8_1024'],
                      biases_noisy['cccp8_1024'], 1, 1)
    err_lyr['cccp8_1024'] = tf.get_variable(
        name='cccp8_1024_lyr_err',
        shape=cccp8_1024.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['cccp8_1024'] = tf.add(cccp8_1024, err_lyr['cccp8_1024'])
    pool4 = avg_pool(layers_err['cccp8_1024'], 6, 6, 1, 1, padding='VALID')
    return pool4, err_w, err_b, err_lyr
Beispiel #6
0
def googlenet_noisy(input_node, netparams, err_mean, err_stddev, train_vars):
    weights_noisy, biases_noisy, err_w, err_b = helper.add_noise(
        netparams['weights'], netparams['biases'], err_mean, err_stddev,
        train_vars)
    mean, variance, scale, offset = netparams['mean'], netparams[
        'variance'], netparams['scale'], netparams['offset']
    err_lyr = {}
    layers_err = {}
    data_spec = helper.get_data_spec('googlenet')
    err_lyr['input'] = tf.get_variable(
        name='input_lyr_err',
        shape=(1, data_spec.crop_size, data_spec.crop_size,
               data_spec.channels),
        initializer=tf.random_normal_initializer(mean=err_mean[0],
                                                 stddev=err_stddev[0]),
        trainable=train_vars[0])
    input_node_noisy = tf.add(input_node, err_lyr['input'])
    conv1_7x7_s2 = conv(input_node_noisy, weights_noisy['conv1_7x7_s2'],
                        biases_noisy['conv1_7x7_s2'], 2, 2)
    err_lyr['conv1_7x7_s2'] = tf.get_variable(
        name='conv1_7x7_s2_lyr_err',
        shape=conv1_7x7_s2.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['conv1_7x7_s2'] = tf.add(conv1_7x7_s2, err_lyr['conv1_7x7_s2'])
    pool1_3x3_s2 = max_pool(layers_err['conv1_7x7_s2'], 3, 3, 2, 2)
    pool1_norm1 = lrn(pool1_3x3_s2, 2, 1.99999994948e-05, 0.75)
    conv2_3x3_reduce = conv(pool1_norm1, weights_noisy['conv2_3x3_reduce'],
                            biases_noisy['conv2_3x3_reduce'], 1, 1)
    err_lyr['conv2_3x3_reduce'] = tf.get_variable(
        name='conv2_3x3_reduce_lyr_err',
        shape=conv2_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['conv2_3x3_reduce'] = tf.add(conv2_3x3_reduce,
                                            err_lyr['conv2_3x3_reduce'])
    conv2_3x3 = conv(layers_err['conv2_3x3_reduce'],
                     weights_noisy['conv2_3x3'], biases_noisy['conv2_3x3'], 1,
                     1)
    err_lyr['conv2_3x3'] = tf.get_variable(
        name='conv2_3x3_lyr_err',
        shape=conv2_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['conv2_3x3'] = tf.add(conv2_3x3, err_lyr['conv2_3x3'])
    conv2_norm2 = lrn(layers_err['conv2_3x3'], 2, 1.99999994948e-05, 0.75)
    pool2_3x3_s2 = max_pool(conv2_norm2, 3, 3, 2, 2)
    inception_3a_1x1 = conv(pool2_3x3_s2, weights_noisy['inception_3a_1x1'],
                            biases_noisy['inception_3a_1x1'], 1, 1)
    err_lyr['inception_3a_1x1'] = tf.get_variable(
        name='inception_3a_1x1_lyr_err',
        shape=inception_3a_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3a_1x1'] = tf.add(inception_3a_1x1,
                                            err_lyr['inception_3a_1x1'])
    inception_3a_3x3_reduce = conv(pool2_3x3_s2,
                                   weights_noisy['inception_3a_3x3_reduce'],
                                   biases_noisy['inception_3a_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_3a_3x3_reduce'] = tf.get_variable(
        name='inception_3a_3x3_reduce_lyr_err',
        shape=inception_3a_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3a_3x3_reduce'] = tf.add(
        inception_3a_3x3_reduce, err_lyr['inception_3a_3x3_reduce'])
    inception_3a_3x3 = conv(layers_err['inception_3a_3x3_reduce'],
                            weights_noisy['inception_3a_3x3'],
                            biases_noisy['inception_3a_3x3'], 1, 1)
    err_lyr['inception_3a_3x3'] = tf.get_variable(
        name='inception_3a_3x3_lyr_err',
        shape=inception_3a_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3a_3x3'] = tf.add(inception_3a_3x3,
                                            err_lyr['inception_3a_3x3'])
    inception_3a_5x5_reduce = conv(pool2_3x3_s2,
                                   weights_noisy['inception_3a_5x5_reduce'],
                                   biases_noisy['inception_3a_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_3a_5x5_reduce'] = tf.get_variable(
        name='inception_3a_5x5_reduce_lyr_err',
        shape=inception_3a_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3a_5x5_reduce'] = tf.add(
        inception_3a_5x5_reduce, err_lyr['inception_3a_5x5_reduce'])
    inception_3a_5x5 = conv(layers_err['inception_3a_5x5_reduce'],
                            weights_noisy['inception_3a_5x5'],
                            biases_noisy['inception_3a_5x5'], 1, 1)
    err_lyr['inception_3a_5x5'] = tf.get_variable(
        name='inception_3a_5x5_lyr_err',
        shape=inception_3a_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3a_5x5'] = tf.add(inception_3a_5x5,
                                            err_lyr['inception_3a_5x5'])
    inception_3a_pool = max_pool(pool2_3x3_s2, 3, 3, 1, 1)
    inception_3a_pool_proj = conv(inception_3a_pool,
                                  weights_noisy['inception_3a_pool_proj'],
                                  biases_noisy['inception_3a_pool_proj'], 1, 1)
    err_lyr['inception_3a_pool_proj'] = tf.get_variable(
        name='inception_3a_pool_proj_lyr_err',
        shape=inception_3a_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3a_pool_proj'] = tf.add(
        inception_3a_pool_proj, err_lyr['inception_3a_pool_proj'])
    inception_3a_output = concat([
        layers_err['inception_3a_1x1'], layers_err['inception_3a_3x3'],
        layers_err['inception_3a_5x5'], layers_err['inception_3a_pool_proj']
    ], 3)
    inception_3b_1x1 = conv(inception_3a_output,
                            weights_noisy['inception_3b_1x1'],
                            biases_noisy['inception_3b_1x1'], 1, 1)
    err_lyr['inception_3b_1x1'] = tf.get_variable(
        name='inception_3b_1x1_lyr_err',
        shape=inception_3b_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3b_1x1'] = tf.add(inception_3b_1x1,
                                            err_lyr['inception_3b_1x1'])
    inception_3b_3x3_reduce = conv(inception_3a_output,
                                   weights_noisy['inception_3b_3x3_reduce'],
                                   biases_noisy['inception_3b_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_3b_3x3_reduce'] = tf.get_variable(
        name='inception_3b_3x3_reduce_lyr_err',
        shape=inception_3b_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3b_3x3_reduce'] = tf.add(
        inception_3b_3x3_reduce, err_lyr['inception_3b_3x3_reduce'])
    inception_3b_3x3 = conv(layers_err['inception_3b_3x3_reduce'],
                            weights_noisy['inception_3b_3x3'],
                            biases_noisy['inception_3b_3x3'], 1, 1)
    err_lyr['inception_3b_3x3'] = tf.get_variable(
        name='inception_3b_3x3_lyr_err',
        shape=inception_3b_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3b_3x3'] = tf.add(inception_3b_3x3,
                                            err_lyr['inception_3b_3x3'])
    inception_3b_5x5_reduce = conv(inception_3a_output,
                                   weights_noisy['inception_3b_5x5_reduce'],
                                   biases_noisy['inception_3b_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_3b_5x5_reduce'] = tf.get_variable(
        name='inception_3b_5x5_reduce_lyr_err',
        shape=inception_3b_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3b_5x5_reduce'] = tf.add(
        inception_3b_5x5_reduce, err_lyr['inception_3b_5x5_reduce'])
    inception_3b_5x5 = conv(layers_err['inception_3b_5x5_reduce'],
                            weights_noisy['inception_3b_5x5'],
                            biases_noisy['inception_3b_5x5'], 1, 1)
    err_lyr['inception_3b_5x5'] = tf.get_variable(
        name='inception_3b_5x5_lyr_err',
        shape=inception_3b_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3b_5x5'] = tf.add(inception_3b_5x5,
                                            err_lyr['inception_3b_5x5'])
    inception_3b_pool = max_pool(inception_3a_output, 3, 3, 1, 1)
    inception_3b_pool_proj = conv(inception_3b_pool,
                                  weights_noisy['inception_3b_pool_proj'],
                                  biases_noisy['inception_3b_pool_proj'], 1, 1)
    err_lyr['inception_3b_pool_proj'] = tf.get_variable(
        name='inception_3b_pool_proj_lyr_err',
        shape=inception_3b_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_3b_pool_proj'] = tf.add(
        inception_3b_pool_proj, err_lyr['inception_3b_pool_proj'])
    inception_3b_output = concat([
        layers_err['inception_3b_1x1'], layers_err['inception_3b_3x3'],
        layers_err['inception_3b_5x5'], layers_err['inception_3b_pool_proj']
    ], 3)
    pool3_3x3_s2 = max_pool(inception_3b_output, 3, 3, 2, 2)
    inception_4a_1x1 = conv(pool3_3x3_s2, weights_noisy['inception_4a_1x1'],
                            biases_noisy['inception_4a_1x1'], 1, 1)
    err_lyr['inception_4a_1x1'] = tf.get_variable(
        name='inception_4a_1x1_lyr_err',
        shape=inception_4a_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4a_1x1'] = tf.add(inception_4a_1x1,
                                            err_lyr['inception_4a_1x1'])
    inception_4a_3x3_reduce = conv(pool3_3x3_s2,
                                   weights_noisy['inception_4a_3x3_reduce'],
                                   biases_noisy['inception_4a_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_4a_3x3_reduce'] = tf.get_variable(
        name='inception_4a_3x3_reduce_lyr_err',
        shape=inception_4a_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4a_3x3_reduce'] = tf.add(
        inception_4a_3x3_reduce, err_lyr['inception_4a_3x3_reduce'])
    inception_4a_3x3 = conv(layers_err['inception_4a_3x3_reduce'],
                            weights_noisy['inception_4a_3x3'],
                            biases_noisy['inception_4a_3x3'], 1, 1)
    err_lyr['inception_4a_3x3'] = tf.get_variable(
        name='inception_4a_3x3_lyr_err',
        shape=inception_4a_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4a_3x3'] = tf.add(inception_4a_3x3,
                                            err_lyr['inception_4a_3x3'])
    inception_4a_5x5_reduce = conv(pool3_3x3_s2,
                                   weights_noisy['inception_4a_5x5_reduce'],
                                   biases_noisy['inception_4a_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_4a_5x5_reduce'] = tf.get_variable(
        name='inception_4a_5x5_reduce_lyr_err',
        shape=inception_4a_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4a_5x5_reduce'] = tf.add(
        inception_4a_5x5_reduce, err_lyr['inception_4a_5x5_reduce'])
    inception_4a_5x5 = conv(layers_err['inception_4a_5x5_reduce'],
                            weights_noisy['inception_4a_5x5'],
                            biases_noisy['inception_4a_5x5'], 1, 1)
    err_lyr['inception_4a_5x5'] = tf.get_variable(
        name='inception_4a_5x5_lyr_err',
        shape=inception_4a_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4a_5x5'] = tf.add(inception_4a_5x5,
                                            err_lyr['inception_4a_5x5'])
    inception_4a_pool = max_pool(pool3_3x3_s2, 3, 3, 1, 1)
    inception_4a_pool_proj = conv(inception_4a_pool,
                                  weights_noisy['inception_4a_pool_proj'],
                                  biases_noisy['inception_4a_pool_proj'], 1, 1)
    err_lyr['inception_4a_pool_proj'] = tf.get_variable(
        name='inception_4a_pool_proj_lyr_err',
        shape=inception_4a_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4a_pool_proj'] = tf.add(
        inception_4a_pool_proj, err_lyr['inception_4a_pool_proj'])
    inception_4a_output = concat([
        layers_err['inception_4a_1x1'], layers_err['inception_4a_3x3'],
        layers_err['inception_4a_5x5'], layers_err['inception_4a_pool_proj']
    ], 3)
    inception_4b_1x1 = conv(inception_4a_output,
                            weights_noisy['inception_4b_1x1'],
                            biases_noisy['inception_4b_1x1'], 1, 1)
    err_lyr['inception_4b_1x1'] = tf.get_variable(
        name='inception_4b_1x1_lyr_err',
        shape=inception_4b_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4b_1x1'] = tf.add(inception_4b_1x1,
                                            err_lyr['inception_4b_1x1'])
    inception_4b_3x3_reduce = conv(inception_4a_output,
                                   weights_noisy['inception_4b_3x3_reduce'],
                                   biases_noisy['inception_4b_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_4b_3x3_reduce'] = tf.get_variable(
        name='inception_4b_3x3_reduce_lyr_err',
        shape=inception_4b_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4b_3x3_reduce'] = tf.add(
        inception_4b_3x3_reduce, err_lyr['inception_4b_3x3_reduce'])
    inception_4b_3x3 = conv(layers_err['inception_4b_3x3_reduce'],
                            weights_noisy['inception_4b_3x3'],
                            biases_noisy['inception_4b_3x3'], 1, 1)
    err_lyr['inception_4b_3x3'] = tf.get_variable(
        name='inception_4b_3x3_lyr_err',
        shape=inception_4b_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4b_3x3'] = tf.add(inception_4b_3x3,
                                            err_lyr['inception_4b_3x3'])
    inception_4b_5x5_reduce = conv(inception_4a_output,
                                   weights_noisy['inception_4b_5x5_reduce'],
                                   biases_noisy['inception_4b_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_4b_5x5_reduce'] = tf.get_variable(
        name='inception_4b_5x5_reduce_lyr_err',
        shape=inception_4b_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4b_5x5_reduce'] = tf.add(
        inception_4b_5x5_reduce, err_lyr['inception_4b_5x5_reduce'])
    inception_4b_5x5 = conv(layers_err['inception_4b_5x5_reduce'],
                            weights_noisy['inception_4b_5x5'],
                            biases_noisy['inception_4b_5x5'], 1, 1)
    err_lyr['inception_4b_5x5'] = tf.get_variable(
        name='inception_4b_5x5_lyr_err',
        shape=inception_4b_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4b_5x5'] = tf.add(inception_4b_5x5,
                                            err_lyr['inception_4b_5x5'])
    inception_4b_pool = max_pool(inception_4a_output, 3, 3, 1, 1)
    inception_4b_pool_proj = conv(inception_4b_pool,
                                  weights_noisy['inception_4b_pool_proj'],
                                  biases_noisy['inception_4b_pool_proj'], 1, 1)
    err_lyr['inception_4b_pool_proj'] = tf.get_variable(
        name='inception_4b_pool_proj_lyr_err',
        shape=inception_4b_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4b_pool_proj'] = tf.add(
        inception_4b_pool_proj, err_lyr['inception_4b_pool_proj'])
    inception_4b_output = concat([
        layers_err['inception_4b_1x1'], layers_err['inception_4b_3x3'],
        layers_err['inception_4b_5x5'], layers_err['inception_4b_pool_proj']
    ], 3)
    inception_4c_1x1 = conv(inception_4b_output,
                            weights_noisy['inception_4c_1x1'],
                            biases_noisy['inception_4c_1x1'], 1, 1)
    err_lyr['inception_4c_1x1'] = tf.get_variable(
        name='inception_4c_1x1_lyr_err',
        shape=inception_4c_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4c_1x1'] = tf.add(inception_4c_1x1,
                                            err_lyr['inception_4c_1x1'])
    inception_4c_3x3_reduce = conv(inception_4b_output,
                                   weights_noisy['inception_4c_3x3_reduce'],
                                   biases_noisy['inception_4c_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_4c_3x3_reduce'] = tf.get_variable(
        name='inception_4c_3x3_reduce_lyr_err',
        shape=inception_4c_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4c_3x3_reduce'] = tf.add(
        inception_4c_3x3_reduce, err_lyr['inception_4c_3x3_reduce'])
    inception_4c_3x3 = conv(layers_err['inception_4c_3x3_reduce'],
                            weights_noisy['inception_4c_3x3'],
                            biases_noisy['inception_4c_3x3'], 1, 1)
    err_lyr['inception_4c_3x3'] = tf.get_variable(
        name='inception_4c_3x3_lyr_err',
        shape=inception_4c_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4c_3x3'] = tf.add(inception_4c_3x3,
                                            err_lyr['inception_4c_3x3'])
    inception_4c_5x5_reduce = conv(inception_4b_output,
                                   weights_noisy['inception_4c_5x5_reduce'],
                                   biases_noisy['inception_4c_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_4c_5x5_reduce'] = tf.get_variable(
        name='inception_4c_5x5_reduce_lyr_err',
        shape=inception_4c_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4c_5x5_reduce'] = tf.add(
        inception_4c_5x5_reduce, err_lyr['inception_4c_5x5_reduce'])
    inception_4c_5x5 = conv(layers_err['inception_4c_5x5_reduce'],
                            weights_noisy['inception_4c_5x5'],
                            biases_noisy['inception_4c_5x5'], 1, 1)
    err_lyr['inception_4c_5x5'] = tf.get_variable(
        name='inception_4c_5x5_lyr_err',
        shape=inception_4c_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4c_5x5'] = tf.add(inception_4c_5x5,
                                            err_lyr['inception_4c_5x5'])
    inception_4c_pool = max_pool(inception_4b_output, 3, 3, 1, 1)
    inception_4c_pool_proj = conv(inception_4c_pool,
                                  weights_noisy['inception_4c_pool_proj'],
                                  biases_noisy['inception_4c_pool_proj'], 1, 1)
    err_lyr['inception_4c_pool_proj'] = tf.get_variable(
        name='inception_4c_pool_proj_lyr_err',
        shape=inception_4c_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4c_pool_proj'] = tf.add(
        inception_4c_pool_proj, err_lyr['inception_4c_pool_proj'])
    inception_4c_output = concat([
        layers_err['inception_4c_1x1'], layers_err['inception_4c_3x3'],
        layers_err['inception_4c_5x5'], layers_err['inception_4c_pool_proj']
    ], 3)
    inception_4d_1x1 = conv(inception_4c_output,
                            weights_noisy['inception_4d_1x1'],
                            biases_noisy['inception_4d_1x1'], 1, 1)
    err_lyr['inception_4d_1x1'] = tf.get_variable(
        name='inception_4d_1x1_lyr_err',
        shape=inception_4d_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4d_1x1'] = tf.add(inception_4d_1x1,
                                            err_lyr['inception_4d_1x1'])
    inception_4d_3x3_reduce = conv(inception_4c_output,
                                   weights_noisy['inception_4d_3x3_reduce'],
                                   biases_noisy['inception_4d_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_4d_3x3_reduce'] = tf.get_variable(
        name='inception_4d_3x3_reduce_lyr_err',
        shape=inception_4d_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4d_3x3_reduce'] = tf.add(
        inception_4d_3x3_reduce, err_lyr['inception_4d_3x3_reduce'])
    inception_4d_3x3 = conv(layers_err['inception_4d_3x3_reduce'],
                            weights_noisy['inception_4d_3x3'],
                            biases_noisy['inception_4d_3x3'], 1, 1)
    err_lyr['inception_4d_3x3'] = tf.get_variable(
        name='inception_4d_3x3_lyr_err',
        shape=inception_4d_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4d_3x3'] = tf.add(inception_4d_3x3,
                                            err_lyr['inception_4d_3x3'])
    inception_4d_5x5_reduce = conv(inception_4c_output,
                                   weights_noisy['inception_4d_5x5_reduce'],
                                   biases_noisy['inception_4d_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_4d_5x5_reduce'] = tf.get_variable(
        name='inception_4d_5x5_reduce_lyr_err',
        shape=inception_4d_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4d_5x5_reduce'] = tf.add(
        inception_4d_5x5_reduce, err_lyr['inception_4d_5x5_reduce'])
    inception_4d_5x5 = conv(layers_err['inception_4d_5x5_reduce'],
                            weights_noisy['inception_4d_5x5'],
                            biases_noisy['inception_4d_5x5'], 1, 1)
    err_lyr['inception_4d_5x5'] = tf.get_variable(
        name='inception_4d_5x5_lyr_err',
        shape=inception_4d_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4d_5x5'] = tf.add(inception_4d_5x5,
                                            err_lyr['inception_4d_5x5'])
    inception_4d_pool = max_pool(inception_4c_output, 3, 3, 1, 1)
    inception_4d_pool_proj = conv(inception_4d_pool,
                                  weights_noisy['inception_4d_pool_proj'],
                                  biases_noisy['inception_4d_pool_proj'], 1, 1)
    err_lyr['inception_4d_pool_proj'] = tf.get_variable(
        name='inception_4d_pool_proj_lyr_err',
        shape=inception_4d_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4d_pool_proj'] = tf.add(
        inception_4d_pool_proj, err_lyr['inception_4d_pool_proj'])
    inception_4d_output = concat([
        layers_err['inception_4d_1x1'], layers_err['inception_4d_3x3'],
        layers_err['inception_4d_5x5'], layers_err['inception_4d_pool_proj']
    ], 3)
    inception_4e_1x1 = conv(inception_4d_output,
                            weights_noisy['inception_4e_1x1'],
                            biases_noisy['inception_4e_1x1'], 1, 1)
    err_lyr['inception_4e_1x1'] = tf.get_variable(
        name='inception_4e_1x1_lyr_err',
        shape=inception_4e_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4e_1x1'] = tf.add(inception_4e_1x1,
                                            err_lyr['inception_4e_1x1'])
    inception_4e_3x3_reduce = conv(inception_4d_output,
                                   weights_noisy['inception_4e_3x3_reduce'],
                                   biases_noisy['inception_4e_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_4e_3x3_reduce'] = tf.get_variable(
        name='inception_4e_3x3_reduce_lyr_err',
        shape=inception_4e_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4e_3x3_reduce'] = tf.add(
        inception_4e_3x3_reduce, err_lyr['inception_4e_3x3_reduce'])
    inception_4e_3x3 = conv(layers_err['inception_4e_3x3_reduce'],
                            weights_noisy['inception_4e_3x3'],
                            biases_noisy['inception_4e_3x3'], 1, 1)
    err_lyr['inception_4e_3x3'] = tf.get_variable(
        name='inception_4e_3x3_lyr_err',
        shape=inception_4e_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4e_3x3'] = tf.add(inception_4e_3x3,
                                            err_lyr['inception_4e_3x3'])
    inception_4e_5x5_reduce = conv(inception_4d_output,
                                   weights_noisy['inception_4e_5x5_reduce'],
                                   biases_noisy['inception_4e_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_4e_5x5_reduce'] = tf.get_variable(
        name='inception_4e_5x5_reduce_lyr_err',
        shape=inception_4e_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4e_5x5_reduce'] = tf.add(
        inception_4e_5x5_reduce, err_lyr['inception_4e_5x5_reduce'])
    inception_4e_5x5 = conv(layers_err['inception_4e_5x5_reduce'],
                            weights_noisy['inception_4e_5x5'],
                            biases_noisy['inception_4e_5x5'], 1, 1)
    err_lyr['inception_4e_5x5'] = tf.get_variable(
        name='inception_4e_5x5_lyr_err',
        shape=inception_4e_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4e_5x5'] = tf.add(inception_4e_5x5,
                                            err_lyr['inception_4e_5x5'])
    inception_4e_pool = max_pool(inception_4d_output, 3, 3, 1, 1)
    inception_4e_pool_proj = conv(inception_4e_pool,
                                  weights_noisy['inception_4e_pool_proj'],
                                  biases_noisy['inception_4e_pool_proj'], 1, 1)
    err_lyr['inception_4e_pool_proj'] = tf.get_variable(
        name='inception_4e_pool_proj_lyr_err',
        shape=inception_4e_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_4e_pool_proj'] = tf.add(
        inception_4e_pool_proj, err_lyr['inception_4e_pool_proj'])
    inception_4e_output = concat([
        layers_err['inception_4e_1x1'], layers_err['inception_4e_3x3'],
        layers_err['inception_4e_5x5'], layers_err['inception_4e_pool_proj']
    ], 3)
    pool4_3x3_s2 = max_pool(inception_4e_output, 3, 3, 2, 2)
    inception_5a_1x1 = conv(pool4_3x3_s2, weights_noisy['inception_5a_1x1'],
                            biases_noisy['inception_5a_1x1'], 1, 1)
    err_lyr['inception_5a_1x1'] = tf.get_variable(
        name='inception_5a_1x1_lyr_err',
        shape=inception_5a_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5a_1x1'] = tf.add(inception_5a_1x1,
                                            err_lyr['inception_5a_1x1'])
    inception_5a_3x3_reduce = conv(pool4_3x3_s2,
                                   weights_noisy['inception_5a_3x3_reduce'],
                                   biases_noisy['inception_5a_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_5a_3x3_reduce'] = tf.get_variable(
        name='inception_5a_3x3_reduce_lyr_err',
        shape=inception_5a_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5a_3x3_reduce'] = tf.add(
        inception_5a_3x3_reduce, err_lyr['inception_5a_3x3_reduce'])
    inception_5a_3x3 = conv(layers_err['inception_5a_3x3_reduce'],
                            weights_noisy['inception_5a_3x3'],
                            biases_noisy['inception_5a_3x3'], 1, 1)
    err_lyr['inception_5a_3x3'] = tf.get_variable(
        name='inception_5a_3x3_lyr_err',
        shape=inception_5a_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5a_3x3'] = tf.add(inception_5a_3x3,
                                            err_lyr['inception_5a_3x3'])
    inception_5a_5x5_reduce = conv(pool4_3x3_s2,
                                   weights_noisy['inception_5a_5x5_reduce'],
                                   biases_noisy['inception_5a_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_5a_5x5_reduce'] = tf.get_variable(
        name='inception_5a_5x5_reduce_lyr_err',
        shape=inception_5a_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5a_5x5_reduce'] = tf.add(
        inception_5a_5x5_reduce, err_lyr['inception_5a_5x5_reduce'])
    inception_5a_5x5 = conv(layers_err['inception_5a_5x5_reduce'],
                            weights_noisy['inception_5a_5x5'],
                            biases_noisy['inception_5a_5x5'], 1, 1)
    err_lyr['inception_5a_5x5'] = tf.get_variable(
        name='inception_5a_5x5_lyr_err',
        shape=inception_5a_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5a_5x5'] = tf.add(inception_5a_5x5,
                                            err_lyr['inception_5a_5x5'])
    inception_5a_pool = max_pool(pool4_3x3_s2, 3, 3, 1, 1)
    inception_5a_pool_proj = conv(inception_5a_pool,
                                  weights_noisy['inception_5a_pool_proj'],
                                  biases_noisy['inception_5a_pool_proj'], 1, 1)
    err_lyr['inception_5a_pool_proj'] = tf.get_variable(
        name='inception_5a_pool_proj_lyr_err',
        shape=inception_5a_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5a_pool_proj'] = tf.add(
        inception_5a_pool_proj, err_lyr['inception_5a_pool_proj'])
    inception_5a_output = concat([
        layers_err['inception_5a_1x1'], layers_err['inception_5a_3x3'],
        layers_err['inception_5a_5x5'], layers_err['inception_5a_pool_proj']
    ], 3)
    inception_5b_1x1 = conv(inception_5a_output,
                            weights_noisy['inception_5b_1x1'],
                            biases_noisy['inception_5b_1x1'], 1, 1)
    err_lyr['inception_5b_1x1'] = tf.get_variable(
        name='inception_5b_1x1_lyr_err',
        shape=inception_5b_1x1.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5b_1x1'] = tf.add(inception_5b_1x1,
                                            err_lyr['inception_5b_1x1'])
    inception_5b_3x3_reduce = conv(inception_5a_output,
                                   weights_noisy['inception_5b_3x3_reduce'],
                                   biases_noisy['inception_5b_3x3_reduce'], 1,
                                   1)
    err_lyr['inception_5b_3x3_reduce'] = tf.get_variable(
        name='inception_5b_3x3_reduce_lyr_err',
        shape=inception_5b_3x3_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5b_3x3_reduce'] = tf.add(
        inception_5b_3x3_reduce, err_lyr['inception_5b_3x3_reduce'])
    inception_5b_3x3 = conv(layers_err['inception_5b_3x3_reduce'],
                            weights_noisy['inception_5b_3x3'],
                            biases_noisy['inception_5b_3x3'], 1, 1)
    err_lyr['inception_5b_3x3'] = tf.get_variable(
        name='inception_5b_3x3_lyr_err',
        shape=inception_5b_3x3.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5b_3x3'] = tf.add(inception_5b_3x3,
                                            err_lyr['inception_5b_3x3'])
    inception_5b_5x5_reduce = conv(inception_5a_output,
                                   weights_noisy['inception_5b_5x5_reduce'],
                                   biases_noisy['inception_5b_5x5_reduce'], 1,
                                   1)
    err_lyr['inception_5b_5x5_reduce'] = tf.get_variable(
        name='inception_5b_5x5_reduce_lyr_err',
        shape=inception_5b_5x5_reduce.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5b_5x5_reduce'] = tf.add(
        inception_5b_5x5_reduce, err_lyr['inception_5b_5x5_reduce'])
    inception_5b_5x5 = conv(layers_err['inception_5b_5x5_reduce'],
                            weights_noisy['inception_5b_5x5'],
                            biases_noisy['inception_5b_5x5'], 1, 1)
    err_lyr['inception_5b_5x5'] = tf.get_variable(
        name='inception_5b_5x5_lyr_err',
        shape=inception_5b_5x5.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5b_5x5'] = tf.add(inception_5b_5x5,
                                            err_lyr['inception_5b_5x5'])
    inception_5b_pool = max_pool(inception_5a_output, 3, 3, 1, 1)
    inception_5b_pool_proj = conv(inception_5b_pool,
                                  weights_noisy['inception_5b_pool_proj'],
                                  biases_noisy['inception_5b_pool_proj'], 1, 1)
    err_lyr['inception_5b_pool_proj'] = tf.get_variable(
        name='inception_5b_pool_proj_lyr_err',
        shape=inception_5b_pool_proj.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['inception_5b_pool_proj'] = tf.add(
        inception_5b_pool_proj, err_lyr['inception_5b_pool_proj'])
    inception_5b_output = concat([
        layers_err['inception_5b_1x1'], layers_err['inception_5b_3x3'],
        layers_err['inception_5b_5x5'], layers_err['inception_5b_pool_proj']
    ], 3)
    pool5_7x7_s1 = avg_pool(inception_5b_output, 7, 7, 1, 1, padding='VALID')
    loss3_classifier = fc(pool5_7x7_s1,
                          weights_noisy['loss3_classifier'],
                          biases_noisy['loss3_classifier'],
                          relu=False)
    err_lyr['loss3_classifier'] = tf.get_variable(
        name='loss3_classifier_lyr_err',
        shape=loss3_classifier.shape[1:],
        initializer=tf.random_normal_initializer(mean=err_mean[3],
                                                 stddev=err_stddev[3]),
        trainable=train_vars[3])
    layers_err['loss3_classifier'] = tf.add(loss3_classifier,
                                            err_lyr['loss3_classifier'])
    return layers_err['loss3_classifier'], err_w, err_b, err_lyr