示例#1
0
                moving_mean = [
                    v for v in tf.global_variables() if v.name == mean_name
                ][0]
                saver_dict.update({"l" + str(l) + "/BN/gamma": gamma})
                saver_dict.update({"l" + str(l) + "/BN/beta": beta})
                saver_dict.update(
                    {"l" + str(l) + "/BN/moving_variance": moving_variance})
                saver_dict.update(
                    {"l" + str(l) + "/BN/moving_mean": moving_mean})
            saver_initvars = tf.train.Saver(saver_dict)
            saver_initvars.restore(sess, save_name)
    else:
        #save_name = LDAMP.GenLDAMPFilename(alg, tie_weights, LayerbyLayer) + ".ckpt"
        save_name = LDAMP.GenLDAMPFilename(
            alg,
            tie_weights,
            LayerbyLayer,
            sampling_rate_override=sampling_rate_train,
            loss_func=TrainLoss) + ".ckpt"
        saver.restore(sess, save_name)

    print("Reconstructing Signal")
    start_time = time.time()

    Final_PSNRs = []
    for offset in range(
            0, n_Test_Images - BATCH_SIZE + 1, BATCH_SIZE
    ):  # Subtract batch size-1 to avoid eerrors when len(train_images) is not a multiple of the batch size
        end = offset + BATCH_SIZE
        # batch_y_test = y_test[:, offset:end] #To be used when using precomputed measurements

        # Generate a new measurement matrix
示例#2
0
            optimizer = optimizer0.minimize(cost, var_list=vars_to_train)

        saver_best = tf.train.Saver()  # defaults to saving all variables
        saver_dict={}
        with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
            sess.run(tf.global_variables_initializer())#Seems to be necessary for the batch normalization layers for some reason.

            if FLAGS.debug:
                sess = tf_debug.LocalCLIDebugWrapperSession(sess)
                sess.add_tensor_filter("has_inf_or_nan", tf_debug.has_inf_or_nan)

            start_time = time.time()
            print("Load Initial Weights ...")
            if ResumeTraining or learning_rate!=learning_rates[0]:
                ##Load previous values for the weights
                saver_initvars_name_chckpt = LDAMP.GenLDAMPFilename(alg, tie_weights, LayerbyLayer,loss_func=loss_func) + ".ckpt"
                for iter in range(n_layers_trained):#Create a dictionary with all the variables except those associated with the optimizer.
                    for l in range(0, n_DnCNN_layers):
                        saver_dict.update({"Iter" + str(iter) + "/l" + str(l) + "/w": theta[iter][0][l]})#,
                                           #"Iter" + str(iter) + "/l" + str(l) + "/b": theta[iter][1][l]})
                    for l in range(1, n_DnCNN_layers - 1):  # Associate variance, means, and beta
                        gamma_name = "Iter" + str(iter) + "/l" + str(l) + "/BN/gamma:0"
                        beta_name = "Iter" + str(iter) + "/l" + str(l) + "/BN/beta:0"
                        var_name = "Iter" + str(iter) + "/l" + str(l) + "/BN/moving_variance:0"
                        mean_name = "Iter" + str(iter) + "/l" + str(l) + "/BN/moving_mean:0"
                        gamma = [v for v in tf.global_variables() if v.name == gamma_name][0]
                        beta = [v for v in tf.global_variables() if v.name == beta_name][0]
                        moving_variance = [v for v in tf.global_variables() if v.name == var_name][0]
                        moving_mean = [v for v in tf.global_variables() if v.name == mean_name][0]
                        saver_dict.update({"Iter" + str(iter) + "/l" + str(l) + "/BN/gamma": gamma})
                        saver_dict.update({"Iter" + str(iter) + "/l" + str(l) + "/BN/beta": beta})