コード例 #1
0
  def generate_plots(self, input_data, input_labels=None):
    """
    Plot weights, reconstruction, and gradients
    Inputs:
      input_data: data object containing the current image batch
      input_labels: data object containing the current label batch
    """
    super(MlpListaModel, self).generate_plots(input_data, input_labels)
    feed_dict = self.get_feed_dict(input_data, input_labels)

    eval_list = [self.global_step, self.w, self.a]
    eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
    current_step = str(eval_out[0])
    filename_suffix = "_v"+self.params.version+"_"+current_step.zfill(5)+".png"
    weights, lista_activity = eval_out[1:]

    fig = pf.plot_activity_hist(input_data, title="Image Histogram",
      save_filename=self.params.disp_dir+"img_hist"+filename_suffix)
    weights = dp.reshape_data(weights.T, flatten=False)[0] # [num_neurons, height, width]
    #Scale image by max and min of images and/or recon
    r_max = np.max(input_data)
    r_min = np.min(input_data)
    input_data = dp.reshape_data(input_data, flatten=False)[0]
    fig = pf.plot_data_tiled(input_data, normalize=False,
      title="Scaled Images at step "+current_step, vmin=r_min, vmax=r_max,
      save_filename=self.params.disp_dir+"images"+filename_suffix)
    fig = pf.plot_activity_hist(lista_activity, title="LISTA Activity Histogram",
      save_filename=self.params.disp_dir+"lista_act_hist"+filename_suffix)
    fig = pf.plot_data_tiled(weights, normalize=False,
      title="Dictionary at step "+current_step, vmin=None, vmax=None,
      save_filename=self.params.disp_dir+"w_lista"+filename_suffix)
コード例 #2
0
  def generate_plots(self, input_data, input_labels=None):
    """
    Plot weights, reconstruction, and gradients
    Inputs:
      input_data: data object containing the current image batch
      input_labels: data object containing the current label batch
    """
    super(ListaModel, self).generate_plots(input_data, input_labels)
    feed_dict = self.get_feed_dict(input_data, input_labels)
    eval_list = [self.global_step, self.lca_module.w, self.w,
      self.lca_module.reconstruction, self.lca_module.a, self.get_encodings()]
    eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
    current_step = str(eval_out[0])
    filename_suffix = "_v"+self.params.version+"_"+current_step.zfill(5)+".png"
    lca_weights, lista_weights, recon, lca_activity, lista_activity = eval_out[1:]
    lca_weights_norm = np.linalg.norm(lca_weights, axis=0, keepdims=False)
    lista_weights_norm = np.linalg.norm(lista_weights, axis=0, keepdims=False)
    recon = dp.reshape_data(recon, flatten=False)[0]
    lca_weights = dp.reshape_data(lca_weights.T, flatten=False)[0] # [num_neurons, height, width]
    lista_weights = dp.reshape_data(lista_weights.T, flatten=False)[0] # [num_neurons, height, width]

    fig = pf.plot_activity_hist(input_data, title="Image Histogram",
      save_filename=self.params.disp_dir+"img_hist"+filename_suffix)

    #Scale image by max and min of images and/or recon
    r_max = np.max([np.max(input_data), np.max(recon)])
    r_min = np.min([np.min(input_data), np.min(recon)])

    input_data = dp.reshape_data(input_data, flatten=False)[0]
    fig = pf.plot_data_tiled(input_data, normalize=False,
      title="Scaled Images at step "+current_step, vmin=r_min, vmax=r_max,
      save_filename=self.params.disp_dir+"images"+filename_suffix)
    fig = pf.plot_data_tiled(recon, normalize=False,
      title="Recons at step "+current_step, vmin=r_min, vmax=r_max,
      save_filename=self.params.disp_dir+"recons"+filename_suffix)

    fig = pf.plot_activity_hist(lca_activity, title="LCA Activity Histogram",
      save_filename=self.params.disp_dir+"lca_act_hist"+filename_suffix)

    fig = pf.plot_activity_hist(lista_activity, title="LISTA Activity Histogram",
      save_filename=self.params.disp_dir+"lista_act_hist"+filename_suffix)

    fig = pf.plot_data_tiled(lca_weights, normalize=False,
      title="LCA Dictionary at step "+current_step, vmin=None, vmax=None,
      save_filename=self.params.disp_dir+"lca_w"+filename_suffix)

    fig = pf.plot_data_tiled(lista_weights, normalize=False,
      title="LISTA Dictionary at step "+current_step, vmin=None, vmax=None,
      save_filename=self.params.disp_dir+"lista_w"+filename_suffix)
コード例 #3
0
 def generate_plots(self, input_data, input_labels=None):
     """
 Plot weights, reconstruction, and gradients
 Inputs:
   input_data: data object containing the current image batch
   input_labels: data object containing the current label batch
 """
     super(IcaModel, self).generate_plots(input_data, input_labels)
     feed_dict = self.get_feed_dict(input_data, input_labels)
     eval_list = [self.global_step, self.w_analysis, self.a, self.z]
     eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
     current_step = str(eval_out[0])
     weights, a_vals, z_vals = eval_out[1:]
     #input_data = dp.reshape_data(input_data, flatten=False)[0]
     #pf.plot_data_tiled(input_data, normalize=False,
     #  title="Images at step "+current_step, vmin=np.min(input_data), vmax=np.max(input_data),
     #  save_filename=(self.params.disp_dir+"images-"+current_step.zfill(5)+".png"))
     weights_norm = np.linalg.norm(weights, axis=0,
                                   keepdims=False)  # norm across pixels
     pf.plot_bar(weights_norm,
                 num_xticks=5,
                 title="$W_{analysis}$ l$_{2}$ norm",
                 xlabel="Basis Index",
                 ylabel="L2 Norm",
                 save_filename=(self.params.disp_dir + "w_analysis_norm_v" +
                                self.params.version + "-" +
                                current_step.zfill(5) + ".png"))
     weights = dp.reshape_data(
         weights.T, flatten=False)[0]  #[neurons, pixels_y, pixels_x]
     pf.plot_weights(
         weights.squeeze(),
         title="Unnormalized weights at step " + current_step,
         save_filename=(self.params.disp_dir + "w_analysis_unnormalized_v" +
                        self.params.version + "-" + current_step.zfill(5) +
                        ".png"))
     #pf.plot_data_tiled(weights, normalize=True,
     #  title="Weights at step "+current_step, vmin=-1.0, vmax=1.0,
     #  save_filename=(self.params.disp_dir+"w_analysis_v"+self.params.version+"-"
     #  +current_step.zfill(5)+".png"))
     pf.plot_activity_hist(
         a_vals,
         num_bins=1000,
         title="a Activity Histogram at step " + current_step,
         save_filename=(self.params.disp_dir + "act_hist_v" +
                        self.params.version + "-" + current_step.zfill(5) +
                        ".png"))
コード例 #4
0
 def generate_plots(self, input_data, input_labels=None):
   """
   Plot weights, reconstruction, and gradients
   Inputs:
     input_data: data object containing the current image batch
     input_labels: data object containing the current label batch
   """
   super(RicaModel, self).generate_plots(input_data, input_labels)
   feed_dict = self.get_feed_dict(input_data, input_labels)
   eval_list = [self.global_step, self.w, self.reconstruction,  self.a]
   eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
   current_step = str(eval_out[0])
   filename_suffix = "_v"+self.params.version+"_"+current_step.zfill(5)+".png"
   weights, recon, activity = eval_out[1:]
   #w_lengths = np.sqrt(np.sum(np.square(weights), axis=0))
   recon = dp.reshape_data(recon, flatten=False)[0]
   weights = dp.reshape_data(weights.T, flatten=False)[0] # [units, pixels]
   fig = pf.plot_activity_hist(input_data, title="Image Histogram",
     save_filename=self.params.disp_dir+"img_hist"+filename_suffix)
   input_data = dp.reshape_data(input_data, flatten=False)[0]
   fig = pf.plot_data_tiled(input_data, normalize=False,
     title="Images at step "+current_step, vmin=None, vmax=None,
     save_filename=self.params.disp_dir+"images"+filename_suffix)
   fig = pf.plot_activity_hist(activity, title="Activity Histogram",
     save_filename=self.params.disp_dir+"act_hist"+filename_suffix)
   fig = pf.plot_data_tiled(weights, normalize=False,
     title="Dictionary at step "+current_step, vmin=None, vmax=None,
     save_filename=self.params.disp_dir+"w"+filename_suffix)
   #fig = pf.plot_bar(w_lengths, title="Weight L2 Norms", xlabel="Weight Index", ylabel="L2 Norm",
   #  save_filename=self.params.disp_dir+"w_norms"+filename_suffix)
   fig = pf.plot_data_tiled(recon, normalize=False,
     title="Recons at step "+current_step, vmin=None, vmax=None,
     save_filename=self.params.disp_dir+"recons"+filename_suffix)
   if self.params.optimizer != "lbfgsb":
     for weight_grad_var in self.grads_and_vars[self.sched_idx]:
       grad = weight_grad_var[0][0].eval(feed_dict)
       shape = grad.shape
       name = weight_grad_var[0][1].name.split('/')[1].split(':')[0]#np.split
       grad = dp.reshape_data(grad.T, flatten=False)[0]
       fig = pf.plot_data_tiled(grad, normalize=True,
         title="Gradient for w at step "+current_step, vmin=None, vmax=None,
         save_filename=self.params.disp_dir+"dw"+filename_suffix)
コード例 #5
0
    def generate_plots(self, input_data, input_labels=None):
        """
    Plot weights, reconstruction, and gradients
    Inputs:
      input_data: data object containing the current image batch
      input_labels: data object containing the current label batch
    """
        if input_data.shape[-1] == 3:
            cmap = None
        elif input_data.shape[-1] == 1:
            cmap = "Greys_r"
        else:
            assert False, (
                "Input_data.shape[-1] should indicate color channel, and should be 1 or 3"
            )
        feed_dict = self.get_feed_dict(input_data, input_labels)
        weights, recon, activity = tf.compat.v1.get_default_session().run(
            [self.module.w, self.module.reconstruction,
             self.get_encodings()], feed_dict)

        recon = dp.rescale_data_to_one(recon)[0]
        weights = np.transpose(dp.rescale_data_to_one(weights.T)[0].T,
                               axes=(3, 0, 1, 2))
        current_step = str(self.global_step.eval())
        filename_suffix = "_v" + self.params.version + "_" + current_step.zfill(
            5) + ".png"
        input_data = dp.rescale_data_to_one(input_data)[0]

        num_features = activity.shape[-1]
        activity = np.reshape(activity, [-1, num_features])
        fig = pf.plot_activity_hist(activity,
                                    title="LCA Activity Histogram",
                                    save_filename=self.params.disp_dir +
                                    "lca_act_hist" + filename_suffix)

        pf.plot_data_tiled(input_data[0, ...],
                           normalize=False,
                           title="Images at step " + current_step,
                           vmin=None,
                           vmax=None,
                           cmap=cmap,
                           save_filename=self.params.disp_dir + "images" +
                           filename_suffix)
        pf.plot_data_tiled(recon[0, ...],
                           normalize=False,
                           title="Recons at step " + current_step,
                           vmin=None,
                           vmax=None,
                           cmap=cmap,
                           save_filename=self.params.disp_dir + "recons" +
                           filename_suffix)
        pf.plot_data_tiled(weights,
                           normalize=False,
                           title="Dictionary at step " + current_step,
                           vmin=np.min(weights),
                           vmax=np.max(weights),
                           cmap=cmap,
                           save_filename=self.params.disp_dir + "phi" +
                           filename_suffix)
        for weight_grad_var in self.grads_and_vars[self.sched_idx]:
            grad = weight_grad_var[0][0].eval(feed_dict)
            shape = grad.shape
            name = weight_grad_var[0][1].name.split('/')[1].split(':')[
                0]  #np.split
コード例 #6
0
ファイル: ae_model.py プロジェクト: dpaiton/DeepSparseCoding
    def generate_plots(self, input_data, input_labels=None):
        """
    Plot weights, reconstruction, and gradients
    Inputs:
      input_data: data object containing the current image batch
      input_labels: data object containing the current label batch
    """
        super(AeModel, self).generate_plots(input_data, input_labels)
        feed_dict = self.get_feed_dict(input_data, input_labels)
        eval_list = [
            self.global_step, self.module.w_list[0], self.module.w_list[-1],
            self.module.b_list, self.module.u_list[1:]
        ]
        eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
        current_step = str(eval_out[0])
        w_enc, w_dec, b_list, activations = eval_out[1:]
        recon = activations[-1]
        # compute weight norms
        num_features = w_enc.shape[-1]
        w_enc_norm = np.linalg.norm(np.reshape(w_enc, (-1, num_features)),
                                    axis=0,
                                    keepdims=False)
        # reshapes flat data into image & normalize
        if (len(w_enc.shape) == 2):
            w_enc_img = dp.reshape_data(w_enc.T, flatten=False)[0]
        else:
            w_enc_img = np.transpose(w_enc, (3, 0, 1, 2))
        w_enc_img = dp.norm_weights(w_enc_img)

        if (not self.params.tie_dec_weights):
            if (len(w_dec.shape) == 2):
                w_dec_norm = np.linalg.norm(w_dec, axis=1, keepdims=False)
                w_dec_img = dp.reshape_data(w_dec, flatten=False)[0]
            else:
                #Decoder in same shape as encoder if multi dimensional
                #conv2d_transpose requires it to be the same shape as decoder
                w_dec_norm = np.linalg.norm(np.reshape(w_dec,
                                                       (-1, num_features)),
                                            axis=0,
                                            keepdims=False)
                w_dec_img = np.transpose(w_dec, (3, 0, 1, 2))
            w_dec_img = dp.norm_weights(w_dec_img)

        # generate figures
        filename_suffix = "_v" + self.params.version + "_" + current_step.zfill(
            5) + ".png"

        fig = pf.plot_data_tiled(
            w_enc_img,
            normalize=False,
            title="Encoding weights at step " + current_step,
            vmin=None,
            vmax=None,
            save_filename=self.params.disp_dir + "w_enc" + filename_suffix)

        fig = pf.plot_bar(w_enc_norm,
                          num_xticks=5,
                          title="w_enc l2 norm",
                          xlabel="Basis Index",
                          ylabel="L2 Norm",
                          save_filename=self.params.disp_dir + "w_enc_norm" +
                          filename_suffix)

        if (not self.params.tie_dec_weights):
            fig = pf.plot_data_tiled(
                w_dec_img,
                normalize=False,
                title="Decoding weights at step " + current_step,
                vmin=None,
                vmax=None,
                save_filename=self.params.disp_dir + "w_dec" + filename_suffix)
            fig = pf.plot_bar(w_dec_norm,
                              num_xticks=5,
                              title="w_dec l2 norm",
                              xlabel="Basis Index",
                              ylabel="L2 Norm",
                              save_filename=self.params.disp_dir +
                              "w_dec_norm" + filename_suffix)

        for layer_id, activity in enumerate(activations[:-1]):
            num_features = activity.shape[-1]
            fig = pf.plot_activity_hist(
                np.reshape(activity, (-1, num_features)),
                title="Activity Encoder " + str(layer_id) + " Histogram",
                save_filename=self.params.disp_dir + "act_enc_" +
                str(layer_id) + "_hist" + filename_suffix)

        for layer_id, bias in enumerate(b_list):
            fig = pf.plot_activity_hist(
                np.squeeze(bias),
                title="Bias " + str(layer_id) + " Histogram",
                save_filename=self.params.disp_dir + "bias_" + str(layer_id) +
                "_hist" + filename_suffix)
        if eval_out[0] * 10 % self.params.cp_int == 0:
            #Scale image by max and min of images and/or recon
            r_max = np.max([np.max(input_data), np.max(recon)])
            r_min = np.min([np.min(input_data), np.min(recon)])
            batch_size = input_data.shape[0]
            fig = pf.plot_activity_hist(np.reshape(input_data,
                                                   (batch_size, -1)),
                                        title="Image Histogram",
                                        save_filename=self.params.disp_dir +
                                        "img_hist" + filename_suffix)
            input_data = dp.reshape_data(input_data, flatten=False)[0]
            fig = pf.plot_data_tiled(
                input_data,
                normalize=False,
                title="Scaled Images at step " + current_step,
                vmin=r_min,
                vmax=r_max,
                save_filename=self.params.disp_dir + "images" +
                filename_suffix)
            #TODO: This plot hangs sometimes?
            #fig = pf.plot_activity_hist(recon, title="Recon Histogram",
            #save_filename=self.params.disp_dir+"recon_hist"+filename_suffix)
            recon = dp.reshape_data(recon, flatten=False)[0]
            fig = pf.plot_data_tiled(recon,
                                     normalize=False,
                                     title="Recons at step " + current_step,
                                     vmin=r_min,
                                     vmax=r_max,
                                     save_filename=self.params.disp_dir +
                                     "recons" + filename_suffix)
コード例 #7
0
ファイル: lca_model.py プロジェクト: dpaiton/DeepSparseCoding
 def generate_plots(self, input_data, input_labels=None):
     """
 Plot weights, reconstruction, and gradients
 Inputs:
   input_data: data object containing the current image batch
   input_labels: data object containing the current label batch
 """
     super(LcaModel, self).generate_plots(input_data, input_labels)
     feed_dict = self.get_feed_dict(input_data, input_labels)
     eval_list = [
         self.global_step, self.module.w, self.module.reconstruction,
         self.get_encodings()
     ]
     eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
     current_step = str(eval_out[0])
     filename_suffix = "_v" + self.params.version + "_" + current_step.zfill(
         5) + ".png"
     weights, recon, activity = eval_out[1:]
     weights_norm = np.linalg.norm(weights, axis=0, keepdims=False)
     recon = dp.reshape_data(recon, flatten=False)[0]
     weights = dp.reshape_data(
         weights.T, flatten=False)[0]  # [num_neurons, height, width]
     fig = pf.plot_activity_hist(input_data,
                                 title="Image Histogram",
                                 save_filename=self.params.disp_dir +
                                 "img_hist" + filename_suffix)
     #Scale image by max and min of images and/or recon
     r_max = np.max([np.max(input_data), np.max(recon)])
     r_min = np.min([np.min(input_data), np.min(recon)])
     input_data = dp.reshape_data(input_data, flatten=False)[0]
     fig = pf.plot_data_tiled(input_data,
                              normalize=False,
                              title="Scaled Images at step " + current_step,
                              vmin=r_min,
                              vmax=r_max,
                              save_filename=self.params.disp_dir +
                              "images" + filename_suffix)
     fig = pf.plot_data_tiled(recon,
                              normalize=False,
                              title="Recons at step " + current_step,
                              vmin=r_min,
                              vmax=r_max,
                              save_filename=self.params.disp_dir +
                              "recons" + filename_suffix)
     fig = pf.plot_activity_hist(activity,
                                 title="Activity Histogram",
                                 save_filename=self.params.disp_dir +
                                 "act_hist" + filename_suffix)
     fig = pf.plot_data_tiled(weights,
                              normalize=False,
                              title="Dictionary at step " + current_step,
                              vmin=None,
                              vmax=None,
                              save_filename=self.params.disp_dir + "phi" +
                              filename_suffix)
     for weight_grad_var in self.grads_and_vars[self.sched_idx]:
         grad = weight_grad_var[0][0].eval(feed_dict)
         shape = grad.shape
         name = weight_grad_var[0][1].name.split('/')[1].split(':')[
             0]  #np.split
         grad = dp.reshape_data(grad.T, flatten=False)[0]
         fig = pf.plot_data_tiled(
             grad,
             normalize=True,
             title="Gradient for w at step " + current_step,
             vmin=None,
             vmax=None,
             save_filename=self.params.disp_dir + "dphi" + filename_suffix)
コード例 #8
0
ファイル: mlp_model.py プロジェクト: dpaiton/DeepSparseCoding
 def generate_plots(self, input_data, input_labels=None):
     """
 Plot weights, gradients, etc
 Inputs: input_data and input_labels used for the session
 """
     super(MlpModel, self).generate_plots(input_data, input_labels)
     feed_dict = self.get_feed_dict(input_data, input_labels)
     eval_list = [
         self.global_step,
         self.get_encodings(), self.mlp_module.weight_list
     ]
     train_on_adversarial = feed_dict[self.train_on_adversarial]
     if (train_on_adversarial):
         eval_list += [self.adv_module.get_adv_input()]
     eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
     current_step = str(eval_out[0])
     filename_suffix = "_v" + self.params.version + "_" + current_step.zfill(
         5) + ".png"
     activity = eval_out[1]
     fig = pf.plot_activity_hist(activity,
                                 title="Logit Histogram",
                                 save_filename=self.params.disp_dir +
                                 "act_hist" + filename_suffix)
     #First layer weights
     mlp_weights = eval_out[2]
     w_enc = mlp_weights[0]
     if self.params.mlp_layer_types[0] == "fc":
         w_enc_norm = np.linalg.norm(w_enc, axis=0, keepdims=False)
         # Don't plot weights as images if input is not square
         w_input_sqrt = np.sqrt(w_enc.shape[0])
         if (np.floor(w_input_sqrt) == np.ceil(w_input_sqrt)):
             w_enc = dp.reshape_data(
                 w_enc.T, flatten=False)[0]  # [num_neurons, height, width]
             fig = pf.plot_data_tiled(
                 w_enc,
                 normalize=False,
                 title="Weights at step " + current_step,
                 vmin=None,
                 vmax=None,
                 save_filename=self.params.disp_dir + "w_enc" +
                 filename_suffix)
     else:  # conv
         w_enc = np.transpose(dp.rescale_data_to_one(w_enc.T)[0].T,
                              axes=(3, 0, 1, 2))
         if (w_enc.shape[-1] == 1 or w_enc.shape[-1] == 3):
             pf.plot_data_tiled(w_enc,
                                normalize=False,
                                title="Weights at step " + current_step,
                                save_filename=self.params.disp_dir +
                                "w_enc" + filename_suffix)
     for (w, tf_w) in zip(mlp_weights, self.mlp_module.weight_list):
         #simplify tensorflow node name to only be the last one
         w_name = tf_w.name.split("/")[-1].split(":")[0]
         num_f = w.shape[-1]
         w_reshape = np.reshape(w, [-1, num_f])
         w_norm = np.linalg.norm(w_reshape, axis=0, keepdims=False)
         fig = pf.plot_bar(w_norm,
                           num_xticks=5,
                           title=w_name + " l2 norm",
                           xlabel="w index",
                           ylabel="L2 Norm",
                           save_filename=self.params.disp_dir + "w_norm_" +
                           w_name + filename_suffix)
     if (train_on_adversarial):
         adv_input = eval_out[-1]
         adv_input = dp.reshape_data(adv_input, flatten=False)[0]
         fig = pf.plot_data_tiled(adv_input,
                                  normalize=False,
                                  title="Adv inputs at " + current_step,
                                  save_filename=self.params.disp_dir +
                                  "adv_input" + filename_suffix)
コード例 #9
0
 def generate_plots(self, input_data, input_labels=None):
     """
 Plot weights, reconstruction, and gradients
 Inputs:
   input_data: data object containing the current image batch
   input_labels: data object containing the current label batch
 """
     super(MlpLcaModel, self).generate_plots(input_data, input_labels)
     feed_dict = self.get_feed_dict(input_data, input_labels)
     eval_list = [
         self.global_step, self.lca_module.w,
         self.lca_module.reconstruction, self.lca_module.a
     ]
     eval_out = tf.compat.v1.get_default_session().run(eval_list, feed_dict)
     current_step = str(eval_out[0])
     filename_suffix = "_v" + self.params.version + "_" + current_step.zfill(
         5) + ".png"
     weights, recon, lca_activity = eval_out[1:]
     batch_size = input_data.shape[0]
     fig = pf.plot_activity_hist(np.reshape(input_data, [batch_size, -1]),
                                 title="Image Histogram",
                                 save_filename=self.params.disp_dir +
                                 "img_hist" + filename_suffix)
     fig = pf.plot_activity_hist(np.reshape(recon, [batch_size, -1]),
                                 title="Recon Histogram",
                                 save_filename=self.params.disp_dir +
                                 "recon_hist" + filename_suffix)
     recon = dp.reshape_data(recon, flatten=False)[0]
     #Scale image by max and min of images and/or recon
     r_max = np.max([np.max(input_data), np.max(recon)])
     r_min = np.min([np.min(input_data), np.min(recon)])
     input_data = dp.reshape_data(input_data, flatten=False)[0]
     fig = pf.plot_data_tiled(input_data,
                              normalize=False,
                              title="Scaled Images at step " + current_step,
                              vmin=r_min,
                              vmax=r_max,
                              save_filename=self.params.disp_dir +
                              "images" + filename_suffix)
     fig = pf.plot_data_tiled(recon,
                              normalize=False,
                              title="Recons at step " + current_step,
                              vmin=r_min,
                              vmax=r_max,
                              save_filename=self.params.disp_dir +
                              "recons" + filename_suffix)
     num_features = lca_activity.shape[-1]
     lca_activity = np.reshape(lca_activity, [-1, num_features])
     fig = pf.plot_activity_hist(lca_activity,
                                 title="LCA Activity Histogram",
                                 save_filename=self.params.disp_dir +
                                 "lca_act_hist" + filename_suffix)
     if (len(weights.shape) == 4):  # conv
         weights = np.transpose(weights, (0, 2, 3, 1))
     else:  # fc
         weights = dp.reshape_data(
             weights.T, flatten=False)[0]  # [num_neurons, height, width]
     fig = pf.plot_data_tiled(weights,
                              normalize=False,
                              title="Dictionary at step " + current_step,
                              vmin=None,
                              vmax=None,
                              save_filename=self.params.disp_dir + "phi" +
                              filename_suffix)