Example #1
0
    def create_hooks(self, config):
        hooks = super().create_hooks(config)

        #LOGGING HOOKS

        # TODO here I am ignoring the number of channels, is it correct? (Luigi).
        # TODO The channels seem to be included with np.prod...
        # dim_with_channels = np.prod(self.x_shape["train"])

        dim_with_channels = tf.cast(tf.reduce_prod(tf.shape(self.raw_x)[1:]),
                                    tf.float32)
        # TODO the two shapes could be different, maybe use tf.shape(self.raw_x) (Riccardo)
        # dim_with_channels = np.prod(self.x_shape["train"])
        # dim_with_channels = np.prod(self.x_shape["eval"])

        # TODO I think this formula is still not correct, it is using loss (which inside has beta and warm_up), it is more correct than the one before maybe
        # check https://www.reddit.com/r/MachineLearning/comments/56m5o2/discussion_calculation_of_bitsdims/
        bits_dim = ((self.loss / dim_with_channels) +
                    tf.log(256.0 / 2.0)) / tf.log(2.0)

        tensors_to_average = [[[-self.loss], [bits_dim]],
                              self.loss_nodes_to_log]
        tensors_to_average_names = [[["LB_log(p)"], ["b/d"]],
                                    self.loss_nodes_to_log_names]
        tensors_to_average_plots = [[{
            "fileName": "loss"
        }, {
            "fileName": "bits_dim"
        }], self.loss_nodes_to_log_filenames]
        #[*[name for name in self.loss_nodes_to_track]],

        hooks.append(
            LoggingMeanTensorsHook(
                model=self,
                fileName="log",
                dirName=self.dirName,
                tensors_to_average=tensors_to_average,
                tensors_to_average_names=tensors_to_average_names,
                tensors_to_average_plots=tensors_to_average_plots,
                average_steps=self._n_steps_stats,
                tensorboard_dir=self._tensorboard_dir,
                trigger_summaries=config["save_summaries"],
                plot_offset=self._plot_offset,
                train_loop_key=TRAIN_LOOP,
                # if you want to remove some dataset from here, make support to specify from conf on which datasets to log, if in doubt ask me please. Riccardo
                datasets_keys=[TRAIN, VALIDATION, TEST],
                time_reference=self._time_reference_str))

        kwargs = config.get("ImagesReconstructHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                VAEImagesReconstructHook(model=self,
                                         dirName=self.dirName,
                                         **kwargs))

        kwargs = config.get("ImagesGenerateHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                ImagesGenerateHook(model=self, dirName=self.dirName, **kwargs))

        kwargs = config.get("ImportanceSamplingHook", None)
        if kwargs:
            # create the IS node only when needed, this function fails when z is fully convolutional... (Riccardo)
            self.importance_sampling_node = self._create_importance_sampling_node(
            )

            if not isinstance(kwargs, list):
                kwargs = [kwargs]
            for kw in kwargs:
                kws = {**self._plot_model_hooks_kwargs, **kw}
                hooks.append(
                    ImportanceSamplingHook(
                        model=self,
                        dirName=self.dirName,
                        tensors_to_average=[self.importance_sampling_node],
                        datasets_keys=[TRAIN, VALIDATION
                                       ],  # don't change the order (Luigi)
                        **kws))

        kwargs = config.get("TwoDimPCALatentVariablesHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                TwoDimPCALatentVariablesHook(
                    model=self,
                    dirName=self.dirName,
                    tensors=[self.z] +
                    list(self._approximate_posterior_params),
                    tensors_names=['z', 'mu'],
                    datasets_keys=[TRAIN, VALIDATION
                                   ],  # don't change the order (Luigi)
                    **kwargs))

        kwargs = config.get("PCALatentVariablesHook", None)
        if kwargs:
            kwargs = {**self._plot_model_hooks_kwargs, **kwargs}
            hooks.append(
                PCALatentVariablesHook(
                    model=self,
                    dirName=self.dirName,
                    tensors=[self.z, self._approximate_posterior_params[0]],
                    tensors_names=['z', 'mu'],
                    datasets_keys=[TRAIN, VALIDATION
                                   ],  # don't change the order (Luigi)
                    **kwargs))

        kwargs = config.get("VAELinearInterpolationHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                VAELinearInterpolationHook(model=self,
                                           dirName=self.dirName,
                                           **kwargs))

        kwargs = config.get("LatentVarsClassificationHook", None)
        if kwargs:
            kwargs = {**self._plot_model_hooks_kwargs, **kwargs}
            hooks.append(
                LatentVarsClassificationHook(
                    model=self,
                    dirName=self.dirName,
                    tensors=[
                        self.z, self._approximate_posterior_params[0],
                        tf.concat(list(self._approximate_posterior_params),
                                  axis=1)
                    ],
                    tensors_names=['z', 'mu', 'mu_cov'],
                    datasets_keys=[TRAIN, VALIDATION
                                   ],  # don't change the order (Luigi)
                    **kwargs))

        # frechet inception distance
        kwargs = config.get("FrechetInceptionDistanceHook", None)
        if kwargs:
            for kw in kwargs:
                kws = {**self._default_model_hooks_kwargs, **kw}
                hooks.append(
                    FrechetInceptionDistanceHook(model=self,
                                                 dirName=self.dirName,
                                                 **kws))

        # latent traversals hook
        kwargs = config.get("LatentTraversalsHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                LatentTraversalsHook(model=self,
                                     dirName=self.dirName,
                                     **kwargs))

        kwargs = config.get("LatentVarsGeometricClassificationHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                LatentVarsGeometricClassificationHook(
                    model=self,
                    dirName=self.dirName,
                    tensors=[
                        self.z, self._approximate_posterior_params[0],
                        tf.concat(list(self._approximate_posterior_params),
                                  axis=1)
                    ],
                    tensors_names=['z', 'mu', 'mu_cov'],
                    datasets_keys=[TRAIN, VALIDATION, TEST],
                    **kwargs))

        return hooks
Example #2
0
    def create_hooks(self, config):
        hooks = super().create_hooks(config)

        tensors_to_average = [
            [[self.cost]],
            [[self._prior.mean()], [self.n_z_samples]],
        ]
        tensors_to_average_names = [
            [["loss_wake"]],
            [["prior_mean"], ["samples"]],
        ]
        tensors_to_average_plots = [[
            {
                "fileName": "loss"
            },
        ], [{
            "fileName": "mean"
        }, {
            "fileName": "samples"
        }]]

        hooks.append(
            LoggingMeanTensorsHook(
                model=self,
                fileName="log",
                dirName=self.dirName,
                tensors_to_average=tensors_to_average,
                tensors_to_average_names=tensors_to_average_names,
                tensors_to_average_plots=tensors_to_average_plots,
                average_steps=self._n_steps_stats,
                tensorboard_dir=self._tensorboard_dir,
                trigger_summaries=config["save_summaries"],
                plot_offset=self._plot_offset,
                train_loop_key=TRAIN_LOOP,
                datasets_keys=[VALIDATION],
                time_reference=self._time_reference_str))

        tensors_to_average = [
            self.loss_nodes_to_log,
        ]
        tensors_to_average_names = [
            self.loss_nodes_to_log_names,
        ]
        tensors_to_average_plots = [
            self.loss_nodes_to_log_filenames,
        ]

        hooks.append(
            LoggingMeanTensorsHook(
                model=self,
                fileName="log2",
                dirName=self.dirName,
                tensors_to_average=tensors_to_average,
                tensors_to_average_names=tensors_to_average_names,
                tensors_to_average_plots=tensors_to_average_plots,
                average_steps=self._n_steps_stats,
                tensorboard_dir=self._tensorboard_dir,
                print_to_screen=False,
                trigger_summaries=config["save_summaries"],
                plot_offset=self._plot_offset,
                train_loop_key=TRAIN_LOOP,
                datasets_keys=[],
                time_reference=self._time_reference_str))

        kwargs = config.get("HMFisherMatrixHook2", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}

            hooks.append(
                HMFisherMatrixHook2(model=self, dirName=self.dirName,
                                    **kwargs))
        else:
            if self._optimizer._d_p is not None:
                t_to_average = [[[self._optimizer._diagonal_pad]]]
                t_to_average_names = [
                    [["diagonal_pad"]],
                ]
                t_to_average_plots = [[{"fileName": "diagonal_pad"}]]
                hooks.append(
                    LoggingMeanTensorsHook(
                        model=self,
                        fileName="dp",
                        dirName=self.dirName,
                        tensors_to_average=t_to_average,
                        tensors_to_average_names=t_to_average_names,
                        tensors_to_average_plots=t_to_average_plots,
                        average_steps=self._n_steps_stats,
                        tensorboard_dir=self._tensorboard_dir,
                        trigger_summaries=config["save_summaries"],
                        print_to_screen=False,
                        plot_offset=self._plot_offset,
                        train_loop_key=TRAIN_LOOP,
                        datasets_keys=[]))

        kwargs = config.get("ImagesGenerateHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}

            hooks.append(
                ImagesGenerateHook(model=self,
                                   dirName=self.dirName,
                                   pm_one=self._pm_one,
                                   **kwargs))
        kwargs = config.get("ImagesInputHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}

            hooks.append(
                ImagesInputHook(model=self,
                                dirName=self.dirName,
                                pm_one=self._pm_one,
                                **kwargs))

        kwargs = config.get("ImagesReconstructHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}

            hooks.append(
                ImagesReconstructHook(model=self,
                                      dirName=self.dirName,
                                      pm_one=self._pm_one,
                                      **kwargs))

        kwargs = config.get("ThreeByThreeHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}

            hooks.append(
                ThreeByThreeHook(model=self,
                                 tensorboard_dir=self._tensorboard_dir,
                                 dirName=self.dirName,
                                 **kwargs))

        kwargs = config.get("LogpImportanceSamplingHook", None)
        if kwargs:
            if not isinstance(kwargs, list):
                kwargs = [kwargs]
            for kw in kwargs:
                kws = {**self._plot_model_hooks_kwargs, **kw}
                hooks.append(
                    ImportanceSamplingHook(
                        model=self,
                        dirName=self.dirName,
                        tensors_to_average=[self.importance_sampling_node],
                        datasets_keys=[TRAIN, VALIDATION, TEST],
                        **kws))
        return hooks
Example #3
0
    def _create_gradient_hook(self, config):

        # gradienthook
        tensors_to_average = [
            [
                [self.gradient_weight_global_norms[PHASE_WAKE][0]],
                self.gradient_norms[PHASE_WAKE],
            ],
            [[self.gradient_weight_global_norms[PHASE_SLEEP][0]],
             self.gradient_norms[PHASE_SLEEP]],
            [
                [self.gradient_weight_global_norms[PHASE_WAKE][1]],
                self.weight_norms[PHASE_WAKE],
            ],
            [[self.gradient_weight_global_norms[PHASE_SLEEP][1]],
             self.weight_norms[PHASE_SLEEP]],
        ]

        layer_names_wake = [
            "L" + name.split(":")[0]
            for name in self.gradient_names[PHASE_WAKE]
        ]

        layer_names_sleep = [
            "L" + name.split(":")[0]
            for name in self.gradient_names[PHASE_SLEEP]
        ]

        tensors_to_average_names = [
            [
                ["gradient_global_norms_wake"],
                layer_names_wake,
            ],
            [["gradient_global_norms_sleep"], layer_names_sleep],
            [
                ["weight_global_norms_wake"],
                layer_names_wake,
            ],
            [["weight_global_norms_sleep"], layer_names_sleep],
        ]

        tensors_to_average_plots = [
            [
                {
                    "fileName": "gradient_global_norms_wake",
                    "logscale-y": 1,
                    "compose-label": 0
                },
                {
                    "fileName": "gradient_norms_wake",
                    "logscale-y": 1,
                    "compose-label": 0
                },
            ],
            [{
                "fileName": "gradient_global_norms_sleep",
                "logscale-y": 1,
                "compose-label": 0
            }, {
                "fileName": "gradient_norms_sleep",
                "logscale-y": 1,
                "compose-label": 0
            }],
            [
                {
                    "fileName": "weight_global_norms_wake",
                    "logscale-y": 1,
                    "compose-label": 0
                },
                {
                    "fileName": "weight_norms_wake",
                    "logscale-y": 1,
                    "compose-label": 0
                },
            ],
            [{
                "fileName": "weight_global_norms_sleep",
                "logscale-y": 1,
                "compose-label": 0
            }, {
                "fileName": "weight_norms_sleep",
                "logscale-y": 1,
                "compose-label": 0
            }],
        ]

        kwargs = config.get("GradientsHook", None)
        hook = None
        if kwargs:
            gradient_period = config["GradientsHook"]["period"]
            gradient_steps = self._get_steps(gradient_period,
                                             self._time_reference_str)
            hook = LoggingMeanTensorsHook(
                model=self,
                fileName="gradient",
                dirName=self.dirName,
                tensors_to_average=tensors_to_average,
                tensors_to_average_names=tensors_to_average_names,
                tensors_to_average_plots=tensors_to_average_plots,
                average_steps=gradient_steps,
                time_reference=self._time_reference_str,
                tensorboard_dir=self._tensorboard_dir,
                trigger_summaries=config["save_summaries"],
                # trigger_plot = True,
                print_to_screen=False,
                plot_offset=self._plot_offset,
                train_loop_key=TRAIN_LOOP,
                datasets_keys=[])

        return hook
Example #4
0
    def create_hooks(self, config):
        hooks = super().create_hooks(config)

        # LOGGING HOOKS

        # the shape of the input could be different from train to eval
        dim_with_channels = tf.cast(tf.reduce_prod(tf.shape(self.x)[1:]),
                                    tf.float32)

        # check https://www.reddit.com/r/MachineLearning/comments/56m5o2/discussion_calculation_of_bitsdims/
        bits_dim = (self.loss / dim_with_channels) / tf.log(
            2.0)  # - tf.log(256.0)

        psnr_reconstruction_quality = tf.image.psnr(
            self.x,
            self.x_reconstruction_node_tf,
            max_val=tf.reduce_max(self.x))

        sample_rate = self.dataset.sample_rate
        mfcc_original = mfcc(self.x, samplerate=sample_rate, preemph=0)
        mfcc_reconstructed = mfcc(self.x_reconstruction_node_tf,
                                  samplerate=sample_rate,
                                  preemph=0)
        mcd_reconstruction_quality = mcd(mfcc_original, mfcc_reconstructed)

        tensors_to_average = [[[psnr_reconstruction_quality]],
                              [[mcd_reconstruction_quality]], [
                                  [-self.loss],
                              ], [
                                  [bits_dim],
                              ], self.nodes_to_track]

        tensors_to_average_names = [[['PSNR_reconstruction_quality']],
                                    [['MCD_reconstruction_distortion']],
                                    [["LB_log(p)"]], [["b/d"]],
                                    self.names_nodes_to_track]

        tensors_to_average_plots = [[{
            "fileName": 'psnr_reconstr_quality'
        }], [{
            "fileName": 'mcd_reconstr_distortion'
        }], [{
            "fileName": "loss"
        }], [{
            "fileName": "bits_dim"
        }], self.loss_nodes_to_log_filenames]

        hooks.append(
            LoggingMeanTensorsHook(
                model=self,
                fileName="log",
                dirName=self.dirName,
                tensors_to_average=tensors_to_average,
                tensors_to_average_names=tensors_to_average_names,
                tensors_to_average_plots=tensors_to_average_plots,
                average_steps=self._n_steps_stats,
                tensorboard_dir=self._tensorboard_dir,
                trigger_summaries=config["save_summaries"],
                plot_offset=self._plot_offset,
                train_loop_key=TRAIN_LOOP,
                datasets_keys=[VALIDATION],
                time_reference=self._time_reference_str))

        kwargs = config.get("WavReconstructHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                WavReconstructHook(model=self, dirName=self.dirName, **kwargs))

        kwargs = config.get("WavenetGaussianVisualizationHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                WavenetGaussianVisualizationHook(model=self,
                                                 dirName=self.dirName,
                                                 **kwargs))

        kwargs = config.get("WavGenerateHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                WavGenerateHook(model=self, dirName=self.dirName, **kwargs))

        kwargs = config.get("TwoDimPCALatentVariablesHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                TwoDimPCALatentVariablesHook(
                    model=self,
                    dirName=self.dirName,
                    tensors=[self.z],
                    tensors_names=['z'],
                    datasets_keys=[TRAIN, VALIDATION],
                    # don't change the order (Luigi)
                    **kwargs))

        kwargs = config.get("PCALatentVariablesHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                PCALatentVariablesHook(
                    model=self,
                    dirName=self.dirName,
                    tensors=[tf.reshape(self.z, [-1, self.z.shape[-1]])],
                    tensors_names=['z'],
                    datasets_keys=[TRAIN, VALIDATION
                                   ],  # don't change the order (Luigi)
                    **kwargs))

        kwargs = config.get("WavLatentPCAHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                WavLatentPCAHook(
                    model=self,
                    dirName=self.dirName,
                    dataset_keys=[TRAIN, VALIDATION
                                  ],  # don't change the order (Luigi)
                    **kwargs))

        kwargs = config.get('WavClusterAnomalyDetectionHook', None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}
            hooks.append(
                WavClusterAnomalyDetectionHook(
                    model=self,
                    dirName=self.dirName,
                    dataset_keys=[TRAIN, VALIDATION
                                  ],  # don't change the order (Luigi)
                    **kwargs))

        return hooks
Example #5
0
    def create_hooks(self, config):
        hooks = super().create_hooks(config)

        # LOGGING HOOKS
        tensors_to_average = self.loss_nodes_to_log
        tensors_to_average_names = self.loss_nodes_to_log_names
        tensors_to_average_plots = self.loss_nodes_to_log_filenames

        hooks.append(LoggingMeanTensorsHook(model=self,
                                            fileName="log",
                                            dirName=self.dirName,
                                            tensors_to_average=tensors_to_average,
                                            tensors_to_average_names=tensors_to_average_names,
                                            tensors_to_average_plots=tensors_to_average_plots,
                                            average_steps=self._n_steps_stats,
                                            tensorboard_dir=self._tensorboard_dir,
                                            trigger_summaries=config["save_summaries"],
                                            plot_offset=self._plot_offset,
                                            train_loop_key=TRAIN_LOOP,
                                            # if you want to remove some dataset from here, make support to specify from conf on which datasets to log, if in doubt ask me please. Riccardo
                                            datasets_keys=[TRAIN, VALIDATION, TEST],
                                            time_reference=self._time_reference_str
                                            )
                     )

        kwargs = config.get("ImagesReconstructHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs,
                      **kwargs}
            hooks.append(AEImagesReconstructHook(model=self,
                                                 dirName=self.dirName,
                                                 **kwargs)
                         )

        kwargs = config.get("ImagesGenerateHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs,
                      **kwargs}
            hooks.append(ImagesGenerateHook(model=self,
                                            dirName=self.dirName,
                                            **kwargs
                                            )
                         )

        # kwargs = config.get("ImportanceSamplingHook", None)
        # if kwargs:
        #     if not isinstance(kwargs, list):
        #         kwargs = [kwargs]
        #     for kw in kwargs:
        #         kws = {**self._plot_model_hooks_kwargs,
        #                **kw}
        #         hooks.append(ImportanceSamplingHook(model = self,
        #                                             dirName = self.dirName,
        #                                             tensors_to_average = [self.importance_sampling_node],
        #                                             datasets_keys = [TRAIN, VALIDATION], # don't change the order (Luigi)
        #                                             **kws
        #                                            )
        #                     )
        #
        # kwargs = config.get("TwoDimPCALatentVariablesHook", None)
        # if kwargs:
        #     kwargs = {**self._default_model_hooks_kwargs,
        #               **kwargs}
        #     hooks.append(TwoDimPCALatentVariablesHook(model = self,
        #                                               dirName = self.dirName,
        #                                               tensors = [self.z] + list(self._approximate_posterior_params),
        #                                               tensors_names = ['z',
        #                                                                'mu'],
        #                                               datasets_keys = [TRAIN, VALIDATION], # don't change the order (Luigi)
        #                                               **kwargs
        #                                              )
        #                  )
        #
        # kwargs = config.get("PCALatentVariablesHook", None)
        # if kwargs:
        #     kwargs = {**self._plot_model_hooks_kwargs,
        #               **kwargs}
        #     hooks.append(PCALatentVariablesHook(model = self,
        #                                          dirName = self.dirName,
        #                                          tensors = [self.z,
        #                                                     self._approximate_posterior_params[0]],
        #                                          tensors_names = ['z',
        #                                                           'mu'],
        #                                          datasets_keys = [TRAIN, VALIDATION], # don't change the order (Luigi)
        #                                          **kwargs
        #                                        )
        #                  )
        #
        # kwargs = config.get("VAELinearInterpolationHook", None)
        # if kwargs:
        #     kwargs = {**self._default_model_hooks_kwargs,
        #               **kwargs}
        #     hooks.append(VAELinearInterpolationHook(model = self,
        #                                             dirName = self.dirName,
        #                                             **kwargs)
        #                  )
        #
        #
        # kwargs = config.get("LatentVarsClassificationHook", None)
        # if kwargs:
        #     kwargs = {**self._plot_model_hooks_kwargs,
        #                **kwargs}
        #     hooks.append(LatentVarsClassificationHook(model = self,
        #                                               dirName = self.dirName,
        #                                               tensors = [self.z,
        #                                                          self._approximate_posterior_params[0],
        #                                                          tf.concat(list(self._approximate_posterior_params), axis=1)],
        #                                               tensors_names = ['z',
        #                                                                'mu',
        #                                                                'mu_cov'],
        #                                               datasets_keys = [TRAIN,VALIDATION], # don't change the order (Luigi)
        #                                               **kwargs)
        #                   )
        #
        #
        # # frechet inception distance
        # kwargs = config.get("FrechetInceptionDistanceHook", None)
        # if kwargs:
        #     for kw in kwargs:
        #         kws = {**self._default_model_hooks_kwargs,
        #                **kw}
        #         hooks.append(FrechetInceptionDistanceHook(model = self,
        #                                                   dirName = self.dirName,
        #                                                   **kws)
        #                     )
        #
        # # latent traversals hook
        # kwargs = config.get("LatentTraversalsHook", None)
        # if kwargs:
        #     kwargs = {**self._default_model_hooks_kwargs,
        #               **kwargs}
        #     hooks.append(LatentTraversalsHook(model=self,
        #                                       dirName=self.dirName,
        #                                       **kwargs)
        #                                       )
        #
        # kwargs = config.get("LatentVarsGeometricClassificationHook", None)
        # if kwargs:
        #     kwargs = {**self._default_model_hooks_kwargs,
        #               **kwargs}
        #     hooks.append(LatentVarsGeometricClassificationHook(model=self,
        #                                                        dirName=self.dirName,
        #                                                        tensors=[self.z,
        #                                                                 self._approximate_posterior_params[0],
        #                                                                 tf.concat(list(self._approximate_posterior_params),
        #                                                                           axis=1)],
        #                                                        tensors_names=['z',
        #                                                                       'mu',
        #                                                                       'mu_cov'],
        #
        #                                                        datasets_keys=[TRAIN, VALIDATION, TEST],
        #                                                        **kwargs)
        #                  )

        return hooks
Example #6
0
    def create_hooks(self, config):
        hooks = super().create_hooks(config)

        # logging hooks

        log_tensors_to_average = self.loss_nodes_to_log

        log_tensors_to_average_names = self.loss_nodes_to_log_names

        log_tensors_to_average_plots = self.loss_nodes_to_log_filenames

        hooks.append(
            LoggingMeanTensorsHook(
                model=self,
                fileName="log",
                dirName=self.dirName,
                tensors_to_average=log_tensors_to_average,
                tensors_to_average_names=log_tensors_to_average_names,
                tensors_to_average_plots=log_tensors_to_average_plots,
                time_reference=self._time_reference_str,
                average_steps=self._n_steps_stats,
                tensorboard_dir=self._tensorboard_dir,
                trigger_summaries=config["save_summaries"],
                #trigger_plot = True,
                print_to_screen=True,
                plot_offset=self._plot_offset,
                train_loop_key=TRAIN_LOOP,
                # if you want to remove some dataset from here, make support to specify from conf on which datasets to log, if in doubt ask me please. Riccardo
                datasets_keys=[TRAIN, VALIDATION, TEST]))

        kwargs = config.get("CorrelationHook", None)
        if kwargs:
            kwargs = {
                **self._default_model_hooks_kwargs, 'datasets_keys':
                [TRAIN, VALIDATION],
                **kwargs
            }

            hooks.append(
                CorrelationHook(model=self, dirName=self.dirName, **kwargs))

        kwargs = config.get("MCDropoutHook", None)
        if kwargs:
            kwargs = {
                **self._default_model_hooks_kwargs, 'datasets_keys':
                [VALIDATION, TEST],
                **kwargs
            }

            hooks.append(
                MCDropoutHook(model=self, dirName=self.dirName, **kwargs))

        kwargs = config.get("MCRegressionHook", None)
        if kwargs:
            kwargs = {
                **self._default_model_hooks_kwargs, 'datasets_keys':
                [VALIDATION, TEST],
                **kwargs
            }

            hooks.append(
                MCRegressionHook(model=self, dirName=self.dirName, **kwargs))

        kwargs = config.get("MCClassificationHook", None)
        if kwargs:
            kwargs = {
                **self._default_model_hooks_kwargs, 'datasets_keys':
                [VALIDATION, TEST],
                **kwargs
            }

            hooks.append(
                MCClassificationHook(model=self,
                                     dirName=self.dirName,
                                     **kwargs))

        # kwargs = config.get("MCDropoutHook_alpha", None)
        # if kwargs:
        #     kwargs = {**self._default_model_hooks_kwargs,
        #               'datasets_keys' : [TEST,VALIDATION],
        #               **kwargs}
        #
        #     hooks.append(MCDropoutHook_alpha(model=self,
        #                                dirName=self.dirName,
        #                                **kwargs))

        kwargs = config.get("WeightsHistogramHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs, **kwargs}

            hooks.append(
                WeightsHistogramHook(model=self,
                                     dirName=self.dirName,
                                     **kwargs))
        '''
        kwargs = config.get("HessianHook", None)
        if kwargs:
            kwargs = {**self._default_model_hooks_kwargs,
                      **kwargs}
            hooks.append(HessianHook(model = self,
                                     dirName = self.dirName,
                                     #tensors = [self.z,
                                     #           self._gaussian_model_latent_mean],
                                     #tensors_names = ['z',
                                     #                 'mu'],
                                     datasets_keys = [TRAIN, VALIDATION], # don't change the order (Luigi)
                                     **kwargs
                                    )
                         )
        '''

        return hooks