Exemplo n.º 1
0
    def __init__(self,
                 volume_manager,
                 input_size,
                 hidden_sizes,
                 target_dims,
                 k,
                 m,
                 seed,
                 use_previous_direction=False,
                 use_layer_normalization=False,
                 drop_prob=0.,
                 use_zoneout=False,
                 **_):
        """
        Parameters
        ----------
        volume_manager : :class:`VolumeManger` object
            Used to evaluate the diffusion signal at specific coordinates using multiple subjects
        input_size : int
            Number of units each element Xi in the input sequence X has.
        hidden_sizes : int, list of int
            Number of hidden units each GRU should have.
        target_dims : int
            Number of dimensions of the multivariate gaussian to estimate; the model outputs two distribution parameters for each dimension
        k : int
            Number of steps ahead to predict (the model will predict all steps up to k)
        m : int
            Number of Monte-Carlo samples used to estimate the gaussian parameters
        seed : int
            Random seed to initialize the random noise used for sampling and dropout.
        use_previous_direction : bool
            Use the previous direction as an additional input
        use_layer_normalization : bool
            Use LayerNormalization to normalize preactivations and stabilize hidden layer evolution
        drop_prob : float
            Dropout/Zoneout probability for recurrent networks. See: https://arxiv.org/pdf/1512.05287.pdf & https://arxiv.org/pdf/1606.01305.pdf
        use_zoneout : bool
            Use zoneout implementation instead of dropout
        """
        super().__init__(input_size, hidden_sizes, use_layer_normalization,
                         drop_prob, use_zoneout, seed)
        self.target_dims = target_dims
        self.target_size = 2 * self.target_dims  # Output distribution parameters mu and sigma for each dimension

        self.volume_manager = volume_manager

        self.k = k
        self.m = m
        self.seed = seed

        self.use_previous_direction = use_previous_direction

        self.srng = MRG_RandomStreams(self.seed)

        # Do not use dropout/zoneout in last hidden layer
        self.layer_regression = LayerRegression(self.hidden_sizes[-1],
                                                self.target_size,
                                                normed=False)
 def __init__(self, input_size, hidden_sizes, output_size, **_):
     """
     Parameters
     ----------
     input_size : int
         Number of units each element Xi in the input sequence X has.
     hidden_sizes : int, list of int
         Number of hidden units each GRU should have.
     output_size : int
         Number of units the regression layer should have.
     """
     super().__init__(input_size, hidden_sizes)
     self.output_size = output_size
     self.layer_regression = LayerRegression(self.hidden_sizes[-1],
                                             self.output_size)
     self.stopping_layer = LayerDense(self.hidden_sizes[-1] + input_size,
                                      1,
                                      activation="sigmoid",
                                      name="stopping")
Exemplo n.º 3
0
    def __init__(self,
                 volume_manager,
                 input_size,
                 hidden_sizes,
                 output_size,
                 n_gaussians,
                 activation='tanh',
                 use_previous_direction=False,
                 use_layer_normalization=False,
                 drop_prob=0.,
                 use_zoneout=False,
                 use_skip_connections=False,
                 seed=1234,
                 **_):
        """
        Parameters
        ----------
        volume_manager : :class:`VolumeManger` object
            Use to evaluate the diffusion signal at specific coordinates.
        input_size : int
            Number of units each element Xi in the input sequence X has.
        hidden_sizes : int, list of int
            Number of hidden units each GRU should have.
        output_size : int
            Number of units the regression layer should have.
        n_gaussians : int
            Number of gaussians in the mixture
        activation : str
            Activation function to apply on the "cell candidate"
        use_previous_direction : bool
            Use the previous direction as an additional input
        use_layer_normalization : bool
            Use LayerNormalization to normalize preactivations and stabilize hidden layer evolution
        drop_prob : float
            Dropout/Zoneout probability for recurrent networks. See: https://arxiv.org/pdf/1512.05287.pdf & https://arxiv.org/pdf/1606.01305.pdf
        use_zoneout : bool
            Use zoneout implementation instead of dropout
        use_skip_connections : bool
            Use skip connections from the input to all hidden layers in the network, and from all hidden layers to the output layer
        seed : int
            Random seed used for dropout normalization
        """
        super(GRU_Regression,
              self).__init__(input_size,
                             hidden_sizes,
                             activation=activation,
                             use_layer_normalization=use_layer_normalization,
                             drop_prob=drop_prob,
                             use_zoneout=use_zoneout,
                             use_skip_connections=use_skip_connections,
                             seed=seed)
        self.volume_manager = volume_manager
        self.n_gaussians = n_gaussians

        assert output_size == 3  # Only 3-dimensional target is supported for now
        self.output_size = output_size

        self.use_previous_direction = use_previous_direction

        # GRU_Mixture does not predict a direction, so it cannot predict an offset
        self.predict_offset = False

        # Do not use dropout/zoneout in last hidden layer
        self.layer_regression_size = sum([
            n_gaussians,  # Mixture weights
            n_gaussians * output_size,  # Means
            n_gaussians * output_size
        ])  # Stds
        output_layer_input_size = sum(
            self.hidden_sizes
        ) if self.use_skip_connections else self.hidden_sizes[-1]
        self.layer_regression = LayerRegression(output_layer_input_size,
                                                self.layer_regression_size)
Exemplo n.º 4
0
    def __init__(self,
                 volume_manager,
                 input_size,
                 hidden_sizes,
                 output_size,
                 use_previous_direction=False,
                 use_layer_normalization=False,
                 drop_prob=0.,
                 use_zoneout=False,
                 use_skip_connections=False,
                 neighborhood_radius=False,
                 learn_to_stop=False,
                 seed=1234,
                 **_):
        """
        Parameters
        ----------
        volume_manager : :class:`VolumeManger` object
            Use to evaluate the diffusion signal at specific coordinates.
        input_size : int
            Number of units each element Xi in the input sequence X has.
        hidden_sizes : int, list of int
            Number of hidden units each GRU should have.
        output_size : int
            Number of units the regression layer should have.
        use_previous_direction : bool
            Use the previous direction as an additional input
        use_layer_normalization : bool
            Use LayerNormalization to normalize preactivations and stabilize hidden layer evolution
        drop_prob : float
            Dropout/Zoneout probability for recurrent networks. See: https://arxiv.org/pdf/1512.05287.pdf & https://arxiv.org/pdf/1606.01305.pdf
        use_zoneout : bool
            Use zoneout implementation instead of dropout
        use_skip_connections : bool
            Use skip connections from the input to all hidden layers in the network, and from all hidden layers to the output layer
        neighborhood_radius : float
            Add signal in positions around the current streamline coordinate to the input (with given length in voxel space); None = no neighborhood
        learn_to_stop : bool
            Predict whether the streamline being generated should stop or not
        seed : int
            Random seed used for dropout normalization
        """
        self.neighborhood_radius = neighborhood_radius
        self.model_input_size = input_size
        if self.neighborhood_radius:
            self.neighborhood_directions = get_neighborhood_directions(
                self.neighborhood_radius)
            # Model input size is increased when using neighborhood
            self.model_input_size = input_size * self.neighborhood_directions.shape[
                0]

        super(GRU_Regression,
              self).__init__(self.model_input_size,
                             hidden_sizes,
                             use_layer_normalization=use_layer_normalization,
                             drop_prob=drop_prob,
                             use_zoneout=use_zoneout,
                             use_skip_connections=use_skip_connections,
                             seed=seed)
        # Restore input size
        self.input_size = input_size

        self.volume_manager = volume_manager

        assert output_size == 3  # Only 3-dimensional target is supported for now
        self.output_size = output_size

        self.use_previous_direction = use_previous_direction

        # GRU_Gaussian does not predict a direction, so it cannot predict an offset
        self.predict_offset = False
        self.learn_to_stop = learn_to_stop

        # Do not use dropout/zoneout in last hidden layer
        self.layer_regression_size = sum([
            output_size,  # Means
            output_size
        ])  # Stds
        output_layer_input_size = sum(
            self.hidden_sizes
        ) if self.use_skip_connections else self.hidden_sizes[-1]
        self.layer_regression = LayerRegression(output_layer_input_size,
                                                self.layer_regression_size)
        if self.learn_to_stop:
            # Predict whether a streamline should stop or keep growing
            self.layer_stopping = LayerDense(output_layer_input_size,
                                             1,
                                             activation='sigmoid',
                                             name="GRU_Gaussian_stopping")