Exemple #1
0
    def __init__(self,
                 cluster_number: int,
                 embedding_dimension: int,
                 hidden_dimension: int,
                 ae: torch.nn.Module,
                 alpha: float = 1.0):
        """
        Module which holds all the moving parts of the DEC algorithm, as described in
        Xie/Girshick/Farhadi; this includes the AutoEncoder stage and the ClusterAssignment stage.

        :param cluster_number: number of clusters
        :param embedding_dimension: embedding dimension, input to the encoder
        :param hidden_dimension: hidden dimension, output of the encoder
        :param ae: autoencoder to use, must have .encoder attribute
        :param alpha: parameter representing the degrees of freedom in the t-distribution, default 1.0
        """
        super(DEC, self).__init__()
        self.ae = ae  # AutoEncoder stage
        if not hasattr(ae, 'encoder'):
            raise ValueError('Autoencoder must have a .encoder attribute.')
        self.embedding_dimension = embedding_dimension
        self.hidden_dimension = hidden_dimension
        self.cluster_number = cluster_number
        self.alpha = alpha
        self.assignment = ClusterAssignment(cluster_number,
                                            self.hidden_dimension, alpha)
Exemple #2
0
 def setUpClass(cls):
     cls.ca = ClusterAssignment(
         cluster_number=2,
         embedding_dimension=2,
         cluster_centers=torch.Tensor([
             [-1, -1],
             [1, 1]
         ]).float()
     )
Exemple #3
0
    def __init__(self,
                 cluster_number: int,
                 hidden_dimension: int,
                 encoder: torch.nn.Module,
                 alpha: float = 1.0):
        """
        Module which holds all the moving parts of the DEC algorithm, as described in
        Xie/Girshick/Farhadi; this includes the AutoEncoder stage and the ClusterAssignment stage.

        :param cluster_number: number of clusters
        :param hidden_dimension: hidden dimension, output of the encoder
        :param encoder: encoder to use
        :param alpha: parameter representing the degrees of freedom in the t-distribution, default 1.0
        """
        super(DEC, self).__init__()
        self.encoder = encoder
        self.hidden_dimension = hidden_dimension
        self.cluster_number = cluster_number
        self.alpha = alpha
        self.assignment = ClusterAssignment(cluster_number, self.hidden_dimension, alpha)
Exemple #4
0
 def setUpClass(cls):
     cls.ca = ClusterAssignment(2,
                                2,
                                cluster_centers=torch.Tensor([[-1, -1],
                                                              [1,
                                                               1]]).float())