Esempio n. 1
0
    def test_big_k(self):
        x = np.random.random((300, 3))
        reader = DataInMemory(x)
        k = 151
        c = api.cluster_uniform_time(k=k)

        c.estimate(reader)
Esempio n. 2
0
 def test_discretizer(self):
     reader_gen = DataInMemory(data=self.generated_data)
     # check if exception safe
     api.discretizer(reader_gen)._chain[-1].get_output()
     api.discretizer(reader_gen, transform=api.tica())._chain[-1].get_output()
     api.discretizer(reader_gen, cluster=api.cluster_uniform_time())._chain[-1].get_output()
     api.discretizer(reader_gen, transform=api.pca(), cluster=api.cluster_regspace(dmin=10))._chain[-1].get_output()
Esempio n. 3
0
    def test_big_k(self):
        x = np.random.random((300, 3))
        reader = DataInMemory(x)
        k=151
        c = api.cluster_uniform_time(k=k)

        c.data_producer = reader
        c.parametrize()
Esempio n. 4
0
    def test_2d_skip(self):
        x = np.random.random((300, 3))
        reader = DataInMemory(x)

        k = 2
        c = api.cluster_uniform_time(k=k, skip=100)

        c.estimate(reader)
Esempio n. 5
0
 def test_no_transform(self):
     reader_xtc = api.source(self.traj_files, top=self.pdb_file)
     api.pipeline([reader_xtc,
                   api.cluster_kmeans(k=10)])._chain[-1].get_output()
     api.pipeline([reader_xtc,
                   api.cluster_regspace(dmin=10)])._chain[-1].get_output()
     api.pipeline([reader_xtc,
                   api.cluster_uniform_time()])._chain[-1].get_output()
Esempio n. 6
0
    def test_2d_skip(self):
        x = np.random.random((300, 3))
        reader = DataInMemory(x)

        k = 2
        c = api.cluster_uniform_time(k=k, skip=100)

        c.data_producer = reader
        c.parametrize()
Esempio n. 7
0
    def test_1d(self):
        x = np.random.random(1000)
        reader = DataInMemory(x)

        k = 2
        c = api.cluster_uniform_time(k=k)

        c.data_producer = reader
        c.parametrize()
Esempio n. 8
0
    def test_2d(self):
        x = np.random.random((300, 3))
        reader = DataInMemory(x)

        k = 2
        c = api.cluster_uniform_time(k=k)

        c.data_producer = reader
        c.parametrize()
Esempio n. 9
0
    def test_big_k(self):
        # TODO: fix this (some error handling should be done in _param_init)
        x = np.random.random((300, 3))
        reader = DataInMemory(x)

        k = 298
        c = api.cluster_uniform_time(k=k)

        c.data_producer = reader
        c.parametrize()
Esempio n. 10
0
    def test_big_k(self):
        # TODO: fix this (some error handling should be done in _param_init)
        x = np.random.random((300, 3))
        reader = DataInMemory(x)

        k = 298
        c = api.cluster_uniform_time(k=k)

        c.data_producer = reader
        c.parametrize()
Esempio n. 11
0
 def test_is_parametrized(self):
     # construct pipeline with all possible transformers
     p = api.pipeline(
         [
             api.source(self.traj_files, top=self.pdb_file),
             api.tica(),
             api.pca(),
             api.cluster_kmeans(k=50),
             api.cluster_regspace(dmin=50),
             api.cluster_uniform_time(k=20)
         ], run=False
     )
     self.assertFalse(p._is_parametrized(), "If run=false, the pipeline should not be parametrized.")
     p.parametrize()
     self.assertTrue(p._is_parametrized(), "If parametrized was called, the pipeline should be parametrized.")
Esempio n. 12
0
 def test_no_transform(self):
     reader_xtc = api.source(self.traj_files, top=self.pdb_file)
     api.pipeline([reader_xtc, api.cluster_kmeans(k=10)])._chain[-1].get_output()
     api.pipeline([reader_xtc, api.cluster_regspace(dmin=10)])._chain[-1].get_output()
     api.pipeline([reader_xtc, api.cluster_uniform_time()])._chain[-1].get_output()
Esempio n. 13
0
    def test_1d(self):
        x = np.random.random(1000)
        reader = DataInMemory(x)

        k = 2
        c = api.cluster_uniform_time(reader, k=k)