def test_noprejoin_run_and_analyze(self):
        moves = [(move[0], move[4], move[2], move[3], move[5]) 
                 for move in common.tps_shooting_moves]
        self.nojoin_pseudosim.run(moves)
        self.storage.close()
        # open the file for analysis, check that its content is reasonable
        analysis = paths.AnalysisStorage(data_filename(self.fname))
        assert_equal(len(analysis.steps), 5) # initial + 4 steps
        scheme = analysis.schemes[0]
        assert_equal(scheme.movers.keys(), ['shooting'])
        assert_equal(len(scheme.movers['shooting']), 1)
        mover = scheme.movers['shooting'][0]

        # use several OPS tools to analyze this file
        ## scheme.move_summary
        devnull = open(os.devnull, 'w')
        scheme.move_summary(analysis.steps, output=devnull) 
        mover_keys = [k for k in scheme._mover_acceptance.keys()
                      if k[0] == mover]
        assert_equal(len(mover_keys), 1)
        assert_equal(scheme._mover_acceptance[mover_keys[0]], [3,4])

        ## move history tree
        import openpathsampling.visualize as ops_vis
        history = ops_vis.PathTree(
            analysis.steps,
            ops_vis.ReplicaEvolution(replica=0)
        )
        assert_equal(len(history.generator.decorrelated_trajectories), 2)

        ## path length histogram
        path_lengths = [len(step.active[0].trajectory) 
                        for step in analysis.steps]
        assert_equal(path_lengths, [11, 9, 7, 7, 7])
        analysis.close()
Пример #2
0
    def test_storage(self):
        import os
        fname = data_filename("interface_set_storage_test.nc")
        if os.path.isfile(fname):
            os.remove(fname)
        template_traj = make_1d_traj([0.0])
        template = template_traj[0]
        storage_w = paths.Storage(fname, "w")
        storage_w.save(template_traj)
        storage_w.save(self.increasing_set)
        storage_w.sync_all()

        storage_r = paths.AnalysisStorage(fname)
        reloaded = storage_r.interfacesets[0]

        assert_items_equal(reloaded.lambdas, self.increasing_set.lambdas)
        assert_equal(reloaded.period_min, self.increasing_set.period_min)
        assert_equal(reloaded.period_max, self.increasing_set.period_max)
        for (truth, beauty) in zip(self.increasing_set, reloaded):
            assert_equal(truth, beauty)

        for (v, l) in zip(reloaded.volumes, reloaded.lambdas):
            assert_equal(reloaded.get_lambda(v), l)

        if os.path.isfile(fname):
            os.remove(fname)
Пример #3
0
    def test_run_and_analyze(self):
        moves = [tuple(move[0:4]) for move in common.tps_shooting_moves]

        trajs = list(zip(*moves))[1]
        init_traj = self.pseudosim.initial_conditions[0].trajectory
        # print hex(id(init_traj)), hex(id(trajs[0]))
        shared = init_traj.shared_subtrajectory(trajs[0])
        # print len(shared), [s for s in shared]
        # print len(init_traj.shared_subtrajectory(trajs[0]))
        # print [len(trajs[i].shared_subtrajectory(trajs[i+1]))
        # for i in range(len(trajs)-1)]

        self.pseudosim.run(moves)
        self.storage.close()

        # open the file for analysis, check that its content is reasonable
        analysis = paths.AnalysisStorage(data_filename(self.fname))
        assert_equal(len(analysis.steps), 5)  # initial + 4 steps
        scheme = analysis.schemes[0]
        assert_equal(list(scheme.movers.keys()), ['shooting'])
        assert_equal(len(scheme.movers['shooting']), 1)
        mover = scheme.movers['shooting'][0]

        # use several OPS tools to analyze this file
        ## scheme.move_summary
        devnull = open(os.devnull, 'w')
        scheme.move_summary(analysis.steps, output=devnull)
        mover_keys = [
            k for k in scheme._mover_acceptance._trials.keys() if k[0] == mover
        ]
        assert_equal(len(mover_keys), 1)
        assert_equal(scheme._mover_acceptance._trials[mover_keys[0]], 4)
        assert_equal(scheme._mover_acceptance._accepted[mover_keys[0]], 3)
        # assert_equal(scheme._mover_acceptance[mover_keys[0]], [3,4])

        ## move history tree
        import openpathsampling.visualize as ops_vis
        history = ops_vis.PathTree(analysis.steps,
                                   ops_vis.ReplicaEvolution(replica=0))
        assert_equal(len(history.generator.decorrelated_trajectories), 2)

        ## path length histogram
        path_lengths = [
            len(step.active[0].trajectory) for step in analysis.steps
        ]
        assert_equal(path_lengths, [11, 9, 7, 7, 7])
        analysis.close()
Пример #4
0
    def test_run_with_negative_shooting_point(self):
        shoot = oink.ShootingStub(self.network.sampling_ensembles[0],
                                  pre_joined=False)
        converter = StupidOneWayTPSConverter(
            storage=paths.Storage(self.data_filename("neg_sp.nc"), "w"),
            initial_file="file0.data",
            mover=shoot,
            network=self.network,
            options=oink.TPSConverterOptions(includes_shooting_point=False,
                                             trim=False))
        converter.run(self.data_filename("summary_neg_sp.txt"))
        converter.storage.close()

        analysis = paths.AnalysisStorage(self.data_filename("neg_sp.nc"))
        self._standard_analysis_checks(analysis)
        analysis.close()
        if os.path.isfile(self.data_filename("neg_sp.nc")):
            os.remove(self.data_filename("neg_sp.nc"))
    def test_sim_with_storage(self):
        tmpfile = data_filename("direct_sim_test.nc")
        if os.path.isfile(tmpfile):
            os.remove(tmpfile)

        storage = paths.Storage(tmpfile, "w", self.snap0)
        sim = DirectSimulation(storage=storage,
                               engine=self.engine,
                               states=[self.center, self.outside],
                               initial_snapshot=self.snap0)

        sim.run(200)
        storage.close()
        read_store = paths.AnalysisStorage(tmpfile)
        assert_equal(len(read_store.trajectories), 1)
        traj = read_store.trajectories[0]
        assert_equal(len(traj), 201)
        read_store.close()
        os.remove(tmpfile)
Пример #6
0
    def test_storage(self):
        import os
        fname = data_filename("mistis_storage_test.nc")
        if os.path.isfile(fname):
            os.remove(fname)
        template = self.traj['AA'][0]
        storage_w = paths.Storage(fname, "w")
        storage_w.snapshots.save(template)
        storage_w.save(self.mistis)
        storage_w.sync_all()

        storage_r = paths.AnalysisStorage(fname)
        reloaded = storage_r.networks[0]
        assert_equal(reloaded.strict_sampling, False)
        assert_equal(reloaded.sampling_transitions[0].ensembles[0],
                     self.mistis.sampling_transitions[0].ensembles[0])

        if os.path.isfile(fname):
            os.remove(fname)
Пример #7
0
 def test_run_with_neg_sp_retrim(self):
     storage_file = self.data_filename("retrim_negsp.nc")
     storage = paths.Storage(storage_file, 'w')
     converter = StupidOneWayTPSConverter(storage=storage,
                                          initial_file="file0_extra.data",
                                          mover=self.shoot,
                                          network=self.network,
                                          options=oink.TPSConverterOptions(
                                              trim=True,
                                              retrim_shooting=True,
                                              auto_reverse=False,
                                              full_trajectory=True))
     converter.run(self.data_filename("summary_full_retrim_negsp.txt"))
     storage.close()
     analysis = paths.AnalysisStorage(storage_file)
     step4 = analysis.steps[4]
     self._standard_analysis_checks(analysis)
     analysis.close()
     if os.path.isfile(storage_file):
         os.remove(storage_file)
Пример #8
0
    def test_store_snapshots(self):
        fname = data_filename("cv_storage_test.nc")
        if os.path.isfile(fname):
            os.remove(fname)

        traj = paths.Trajectory(list(self.traj))
        template = traj[0]

        for use_cache in (False, True):
            # print '=========================================================='
            # print 'UUID', use_uuid, 'CACHE', use_cache
            # print '=========================================================='

            storage_w = paths.Storage(fname, "w")
            storage_w.snapshots.save(template)

            # let's mess up the order in which we save and include
            # reversed ones as well

            assert (len(storage_w.snapshots) == 2)
            assert (len(storage_w.trajectories) == 0)
            assert (len(storage_w.stores['snapshot0']) == 2)
            storage_w.snapshots.save(traj[8].reversed)
            assert (len(storage_w.snapshots) == 4)
            assert (len(storage_w.trajectories) == 0)
            assert (len(storage_w.stores['snapshot0']) == 4)
            # this will store traj[6:] under pos IDX #0
            storage_w.trajectories.save(traj[6:])
            assert (len(storage_w.snapshots) == 10)
            assert (len(storage_w.trajectories) == 1)
            assert (len(storage_w.stores['snapshot0']) == 10)

            traj_rev = traj.reversed

            # this will store traj_rev under pos IDX #1
            storage_w.trajectories.mention(traj_rev)
            assert (len(storage_w.snapshots) == 20)
            assert (len(storage_w.trajectories) == 2)
            assert (len(storage_w.stores['snapshot0']) == 10)

            # this will not do anything since traj is already saved
            storage_w.trajectories.save(traj_rev)
            assert (len(storage_w.snapshots) == 20)
            assert (len(storage_w.trajectories) == 2)
            assert (len(storage_w.stores['snapshot0']) == 10)

            # this will store traj under pos IDX #2
            storage_w.trajectories.save(traj)
            assert (len(storage_w.snapshots) == 20)
            assert (len(storage_w.trajectories) == 3)
            assert (len(storage_w.stores['snapshot0']) == 20)

            # this will not store since traj is already stored
            storage_w.trajectories.save(traj)
            assert (len(storage_w.snapshots) == 20)
            assert (len(storage_w.trajectories) == 3)
            assert (len(storage_w.stores['snapshot0']) == 20)

            # we saved in this order [0f, 8r, 6f, 7f, 9f, 5r, 4r, 3r, 2r, 1r ]
            # these are indices      [ 0, 17, 12, 14, 18,  3,  5,  7,  9, 11 ]

            storage_w.close()

            if use_cache:
                storage_r = paths.AnalysisStorage(fname)
            else:
                storage_r = paths.Storage(fname, 'r')
                storage_r.snapshots.set_caching(False)
                storage_r.stores['snapshot0'].set_caching(False)

            # check if the loaded trajectory is reproduced
            for s1, s2 in zip(traj, storage_r.trajectories[2]):
                compare_snapshot(s1, s2, True)

            # this is the expected order in which it is saved
            eff_traj = [
                traj[0],
                traj[8].reversed,
                traj[6],
                traj[7],
                traj[9],
                traj[5].reversed,
                traj[4].reversed,
                traj[3].reversed,
                traj[2].reversed,
                traj[1].reversed,
            ]

            # load from hidden and see, if the hidden store looks as expected
            # we open every second snapshot from the hidden store because the
            # ones in between correspond to the reversed ones

            hidden_snapshots = storage_r.stores['snapshot0'][:]
            for idx in range(10):
                s1 = eff_traj[idx]
                s1r = s1.reversed
                s2 = hidden_snapshots[2 * idx]
                s2r = hidden_snapshots[2 * idx + 1]
                compare_snapshot(s1, s2, True)
                compare_snapshot(s1r, s2r, True)

            storage_r.close()
Пример #9
0
    def test_storage_cv_function(self):
        import os

        # test all combinations of (1) with and without UUIDs,
        # (2) using partial yes, no all of these must work
        for allow_incomplete in (True, False):

            # print '=========================================================='
            # print 'PARTIAL', allow_incomplete
            # print '=========================================================='

            fname = data_filename("cv_storage_test.nc")
            if os.path.isfile(fname):
                os.remove(fname)

            traj = paths.Trajectory(list(self.traj_simple))
            template = traj[0]

            storage_w = paths.Storage(fname, "w")
            storage_w.snapshots.save(template)

            cv1 = paths.CoordinateFunctionCV(
                'f1', lambda x: x.coordinates[0]).with_diskcache(
                    allow_incomplete=allow_incomplete)

            storage_w.save(cv1)

            # let's mess up the order in which we save and
            # include reversed ones as well
            assert (len(storage_w.snapshots) == 2)
            storage_w.trajectories.save(traj[3:])
            assert (len(storage_w.snapshots) == 16)
            storage_w.snapshots.save(traj[1].reversed)
            assert (len(storage_w.snapshots) == 18)
            storage_w.trajectories.save(traj.reversed)
            assert (len(storage_w.snapshots) == 20)

            # this should be ignored for all is saved already
            storage_w.trajectories.save(traj)
            storage_w.close()

            storage_r = paths.AnalysisStorage(fname)
            rcv1 = storage_r.cvs['f1']

            assert (rcv1._store_dict)

            cv_cache = rcv1._store_dict.value_store

            assert (cv_cache.allow_incomplete == allow_incomplete)

            for idx, snap in enumerate(storage_r.trajectories[1]):
                # print idx, snap
                # if hasattr(snap, '_idx'):
                #     print 'Proxy IDX', snap._idx

                # print 'ITEMS', storage_r.snapshots.index.items()
                # print snap, type(snap), snap.__dict__

                # print snap.__uuid__
                # print snap.reversed.__uuid__
                # print snap.create_reversed().__uuid__
                #
                # print 'POS', cv_cache.object_pos(snap),
                # print 'POS', storage_r.snapshots.pos(snap),
                # print 'POS', storage_r.snapshots.index[snap]
                #
                # print 'POS', cv_cache.object_pos(snap.reversed),
                # print 'POS', storage_r.snapshots.pos(snap.reversed),
                # print 'POS', storage_r.snapshots.index[snap.reversed]

                # if len(cv_cache.cache._chunkdict) > 0:
                #
                #     if allow_incomplete:
                #         print cv_cache.index
                #         print cv_cache.vars['value'][:]
                #
                #     for n, v in enumerate(cv_cache.cache._chunkdict[0]):
                #         print n, v
                #
                # print cv1(snap)
                # print cv1(snap.reversed)
                # print cv_cache[snap]
                #
                # print cv_cache[snap.reversed]

                if not allow_incomplete or cv_cache[snap] is not None:
                    assert_close_unit(cv_cache[snap], cv1(snap))
                    assert_close_unit(cv_cache[snap.reversed],
                                      cv1(snap.reversed))

            storage_r.close()

            if os.path.isfile(fname):
                os.remove(fname)
 def test_run(self):
     self.converter.run(self.data_filename("summary.txt"))
     self.converter.storage.close()
     analysis = paths.AnalysisStorage(self.data_filename("output.nc"))
     self._standard_analysis_checks(analysis)
     analysis.close()