Exemplo n.º 1
0
 def cmd_rebin(self):
     mapper = self.binning.mapper
     assert mapper is not None    
     if self.n_iter == 1:
         sys.stderr.write('rebin is not supported for the first iteration; reinitialize with w_init instead\n')
         sys.exit(1)
     n_target_states = len(self.data_reader.get_target_states(self.n_iter))
     we_driver = westpa.rc.get_we_driver()
     data_manager = self.data_reader.data_manager
     
     segments = data_manager.get_segments(self.n_iter,load_pcoords=True)
     last_iter_segments = data_manager.get_segments(self.n_iter-1,load_pcoords=False,load_auxdata=False)
             
     # Bin on this iteration's initial points
     # We don't have to worry about recycling because we are binning on
     # initial points rather than final points, so recycling has already
     # occurred for this iteration.
     # We do need initial states, in case we merge a newly-created walker out of existence
     #avail_initial_states = {state.state_id: state
     #                        for state in data_manager.get_unused_initial_states(n_iter = self.n_iter)}
     avail_initial_states = data_manager.get_unused_initial_states(n_iter = self.n_iter)
     used_initial_states = data_manager.get_segment_initial_states(segments)
     we_driver.new_iteration(initial_states=avail_initial_states,
                             bin_mapper=mapper, bin_target_counts=self.binning.bin_target_counts)
     we_driver.used_initial_states = {state.state_id: state for state in used_initial_states}
     we_driver.assign(segments,initializing=True)
     we_driver.rebin_current(parent_segments=last_iter_segments)
     
     weights = numpy.array([segment.weight for segment in we_driver.next_iter_segments])
     assignments = numpy.fromiter(we_driver.next_iter_assignments,dtype=int,count=len(weights))
     write_bin_info(mapper, assignments, weights, n_target_states, detailed=self.args.detail)
     
     if self.args.confirm:
         data_manager.prepare_iteration(self.n_iter, list(we_driver.next_iter_segments))
         
         # manually update endpoint statuses only
         endpoint_types = sorted([(segment.seg_id, segment.endpoint_type) for segment in last_iter_segments])
         last_iter_group = data_manager.get_iter_group(self.n_iter-1)
         last_iter_index = last_iter_group['seg_index'][...]
         last_iter_index['endpoint_type'] = [pair[1] for pair in endpoint_types]
         last_iter_group['seg_index'][...] = last_iter_index
         
         data_manager.save_iter_binning(self.n_iter, self.binning.mapper_hash, self.binning.mapper_pickle,
                                        we_driver.bin_target_counts)
         data_manager.update_initial_states(we_driver.all_initial_states)
         data_manager.flush_backing()
Exemplo n.º 2
0
 def cmd_info(self):
     mapper = self.binning.mapper
     
     # Get target states and their assignments
     target_states = self.data_reader.get_target_states(self.n_iter)
     n_target_states = len(target_states)
     
     iter_group = self.data_reader.get_iter_group(self.n_iter)
     
     # bin initial pcoords for iteration n_iter
     initial_pcoords = iter_group['pcoord'][:,0,:]
     assignments = mapper.assign(initial_pcoords)
     del initial_pcoords
     
     print('Bin information for iteration {:d}'.format(self.n_iter))
     
     # Get bin counts and weights
     weights = iter_group['seg_index']['weight']
     
     write_bin_info(mapper, assignments, weights, n_target_states, detailed=self.args.detail)
Exemplo n.º 3
0
    def cmd_info(self):
        mapper = self.binning.mapper

        # Get target states and their assignments
        target_states = self.data_reader.get_target_states(self.n_iter)
        n_target_states = len(target_states)

        iter_group = self.data_reader.get_iter_group(self.n_iter)

        # bin initial pcoords for iteration n_iter
        initial_pcoords = iter_group['pcoord'][:, 0, :]
        assignments = mapper.assign(initial_pcoords)
        del initial_pcoords

        print('Bin information for iteration {:d}'.format(self.n_iter))

        # Get bin counts and weights
        weights = iter_group['seg_index']['weight']

        write_bin_info(mapper,
                       assignments,
                       weights,
                       n_target_states,
                       detailed=self.args.detail)
Exemplo n.º 4
0
    def cmd_rebin(self):
        mapper = self.binning.mapper
        assert mapper is not None
        if self.n_iter == 1:
            sys.stderr.write(
                'rebin is not supported for the first iteration; reinitialize with w_init instead\n'
            )
            sys.exit(1)
        n_target_states = len(self.data_reader.get_target_states(self.n_iter))
        we_driver = westpa.rc.get_we_driver()
        data_manager = self.data_reader.data_manager

        segments = data_manager.get_segments(self.n_iter, load_pcoords=True)
        last_iter_segments = data_manager.get_segments(self.n_iter - 1,
                                                       load_pcoords=False,
                                                       load_auxdata=False)

        # Bin on this iteration's initial points
        # We don't have to worry about recycling because we are binning on
        # initial points rather than final points, so recycling has already
        # occurred for this iteration.
        # We do need initial states, in case we merge a newly-created walker out of existence
        #avail_initial_states = {state.state_id: state
        #                        for state in data_manager.get_unused_initial_states(n_iter = self.n_iter)}
        avail_initial_states = data_manager.get_unused_initial_states(
            n_iter=self.n_iter)
        used_initial_states = data_manager.get_segment_initial_states(segments)
        we_driver.new_iteration(
            initial_states=avail_initial_states,
            bin_mapper=mapper,
            bin_target_counts=self.binning.bin_target_counts)
        we_driver.used_initial_states = {
            state.state_id: state
            for state in used_initial_states
        }
        we_driver.assign(segments, initializing=True)
        we_driver.rebin_current(parent_segments=last_iter_segments)

        weights = numpy.array(
            [segment.weight for segment in we_driver.next_iter_segments])
        assignments = numpy.fromiter(we_driver.next_iter_assignments,
                                     dtype=int,
                                     count=len(weights))
        write_bin_info(mapper,
                       assignments,
                       weights,
                       n_target_states,
                       detailed=self.args.detail)

        if self.args.confirm:
            data_manager.prepare_iteration(self.n_iter,
                                           list(we_driver.next_iter_segments))

            # manually update endpoint statuses only
            endpoint_types = sorted([(segment.seg_id, segment.endpoint_type)
                                     for segment in last_iter_segments])
            last_iter_group = data_manager.get_iter_group(self.n_iter - 1)
            last_iter_index = last_iter_group['seg_index'][...]
            last_iter_index['endpoint_type'] = [
                pair[1] for pair in endpoint_types
            ]
            last_iter_group['seg_index'][...] = last_iter_index

            data_manager.save_iter_binning(self.n_iter,
                                           self.binning.mapper_hash,
                                           self.binning.mapper_pickle,
                                           we_driver.bin_target_counts)
            data_manager.update_initial_states(we_driver.all_initial_states)
            data_manager.flush_backing()