Exemplo n.º 1
0
    def get_segments_by_id(self, n_iter, seg_ids, include_pcoords = True):
        '''Get segments from the data manager, employing caching where possible'''
        
        if len(seg_ids) == 0: return []
        
        seg_index  = self.get_seg_index(n_iter)
        all_wtg_parent_ids = self.get_wtg_parent_array(n_iter)
        
        segments = []

        if include_pcoords:
            pcoords = self.get_pcoords(n_iter, seg_ids)
        
        for (isegid, seg_id) in enumerate(seg_ids):
            row = seg_index[seg_id]
            parents_offset = row['wtg_offset']
            n_parents = row['wtg_n_parents']
            segment = Segment(seg_id = seg_id,
                              n_iter = n_iter,
                              status = row['status'],
                              endpoint_type = row['endpoint_type'],
                              walltime = row['walltime'],
                              cputime = row['cputime'],
                              weight = row['weight'],
                              )
            if include_pcoords:
                segment.pcoord = pcoords[isegid]

            parent_ids = all_wtg_parent_ids[parents_offset:parents_offset+n_parents]
            segment.wtg_parent_ids = {long(parent_id) for parent_id in parent_ids}
            segment.parent_id = long(parent_ids[0])
            segments.append(segment)

        return segments        
Exemplo n.º 2
0
    def get_segments_by_id(self, n_iter, seg_ids, include_pcoords = True):
        '''Get segments from the data manager, employing caching where possible'''
        
        if len(seg_ids) == 0: return []
        
        seg_index  = self.get_seg_index(n_iter)
        all_wtg_parent_ids = self.get_wtg_parent_array(n_iter)
        
        segments = []

        if include_pcoords:
            pcoords = self.get_pcoords(n_iter, seg_ids)
        
        for (isegid, seg_id) in enumerate(seg_ids):
            row = seg_index[seg_id]
            parents_offset = row['wtg_offset']
            n_parents = row['wtg_n_parents']
            segment = Segment(seg_id = seg_id,
                              n_iter = n_iter,
                              status = row['status'],
                              endpoint_type = row['endpoint_type'],
                              walltime = row['walltime'],
                              cputime = row['cputime'],
                              weight = row['weight'],
                              )
            if include_pcoords:
                segment.pcoord = pcoords[isegid]

            parent_ids = all_wtg_parent_ids[parents_offset:parents_offset+n_parents]
            segment.wtg_parent_ids = {int(parent_id) for parent_id in parent_ids}
            segment.parent_id = int(parent_ids[0])
            segments.append(segment)

        return segments        
    def _merge_walkers(self, segments, cumul_weight, bin):
        '''Merge the given ``segments`` in ``bin``, previously sorted by weight, into one conglomerate segment.
        ``cumul_weight`` is the cumulative sum of the weights of the ``segments``; this may be None to calculate here.'''

        if cumul_weight is None:
            cumul_weight = numpy.add.accumulate(
                [segment.weight for segment in segments])

        glom = Segment(
            n_iter=segments[0].
            n_iter,  # assumed correct (and equal among all segments)
            weight=cumul_weight[len(segments) - 1],
            status=Segment.SEG_STATUS_PREPARED,
            pcoord=self.system.new_pcoord_array(),
        )

        # Select the history to use
        # The following takes a random number in the interval 0 <= x < glom.weight, then
        # sees where this value falls among the (sorted) weights of the segments being merged;
        # this ensures that a walker with (e.g.) twice the weight of its brethren has twice the
        # probability of having its history selected for continuation
        iparent = numpy.digitize((random.uniform(0, glom.weight), ),
                                 cumul_weight)[0]
        gparent_seg = segments[iparent]

        # Inherit history from this segment ("gparent" stands for "glom parent", as opposed to historical
        # parent).
        glom.parent_id = gparent_seg.parent_id
        glom.pcoord[0, :] = gparent_seg.pcoord[0, :]

        # Weight comes from all segments being merged, and therefore all their
        # parent segments
        glom.wtg_parent_ids = set()
        for segment in segments:
            glom.wtg_parent_ids |= segment.wtg_parent_ids

        # Remove merged walkers from consideration before treating initial states
        bin.difference_update(segments)

        # The historical parent of gparent is continued; all others are marked as merged
        for segment in segments:
            if segment is gparent_seg:
                # we must ignore initial states here...
                if segment.parent_id >= 0:
                    self._parent_map[
                        segment.
                        parent_id].endpoint_type = Segment.SEG_ENDPOINT_CONTINUES
            else:
                # and "unuse" an initial state here (recall that initial states are in 1:1 correspondence
                # with the segments they initiate), except when a previously-split particle is being
                # merged
                if segment.parent_id >= 0:
                    self._parent_map[
                        segment.
                        parent_id].endpoint_type = Segment.SEG_ENDPOINT_MERGED
                else:
                    if segment.initial_state_id in {
                            segment.initial_state_id
                            for segment in bin
                    }:
                        log.debug(
                            'initial state in use by other walker; not removing'
                        )
                    else:
                        initial_state = self.used_initial_states.pop(
                            segment.initial_state_id)
                        log.debug(
                            'freeing initial state {!r} for future use (merged)'
                            .format(initial_state))
                        self.avail_initial_states[
                            initial_state.state_id] = initial_state
                        initial_state.iter_used = None

        if log.isEnabledFor(logging.DEBUG):
            log.debug('merging ({:d}) {!r} into 1:\n    {!r}'.format(
                len(segments), segments, glom))

        bin.add(glom)
Exemplo n.º 4
0
    def _merge_walkers(self, segments, cumul_weight, bin):
        '''Merge the given ``segments`` in ``bin``, previously sorted by weight, into one conglomerate segment.
        ``cumul_weight`` is the cumulative sum of the weights of the ``segments``; this may be None to calculate here.'''
        
        if cumul_weight is None:
            cumul_weight = numpy.add.accumulate([segment.weight for segment in segments])
        
        glom = Segment(n_iter = segments[0].n_iter, # assumed correct (and equal among all segments)
                       weight = cumul_weight[len(segments)-1],
                       status = Segment.SEG_STATUS_PREPARED,
                       pcoord = self.system.new_pcoord_array(),
                       )
                
        # Select the history to use
        # The following takes a random number in the interval 0 <= x < glom.weight, then
        # sees where this value falls among the (sorted) weights of the segments being merged;
        # this ensures that a walker with (e.g.) twice the weight of its brethren has twice the
        # probability of having its history selected for continuation
        iparent = numpy.digitize((random.uniform(0,glom.weight),),cumul_weight)[0]
        gparent_seg = segments[iparent]
        
        # Inherit history from this segment ("gparent" stands for "glom parent", as opposed to historical
        # parent). 
        glom.parent_id = gparent_seg.parent_id
        glom.pcoord[0,:] = gparent_seg.pcoord[0,:]
        
        # Weight comes from all segments being merged, and therefore all their
        # parent segments
        glom.wtg_parent_ids = set()
        for segment in segments:
            glom.wtg_parent_ids |= segment.wtg_parent_ids
        
        # Remove merged walkers from consideration before treating initial states
        bin.difference_update(segments)            
            
        # The historical parent of gparent is continued; all others are marked as merged
        for segment in segments:
            if segment is gparent_seg:
                # we must ignore initial states here...
                if segment.parent_id >= 0:
                    self._parent_map[segment.parent_id].endpoint_type = Segment.SEG_ENDPOINT_CONTINUES
            else:
                # and "unuse" an initial state here (recall that initial states are in 1:1 correspondence
                # with the segments they initiate), except when a previously-split particle is being
                # merged
                if segment.parent_id >= 0:
                    self._parent_map[segment.parent_id].endpoint_type = Segment.SEG_ENDPOINT_MERGED
                else:
                    if segment.initial_state_id in {segment.initial_state_id for segment in bin}:
                        log.debug('initial state in use by other walker; not removing')
                    else:
                        initial_state = self.used_initial_states.pop(segment.initial_state_id)
                        log.debug('freeing initial state {!r} for future use (merged)'.format(initial_state))
                        self.avail_initial_states[initial_state.state_id] = initial_state
                        initial_state.iter_used = None

        if log.isEnabledFor(logging.DEBUG):
            log.debug('merging ({:d}) {!r} into 1:\n    {!r}'.format(len(segments), segments, glom))
                

        bin.add(glom)
                    istate_type=InitialState.ISTATE_TYPE_GENERATED,
                    istate_status=InitialState.ISTATE_STATUS_PREPARED,
                    pcoord=all_pcoord[struct_id])
                initial_states[struct_id] = istate
                istate_filename = 'istates/struct_{:06d}.gro'.format(istate_id)
                uv.selectAtoms('all').positions = coord_ds[struct_id]
                uv.selectAtoms('all').write(istate_filename)
                print(
                    '  wrote {} containing initial state {} from structure {} with weight {}'
                    .format(istate_filename, istate_id, struct_id, weight))
                istate_id += 1

            segment = Segment(seg_id=seg_id,
                              n_iter=1,
                              weight=weight,
                              pcoord=system.new_pcoord_array(),
                              status=Segment.SEG_STATUS_PREPARED)
            segment.parent_id = -(istate.state_id + 1)
            segment.wtg_parent_ids = [segment.parent_id]
            segment.pcoord[0, ] = istate.pcoord[:]
            segments.append(segment)
            seg_id += 1
        sys.stdout.flush()

    data_manager.save_target_states([], n_iter=1)
    data_manager.create_initial_states(len(initial_states), 1)
    data_manager.update_initial_states(initial_states.values(), n_iter=1)
    data_manager.prepare_iteration(1, segments)
    data_manager.flush_backing()
    data_manager.close_backing()