示例#1
0
 def _def_gconst(pattern, _):
     """General constraints for the energy interval abstraction pattern"""
     verify(pattern.hypothesis.lateend < np.inf)
     #The margin to group consecutive fragments is 1 mm
     #Limits for the detection.
     beg = int(pattern.hypothesis.earlystart)
     end = int(pattern.hypothesis.lateend)
     #Now we get the energy accumulated in all leads.
     energy = None
     for lead in sig_buf.get_available_leads():
         lenerg, fbeg, fend = sig_buf.get_energy_fragment(
             beg, end, TWINDOW, lead)
         energy = lenerg if energy is None else energy + lenerg
     if energy is None:
         return 0.0
     #We get the already published fragments affecting our temporal support.
     conflictive = []
     published = SortedList(obs_buf.get_observations(o.Deflection))
     idx = published.bisect_left(pattern.hypothesis)
     if idx > 0 and published[idx - 1].lateend > beg:
         idx -= 1
     while (idx < len(published) and Iv(beg, end).overlap(
             Iv(published[idx].earlystart, published[idx].lateend))):
         conflictive.append(
             Iv(published[idx].earlystart - beg + fbeg,
                published[idx].lateend - beg + fbeg))
         idx += 1
     #We obtain the relative limits of the energy interval wrt the fragment
     iv_start = Iv(fbeg, fbeg + int(pattern.hypothesis.latestart - beg))
     iv_end = Iv(fend - int(end - pattern.hypothesis.earlyend), fend)
     #We look for the highest-level interval satisfying the limits.
     interval = None
     lev = 0
     while interval is None and lev <= 20:
         areas = [
             iv for iv in get_energy_intervals(energy, lev, group=TMARGIN)
             if iv.start in iv_start and iv.end in iv_end and all(
                 not iv.overlapm(ein) for ein in conflictive)
         ]
         #We sort the areas by energy, with the highest energy first.
         areas.sort(
             key=lambda interv: np.sum(energy[interv.start:interv.end + 1]),
             reverse=True)
         #Now we take the element indicated by the index.
         if len(areas) > int_idx:
             interval = areas[int_idx]
         else:
             lev += 1
     verify(interval is not None)
     pattern.hypothesis.start.set(interval.start + beg - fbeg,
                                  interval.start + beg - fbeg)
     pattern.hypothesis.end.set(interval.end + beg - fbeg,
                                interval.end + beg - fbeg)
     for lead in sig_buf.get_available_leads():
         pattern.hypothesis.level[lead] = lev
示例#2
0
    def get_observations(self,
                         clazz=Observable,
                         start=0,
                         end=np.inf,
                         filt=lambda obs: True,
                         reverse=False):
        """
        Obtains a list of observations matching the search criteria, ordered
        by the earliest time of the observation.

        Parameters
        ----------
        clazz:
            Only instances of the *clazz* class (or any subclass) are returned.
        start:
            Only observations whose earlystart attribute is after or equal this
            parameter are returned.
        end:
            Only observations whose lateend attribute is lower or equal this
            parameter are returned.
        filt:
            General filter provided as a boolean function that accepts an
            observation as a parameter. Only the observations satisfying this
            filter are returned.
        reverse:
            Boolean parameter. If True, observations are returned in reversed
            order, from last to first.
        """
        #We perform a combination of the observations from the global buffer
        #and from the interpretation.
        geng = obsbuf.get_observations(clazz, start, end, filt, reverse)
        genl = self._get_proper_obs(clazz, start, end, filt, reverse)
        dummy = EventObservable()
        dummy.start.set(np.inf, np.inf)
        nxtg = next(geng, dummy)
        nxtl = next(genl, dummy)
        while True:
            nxt = min(nxtg, nxtl)
            if nxt is dummy:
                return
            elif nxt is nxtg:
                nxtg = next(geng, dummy)
            else:
                nxtl = next(genl, dummy)
            yield nxt
示例#3
0
#Input system configuration
IN.reset()
IN.set_record(args.r, args.a)
IN.set_offset(args.f)
IN.set_duration(args.l)
IN.set_tfactor(TFACTOR)
IN.start()
print('Preloading buffer...')
time.sleep(sp2ms(MIN_DELAY) / (1000.0 * TFACTOR))
#Load the initial evidence
IN.get_more_evidence()

#Trivial interpretation
interp = Interpretation()
#The focus is initially set in the first observation
interp.focus.push(next(obs_buffer.get_observations()), None)
##########################
### Construe searching ###
##########################
print('Starting interpretation')
t0 = time.time()
cntr = searching.Construe(interp, KFACTOR)
ltime = (cntr.last_time, t0)
#Main loop
while cntr.best is None:
    IN.get_more_evidence()
    acq_time = IN.get_acquisition_point()
    #HINT debug code
    fstr = 'Int: {0:05d} '
    for i in range(int(sp2ms(acq_time - cntr.last_time) / 1000.0)):
        fstr += '-'
示例#4
0
ANNOTATOR = 'gqrs'
#Record used
REC = '/home/local/tomas.teijeiro/cinc_challenge15/training/v111l'

IN.set_record(REC, ANNOTATOR, True)
IN.set_offset(INIT)
IN.set_duration(LENGTH)
IN.set_tfactor(1000.0)
IN.start()
time.sleep(1)
IN.get_more_evidence()

#Trivial interpretation
interp = Interpretation()
#The focus is initially set in the first observation
interp.focus.append(next(obs_buffer.get_observations()))
########################
### PEKBFS searching ###
########################
print('Starting interpretation')
t0 = time.time()
pekbfs = searching.PEKBFS(interp, KFACTOR)
ltime = (pekbfs.last_time, t0)
while pekbfs.best is None:
    IN.get_more_evidence()
    acq_time = IN.get_acquisition_point()
    #HINT debug code
    fstr = 'Int: {0:05d} '
    for i in range(int(sp2ms(acq_time - pekbfs.last_time) / 1000.0)):
        fstr += '-'
    fstr += ' Acq: {1}'