Example #1
0
    def predict(self, observations):
        # set initial the probabilities and backpointers
        probabs = defdict(lambda: defdict(lambda: 0.0))
        probabs[-1][self.START] = 1.0
        pointers = defdict(lambda: {})

        # update the probabilities for each observation
        i = -1
        for i, observation in enumerate(observations):
            for stateinp in self._statelevel:

                # calculate probabilities of making a transition
                # from a previous state input to this one and seeing
                # the current observation
                route_probabs = {}
                for prev_stateinp in self._statelevel:
                    route_probabs[prev_stateinp] = (
                        probabs[i - 1][prev_stateinp] *
                        self._transitions[prev_stateinp][stateinp] *
                        self._emissions[stateinp][observation])
                # select previous state input with the highest probability
                max_stateinp = max(route_probabs, key=route_probabs.get)
                probabs[i][stateinp] = route_probabs[max_stateinp]
                pointers[i][stateinp] = max_stateinp

        # get the best final state
        curr_stateinp = max(probabs[i], key=probabs[i].get)

        # follow the pointers to get the best state sequence
        statelevel = []
        for i in xrange(i, -1, -1):
            statelevel.append(curr_stateinp)
            curr_stateinp = pointers[i][curr_stateinp]
        statelevel.reverse()
        return statelevel
Example #2
0
    def __init__(self, maxima, equip, value_slots, single_slots):

        self.maxima = defdict(const_factory(1))
        self.maxima.update(maxima)
        self.equip = defdict(const_factory(0))
        self.equip.update(equip)
        self.value_slots = value_slots
        self.single_slots = single_slots
        self.slots = value_slots | single_slots
Example #3
0
def average_by(*params):
    '''average_by works as follows:
      >>>average_by([2,3,4],[0,1,1])
      [[2.0, 0], [3.5, 1]]
      >>>average_by([2,3,4],[0,1,1],[0,1,1])
      [[2.0, 0, 0], [3.5, 1, 1]]

      i.e. it takes the unique values of all later params and uses them
      to average values of the first parameter.'''
    items = defdict(list)
    tuples = zip(*params)
    for t in tuples:
        items[t[1:]] += [t[0]]
    keys = items.keys()
    keys.sort()

    ans = []
    for k in keys:
        v = items[k]

        v = reduce(add,v)/len(v)
        if isscalar(k):
            ans.append( [v,k] )
        else:
            temp = [v]
            temp.extend(copy(k).tolist())
            ans.append(temp)
    return ans
def map2bbh(mapping_f, input_type='bowtie2out'):
    if not mapping_f:
        ras, inpf = plain_read_and_split, sys.stdin
    else:
        if mapping_f.endswith(".bz2"):
            ras, inpf = read_and_split, bz2.BZ2File(mapping_f, "r")
        else:
            ras, inpf = plain_read_and_split, open(mapping_f)

    reads2markers, reads2maxb = {}, {}
    if input_type == 'bowtie2out':
        #for r,c in (l.strip().split('\t') for l in inpf):
        for r, c in ras(inpf):
            reads2markers[r] = c
    elif input_type == 'sam':
        #for o in (l.strip().split('\t') for l in inpf):
        for o in ras(inpf):
            if o[0][0] != '@' and o[2][-1] != '*':
                reads2markers[o[0]] = o[2]
    inpf.close()

    markers2reads = defdict(set)
    for r, m in reads2markers.items():
        markers2reads[m].add(r)

    return markers2reads
Example #5
0
    def func(net, sta):
        # identify each row by its channel tuple:
        retdict = defdict(odict)
        strfrmt = "%0.4d-%0.2d-%0.2d"
        for cha in sta.channels:
            start = strfrmt % (cha.start_date.year, cha.start_date.month, cha.start_date.day)
            end = strfrmt % (cha.end_date.year, cha.end_date.month, cha.end_date.day)
            id_ = (sta.code, start, end)
            mydic = retdict[id_]
            mydic['Name'] = sta.code
            mydic['Lat'] = sta.latitude
            mydic['Lon'] = sta.longitude
            mydic['Ele'] = int_(cha.elevation)
            mydic['Azi'] = int_(max(mydic['Azi'], cha.azimuth) if 'Azi' in mydic else cha.azimuth)
            mydic['Rate'] = int_(cha.sample_rate)
            mydic['Sensor'] = cha.sensor.model
            mydic['ID'] = cha.sensor.serial_number
            mydic['Logger'] = cha.data_logger.model
            mydic['Id'] = cha.data_logger.serial_number
            mydic['Start'] = start
            mydic['End'] = end
            mydic['Channels'] = "%s %s" % (mydic['Channels'], cha.code) \
                if 'Channels' in mydic else cha.code

        return retdict.itervalues()
Example #6
0
def ProcessDup(src):
    """Function saves file duplicates with new name, origin remains as is."""
    ourDict = defdict(list)
    destDict = {}
    for item in src:
        if "SOURCE" in item:
            t = item.split()[1]
            destDict[t] = t
            m = os.path.split(t)
            ourDict.setdefault(m[1], []).append(m[0])

    ourDict = dict((k, v) for k, v in ourDict.iteritems() if len(v) > 1)

    result = src
    if (len(ourDict.values()) > 0):
        result = []
        fixed = FindDup(ourDict)
        for item in src:
            if "SOURCE" in item:
                dst = item.split()[1]
                if dst in fixed:
                    d = item.replace(dst, fixed[dst])
                    result.append(d)
                else:
                    result.append(item)
    ourDict.clear()
    return result
Example #7
0
def average_by(*params):
    '''average_by works as follows:
      >>>average_by([2,3,4],[0,1,1])
      [[2.0, 0], [3.5, 1]]
      >>>average_by([2,3,4],[0,1,1],[0,1,1])
      [[2.0, 0, 0], [3.5, 1, 1]]

      i.e. it takes the unique values of all later params and uses them
      to average values of the first parameter.'''
    items = defdict(list)
    tuples = zip(*params)
    for t in tuples:
        items[t[1:]] += [t[0]]
    keys = items.keys()
    keys.sort()

    ans = []
    for k in keys:
        v = items[k]

        v = reduce(add, v) / len(v)
        if isscalar(k):
            ans.append([v, k])
        else:
            temp = [v]
            temp.extend(copy(k).tolist())
            ans.append(temp)
    return ans
Example #8
0
def aggregate(in_path):
    """Merge data in separate files and aggregate them by link.
    """
    links = defdict(lambda: [])
    for fname in os.listdir(in_path):
        fpath = os.path.sep.join((in_path, fname))
        load_data(fpath, links)
    return links
Example #9
0
    def __init__(self, level_map, commenter, conf, **items):

        self.level_map = level_map
        actors = items['actors']
        monsters = items['monsters']
        things = items['things']
        self.actors = set(actors)
        self.monsters = set(monsters)
        acmo = actors + monsters
        self.coord_actors = dict(zip((actor.position for actor in acmo), acmo))
        self.coord_things = dict(zip((thing.position for thing in things), things))
        self.obstacles = items['obstacles']
        self.conf = conf

        self.future_moves = defdict(list)
        self.pos_move = dict()
        self.future_drops = defdict(list)
        self.future_attacks = defdict(list)
        self.commenter = commenter
Example #10
0
def analyze(in_path, *opts):
    """Merge data in separate files and measure variations.
    """
    links = defdict(lambda: [])
    for fname in os.listdir(in_path):
        fpath = os.path.sep.join((in_path, fname))
        meas = [m for m in load_data(fpath, *opts)]
        ts = [v[0] for v in meas]
        min_ts = np.min(ts)
        max_ts = np.max(ts)
        stats = compute_stats([v[-1] for v in meas])
        yield (fname, min_ts, max_ts, stats)
Example #11
0
    def set_actors(self, moves):
        """Selectively update  actor, monster positions"""
        is_new = set()
        for new_position, actor in moves.items():
            # old position could be new position for another actor
            if actor.position not in is_new:
                self.coord_actors.pop(actor.position)
            actor.position = new_position
            is_new.add(new_position)
            self.coord_actors[new_position] = actor

        self.future_moves = defdict(list)
        self.pos_move = {}
Example #12
0
    def resolve_moves(self):

        move_multipos = defdict(list)
        for move, actors in self.future_moves.items():
            for actor in actors:
                move_multipos[move].append(actor.position)

        static = set(self.coord_actors) - set(self.pos_move)
        static |= self.propagate_static(move_multipos, static)

        doable = {}
        for move in move_multipos:
            if move not in static and self.level_map[move] not in self.obstacles:
                doable[move] = move_multipos[move]

        # handle free places
        movings = {}
        for move, positions in doable.items():
            if move not in self.pos_move and move not in static:
                sortpos = sorted(positions, key=lambda p: self.coord_actors[p].priority)
                movings[move] = sortpos[0]
                static.update(sortpos[1:])

        static |= self.propagate_static(doable, static)

        # handle occupied places
        beset = {}
        for move in set(doable) - set(movings):
            beset[move] = doable[move]

        for move, positions in beset.items():
            if move not in static:
                for pos in positions:
                    if move == self.pos_move[pos] and pos == self.pos_move[move]:
                        static.add(move)
                        static.add(pos)
                        break
                valid_pos = [p for p in positions if p not in static]
                if not valid_pos:
                    continue
                first = min(valid_pos, key=lambda p: self.coord_actors[p].priority)
                movings[move] = first
                static.update(set(valid_pos) - set([first]))

        static |= self.propagate_static(doable, static)
        real_moves = {}
        for move, pos in movings.items():
            if move not in static:
                real_moves[move] = self.coord_actors[pos]

        return real_moves
Example #13
0
    def resolve_drops(self):

        valid_pos = set(self.future_drops) - \
          (set(self.coord_actors) | set(self.coord_things))
        for goal in valid_pos:
            actor, name = min(self.future_drops[goal], key=lambda x: x[0].priority)
            thing = actor.drop(name, *goal)
            if thing:
                self.coord_things[goal] = thing
                if thing.is_good:
                    actor.update_karma()
            else:
                self.commenter(2, "{0} can't drop {1}.".format(actor.name, name))
        self.future_drops = defdict(list)
Example #14
0
    def resolve_attacks(self):

        attacks = dict((self.coord_actors[pos], self.future_attacks[pos])
                       for pos in self.future_attacks if pos in self.coord_actors)
        pairs = graphs.node_pairs(attacks)
        queue = queues.PriorityQueue()
        queue.reset()
        queue.push_pairs(((a.attack_priority, d.attack_priority), (a, d))
                         for a, d in pairs)
        rounds = self.conf['rounds']
        dt = self.conf['dt']
        while len(queue) > 0:
            _, (attacker, defender) = queue.pop()
            fight(attacker, defender, dt, rounds)
            self.update_karma(attacker, defender)
            self.commenter.fight(attacker, defender)

        self.future_attacks = defdict(list)
Example #15
0
 def __init__(self):
     self._statelevel = []
     self._transitions = defdict(lambda: defdict(lambda: 0.0))
     self._emissions = defdict(lambda: defdict(lambda: 0.0))
Example #16
0
    def reset(self):

        self.equip = defdict(const_factory(0))
Example #17
0
def one_line_patch():
   d = defdict(list)
   for k, v in oneline_patch_src:
      d[k].append(v)
   [patch_line(file, val) for file, val in d.iteritems()]
Example #18
0
def fix_src():
   d = defdict(list)
   for k, v in va_macro_src:
      d[k].append(v)
   [fix_va_macro(folder, file) for folder, file in d.iteritems()]
   one_line_patch()
Example #19
0
def process_all(session, segments_model_instances, run_id,
                notify_progress_func=lambda *a, **v: None, **processing_args):
    """
        Processes all segments_model_instances. FIXME: write detailed doc
    """
    # redirect stndard error to devnull. FIXME if we can capture it segment-wise (that
    # would be great but.. how much effort and how much performances decreasing?)
    # redirect_external_out(2)

    # set after how many processed segments we want to commit. Setting it higher might speed up
    # calculations at expense of loosing max_session_new segment if just one is wrong
    max_session_new = 10
    # commit for safety:
    commit(session, on_exc=lambda exc: logger.error(str(exc)))

    calculated = 0
    saved = 0

    logger.info("Processing %d segments", len(segments_model_instances))
    ret = []

    sta2segs = defdict(lambda: [])
    for seg in segments_model_instances:
        sta2segs[seg.channel.station_id].append(seg)

    # process segments station-like, so that we load only one inventory at a time
    # and hopefully it will garbage collected (inventory object is big)
    for sta_id, segments in sta2segs.iteritems():
        inventory = None
        try:
            inventory = get_inventory(segments[0], session, timeout=30)
        except SQLAlchemyError as exc:
            logger.warning("Error while saving inventory (station id=%s), "
                           "%d segment will not be processed: %s",
                           str(sta_id), len(segments), str(exc))
            session.rollback()
        except (urllib2.HTTPError, urllib2.URLError, httplib.HTTPException, socket.error) as _:
            logger.warning("Error while downloading inventory (station id=%s), "
                           "%d segment will not be processed: %s URL: %s",
                           str(sta_id), len(segments), str(_), get_inventory_query(segments[0]))
        except Exception as exc:  # pylint:disable=broad-except
            logger.warning("Error while creating inventory (station id=%s), "
                           "%d segment will not be processed: %s",
                           str(sta_id), len(segments), str(exc))

        if inventory is None:
            notify_progress_func(len(segments))
            continue
            # pass

        # THIS IS THE METHOD WITHOUT MULTIPROCESS: 28, 24.7 secs on 30 segments
        for seg in segments:
            notify_progress_func(1)
            pro = models.Processing(run_id=run_id)
            # pro.segment = seg
            # session.flush()
            try:
                pro = process(pro, seg, seg.channel, seg.channel.station, seg.event,
                              seg.datacenter, inventory, **processing_args)
                pro.id = None
                pro.segment = seg
                calculated += 1
                ret.append(pro)
                # flush(session, on_exc=lambda exc: logger.error(str(exc)))
                if len(ret) >= max_session_new:
                    added = len(ret)
                    session.add_all(ret)
                    ret = []
                    if commit(session,
                              on_exc=lambda exc: logger.warning(msgs.db.dropped_seg(added,
                                                                                    None,
                                                                                    exc))):
                        saved += added
            except Exception as exc:  # pylint:disable=broad-except
                logger.warning(msgs.calc.dropped_seg(seg, "segments processing", exc))

    added = len(ret)
    if added and commit(session, on_exc=lambda exc: logger.warning(msgs.db.dropped_seg(added,
                                                                                       None,
                                                                                       exc))):
        saved += added
    logger.info("")
    logger.info("%d segments successfully processed, %d succesfully saved", calculated, saved)
    return ret