Пример #1
0
class memoized(object):
    """A memoized decorator with reset functionality for instance methods
    """

    memo = makehash()

    n_call = 0
    n_hit = 0
    n_reset = 0

    def __init__(self, f):
        self.f = f
        functools.update_wrapper(self, f)

    @staticmethod
    def key(*args, **kwargs):
        args = flatten(list(args))
        return tuple(args), frozenset(kwargs.items())

    def __call__(self, obj, *args, **kwargs):
        memoized.n_call += 1

        key = memoized.key(*args, **kwargs)
        fn = self.f.__name__

        if key not in memoized.memo[obj][fn]:
            memoized.memo[obj][fn][key] = self.f(obj, *args, **kwargs)
        else:
            memoized.n_hit += 1

        return memoized.memo[obj][fn][key]

    def __get__(self, obj, objtype):
        return functools.partial(self.__call__, obj)

    @staticmethod
    def reset(name=None):
        def wrapper(f):
            @functools.wraps(f)
            def helper(obj, *args, **kwargs):
                if name and name in memoized.memo[obj]:
                    del memoized.memo[obj][name]
                elif obj in memoized.memo:
                    del memoized.memo[obj]
                memoized.n_reset += 1
                return f(obj, *args, **kwargs)

            return helper

        return wrapper

    @classmethod
    def statistics(cls):
        if cls.n_call:
            print >> sys.stderr, 'memoized hitting rate: %d/%d=%f' % \
                                 (cls.n_hit, cls.n_call,
                                  cls.n_hit / cls.n_call)

        if cls.n_reset:
            print >> sys.stderr, 'memoized reset times: %d' % cls.n_reset
Пример #2
0
def load(opened_file):
    codes = makehash()
    loaded = makehash()

    for line in opened_file:
        splitted = line.strip().split('\t')

        if len(splitted) == 4:
            k1, k2, idx, code = splitted
            codes[k1][k2][int(idx)] = code

    for k1 in codes:
        for k2 in codes[k1]:
            joined = ''.join(codes[k1][k2][i]
                             for i in sorted(codes[k1][k2].keys()))
            loaded[k1][k2] = decode(joined)

    return loaded
Пример #3
0
 def mat_to_dict(self, charlist, ignore_this={}):
     """Return a dict from the mat"""
     mat_val = utils.makehash(1, list)
     for node in self.tree.traverse():
         for i, val in enumerate(charlist):
             for st in self.state_mat[i, node.ind]:
                 ignore_corr = (ignore_this.get(val, "") == st)
                 if not ignore_corr:
                     mat_val[node.name][st].append(val)
     return mat_val
Пример #4
0
    def flip_rea_forward(clc, nodestates):
        """Flip rea data dict"""
        new_dt = utils.makehash(1, set)
        state_map = defaultdict(set)
        for (genome, aarea) in nodestates.items():
            nl = [(c, aa) for aa, codons in aarea.items() for c in codons]
            for (c, aa) in nl:
                new_dt[genome][c].add(aa)
                state_map[c].add(aa)

        return new_dt, state_map
Пример #5
0
def main():
    loaded = stream.load(sys.stdin)
    merged = makehash()

    for k1 in loaded:
        merged[k1] = SemiMarkov()

        for k2 in loaded[k1]:
            merged[k1] += loaded[k1][k2]

        stream.dump(k1, merged[k1])
Пример #6
0
def create_login(user, password, update=False, superuser=None):
    hash = makehash(password)
    result = web.ctx.db.where('user', login=user).first()
    data = {}
    if password:
        data['password'] = hash
    if superuser is not None:
        data['superuser'] = superuser
    if result and update:
        web.ctx.db.update(
            'user', where='login=$user', vars=locals(), **data)
    else:
        web.ctx.db.insert('user', login=user, **data)
Пример #7
0
def evaluate(samples, models,
             use_duration=True,
             window=semi_markov.WINDOW):
    confusion_matrix = makehash()

    for klass, file_names in samples.items():
        counters = statistics(
            file_names, models,
            use_duration=use_duration,
            window=window)

        for threshold in counters:
            for term, counter in counters[threshold].items():
                key = '_'.join([klass, term])
                confusion_matrix[threshold][key] = counter

    for threshold, matrix in sorted(confusion_matrix.items()):
        pv('threshold', stdout=True)
        print_statistics(matrix, threshold)
        print