Ejemplo n.º 1
0
def create_markers(kind,
                   batchsize=100,
                   attribute='__key__',
                   order='asc',
                   filters=[],
                   callback=None):
    mc_key = ('create_markers:%s-%s' % (
        time.time(), random.randint(1, sys.maxint))).replace('.','-')
    defer(_compute_markers, [mc_key, kind, attribute, order, batchsize],
          dict(callback=callback, filters=filters), once_only=False)
    return mc_key
Ejemplo n.º 2
0
def _compute_markers(mc_key, kind, attribute, order, batchsize,
                     filters=[], callback=None):
    logging.info('BatchHandler.execute %s' % repr(locals()))
    q = datastore.Query(kind, keys_only=True)
    q.Order((attribute, _sorting[order]))
    for l, r in filters:
        q[l] = r
    # the intermediate result
    i_res = memcache.get(mc_key) or []
    try:
        while True:
            if i_res:
                q['%s %s' % (attribute, _operator[order])] = i_res[-1]
            if not q.Count(): # required due to bug in sdk (#2875)
                break
            res = q.Get(1, batchsize-1)
            if not res:
                # this is the end because we have less than
                # batchsize items left
                break
            # look if our attribute is __key__ so we have it already
            if attribute == '__key__':
                lastval = res[-1]
            else:
                e = db.get(res[-1])
                lastval = getattr(e, attribute)
            i_res.append(lastval)
        memcache.set(mc_key, i_res)
        # we are finished, so we call the callback with the name
        if callback:
            defer(callback, [mc_key], once_only=False)
    except DeadlineExceededError:
        # we did not make it, add ourself again
        memcache.set(mc_key, i_res)
        defer(_compute_markers, [mc_key, kind, attribute, order, batchsize],
              dict(callback=callback, filters=filters), once_only=False)