Ejemplo n.º 1
0
 def helper():
   inq = tasklets.SerialQueueFuture()
   query.run_to_queue(inq, self._conn, options)
   is_ancestor_query = query.ancestor is not None
   while True:
     try:
       ent = yield inq.getq()
     except EOFError:
       break
     if isinstance(ent, model.Key):
       pass  # It was a keys-only query and ent is really a Key.
     else:
       key = ent.key
       if key in self._cache:
         # Assume the cache is more up to date.
         if self._cache[key] is None:
           # This is a weird case.  Apparently this entity was
           # deleted concurrently with the query.  Let's just
           # pretend the delete happened first.
           logging.info('Conflict: entity %s was deleted', key)
           continue
         # Replace the entity the callback will see with the one
         # from the cache.
         if ent != self._cache[key]:
           logging.info('Conflict: entity %s was modified', key)
         ent = self._cache[key]
       else:
         if is_ancestor_query and self.should_cache(key):
           self._cache[key] = ent
     if callback is None:
       val = ent
     else:
       val = callback(ent)  # TODO: If this raises, log and ignore
     mfut.putq(val)
   mfut.complete()
Ejemplo n.º 2
0
 def testSerialQueueFuture_PutQ_1(self):
     sqf = tasklets.SerialQueueFuture()
     f1 = Future()
     sqf.putq(f1)
     sqf.complete()
     f1.set_result(1)
     self.assertEqual(sqf.getq().get_result(), 1)
Ejemplo n.º 3
0
    def testSerialQueueFuture(self):
        q = tasklets.SerialQueueFuture()

        @tasklets.tasklet
        def produce_one(i):
            yield tasklets.sleep(random.randrange(10) * 0.01)
            raise tasklets.Return(i)

        @tasklets.tasklet
        def producer():
            for i in range(10):
                q.add_dependent(produce_one(i))
            q.complete()

        @tasklets.tasklet
        def consumer():
            for i in range(10):
                val = yield q.getq()
                self.assertEqual(val, i)
            yield q
            self.assertRaises(EOFError, q.getq().get_result)
            yield q

        @tasklets.synctasklet
        def foo():
            yield producer(), consumer()

        foo()
Ejemplo n.º 4
0
 def testSerialQueueFuture_ItemException(self):
     sqf = tasklets.SerialQueueFuture()
     g1 = sqf.getq()
     f1 = Future()
     sqf.putq(f1)
     sqf.complete()
     f1.set_exception(ZeroDivisionError())
     self.assertRaises(ZeroDivisionError, g1.get_result)
Ejemplo n.º 5
0
 def helper():
     try:
         inq = tasklets.SerialQueueFuture()
         query.run_to_queue(inq, self._conn, options)
         is_ancestor_query = query.ancestor is not None
         while True:
             try:
                 batch, i, ent = yield inq.getq()
             except EOFError:
                 break
             if isinstance(ent, model.Key):
                 pass  # It was a keys-only query and ent is really a Key.
             else:
                 key = ent._key
                 if key in self._cache:
                     hit = self._cache[key]
                     if hit is not None and hit.key != key:
                         # The cached entry has been mutated to have a different key.
                         # That's a false hit.  Get rid of it.  See issue #13.
                         del self._cache[key]
                 if key in self._cache:
                     # Assume the cache is more up to date.
                     if self._cache[key] is None:
                         # This is a weird case.  Apparently this entity was
                         # deleted concurrently with the query.  Let's just
                         # pretend the delete happened first.
                         logging.info('Conflict: entity %s was deleted',
                                      key)
                         continue
                     # Replace the entity the callback will see with the one
                     # from the cache.
                     if ent != self._cache[key]:
                         logging.info(
                             'Conflict: entity %s was modified', key)
                     ent = self._cache[key]
                 else:
                     # Cache the entity only if this is an ancestor query;
                     # non-ancestor queries may return stale results, since in
                     # the HRD these queries are "eventually consistent".
                     # TODO: Shouldn't we check this before considering cache hits?
                     if is_ancestor_query and self.should_cache(key):
                         self._cache[key] = ent
             if callback is None:
                 val = ent
             else:
                 # TODO: If the callback raises, log and ignore.
                 if options is not None and options.produce_cursors:
                     val = callback(batch, i, ent)
                 else:
                     val = callback(ent)
             mfut.putq(val)
     except Exception, err:
         _, _, tb = sys.exc_info()
         mfut.set_exception(err, tb)
         raise
Ejemplo n.º 6
0
    def run_to_queue(self, queue, conn, options=None):
        """Run this query, putting entities into the given queue."""
        # Create a list of (first-entity, subquery-iterator) tuples.
        # TODO: Use the specified sort order.
        assert options is None  # Don't know what to do with these yet.
        state = []
        orderings = orders_to_orderings(self.__orders)
        for subq in self.__subqueries:
            subit = tasklets.SerialQueueFuture('MultiQuery.run_to_queue')
            subq.run_to_queue(subit, conn)
            try:
                ent = yield subit.getq()
            except EOFError:
                continue
            else:
                state.append(_SubQueryIteratorState(ent, subit, orderings))

        # Now turn it into a sorted heap.  The heapq module claims that
        # calling heapify() is more efficient than calling heappush() for
        # each item.
        heapq.heapify(state)

        # Repeatedly yield the lowest entity from the state vector,
        # filtering duplicates.  This is essentially a multi-way merge
        # sort.  One would think it should be possible to filter
        # duplicates simply by dropping other entities already in the
        # state vector that are equal to the lowest entity, but because of
        # the weird sorting of repeated properties, we have to explicitly
        # keep a set of all keys, so we can remove later occurrences.
        # Yes, this means that the output may not be sorted correctly.
        # Too bad.  (I suppose you can do this in constant memory bounded
        # by the maximum number of entries in relevant repeated
        # properties, but I'm too lazy for now.  And yes, all this means
        # MultiQuery is a bit of a toy.  But where it works, it beats
        # expecting the user to do this themselves.)
        keys_seen = set()
        while state:
            item = heapq.heappop(state)
            ent = item.entity
            if ent._key not in keys_seen:
                keys_seen.add(ent._key)
                queue.putq((None, None, ent))
            subit = item.iterator
            try:
                batch, i, ent = yield subit.getq()
            except EOFError:
                pass
            else:
                item.entity = ent
                heapq.heappush(state, item)
        queue.complete()
Ejemplo n.º 7
0
 def iter_query(self, query, options=None):
   return self.map_query(query, callback=None, options=options,
                         merge_future=tasklets.SerialQueueFuture())
Ejemplo n.º 8
0
 def testSerialQueueFuture_GetQ(self):
     sqf = tasklets.SerialQueueFuture()
     sqf.set_exception(KeyError())
     self.assertRaises(KeyError, sqf.getq().get_result)
Ejemplo n.º 9
0
 def testSerialQueueFuture_PutQ_3(self):
     sqf = tasklets.SerialQueueFuture()
     g1 = sqf.getq()
     sqf.putq(1)
     sqf.complete()
     self.assertEqual(g1.get_result(), 1)
Ejemplo n.º 10
0
 def testSerialQueueFuture_Complete(self):
     sqf = tasklets.SerialQueueFuture()
     g1 = sqf.getq()
     sqf.complete()
     self.assertRaises(EOFError, g1.get_result)