def get_results(self):
        if self.failed:
            return

        if self.cachedResult is not None:
            results = self.cachedResult
        else:
            if self.connection is None:
                self.send()

            try:
                response = self.connection.getresponse()
                assert response.status == 200, "received error response %s - %s" % (response.status, response.reason)
                result_data = response.read()
                results = unpickle.loads(result_data)

            except:
                log.exception("FindRequest.get_results(host=%s, query=%s) exception processing response" % (self.store.host, self.query))
                self.store.fail()
                return

            cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION)

        for node_info in results:
            if node_info.get('is_leaf'):
                reader = RemoteReader(self.store, node_info, bulk_query=self.query.pattern)
                node = LeafNode(node_info['path'], reader)
            else:
                node = BranchNode(node_info['path'])

            node.local = False
            yield node
        def wait_for_results():
            if wait_lock.acquire(False): # the FetchInProgress that gets waited on waits for the actual completion
                try:
                    response = connection.getresponse()
                    if response.status != 200:
                        raise Exception("Error response %d %s from %s" % (response.status, response.reason, url))

                    pickled_response = response.read()
                    results = unpickle.loads(pickled_response)
                    self.cache_lock.acquire()
                    self.request_cache[url] = results
                    self.cache_lock.release()
                    completion_event.set()
                    return results
                except:
                    completion_event.set()
                    self.store.fail()
                    log.exception("Error requesting %s" % url)
                    raise

            else: # otherwise we just wait on the completion_event
                completion_event.wait(settings.REMOTE_FETCH_TIMEOUT)
                cached_results = self.request_cache.get(url)
                if cached_results is None:
                    raise Exception("Passive remote fetch failed to find cached results")
                else:
                    return cached_results
        def wait_for_results():
            if wait_lock.acquire(
                    False
            ):  # the FetchInProgress that gets waited on waits for the actual completion
                try:
                    response = connection.getresponse()
                    if response.status != 200:
                        raise Exception(
                            "Error response %d %s from %s" %
                            (response.status, response.reason, url))

                    pickled_response = response.read()
                    results = unpickle.loads(pickled_response)
                    self.cache_lock.acquire()
                    self.request_cache[url] = results
                    self.cache_lock.release()
                    completion_event.set()
                    return results
                except:
                    completion_event.set()
                    self.store.fail()
                    log.exception("Error requesting %s" % url)
                    raise

            else:  # otherwise we just wait on the completion_event
                completion_event.wait(settings.REMOTE_FETCH_TIMEOUT)
                cached_results = self.request_cache.get(url)
                if cached_results is None:
                    raise Exception(
                        "Passive remote fetch failed to find cached results")
                else:
                    return cached_results
    def get_results(self):
        if self.failed:
            return

        if self.cachedResult is not None:
            results = self.cachedResult
        else:
            if self.connection is None:
                self.send()

            try:
                response = self.connection.getresponse()
                assert response.status == 200, "received error response %s - %s" % (
                    response.status, response.reason)
                result_data = response.read()
                results = unpickle.loads(result_data)

            except:
                log.exception(
                    "FindRequest.get_results(host=%s, query=%s) exception processing response"
                    % (self.store.host, self.query))
                self.store.fail()
                return

            cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION)

        for node_info in results:
            if node_info.get('is_leaf'):
                reader = RemoteReader(self.store,
                                      node_info,
                                      bulk_query=self.query.pattern)
                node = LeafNode(node_info['path'], reader)
            else:
                node = BranchNode(node_info['path'])

            node.local = False
            yield node
Exemple #5
0
 def recv_response(self, conn):
     len_prefix = recv_exactly(conn, 4)
     body_size = struct.unpack("!L", len_prefix)[0]
     body = recv_exactly(conn, body_size)
     return unpickle.loads(body)
Exemple #6
0
 def recv_response(self, conn):
     len_prefix = recv_exactly(conn, 4)
     body_size = struct.unpack("!L", len_prefix)[0]
     body = recv_exactly(conn, body_size)
     return unpickle.loads(body)