def delta_store_to_queues(self, w_id, topic, delta):
     # Queue delta for graphql subscription resolving
     if self.delta_queues[w_id]:
         delta_store = create_delta_store(delta, w_id)
         for delta_queue in self.delta_queues[w_id].values():
             delta_queue.put((w_id, topic, delta_store))
示例#2
0
    async def subscribe_delta(self, root, info, args):
        """Delta subscription async generator.

        Async generator mapping the incoming protobuf deltas to
        yielded GraphQL subscription objects.

        """
        workflow_ids = set(args.get('workflows', args.get('ids', ())))
        sub_id = uuid4()
        info.context['sub_id'] = sub_id
        self.delta_store[sub_id] = {}
        delta_queues = self.data_store_mgr.delta_queues
        deltas_queue = queue.Queue()
        try:
            # Iterate over the queue yielding deltas
            w_ids = workflow_ids
            sub_resolver = SUB_RESOLVERS.get(to_snake_case(info.field_name))
            interval = args['ignore_interval']
            old_time = time()
            while True:
                if not workflow_ids:
                    old_ids = w_ids
                    w_ids = set(delta_queues.keys())
                    for remove_id in old_ids.difference(w_ids):
                        if remove_id in self.delta_store[sub_id]:
                            del self.delta_store[sub_id][remove_id]
                for w_id in w_ids:
                    if w_id in self.data_store_mgr.data:
                        if sub_id not in delta_queues[w_id]:
                            delta_queues[w_id][sub_id] = deltas_queue
                            # On new yield workflow data-store as added delta
                            if args.get('initial_burst'):
                                delta_store = create_delta_store(
                                    workflow_id=w_id)
                                delta_store[DELTA_ADDED] = (
                                    self.data_store_mgr.data[w_id])
                                self.delta_store[sub_id][w_id] = delta_store
                                if sub_resolver is None:
                                    yield delta_store
                                else:
                                    result = await sub_resolver(
                                        root, info, **args)
                                    if result:
                                        yield result
                    elif w_id in self.delta_store[sub_id]:
                        del self.delta_store[sub_id][w_id]
                try:
                    w_id, topic, delta_store = deltas_queue.get(False)
                    if topic != 'shutdown':
                        new_time = time()
                        elapsed = new_time - old_time
                        # ignore deltas that are more frequent than interval.
                        if elapsed <= interval:
                            continue
                        old_time = new_time
                    else:
                        delta_store['shutdown'] = True
                    self.delta_store[sub_id][w_id] = delta_store
                    if sub_resolver is None:
                        yield delta_store
                    else:
                        result = await sub_resolver(root, info, **args)
                        if result:
                            yield result
                except queue.Empty:
                    await asyncio.sleep(DELTA_SLEEP_INTERVAL)
        except (GeneratorExit, asyncio.CancelledError):
            raise
        except Exception:
            import traceback
            logger.warning(traceback.format_exc())
        finally:
            for w_id in w_ids:
                if delta_queues.get(w_id, {}).get(sub_id):
                    del delta_queues[w_id][sub_id]
            if sub_id in self.delta_store:
                del self.delta_store[sub_id]
            yield None
示例#3
0
文件: resolvers.py 项目: lparkes/cylc
    async def subscribe_delta(self, root, info, args):
        """Delta subscription async generator.

        Async generator mapping the incoming protobuf deltas to
        yielded GraphQL subscription objects.

        """
        workflow_ids = set(args.get('workflows', args.get('ids', ())))
        sub_id = uuid4()
        info.variable_values['backend_sub_id'] = sub_id
        self.delta_store[sub_id] = {}

        op_id = root
        if 'ops_queue' not in info.context:
            info.context['ops_queue'] = {}
        info.context['ops_queue'][op_id] = queue.Queue()
        op_queue = info.context['ops_queue'][op_id]
        self.delta_processing_flows[sub_id] = set()
        delta_processing_flows = self.delta_processing_flows[sub_id]

        delta_queues = self.data_store_mgr.delta_queues
        deltas_queue = queue.Queue()

        counters = {}
        delta_yield_queue = queue.Queue()
        flow_delta_queues = {}
        try:
            # Iterate over the queue yielding deltas
            w_ids = workflow_ids
            sub_resolver = SUB_RESOLVERS.get(to_snake_case(info.field_name))
            interval = args['ignore_interval']
            old_time = time()
            while True:
                if not workflow_ids:
                    old_ids = w_ids
                    w_ids = set(delta_queues.keys())
                    for remove_id in old_ids.difference(w_ids):
                        if remove_id in self.delta_store[sub_id]:
                            del self.delta_store[sub_id][remove_id]
                for w_id in w_ids:
                    if w_id in self.data_store_mgr.data:
                        if sub_id not in delta_queues[w_id]:
                            delta_queues[w_id][sub_id] = deltas_queue
                            # On new yield workflow data-store as added delta
                            if args.get('initial_burst'):
                                delta_store = create_delta_store(
                                    workflow_id=w_id)
                                delta_store[DELTA_ADDED] = (
                                    self.data_store_mgr.data[w_id])
                                deltas_queue.put(
                                    (w_id, 'initial_burst', delta_store))
                    elif w_id in self.delta_store[sub_id]:
                        del self.delta_store[sub_id][w_id]
                try:
                    with suppress(queue.Empty):
                        w_id, topic, delta_store = deltas_queue.get(False)

                        if w_id not in flow_delta_queues:
                            counters[w_id] = 0
                            flow_delta_queues[w_id] = queue.Queue()
                        flow_delta_queues[w_id].put((topic, delta_store))

                    # Only yield deltas from the same workflow if previous
                    # delta has finished processing.
                    for flow_id, flow_queue in flow_delta_queues.items():
                        if flow_queue.empty():
                            continue
                        elif flow_id in delta_processing_flows:
                            if counters[flow_id] < DELTA_PROC_WAIT:
                                continue
                            delta_processing_flows.remove(flow_id)
                        counters[flow_id] = 0
                        topic, delta_store = flow_queue.get()
                        delta_yield_queue.put((flow_id, topic, delta_store))

                    w_id, topic, delta_store = delta_yield_queue.get(False)

                    # Handle shutdown delta, don't ignore.
                    if topic == 'shutdown':
                        delta_store['shutdown'] = True
                    else:
                        # ignore deltas that are more frequent than interval.
                        new_time = time()
                        elapsed = new_time - old_time
                        if elapsed <= interval:
                            continue
                        old_time = new_time

                    delta_processing_flows.add(w_id)
                    op_queue.put((sub_id, w_id))
                    self.delta_store[sub_id][w_id] = delta_store
                    if sub_resolver is None:
                        yield delta_store
                    else:
                        result = await sub_resolver(root, info, **args)
                        if result:
                            yield result
                except queue.Empty:
                    await asyncio.sleep(DELTA_SLEEP_INTERVAL)
                    for flow_id in delta_processing_flows:
                        counters[flow_id] += 1
        except (GeneratorExit, asyncio.CancelledError):
            raise
        except Exception:
            import traceback
            logger.warning(traceback.format_exc())
        finally:
            for w_id in w_ids:
                if delta_queues.get(w_id, {}).get(sub_id):
                    del delta_queues[w_id][sub_id]
            if sub_id in self.delta_store:
                del self.delta_store[sub_id]
            yield None