Exemple #1
0
def test_reverse_dag():
    d = DAG()
    d.add_vertex('a')
    d.add_vertex('b', predecessors=['a'])
    d.add_vertex('c', predecessors=['b'])
    d.add_vertex('d', predecessors=['c'])

    it = DAGIterator(d)
    assert [k for k, _ in it] == ['a', 'b', 'c', 'd']

    reverse_d = d.reverse_graph()
    reverse_it = DAGIterator(reverse_d)
    assert [k for k, _ in reverse_it] == ['d', 'c', 'b', 'a']
Exemple #2
0
def test_reverse_dag():
    d = DAG()
    d.add_vertex("a")
    d.add_vertex("b", predecessors=["a"])
    d.add_vertex("c", predecessors=["b"])
    d.add_vertex("d", predecessors=["c"])

    it = DAGIterator(d)
    assert [k for k, _ in it] == ["a", "b", "c", "d"]

    reverse_d = d.reverse_graph()
    reverse_it = DAGIterator(reverse_d)
    assert [k for k, _ in reverse_it] == ["d", "c", "b", "a"]
Exemple #3
0
def test_iter_with_busy_state():
    d = DAG()
    d.add_vertex('a')
    d.add_vertex('b', predecessors=['a'])

    it = DAGIterator(d, enable_busy_state=True)
    for nid, data in it:
        if nid is None:
            it.leave('a')
Exemple #4
0
def test_iter_with_busy_state():
    d = DAG()
    d.add_vertex("a")
    d.add_vertex("b", predecessors=["a"])

    it = DAGIterator(d, enable_busy_state=True)
    for nid, _ in it:
        if nid is None:
            it.leave("a")
    def __init__(
        self,
        dag: DAG,
        job_factory: JobFactoryCallback,
        collect_result: CollectResultCallback,
        jobs: int = 0,
        dyn_poll_interval: bool = True,
    ):
        """Initialize a MultiprocessScheduler instance.

        :param dag: DAG in which nodes represent units of work to do and edges
            represent dependencies between them.
        :param job_factory: Callback to turn DAG nodes into corresponding
            Worker instances.
        :param collect_result: Callback to extract work result from a worker.
        :param jobs: Maximum of worker allowed to run in parallel. If left to
            0, use the number of available cores on the current machine.
        :param dyn_poll_interval: If True the interval between each polling
            iteration is automatically updated. Otherwise it's set to 0.1
            seconds.
        """
        e = Env()
        self.parallelism = jobs or e.build.cpu.cores
        self.dag = dag

        self.workers: List[Optional[SomeWorker]] = [None] * self.parallelism
        """
        List of active workers. Indexes in this list correspond to slot IDs
        passed to workers: `self.workers[N].slot == N` for all present
        wor,kers. When the worker is done, we just replace it with None, and
        when a slot is None we can create a new worker for it.
        """

        self.iterator = DAGIterator(self.dag, enable_busy_state=True)
        """Iterator to get ready-to-run units of work."""

        self.job_factory = job_factory
        self.collect_result = collect_result

        self.active_workers = 0
        """Equivalent to the number of non-None slots in ``self.workers``."""

        self.poll_interval = 0.1
        """Time (in seconds) to wait between each round of worker polling."""

        self.dyn_poll_interval = dyn_poll_interval

        self.no_free_item = False
        """
        True if there is work waiting to be executed, False if all work to be
        scheduled depends on work that hasn't completed.
        """

        self.no_work_left = False
        """
Exemple #6
0
    def init_state(self, dag: DAG) -> None:
        """Reinitialize the scheduler state (internal function).

        :param dag: the dag representing the list of job to execute
        """
        # Active jobs
        self.active_jobs = []

        # Total number of jobs in the queues
        self.queued_jobs = 0

        # Have all jobs been queued?
        self.all_jobs_queued = False

        # Message queue to get job end notifications
        self.message_queue = Queue()

        self.dag = dag
        self.dag_iterator = DAGIterator(dag, enable_busy_state=True)
        self.start_time = datetime.now()
        self.stop_time = None
        self.max_active_jobs = 0