Example #1
0
def scheduler_and_workers(n=2):
    s = Scheduler()
    workers = [Worker(s.address_to_workers) for i in range(n)]
    try:
        yield s, workers
    finally:
        s.close()
        for w in workers:
            w.close()
Example #2
0
def scheduler_and_workers(n=2):
    s = Scheduler()
    workers = [Worker(s.address_to_workers) for i in range(n)]
    try:
        yield s, workers
    finally:
        s.close()
        for w in workers:
            w.close()
Example #3
0
def scheduler_and_workers(n=2):
    s = Scheduler(hostname='127.0.0.1')
    workers = [Worker(s.address_to_workers, hostname='127.0.0.1') for i in range(n)]
    while len(s.workers) < n:
        sleep(1e-6)
    try:
        yield s, workers
    finally:
        s.close()
        for w in workers:
            w.close()
Example #4
0
def scheduler_and_workers(n=2):
    s = Scheduler()
    workers = [Worker(s.address_to_workers) for i in range(n)]
    while len(s.workers) < n:
        sleep(0.01)
    try:
        yield s, workers
    finally:
        s.close()
        for w in workers:
            w.close()
Example #5
0
def scheduler_and_workers(n=2):
    s = Scheduler(hostname='127.0.0.1')
    workers = [
        Worker(s.address_to_workers, hostname='127.0.0.1', nthreads=10)
        for i in range(n)
    ]
    while len(s.workers) < n:
        sleep(1e-6)
    try:
        yield s, workers
    finally:
        for w in workers:
            w.close()
        s.close()
Example #6
0
class DaskSchedulerWrapper:
    # TODO: allow user to customize dask scheduler startup
    def __init__(self, work_dir):
        self.logger = logging.getLogger(self.__class__.__name__)
        self.scheduler = Scheduler(port=const.DASK_SCHEDULER_PORT,
                                   dashboard=True,
                                   http_prefix="/dask")

    async def start(self, queue, **kwargs):
        self.logger.info("Starting DASK Scheduler ... ")
        self.scheduler = await self.scheduler
        await self.scheduler.finished()

    async def stop(self, queue, **kwargs):
        self.logger.info("Stoping DASK Scheduler ... ")
        try:
            for worker in self.scheduler.workers_list(workers=None):
                try:
                    await self.scheduler.close_worker(worker=worker)
                except Exception as worker_close_ex:
                    self.logger.error(f"Failed to close worker {worker}",
                                      exc_info=worker_close_ex)
                    raise worker_close_ex
        except Exception as ex:
            self.logger.error("Failed to close workers", exc_info=ex)

        try:
            await self.scheduler.close()
        except Exception as ex:
            self.logger.error("Failed to close Dask Scheduler", exc_info=ex)
Example #7
0
async def A():
    async with Scheduler(host='tcp://127.0.0.1', port=8786, protocol='tcp') as s:
        async with Worker(s.address, resources={'CPU':8})              as w0, \
                   Worker(s.address, resources={'GPU':128, 'CPU':128}) as w1:

            async with Client(s.address, asynchronous=True) as client:
                future = client.submit(lambda x: x + 1, 10, resources={'GPU': 16, 'CPU':16})
                result = await future
                print(result)
Example #8
0
 async def run_scheduler():
     async with Scheduler(
             interface=interface,
             protocol=protocol,
             dashboard_address=dashboard_address,
     ) as scheduler:
         comm.bcast(scheduler.address, root=0)
         comm.Barrier()
         await scheduler.finished()
Example #9
0
 async def _run_test():
     with instance_for_test() as instance:
         async with Scheduler() as scheduler:
             async with Worker(scheduler.address) as _:
                 result = await asyncio.get_event_loop().run_in_executor(
                     None, _execute, scheduler.address, instance)
                 assert result.success
                 assert result.result_for_solid(
                     "simple").output_value() == 1
Example #10
0
 async def run_scheduler():
     async with Scheduler(
             interface=interface,
             protocol=protocol,
             dashboard_address=dashboard_address,
             scheduler_file=scheduler_file,
     ) as s:
         comm.Barrier()
         await s.finished()
Example #11
0
async def test_run_spec(cleanup):
    async with Scheduler(port=0) as s:
        workers = await run_spec(worker_spec, s.address)
        async with Client(s.address, asynchronous=True) as c:
            await c.wait_for_workers(len(worker_spec))

            await asyncio.gather(*[w.close() for w in workers.values()])

            assert not s.workers

            await asyncio.gather(*[w.finished() for w in workers.values()])
Example #12
0
async def test_job(job_cls):
    async with Scheduler(port=0) as s:
        job = job_cls(scheduler=s.address, name="foo", cores=1, memory="1GB")
        job = await job
        async with Client(s.address, asynchronous=True) as client:
            await client.wait_for_workers(1)
            assert list(s.workers.values())[0].name == "foo"

        await job.close()

        start = time()
        while len(s.workers):
            await asyncio.sleep(0.1)
            assert time() < start + 10
Example #13
0
 def __init__(self, work_dir):
     self.logger = logging.getLogger(self.__class__.__name__)
     self.scheduler = Scheduler(port=const.DASK_SCHEDULER_PORT,
                                dashboard=True,
                                http_prefix="/dask")
Example #14
0
def square(x):
    return x**2

def start_worker():
    """Starts a single worker on the current process"""
    # Needs to be an other process or you get: RuntimeError: IOLoop is already running
    #loop = IOLoop.current()
    t = Thread(target=loop.start)
    t.daemon = True
    t.start()
    w = Worker('tcp://127.0.0.1:8786', loop=loop)
    w.start()  # choose randomly assigned port
    print 'Starting a worker'
    return


# Start the scheduler
loop = IOLoop.current()
t = Thread(target=loop.start)
t.daemon = True
t.start()
s = Scheduler(loop=loop)
s.start('tcp://:8786')
print s.workers

jobs = []
for i in range(8):
    p = multiprocessing.Process(target=start_worker)
    jobs.append(p)
    p.start()
Example #15
0
		async def g(port):
			s = Scheduler(port=scheduler_port)
			await s
			await s.finished()
Example #16
0
 async def f(port):
     s = Scheduler(port=scheduler_port)
     s = await s
     await s.finished()
     return 1
Example #17
0
 def start_scheduler():
     from dask.distributed import Scheduler
     scheduler = Scheduler()
     return scheduler.address_to_clients, scheduler.address_to_workers