Beispiel #1
0
def test_work_item():

    q = JobQueue()

    js = JobSpec(command="cmd.exe /C sleep 2.5",
                 working_dir="d:/temp",
                 cores=2)

    job_id = q.submit([js])[0]
    job, q2 = q.find_job(job_id)

    wi = WorkItem(job=job, cores=[0, 1])

    print(wi)

    wi.start()

    for i in range(10):
        print(i)
        time.sleep(0.5)
        print(wi.get_status())
        if not wi.is_running():
            break

    print("done")
Beispiel #2
0
    def __init__(self):
        super().__init__()

        if Path(".env").exists():
            self.config = Configuration.load_env_file(".env")
        else:
            self.config = Configuration()

        self._setup_logging(None)
        self.queue = JobQueue(
            name="default_queue",
            queue_file=self.config.queue_file,
            completed_limit=self.config.completed_limit,
            config=self.config,
        )

        self.queue._start_manager_thread()
        self.queue.log = self.log

        self.log.info(f"Starting up LoQuTuS server - {VERSION}")

        self.pool = None
        self.__start_workers(self.config.nworkers)

        self.log.info(f"Visit {self.config.url}/qstatus to view the queue status")

        if self.config.resume_on_start_up and Path(self.config.queue_file).exists():
            self.log.info("Attempting to resume queue")
            self.queue.load()
Beispiel #3
0
def test_job_depends():

    if platform.platform().lower().startswith("linux"):
        job_spec = JobSpec(
            command=f"bash {data_path}/sleepy.sh",
            working_dir=pwd,
            log_file="test.log",
            priority=5,
            depends=[],
        )
    else:
        job_spec = JobSpec(
            command=f"{data_path}/sleepy.bat",
            working_dir=pwd,
            log_file="test.log",
            priority=5,
            depends=[],
        )

    q = JobQueue()

    # response = client.post("qsub", json=[job_spec.dict()])
    # dependency = response.json()
    # dependency = ujson.loads(dependency)
    job_ids = q.submit([job_spec])
    job_spec = JobSpec(
        depends=job_ids,
        command=f'{data_path}/echo_it.bat "goodbye"',
        working_dir=pwd,
    )
    q.submit([job_spec])
Beispiel #4
0
def test_is_queued():

    q = JobQueue()

    job_spec = JobSpec(
        command=f'{data_path}/echo_it.bat "hello"',
        working_dir=pwd,
        log_file="test.log",
        priority=5,
        depends=[],
    )

    job_id = q.submit([job_spec])[0]
    # job_id = q.submitted([job_spec])
    # response = client.post("qsub", json=[job_spec.dict()])

    assert job_id == JobID.parse_obj("1.0")
    job = q.find_job(job_id)[0]

    assert job.job_spec.command == job_spec.command

    assert job.job_spec.priority == 5
    assert job.job_spec.working_dir == pwd
    assert job.job_spec.log_file == "test.log"
    assert job.job_spec.depends == []

    assert len(q.queued_jobs) == 1
Beispiel #5
0
def test_submit():

    q = JobQueue()

    assert len(q.queued_jobs) == 0
    js = get_job_spec()

    job_ids = q.submit([js])
    assert len(q.queued_jobs) == 1

    js_next = q.next_job()
    assert js_next.job_id == job_ids[0]
Beispiel #6
0
def test_priority():

    q = JobQueue()

    js1 = JobSpec(command=hello, working_dir=".", priority=10)
    js2 = JobSpec(command=goodbye, working_dir=".", priority=15)

    job1 = q.submit([js1, js2])

    exec_job_1 = q.next_job()
    q.on_job_started(exec_job_1.job_id)
    exec_job_2 = q.next_job()
    q.on_job_started(exec_job_2.job_id)

    assert exec_job_1.job_id == JobID(group=2, index=0)
    assert exec_job_2.job_id == JobID(group=1, index=0)
Beispiel #7
0
def test_pool():

    q = JobQueue()

    js = JobSpec(command="cmd.exe /C sleep 2.5",
                 working_dir="d:/temp",
                 cores=1)

    q.submit([js] * 20)
    # job, q2 = q.find_job(job_id)

    pool = DynamicProcessPool(q, 20)

    pool._start_manager_thread()
    time.sleep(4)
    print(q.running_jobs.values())
    assert len(q.running_jobs) > 0

    time.sleep(10)
    pool.join(wait=True)
    print(q.running_jobs.values())
    assert len(q.running_jobs) == 0
Beispiel #8
0
def test_prune():

    # DEFAULT_CONFIG.prune_job_limt = 10

    q = JobQueue()
    q.completed_limit = 10

    for i in range(15):
        js1 = JobSpec(command=hello, working_dir=".", priority=10)
        job1 = q.submit([js1])

    assert len(q.queued_jobs) == 15
    assert len(q.running_jobs) == 0
    assert len(q.completed_jobs) == 0

    for i in range(15):
        q.on_job_started(q.next_job().job_id)

    assert len(q.queued_jobs) == 0
    assert len(q.running_jobs) == 15
    assert len(q.completed_jobs) == 0

    for job_id, job in list(q.running_jobs.items()):
        q.on_job_finished(job_id)

    assert len(q.queued_jobs) == 0
    assert len(q.running_jobs) == 0
    assert len(q.completed_jobs) == 15

    q.prune()

    assert len(q.queued_jobs) == 0
    assert len(q.running_jobs) == 0
    assert len(q.completed_jobs) == 5
Beispiel #9
0
def test_save_and_read_queue():

    # DEFAULT_CONFIG.prune_job_limt = 10

    q = JobQueue(start_manager_thread=False)
    q.completed_limit = 10
    for i in range(15):
        js1 = JobSpec(command=hello, working_dir=".", priority=10)
        job1 = q.submit([js1])

    assert len(q.queued_jobs) == 15
    assert len(q.running_jobs) == 0
    assert len(q.completed_jobs) == 0

    for i in range(10):
        q.on_job_started(q.next_job().job_id)

    assert len(q.queued_jobs) == 5
    assert len(q.running_jobs) == 10
    assert len(q.completed_jobs) == 0

    i = 0
    for job_id, job in list(q.running_jobs.items()):
        q.on_job_finished(job_id)
        i += 1
        if i == 5:
            break

    assert len(q.queued_jobs) == 5
    assert len(q.running_jobs) == 5
    assert len(q.completed_jobs) == 5

    q.save()

    q2 = JobQueue()
    q2.load()

    a, b = len(q.queued_jobs) + len(q.running_jobs), len(q2.queued_jobs)
    assert a == b
    # assert len(q.running_jobs) == len(q2.running_jobs)
    assert len(q.completed_jobs) == len(q2.completed_jobs)
Beispiel #10
0
class Application(FastAPI):
    """
    LoQuTuS Job Scheduling Server
    """

    config: object = None

    def __init__(self):
        super().__init__()

        if Path(".env").exists():
            self.config = Configuration.load_env_file(".env")
        else:
            self.config = Configuration()

        self._setup_logging(None)
        self.queue = JobQueue(
            name="default_queue",
            queue_file=self.config.queue_file,
            completed_limit=self.config.completed_limit,
            config=self.config,
        )

        self.queue._start_manager_thread()
        self.queue.log = self.log

        self.log.info(f"Starting up LoQuTuS server - {VERSION}")

        self.pool = None
        self.__start_workers(self.config.nworkers)

        self.log.info(f"Visit {self.config.url}/qstatus to view the queue status")

        if self.config.resume_on_start_up and Path(self.config.queue_file).exists():
            self.log.info("Attempting to resume queue")
            self.queue.load()

    def __start_workers(self, nworkers: int = DEFAULT_WORKERS):
        """Starts the worker pool

        Parameters
        ----------
        nworkers: int
            number of workers
        """
        # if nworkers is None:
        #     nworkers = self.config.nworkers

        # self.pool = cf.ProcessPoolExecutor(max_workers=nworkers)
        self.pool = DynamicProcessPool(
            queue=self.queue, max_workers=nworkers, feed_delay=0.05, manager_delay=2.0
        )
        self.pool._start_manager_thread()
        # self.pool.add_event_callback(self.receive_pool_events)
        self.log.info("Worker pool started with {} workers".format(nworkers))

    def _setup_logging(self, log_file: str):
        """
        Sets up logging.  A console and file logger are used.  The _DEGBUG flag on
        the SQServer instance controls the log level

        Parameters
        ----------
        log_file: str
        """
        from lqts.simple_logging import getLogger, Level

        self.log = getLogger("lqts", Level.INFO)