Exemplo n.º 1
0
def test_get_root_creates(db):
    assert Folder.get_or_none(name="root", parent=None) is None

    root = Folder.get_root()
    assert root.name == "root"
    assert root.parent is None

    assert Folder.get_or_none(name="root", parent=None) is not None
Exemplo n.º 2
0
def test_create_name_unique(db):
    root = Folder.get_root()
    f1 = root.add_folder("f1")

    with pytest.raises(pw.IntegrityError):
        Folder.create(name="f1", parent=root)
    with pytest.raises(pw.IntegrityError):
        root.add_folder("f1")

    # another one is fine
    assert root.add_folder("f2").name == "f2"
    # one further down is also fine
    assert f1.add_folder("f1").name == "f1"
Exemplo n.º 3
0
def test_complete_path(state, tree, repl):
    root = Folder.get_root()
    alts = complete_path(root, "f")
    assert alts == ["f1/", "f2/", "f3/"]

    alts = complete_path(root, "f1")
    assert alts == ["f1/"]

    alts = complete_path(root, "f2/")
    assert alts == ["alpha/", "beta/", "gamma/"]

    state.cwd = Folder.find_by_path("/f2", state.cwd)

    alts = complete_path(root.subfolder("f2"), "a")
    assert alts == ["alpha/"]
Exemplo n.º 4
0
def test_job_resubmit_already_deleted(driver, state, monkeypatch):
    root = Folder.get_root()
    j1 = driver.create_job(
        command="echo 'begin'; sleep 0.2 ; echo 'end' ; exit 1", folder=root)
    j1.status = Job.Status.COMPLETED
    j1.save()

    # it never ran, so these shouldn't exist
    for path in ["exit_status_file", "stdout", "stderr"]:
        assert not os.path.exists(j1.data[path])
    for d in ["scratch_dir", "output_dir"]:
        path = j1.data[d]
        shutil.rmtree(path)

    submit = Mock()
    with monkeypatch.context() as m:
        m.setattr(driver, "submit", submit)
        makedirs = Mock()
        m.setattr("os.makedirs", makedirs)
        remove = Mock()
        m.setattr("os.remove", remove)
        driver.resubmit(j1)
        assert makedirs.call_count == 0
        assert remove.call_count == 0
    submit.assert_called_once()
    for path in ["exit_status_file", "stdout", "stderr"]:
        assert not os.path.exists(j1.data[path])
Exemplo n.º 5
0
def test_get_jobs(state):
    root = Folder.get_root()
    j1 = state.create_job(command="sleep 1")

    f2 = root.add_folder("f2")
    state.cd("f2")
    j2 = state.create_job(command="sleep 1")
    j3 = state.create_job(command="sleep 1")

    assert state.get_jobs(f"{j1.job_id}")[0] == j1
    assert state.get_jobs(f"f2/{j2.job_id}")[0] == j2
    with pytest.raises(DoesNotExist):
        state.get_jobs("42")

    assert all(a == b for a, b in zip(state.get_jobs("../*"), [j1]))
    assert all(a == b for a, b in zip(state.get_jobs("*"), [j2, j3]))
    state.cwd = root
    assert all(a == b for a, b in zip(state.get_jobs("f2/*"), [j2, j3]))

    # get by id that does not exist
    with pytest.raises(DoesNotExist):
        state.get_jobs(42)

    # get path/id that does not exist
    with pytest.raises(DoesNotExist):
        state.get_jobs("/42")

    # get single job instance (this is a bit redundant)
    assert state.get_jobs(j1) == [j1]

    with pytest.raises(TypeError):
        state.get_jobs(4.2)
Exemplo n.º 6
0
def test_bulk_sync(driver, state):
    root = Folder.get_root()

    jobs = []
    for i in range(15):
        job = driver.create_job(
            folder=root,
            command=f"sleep {0.1 + random.random()*0.2} ; echo 'JOB{i}'")
        job.submit()
        jobs.append(job)

    sjobs = len(jobs)

    for i in range(15):
        job = driver.create_job(
            folder=root,
            command=
            f"sleep {0.1 + random.random()*0.2} ; echo 'JOB{i+sjobs}' 1>&2 ; exit 1",
        )
        job.submit()
        jobs.append(job)

    time.sleep(0.4)
    driver.bulk_sync_status(jobs)

    for i, job in enumerate(jobs[:15]):
        assert job.status == Job.Status.COMPLETED
        with job.stdout() as fh:
            assert fh.read().strip() == f"JOB{i}"
    for i, job in enumerate(jobs[15:]):
        assert job.status == Job.Status.FAILED
        with job.stderr() as fh:
            assert fh.read().strip() == f"JOB{i+sjobs}"
Exemplo n.º 7
0
def test_sync_status(driver, state, monkeypatch, tmpdir):
    root = Folder.get_root()
    j1 = driver.create_job(
        command="echo 'begin'; sleep 0.2 ; echo 'end' ; exit 1", folder=root)
    j1.data["pid"] = 123
    j1.status = Job.Status.RUNNING
    j1.save()

    proc = Mock()
    proc.is_running = Mock(return_value=False)
    monkeypatch.setattr("psutil.Process", Mock(return_value=proc))

    exit_status_file = tmpdir.join("exitcode.txt")
    exit_status_file.write("0")

    j1.data["exit_status_file"] = str(exit_status_file)
    j1u = driver.sync_status(j1)
    assert j1u.status == Job.Status.COMPLETED

    j1.status = Job.Status.RUNNING
    j1.save()

    exit_status_file.write("1")
    j1u = driver.sync_status(j1)
    assert j1u.status == Job.Status.FAILED
Exemplo n.º 8
0
def test_bulk_kill(driver, state, monkeypatch):
    root = Folder.get_root()

    jobs = [
        driver.create_job(folder=root, command=f"sleep 0.1; echo 'JOB{i}'")
        for i in range(15)
    ]

    for job in jobs:
        assert job.status == Job.Status.CREATED

    sbatch = Mock(side_effect=[i for i in range(len(jobs))])
    monkeypatch.setattr(driver.slurm, "sbatch", sbatch)
    driver.bulk_submit(jobs)
    assert sbatch.call_count == len(jobs)

    for job in jobs:
        assert job.status == Job.Status.SUBMITTED

    scancel = Mock()
    monkeypatch.setattr(driver.slurm, "scancel", scancel)
    monkeypatch.setattr(driver.slurm, "sacct", Mock(return_value=[]))

    jobs = driver.bulk_kill(jobs)

    assert scancel.call_count == len(jobs)

    for job in jobs:
        assert job.status == Job.Status.FAILED
Exemplo n.º 9
0
def test_sync_status(driver, monkeypatch):
    root = Folder.get_root()
    j1 = driver.create_job(command="sleep 1", folder=root)

    with monkeypatch.context() as m:
        sbatch = Mock(return_value=1)
        m.setattr(driver.slurm, "sbatch", sbatch)

    assert j1.status == Job.Status.CREATED

    batch_job_id = 5_207_375
    monkeypatch.setattr(driver.slurm, "sbatch", Mock(return_value=batch_job_id))
    driver.submit(j1)
    assert j1.status == Job.Status.SUBMITTED
    assert j1.batch_job_id == str(batch_job_id)

    sacct_return = [
        [SlurmAccountingItem(batch_job_id, Job.Status.RUNNING, 0, {})],
        [SlurmAccountingItem(batch_job_id, Job.Status.FAILED, 0, {})],
    ]
    sacct = Mock(side_effect=sacct_return)
    monkeypatch.setattr(driver.slurm, "sacct", sacct)

    j1 = driver.sync_status(j1)
    assert j1.status == Job.Status.RUNNING
    j1 = driver.sync_status(j1)
    assert j1.status == Job.Status.FAILED
Exemplo n.º 10
0
def test_submit_job(driver, state, monkeypatch):
    root = Folder.get_root()
    j1 = driver.create_job(
        command="sleep 1",
        folder=root,
        cores=1,
        name="job1",
        queue="somequeue",
        walltime=timedelta(hours=5),
    )

    assert j1.status == Job.Status.CREATED

    j1.status = Job.Status.SUBMITTED
    j1.save()

    with pytest.raises(InvalidJobStatus):
        driver.submit(j1)

    j1.status = Job.Status.CREATED
    j1.save()

    batch_job_id = 5_207_375
    with monkeypatch.context() as m:
        sbatch = Mock(return_value=f"Submitted batch job {batch_job_id}")
        m.setattr(driver.slurm, "_sbatch", sbatch)
        driver.submit(j1)
        sbatch.assert_called_once_with(j1.data["batchfile"])

    assert j1.status == Job.Status.SUBMITTED
    assert j1.batch_job_id == str(batch_job_id)
Exemplo n.º 11
0
def test_resubmit_job(repl, state, capsys, monkeypatch):
    root = Folder.get_root()
    repl.do_create_job("sleep 1")
    j1 = root.jobs[-1]
    j1.ensure_driver_instance(state.config)

    assert j1.status == Job.Status.CREATED
    j1.submit()
    assert j1.status == Job.Status.SUBMITTED
    time.sleep(0.1)
    assert j1.get_status() == Job.Status.RUNNING

    monkeypatch.setattr("click.confirm", Mock(return_value=True))
    repl.do_kill_job(str(j1.job_id))

    assert j1.get_status() == Job.Status.FAILED

    repl.do_resubmit_job(str(j1.job_id))
    out, err = capsys.readouterr()
    j1.reload()
    assert j1.status == Job.Status.SUBMITTED

    with pytest.raises(UsageError):
        repl.onecmd("resubmit_job --nope")
    out, err = capsys.readouterr()
    assert "No such option" in out
Exemplo n.º 12
0
def test_submit_job(repl, state, capsys, monkeypatch):
    root = Folder.get_root()
    value = "VALUE VALUE VALUE"
    cmd = f"sleep 0.3; echo '{value}'"

    monkeypatch.setattr("click.confirm", Mock(return_value=True))

    repl.do_create_job(cmd)
    j1 = root.jobs[-1]
    j1.ensure_driver_instance(state.config)
    assert j1.status == Job.Status.CREATED

    repl.do_submit_job(f"{j1.job_id}")
    j1.reload()
    assert j1.status == Job.Status.SUBMITTED
    time.sleep(0.1)
    assert j1.get_status() == Job.Status.RUNNING
    time.sleep(0.3)
    assert j1.get_status() == Job.Status.COMPLETED

    out, err = capsys.readouterr()

    with pytest.raises(UsageError):
        repl.onecmd("submit_job --nope")
    out, err = capsys.readouterr()
    assert "No such option" in out
Exemplo n.º 13
0
def test_rm_job(state, repl, db, capsys, monkeypatch):
    root = Folder.get_root()
    j1 = state.default_driver.create_job(command="sleep 1", folder=root)
    assert len(root.jobs) == 1 and root.jobs[0] == j1
    assert Job.get_or_none(job_id=j1.job_id) is not None

    with monkeypatch.context() as m:
        confirm = Mock(return_value=True)
        m.setattr("click.confirm", confirm)
        repl.do_rm(str(j1.job_id))
        confirm.assert_called_once()

    out, err = capsys.readouterr()
    assert len(root.jobs) == 0
    assert Job.get_or_none(job_id=j1.job_id) is None

    # works in other cwd too
    alpha = root.add_folder("alpha")
    j2 = state.default_driver.create_job(command="sleep 1", folder=alpha)
    assert j1.job_id != j2.job_id
    assert Job.get_or_none(job_id=j2.job_id) is not None
    assert len(alpha.jobs) == 1 and alpha.jobs[0] == j2
    assert state.cwd == root
    with monkeypatch.context() as m:
        confirm = Mock(return_value=True)
        m.setattr("click.confirm", confirm)
        repl.do_rm(str(j2.job_id))
        confirm.assert_called_once()
    out, err = capsys.readouterr()
    assert Job.get_or_none(job_id=j2.job_id) is None
    assert len(alpha.jobs) == 0
Exemplo n.º 14
0
def test_rm(state, repl, db, capsys, monkeypatch):
    root = Folder.get_root()

    repl.do_rm("../nope")
    out, err = capsys.readouterr()
    assert "not exist" in out

    repl.do_rm("/")
    out, err = capsys.readouterr()
    assert "annot delete" in out, "root" in out

    root.add_folder("alpha")
    with monkeypatch.context() as m:
        confirm = Mock(return_value=False)
        m.setattr("click.confirm", confirm)
        repl.onecmd("rm -r alpha")
        confirm.assert_called_once()

    assert root.subfolder("alpha") is not None
    with monkeypatch.context() as m:
        confirm = Mock(return_value=True)
        m.setattr("click.confirm", confirm)
        repl.onecmd("rm -r alpha")
        confirm.assert_called_once()
    assert root.subfolder("alpha") is None
    out, err = capsys.readouterr()
    assert len(out) > 0
Exemplo n.º 15
0
def test_get_folders_jobs_pattern(state):
    root = Folder.get_root()

    folders_alpha = [root.add_folder(f"alpha_{n}") for n in range(10)]
    folders_beta = [root.add_folder(f"beta_{n}") for n in range(13)]

    jobs_alpha = []
    for f in folders_alpha:
        with state.pushd(f):
            jobs_alpha.append(state.create_job(command="sleep 1"))

    jobs_beta = []
    for f in folders_beta:
        with state.pushd(f):
            jobs_beta.append(state.create_job(command="sleep 1"))

    globbed_alpha = state.get_jobs("alpha_*/*")
    assert len(globbed_alpha) == len(jobs_alpha)
    assert all(a == b for a, b in zip(globbed_alpha, jobs_alpha))

    globbed_beta = state.get_jobs("beta_*/*")
    assert len(globbed_beta) == len(jobs_beta)
    jobid = lambda j: j.job_id
    assert all(a == b for a, b in zip(sorted(globbed_beta, key=jobid),
                                      sorted(jobs_beta, key=jobid)))
Exemplo n.º 16
0
def test_rm_job(state, db):
    root = Folder.get_root()
    j1 = state.create_job(command="sleep 1")
    assert len(root.jobs) == 1 and root.jobs[0] == j1
    assert Job.get_or_none(job_id=j1.job_id) is not None
    state.rm(str(j1.job_id))  # confirm default is True
    assert len(root.jobs) == 0
    assert Job.get_or_none(job_id=j1.job_id) is None

    # should also work with instance
    j2 = state.create_job(command="sleep 1")
    assert len(root.jobs) == 1 and root.jobs[0] == j2
    assert Job.get_or_none(job_id=j2.job_id) is not None
    confirm = Mock(return_value=False)
    state.rm(j2, confirm=confirm)
    confirm.assert_called_once()
    # no change
    assert len(root.jobs) == 1 and root.jobs[0] == j2
    assert Job.get_or_none(job_id=j2.job_id) is not None

    # now change
    confirm = Mock(return_value=True)
    state.rm(j2, confirm=confirm)
    confirm.assert_called_once()
    assert len(root.jobs) == 0
    assert Job.get_or_none(job_id=j2.job_id) is None

    # works in other cwd too
    alpha = root.add_folder("alpha")
    j3 = state.default_driver.create_job(command="sleep 1", folder=alpha)
    assert len(alpha.jobs) == 1 and alpha.jobs[0] == j3
    assert Job.get_or_none(job_id=j3.job_id) is not None
    state.rm(str(j3.job_id))
    assert len(alpha.jobs) == 0
    assert Job.get_or_none(job_id=j3.job_id) is None
Exemplo n.º 17
0
def test_mv_bulk_both(state):
    root = Folder.get_root()
    f1, f2 = [root.add_folder(n) for n in ("f1", "f2")]
    f3 = f1.add_folder("f3")

    with state.pushd(f1):
        j1 = state.create_job(command="sleep 1")

    state.mv("f1/*", "f2")
    j1.reload()
    assert j1.folder == f2
    f3.reload()
    assert f3.parent == f2

    f3.parent = root
    f3.save()
    j1.folder = root
    j1.save()

    # attempt to move all in root to f2. this will fail for f2, but only for f2
    state.mv("*", "f2")
    for o in (f1, f2, f3, j1):
        o.reload()
    assert f1.parent == f2
    assert f2.parent == root
    assert f3.parent == f2
    assert j1.folder == f2
Exemplo n.º 18
0
def test_job_resubmit(driver, state, monkeypatch):
    root = Folder.get_root()
    j1 = driver.create_job(
        command="echo 'begin'; sleep 0.2 ; echo 'end' ; exit 1", folder=root)
    j1.submit()
    j1.wait(poll_interval=0.1)
    assert j1.status == Job.Status.FAILED
    with j1.stdout() as fh:
        assert fh.read().strip() == "begin\nend"

    # we need to prevent driver from actually calling submit
    submit = Mock()
    with monkeypatch.context() as m:
        m.setattr(driver, "submit", submit)
        makedirs = Mock()
        m.setattr("os.makedirs", makedirs)
        remove = Mock(wraps=os.remove)
        m.setattr("os.remove", remove)
        driver.resubmit(j1)
        assert makedirs.call_count == 2
    submit.assert_called_once()
    remove.assert_has_calls(
        [call(j1.data[p]) for p in ["exit_status_file", "stdout", "stderr"]])

    # now actually hit submit
    driver.submit(j1)

    assert j1.status == Job.Status.SUBMITTED
    j1.wait(poll_interval=0.1)
    assert j1.status == Job.Status.FAILED
Exemplo n.º 19
0
def test_bulk_kill(driver, state, monkeypatch):
    root = Folder.get_root()

    jobs = [
        driver.create_job(folder=root, command=f"sleep 0.1; echo 'JOB{i}'")
        for i in range(15)
    ]

    for job in jobs:
        assert job.status == Job.Status.CREATED

    condor_submit = Mock(side_effect=[i for i in range(len(jobs))])
    monkeypatch.setattr(driver.htcondor, "condor_submit", condor_submit)
    driver.bulk_submit(jobs)
    assert condor_submit.call_count == len(jobs)

    for job in jobs:
        assert job.status == Job.Status.SUBMITTED

    condor_rm = Mock()
    monkeypatch.setattr(driver.htcondor, "condor_rm", condor_rm)
    monkeypatch.setattr(driver.htcondor, "condor_q", Mock(return_value=[]))
    monkeypatch.setattr(driver.htcondor, "condor_history",
                        Mock(return_value=[]))

    jobs = driver.bulk_kill(jobs)

    assert condor_rm.call_count == len(jobs)

    for job in jobs:
        assert job.status == Job.Status.FAILED
Exemplo n.º 20
0
def test_mv_bulk_job(state):
    root = Folder.get_root()

    f1, f2, f3 = [root.add_folder(n) for n in ("f1", "f2", "f3")]
    assert len(root.children) == 3

    state.cwd = f1
    j1, j2, j3, j4, j5 = [
        state.create_job(command="sleep 1") for _ in range(5)
    ]
    assert len(f1.jobs) == 5

    state.cwd = root

    state.mv("f1/*", f3)
    assert len(f1.jobs) == 0 and len(f3.jobs) == 5
    assert len(root.children) == 3
    for j in (j1, j2, j3, j4, j5):
        j.reload()
        assert j.folder == f3

    state.cwd = f2
    state.mv("../f3/*", ".")
    assert len(f3.jobs) == 0 and len(f2.jobs) == 5
    for j in (j1, j2, j3, j4, j5):
        j.reload()
        assert j.folder == f2
Exemplo n.º 21
0
def test_stdout_stderr(driver, state, monkeypatch):
    root = Folder.get_root()
    j1 = driver.create_job(
        command="sleep 1",
        folder=root,
        cores=1,
        name="job1",
        walltime=timedelta(hours=5),
    )

    assert j1.status == Job.Status.CREATED

    batch_job_id = 5_207_375
    condor_submit = Mock(return_value=batch_job_id)
    monkeypatch.setattr(driver.htcondor, "condor_submit", condor_submit)
    driver.submit(j1)

    assert j1.status == Job.Status.SUBMITTED
    assert j1.batch_job_id == str(batch_job_id)

    stdout = "VALUE VALUE VALUE"

    with open(j1.data["stdout"], "w") as fh:
        fh.write(stdout)

    with driver.stdout(j1) as fh:
        assert stdout == fh.read()

    with pytest.raises(NotImplementedError):
        driver.stderr(j1)
Exemplo n.º 22
0
def test_bulk_sync_status_invalid_id(driver, state, monkeypatch):

    root = Folder.get_root()

    jobs = driver.bulk_create_jobs([{
        "folder": root,
        "command": "sleep 1"
    } for i in range(10)])

    condor_submit = Mock(side_effect=[i + 1 for i in range(len(jobs))])
    monkeypatch.setattr(driver.htcondor, "condor_submit", condor_submit)
    driver.bulk_submit(jobs)

    t1 = datetime(2020, 8, 3, 20, 15)
    HTAI = HTCondorAccountingItem
    condor_q_return = [
        HTAI(i + 1, Job.Status.RUNNING, 0, t1, t1) for i in range(len(jobs))
    ]
    condor_history_return = [HTAI(12_345_665, Job.Status.UNKNOWN, 0, t1, t1)]
    # pretend they're all running now
    monkeypatch.setattr(driver.htcondor, "condor_q",
                        Mock(return_value=condor_q_return))
    monkeypatch.setattr(driver.htcondor, "condor_history",
                        Mock(return_value=condor_history_return))
    jobs = driver.bulk_sync_status(jobs)

    for job in jobs:
        assert job.status == Job.Status.RUNNING
        assert job.updated_at == t1
Exemplo n.º 23
0
def test_job_bulk_resubmit_no_submit(driver, state, monkeypatch):
    root = Folder.get_root()

    jobs = [
        driver.create_job(
            command="echo 'begin'; sleep 0.2 ; echo 'end' ; exit 1",
            folder=root),
        driver.create_job(
            command="echo 'begin'; sleep 0.2 ; echo 'end' ; exit 1",
            folder=root),
        driver.create_job(
            command="echo 'begin'; sleep 0.2 ; echo 'end' ; exit 1",
            folder=root),
    ]

    condor_submit = Mock(side_effect=[1, 2, 3])
    monkeypatch.setattr(driver.htcondor, "condor_submit", condor_submit)
    driver.bulk_submit(jobs)
    assert condor_submit.call_count == 3

    for job in jobs:
        job.status = Job.Status.COMPLETED
        job.save()

    bulk_submit = Mock()
    with monkeypatch.context() as m:
        m.setattr(driver.htcondor, "condor_q", Mock(return_value=[]))
        m.setattr(driver.htcondor, "condor_history", Mock(return_value=[]))
        m.setattr(driver, "bulk_submit", bulk_submit)
        driver.bulk_resubmit(jobs, do_submit=False)
    assert bulk_submit.call_count == 0
Exemplo n.º 24
0
def test_submit_job(driver, state, monkeypatch):
    root = Folder.get_root()
    j1 = driver.create_job(
        command="sleep 1",
        folder=root,
        cores=1,
        name="job1",
        walltime=timedelta(hours=5),
    )

    assert j1.status == Job.Status.CREATED

    j1.status = Job.Status.SUBMITTED
    j1.save()

    with pytest.raises(InvalidJobStatus):
        driver.submit(j1)

    j1.status = Job.Status.CREATED
    j1.save()

    batch_job_id = 5_207_375

    with monkeypatch.context() as m:
        condor_submit = Mock(return_value=batch_job_id)
        m.setattr(driver.htcondor, "condor_submit", condor_submit)
        driver.submit(j1)
        condor_submit.assert_called_once_with(j1)

    assert j1.status == Job.Status.SUBMITTED
    assert j1.batch_job_id == str(batch_job_id)
Exemplo n.º 25
0
def test_cleanup_driver(driver, state, monkeypatch):
    root = Folder.get_root()
    j1 = driver.create_job(command="sleep 1", folder=root)

    assert j1.status == Job.Status.CREATED
    assert os.path.exists(j1.data["log_dir"])
    assert os.path.exists(j1.data["output_dir"])

    # disable job updates
    monkeypatch.setattr(driver, "sync_status", Mock(side_effect=lambda j: j))

    j1.status = Job.Status.SUBMITTED
    with pytest.raises(InvalidJobStatus):
        driver.cleanup(j1)
    assert os.path.exists(j1.data["log_dir"])
    assert os.path.exists(j1.data["output_dir"])
    j1.status = Job.Status.RUNNING
    with pytest.raises(InvalidJobStatus):
        driver.cleanup(j1)
    assert os.path.exists(j1.data["log_dir"])
    assert os.path.exists(j1.data["output_dir"])

    j1.status = Job.Status.COMPLETED

    driver.cleanup(j1)
    assert not os.path.exists(j1.data["log_dir"])
    assert not os.path.exists(j1.data["output_dir"])
Exemplo n.º 26
0
def test_bulk_create(driver, state):
    root = Folder.get_root()
    jobs = driver.bulk_create_jobs(
        [{"folder": root, "command": "sleep 1"} for i in range(10)]
    )
    assert len(jobs) == 10
    for job in jobs:
        assert job.status == Job.Status.CREATED
Exemplo n.º 27
0
def test_run_killed(driver, state):
    root = Folder.get_root()
    j1 = driver.create_job(command="sleep 10", folder=root)
    j1.submit()
    proc = psutil.Process(pid=j1.data["pid"])
    proc.kill()
    j1.wait()
    assert j1.status == Job.Status.UNKNOWN
Exemplo n.º 28
0
def test_path(db):
    root = Folder.get_root()
    assert root.path == "/"

    f1 = Folder.create(name="f1", parent=root)
    assert f1.path == "/f1"
    f2 = Folder.create(name="f2", parent=f1)
    assert f2.path == "/f1/f2"
    f3 = Folder.create(name="f3", parent=f2)
    assert f3.path == "/f1/f2/f3"
    f4 = Folder.create(name="f4", parent=f3)
    assert f4.path == "/f1/f2/f3/f4"

    # move stuff around
    f4.parent = f2
    f4.save()
    assert f4.path == "/f1/f2/f4"
Exemplo n.º 29
0
def test_mv_folder(state, repl, capsys):
    root = Folder.get_root()

    repl.onecmd("mv --help")
    out, err = capsys.readouterr()

    f1, f2, f3, f4, f5 = [
        root.add_folder(n) for n in ("f1", "f2", "f3", "f4", "f5")
    ]

    assert len(root.children) == 5
    assert len(f2.children) == 0

    # actual move
    repl.onecmd("mv f1 f2")
    assert len(root.children) == 4
    f2.reload()
    assert len(f2.children) == 1 and f2.children[0] == f1
    f1.reload()
    assert f1.parent == f2
    out, err = capsys.readouterr()

    # rename f3 -> f3x
    repl.onecmd("mv f3 f3x")
    out, err = capsys.readouterr()
    f3.reload()
    assert len(root.children) == 4
    assert f3.name == "f3x"

    # another move
    repl.onecmd("mv f3x f4")
    assert len(f4.children) == 1 and f4.children[0] == f3
    f3.reload()
    assert f3.parent == f4
    assert f3.name == "f3x"
    out, err = capsys.readouterr()

    # move rename at the same time
    repl.onecmd("cd f2")
    repl.onecmd("mv ../f5 ../f4/f5x")
    out, err = capsys.readouterr()
    f5.reload()
    assert len(f4.children) == 2
    assert f5.name == "f5x"
    assert f5.parent == f4

    # try move to nonexistant
    repl.onecmd("cd /")
    with pytest.raises(ValueError):
        repl.onecmd("mv f2/f1 /nope/blub")
    out, err = capsys.readouterr()
    assert "/nope" in out and "not exist" in out

    # try to move nonexistant
    with pytest.raises(DoesNotExist):
        repl.onecmd("mv ../nope f1")
    out, err = capsys.readouterr()
    assert "../nope" in out and "No such" in out
Exemplo n.º 30
0
def test_kill_job_recursive(state, sample_jobs):
    root = Folder.get_root()

    assert all(j.status == Job.Status.CREATED for j in sample_jobs)
    state.kill_job("/", recursive=True)

    for j in sample_jobs:
        j.reload()

    assert all(j.status == Job.Status.FAILED for j in sample_jobs)