コード例 #1
0
def test_persistent_upload_joblist_basic():
    """basic job tests"""
    api = common.get_api()
    td = pathlib.Path(tempfile.mkdtemp(prefix="persistent_uj_list_"))
    pujl_path = td / "joblistdir"
    task_path = common.make_upload_task()
    pujl = PersistentUploadJobList(pujl_path)
    uj = load_task(task_path, api=api)

    # add a job
    pujl.immortalize_job(uj)
    assert uj in pujl
    assert uj.dataset_id in pujl

    # find that job
    uj_same = pujl.summon_job(uj.dataset_id, api=api)
    assert uj_same is not uj, "not same instance"
    assert uj_same.__getstate__() == uj.__getstate__(), "same data"
    ids = pujl.get_queued_dataset_ids()
    assert uj.dataset_id in ids

    # remove a job
    assert pujl.job_exists(uj.dataset_id)
    assert pujl.is_job_queued(uj.dataset_id)
    assert not pujl.is_job_done(uj.dataset_id)
    pujl.obliterate_job(uj.dataset_id)
    assert uj not in pujl
    assert not pujl.job_exists(uj.dataset_id)
コード例 #2
0
def test_load_with_existing_dataset():
    api = common.get_api()
    # create some metadata
    dataset_dict = common.make_dataset_dict(hint="task_test")
    # post dataset creation request
    dataset_dict_with_id = create_dataset(dataset_dict=dataset_dict,
                                          resources=[dpath],
                                          api=api)
    task_path = common.make_upload_task(dataset_dict=dataset_dict_with_id,
                                        resource_paths=[str(dpath)],
                                        resource_names=[dpath.name])
    uj = task.load_task(task_path, api=api)
    assert uj.dataset_id == dataset_dict_with_id["id"]
    # skipping the upload should work, since it's already uploaded
    uj.set_state("online")
    uj.task_verify_resources()
    for ii in range(600):
        uj.task_verify_resources()
        if uj.state != "done":
            time.sleep(.1)
            continue
        else:
            break
    else:
        raise AssertionError("State not 'done' - No verification within 60s!")
コード例 #3
0
def test_load_basic():
    api = common.get_api()
    task_path = common.make_upload_task(task_id="zpowiemsnh",
                                        resource_names=["humdinger.rtdc"])
    assert task.task_has_circle(task_path)
    uj = task.load_task(task_path, api=api)
    assert uj.task_id == "zpowiemsnh"
    assert uj.resource_names == ["humdinger.rtdc"]
コード例 #4
0
def test_dataset_id_does_not_exist():
    api = common.get_api()
    # create a fake ID
    dataset_id = str(uuid.uuid4())
    # create a new task with the fake dataset ID
    task_path = common.make_upload_task(dataset_id=dataset_id)
    # create the upload job
    with pytest.raises(dcoraid.api.APINotFoundError, match=dataset_id):
        task.load_task(task_path, api=api)
コード例 #5
0
def test_missing_owner_org():
    api = common.get_api()
    # create some metadata
    dataset_dict = common.make_dataset_dict(hint="task_test")
    dataset_dict.pop("owner_org")
    task_path = common.make_upload_task(dataset_dict=dataset_dict)
    assert not task.task_has_circle(task_path)
    with pytest.raises(dcoraid.api.APIConflictError,
                       match="A circle must be provided"):
        task.load_task(task_path, api=api)
コード例 #6
0
def test_no_ids():
    api = common.get_api()
    # create some metadata
    dataset_dict = common.make_dataset_dict(hint="task_test")
    task_path = common.make_upload_task(dataset_dict=dataset_dict,
                                        resource_paths=[str(dpath)],
                                        resource_names=[dpath.name],
                                        task_id=None)
    with pytest.raises(ValueError,
                       match="or pass the dataset_id via the dataset_kwargs"):
        task.load_task(task_path, api=api)
コード例 #7
0
def test_persistent_upload_joblist_error_exists():
    """test things when a job is done"""
    api = common.get_api()
    td = pathlib.Path(tempfile.mkdtemp(prefix="persistent_uj_list_"))
    pujl_path = td / "joblistdir"
    task_path = common.make_upload_task()
    pujl = PersistentUploadJobList(pujl_path)
    uj = load_task(task_path, api=api)
    pujl.immortalize_job(uj)
    with pytest.raises(FileExistsError, match="already present at"):
        pujl.immortalize_job(uj)
コード例 #8
0
def test_resource_supplements():
    task_path = common.make_upload_task(resource_paths=[dpath],
                                        resource_supplements=[{
                                            "chip": {
                                                "name": "7x2",
                                                "master name": "R1"
                                            }
                                        }])
    uj = task.load_task(task_path, api=common.get_api())
    assert uj.supplements[0]["chip"]["name"] == "7x2"
    assert uj.supplements[0]["chip"]["master name"] == "R1"
コード例 #9
0
def test_resource_supplements_with_other_files():
    task_path = common.make_upload_task(
        resource_paths=[__file__, dpath],
        resource_names=["test.py", "other_data.rtdc"],
        resource_supplements=[{}, {
            "chip": {
                "name": "7x2",
                "master name": "R1"
            }
        }])
    uj = task.load_task(task_path, api=common.get_api())
    assert len(uj.supplements[0]) == 0
コード例 #10
0
def test_load_with_update():
    api = common.get_api()
    task_path = common.make_upload_task(task_id="blackfalcon",
                                        resource_names=["marvel.rtdc"])
    assert task.task_has_circle(task_path)
    uj = task.load_task(task_path, api=api, update_dataset_id=True)
    assert uj.task_id == "blackfalcon"
    assert uj.resource_names == ["marvel.rtdc"]
    # Load task and check dataset_id
    with open(task_path) as fd:
        task_dict = json.load(fd)
        assert task_dict["dataset_dict"]["id"] == uj.dataset_id
コード例 #11
0
def test_custom_dataset_dict_2():
    api = common.get_api()
    # post dataset creation request
    task_path = common.make_upload_task(dataset_dict=True,
                                        resource_paths=[str(dpath)],
                                        resource_names=[dpath.name])
    dataset_dict = common.make_dataset_dict()
    dataset_dict["authors"] = "Captain Hook!"
    uj = task.load_task(task_path, api=api, dataset_kwargs=dataset_dict)
    # now make sure the authors were set correctly
    ddict = api.get("package_show", id=uj.dataset_id)
    assert ddict["authors"] == "Captain Hook!"
コード例 #12
0
def test_resource_name_lengths():
    """Make sure ValueError is raised when list lengths do not match"""
    task_path = common.make_upload_task(resource_paths=[__file__, dpath],
                                        resource_names=["other_data.rtdc"],
                                        resource_supplements=[{}, {
                                            "chip": {
                                                "name": "7x2",
                                                "master name": "R1"
                                            }
                                        }])
    with pytest.raises(ValueError,
                       match="does not match number of resource names"):
        task.load_task(task_path, api=common.get_api())
コード例 #13
0
def test_upload_task(qtbot, monkeypatch):
    task_id = str(uuid.uuid4())
    tpath = common.make_upload_task(task_id=task_id)
    mw = DCORAid()
    QtWidgets.QApplication.processEvents(QtCore.QEventLoop.AllEvents, 300)
    monkeypatch.setattr(QtWidgets.QFileDialog, "getOpenFileNames",
                        lambda *args: ([tpath], None))
    act = QtWidgets.QAction("some unimportant text")
    act.setData("single")
    mw.panel_upload.on_upload_task(action=act)
    uj = mw.panel_upload.jobs[-1]
    assert uj.task_id == task_id
    mw.close()
コード例 #14
0
def test_wrong_ids():
    api = common.get_api()
    # create some metadata
    dataset_dict = common.make_dataset_dict(hint="task_test")
    dataset_dict["id"] = "peter"
    task_path = common.make_upload_task(
        dataset_dict=dataset_dict,
        dataset_id="hans",  # different id
        resource_paths=[str(dpath)],
        resource_names=[dpath.name])
    with pytest.raises(ValueError,
                       match="I got the following IDs: from upload job " +
                       "state: hans; from dataset dict: peter"):
        task.load_task(task_path, api=api)
コード例 #15
0
def test_load_with_existing_dataset_map_from_task_dict_update():
    api = common.get_api()
    # create some metadata
    dataset_dict = common.make_dataset_dict(hint="task_test")
    # post dataset creation request
    task_path = common.make_upload_task(dataset_dict=dataset_dict,
                                        resource_paths=[str(dpath)],
                                        resource_names=[dpath.name],
                                        task_id="xwing")
    map_task_to_dataset_id = {}
    uj = task.load_task(task_path,
                        api=api,
                        map_task_to_dataset_id=map_task_to_dataset_id)
    assert uj.task_id == "xwing"
    assert map_task_to_dataset_id["xwing"] == uj.dataset_id
コード例 #16
0
def test_persistent_upload_joblist_done():
    """test things when a job is done"""
    api = common.get_api()
    td = pathlib.Path(tempfile.mkdtemp(prefix="persistent_uj_list_"))
    pujl_path = td / "joblistdir"
    task_path = common.make_upload_task()
    pujl = PersistentUploadJobList(pujl_path)
    uj = load_task(task_path, api=api)
    pujl.immortalize_job(uj)
    pujl.set_job_done(uj.dataset_id)
    assert pujl.job_exists(uj.dataset_id)
    assert not pujl.is_job_queued(uj.dataset_id)
    assert pujl.is_job_done(uj.dataset_id)
    assert uj in pujl

    ids = pujl.get_queued_dataset_ids()
    assert uj.dataset_id not in ids
コード例 #17
0
def test_load_with_existing_dataset_map_from_task_control():
    api = common.get_api()
    # create some metadata
    dataset_dict = common.make_dataset_dict(hint="task_test")
    # post dataset creation request
    dataset_dict_with_id = create_dataset(dataset_dict=dataset_dict,
                                          resources=[dpath],
                                          api=api)
    task_path = common.make_upload_task(dataset_dict=dataset_dict,
                                        resource_paths=[str(dpath)],
                                        resource_names=[dpath.name],
                                        task_id="xwing")
    uj = task.load_task(
        task_path,
        api=api,
        map_task_to_dataset_id={"deathstar": dataset_dict_with_id["id"]})
    assert uj.dataset_id != dataset_dict_with_id["id"]
コード例 #18
0
def test_resource_supplements_must_be_empty_for_non_rtdc():
    task_path = common.make_upload_task(
        resource_paths=[__file__, dpath],
        resource_names=["test.py", "other_data.rtdc"],
        resource_supplements=[{
            "chip": {
                "name": "7x2",
                "master name": "R1"
            }
        }, {
            "chip": {
                "name": "7x2",
                "master name": "R1"
            }
        }])
    with pytest.raises(ValueError, match="supplements must be empty"):
        task.load_task(task_path, api=common.get_api())
コード例 #19
0
def test_upload_task_missing_circle(qtbot, monkeypatch):
    """When the organization is missing, DCOR-Aid should ask for it"""
    task_id = str(uuid.uuid4())
    dataset_dict = common.make_dataset_dict(hint="task_upload_no_org_")
    dataset_dict.pop("owner_org")
    tpath = common.make_upload_task(task_id=task_id,
                                    dataset_dict=dataset_dict)
    mw = DCORAid()
    QtWidgets.QApplication.processEvents(QtCore.QEventLoop.AllEvents, 300)
    monkeypatch.setattr(QtWidgets.QFileDialog, "getOpenFileNames",
                        lambda *args: ([tpath], None))
    # We actually only need this monkeypatch if there is more than
    # one circle for the present user.
    monkeypatch.setattr(QtWidgets.QInputDialog, "getItem",
                        # return the first item in the circle list
                        lambda *args: (args[3][0], True))
    act = QtWidgets.QAction("some unimportant text")
    act.setData("single")
    mw.panel_upload.on_upload_task(action=act)
    uj = mw.panel_upload.jobs[-1]
    assert uj.task_id == task_id
    mw.close()
コード例 #20
0
def test_dataset_id_already_exists_active_fails():
    api = common.get_api()
    # create some metadata
    dataset_dict = common.make_dataset_dict(hint="task_test")
    # post dataset creation request
    dataset_dict_with_id = create_dataset(dataset_dict=dataset_dict,
                                          resources=[dpath],
                                          api=api,
                                          activate=True)
    # create a new task with the same dataset ID but with different data
    task_path = common.make_upload_task(
        dataset_dict=dataset_dict_with_id,
        resource_paths=[str(dpath), str(dpath)],
        resource_names=["1.rtdc", "2.rtdc"])
    uj = task.load_task(task_path, api=api)
    assert len(uj.paths) == 2
    assert len(uj.resource_names) == 2
    assert uj.dataset_id == dataset_dict_with_id["id"]
    # attempt to upload the task
    uj.task_compress_resources()
    assert uj.state == "parcel"
    uj.task_upload_resources()
    assert uj.state == "error"
    assert "Access denied" in str(uj.traceback)
コード例 #21
0
def test_resource_path_is_relative():
    task_path = common.make_upload_task(resource_paths=["guess_my_name.rtdc"])
    new_data_path = pathlib.Path(task_path).parent / "guess_my_name.rtdc"
    shutil.copy2(dpath, new_data_path)
    uj = task.load_task(task_path, api=common.get_api())
    assert new_data_path.samefile(uj.paths[0])
コード例 #22
0
def test_resource_path_not_found():
    task_path = common.make_upload_task(resource_paths=["/home/unknown.rtdc"])
    with pytest.raises(FileNotFoundError, match="not found for task"):
        task.load_task(task_path, api=common.get_api())