示例#1
0
async def test_dsm_datcore(postgres_service_url, dsm_fixture,
                           datcore_structured_testbucket):
    if not has_datcore_tokens():
        return

    utils.create_tables(url=postgres_service_url)

    dsm = dsm_fixture
    user_id = "0"
    data = await dsm.list_files(user_id=user_id,
                                location=DATCORE_STR,
                                uuid_filter=BUCKET_NAME)
    # the fixture creates 3 files
    assert len(data) == 3

    # delete the first one
    fmd_to_delete = data[0].fmd
    print("Deleting", fmd_to_delete.bucket_name, fmd_to_delete.object_name)
    is_deleted = await dsm.delete_file(user_id, DATCORE_STR,
                                       fmd_to_delete.file_id)
    assert is_deleted

    import time

    time.sleep(1)  # FIXME: takes some time to delete!!

    data = await dsm.list_files(user_id=user_id,
                                location=DATCORE_STR,
                                uuid_filter=BUCKET_NAME)
    assert len(data) == 2
示例#2
0
async def test_datcore_list_files_raw(loop):
    if not utils.has_datcore_tokens():
        return

    api_token = os.environ.get("BF_API_KEY", "none")
    api_secret = os.environ.get("BF_API_SECRET", "none")
    pool = ThreadPoolExecutor(2)
    dcw = DatcoreWrapper(api_token, api_secret, loop, pool)
    f = await dcw.list_files_raw()
    assert len(f)
示例#3
0
async def test_datcore_ping(loop):
    if not utils.has_datcore_tokens():
        return

    api_token = os.environ.get("BF_API_KEY", "none")
    api_secret = os.environ.get("BF_API_SECRET", "none")
    pool = ThreadPoolExecutor(2)
    dcw = DatcoreWrapper(api_token, api_secret, loop, pool)
    responsive = await dcw.ping()
    assert responsive
示例#4
0
async def test_dsm_list_datasets_datcore(dsm_fixture,
                                         datcore_structured_testbucket):
    if not has_datcore_tokens():
        return

    datasets = await dsm_fixture.list_datasets(user_id=USER_ID,
                                               location=DATCORE_STR)

    assert len(datasets)
    assert any(BUCKET_NAME in d.display_name for d in datasets)
示例#5
0
async def test_dsm_s3_to_datcore(
    postgres_service_url,
    s3_client,
    mock_files_factory,
    dsm_fixture,
    datcore_structured_testbucket,
):
    if not has_datcore_tokens():
        return
    utils.create_tables(url=postgres_service_url)
    tmp_file = mock_files_factory(1)[0]

    fmd = _create_file_meta_for_s3(postgres_service_url, s3_client, tmp_file)

    dsm = dsm_fixture

    up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid)
    with io.open(tmp_file, "rb") as fp:
        d = fp.read()
        req = urllib.request.Request(up_url, data=d, method="PUT")
        with urllib.request.urlopen(req) as _f:
            pass

    # given the fmd, upload to datcore
    tmp_file2 = tmp_file + ".fordatcore"
    user_id = USER_ID
    down_url = await dsm.download_link_s3(fmd.file_uuid)
    urllib.request.urlretrieve(down_url, tmp_file2)
    assert filecmp.cmp(tmp_file2, tmp_file)
    # now we have the file locally, upload the file
    await dsm.upload_file_to_datcore(
        user_id=user_id,
        local_file_path=tmp_file2,
        destination_id=datcore_structured_testbucket["dataset_id"],
    )
    # and into a deeper strucutre
    await dsm.upload_file_to_datcore(
        user_id=user_id,
        local_file_path=tmp_file2,
        destination_id=datcore_structured_testbucket["coll2_id"],
    )

    # FIXME: upload takes some time
    import time

    time.sleep(1)

    data = await dsm.list_files(user_id=user_id,
                                location=DATCORE_STR,
                                uuid_filter=BUCKET_NAME)
    # there should now be 5 files
    assert len(data) == 5
示例#6
0
async def test_datcore_nested_download_link(loop):
    if not utils.has_datcore_tokens():
        return

    api_token = os.environ.get("BF_API_KEY", "none")
    api_secret = os.environ.get("BF_API_SECRET", "none")
    pool = ThreadPoolExecutor(2)
    dcw = DatcoreWrapper(api_token, api_secret, loop, pool)
    destination = str(Path("Shared Data/ISAN/UCDavis use case 0D/inputs/"))
    filename = "initial_WTstates.txt"

    f = await dcw.download_link(destination, filename)
    assert f
示例#7
0
async def test_locations(client):
    user_id = USER_ID

    resp = await client.get("/v0/locations?user_id={}".format(user_id))

    payload = await resp.json()
    assert resp.status == 200, str(payload)

    data, error = tuple(payload.get(k) for k in ("data", "error"))

    _locs = 2 if has_datcore_tokens() else 1
    assert len(data) == _locs
    assert not error
示例#8
0
async def test_dsm_datcore_to_S3(
    postgres_service_url,
    s3_client,
    dsm_fixture,
    mock_files_factory,
    datcore_structured_testbucket,
):
    if not has_datcore_tokens():
        return
    utils.create_tables(url=postgres_service_url)
    # create temporary file
    tmp_file = mock_files_factory(1)[0]
    dest_fmd = _create_file_meta_for_s3(postgres_service_url, s3_client,
                                        tmp_file)
    user_id = dest_fmd.user_id
    dest_uuid = dest_fmd.file_uuid

    dsm = dsm_fixture

    s3_data = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR)
    assert len(s3_data) == 0

    dc_data = await dsm.list_files(user_id=user_id,
                                   location=DATCORE_STR,
                                   uuid_filter=BUCKET_NAME)
    assert len(dc_data) == 3
    src_fmd = dc_data[0]

    await dsm.copy_file(
        user_id=user_id,
        dest_location=SIMCORE_S3_STR,
        dest_uuid=dest_uuid,
        source_location=DATCORE_STR,
        source_uuid=datcore_structured_testbucket["file_id1"],
    )

    s3_data = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR)
    assert len(s3_data) == 1

    # now download the original file
    tmp_file1 = tmp_file + ".fromdatcore"
    down_url_dc, filename = await dsm.download_link_datcore(
        user_id, datcore_structured_testbucket["file_id1"])
    urllib.request.urlretrieve(down_url_dc, tmp_file1)

    # and the one on s3
    tmp_file2 = tmp_file + ".fromS3"
    down_url_s3 = await dsm.download_link_s3(dest_uuid)
    urllib.request.urlretrieve(down_url_s3, tmp_file2)

    assert filecmp.cmp(tmp_file1, tmp_file2)
示例#9
0
async def test_copy_datcore(
    postgres_service_url,
    s3_client,
    dsm_fixture,
    mock_files_factory,
    datcore_structured_testbucket,
):
    if not has_datcore_tokens():
        return
    utils.create_tables(url=postgres_service_url)

    # the fixture should provide 3 files
    dsm = dsm_fixture
    user_id = USER_ID
    data = await dsm.list_files(user_id=user_id,
                                location=DATCORE_STR,
                                uuid_filter=BUCKET_NAME)
    assert len(data) == 3

    # create temporary file and upload to s3
    tmp_file = mock_files_factory(1)[0]
    fmd = _create_file_meta_for_s3(postgres_service_url, s3_client, tmp_file)

    up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid)
    with io.open(tmp_file, "rb") as fp:
        d = fp.read()
        req = urllib.request.Request(up_url, data=d, method="PUT")
        with urllib.request.urlopen(req) as _f:
            pass

    # now copy to datcore
    dat_core_uuid = os.path.join(BUCKET_NAME, fmd.file_name)

    await dsm.copy_file(
        user_id=user_id,
        dest_location=DATCORE_STR,
        dest_uuid=datcore_structured_testbucket["coll2_id"],
        source_location=SIMCORE_S3_STR,
        source_uuid=fmd.file_uuid,
    )

    data = await dsm.list_files(user_id=user_id,
                                location=DATCORE_STR,
                                uuid_filter=BUCKET_NAME)

    # there should now be 4 files
    assert len(data) == 4
示例#10
0
async def test_copy(client, dsm_mockup_db, datcore_structured_testbucket):
    if not has_datcore_tokens():
        return
    # copy N files
    N = 2
    counter = 0
    for d in dsm_mockup_db.keys():
        fmd = dsm_mockup_db[d]
        source_uuid = fmd.file_uuid
        datcore_id = datcore_structured_testbucket["coll1_id"]
        resp = await client.put(
            "/v0/locations/1/files/{}?user_id={}&extra_location={}&extra_source={}"
            .format(
                quote(datcore_id, safe=""),
                fmd.user_id,
                SIMCORE_S3_ID,
                quote(source_uuid, safe=""),
            ))
        payload = await resp.json()
        assert resp.status == 200, str(payload)

        data, error = tuple(payload.get(k) for k in ("data", "error"))
        assert not error
        assert data

        counter = counter + 1
        if counter == N:
            break

    # list files for every user
    user_id = USER_ID
    resp = await client.get(
        "/v0/locations/1/files/metadata?user_id={}&uuid_filter={}".format(
            user_id, BUCKET_NAME))
    payload = await resp.json()
    assert resp.status == 200, str(payload)

    data, error = tuple(payload.get(k) for k in ("data", "error"))
    assert not error
    assert len(data) > N
示例#11
0
async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture,
                                    mock_files_factory,
                                    datcore_structured_testbucket):
    if not has_datcore_tokens():
        return
    utils.create_tables(url=postgres_service_url)
    dsm = dsm_fixture
    user_id = USER_ID
    data = await dsm.list_files(user_id=user_id,
                                location=DATCORE_STR,
                                uuid_filter=BUCKET_NAME)
    assert len(data)

    url, filename = await dsm.download_link_datcore(
        user_id, datcore_structured_testbucket["file_id1"])

    tmp_file = mock_files_factory(1)[0]
    tmp_file2 = tmp_file + ".fromdatcore"

    urllib.request.urlretrieve(url, tmp_file2)

    assert filecmp.cmp(tmp_file2, tmp_file)
import tempfile
import typing
from pathlib import Path

from blackfynn import Blackfynn
from blackfynn.models import Collection

import utils
from simcore_service_storage.datcore import DatcoreClient
from simcore_service_storage.models import FileMetaData

dir_path = os.path.dirname(os.path.realpath(__file__))
api_token = os.environ.get("BF_API_KEY")
api_secret = os.environ.get("BF_API_SECRET")

if utils.has_datcore_tokens():
    client = DatcoreClient(api_token=api_token, api_secret=api_secret)
    api_secret = os.environ.get("BF_API_SECRET", "none")
    destination = str(Path("MaG/level1/level2"))
    fd, path = tempfile.mkstemp()

    try:
        with os.fdopen(fd, "w") as tmp:
            # do stuff with temp file
            tmp.write("stuff")

        f = client.upload_file(destination, path)
        f = client.delete_file(destination, Path(path).name)
    finally:
        os.remove(path)
示例#13
0
async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client,
                                            postgres_service_url,
                                            datcore_structured_testbucket):
    if not has_datcore_tokens():
        return
    dsm = dsm_fixture
    utils.create_full_tables(url=postgres_service_url)

    path_in_datcore = datcore_structured_testbucket["file_id3"]
    file_name_in_datcore = Path(
        datcore_structured_testbucket["filename3"]).name
    user_id = USER_ID

    source_project = {
        "uuid": "de2578c5-431e-4d5e-b80e-401c8066782f",
        "name": "ISAN: 2D Plot",
        "description": "2D RawGraphs viewer with one input",
        "thumbnail": "",
        "prjOwner": "*****@*****.**",
        "creationDate": "2019-05-24T10:36:57.813Z",
        "lastChangeDate": "2019-05-24T11:36:12.015Z",
        "workbench": {
            "de2578c5-431e-48eb-a9d2-aaad6b72400a": {
                "key": "simcore/services/frontend/file-picker",
                "version": "1.0.0",
                "label": "File Picker",
                "inputs": {},
                "inputNodes": [],
                "outputs": {
                    "outFile": {
                        "store": 1,
                        "path":
                        "N:package:ab8c214d-a596-401f-a90c-9c50e3c048b0",
                    }
                },
                "progress": 100,
                "thumbnail": "",
                "position": {
                    "x": 100,
                    "y": 100
                },
            },
            "de2578c5-431e-4c63-a705-03a2c339646c": {
                "key": "simcore/services/dynamic/raw-graphs",
                "version": "2.8.0",
                "label": "2D plot",
                "inputs": {
                    "input_1": {
                        "nodeUuid": "de2578c5-431e-48eb-a9d2-aaad6b72400a",
                        "output": "outFile",
                    }
                },
                "inputNodes": ["de2578c5-431e-48eb-a9d2-aaad6b72400a"],
                "outputs": {},
                "progress": 0,
                "thumbnail": "",
                "position": {
                    "x": 400,
                    "y": 100
                },
            },
        },
    }

    bucket_name = BUCKET_NAME
    s3_client.create_bucket(bucket_name, delete_contents_if_exists=True)

    source_project["workbench"]["de2578c5-431e-48eb-a9d2-aaad6b72400a"][
        "outputs"]["outFile"]["path"] = path_in_datcore

    destination_project = copy.deepcopy(source_project)
    source_project_id = source_project["uuid"]
    destination_project["uuid"] = source_project_id.replace(
        "template", "deep-copy")
    destination_project["workbench"] = {}

    node_mapping = {}

    for node_id, node in source_project["workbench"].items():
        object_name = str(
            Path(source_project_id) / Path(node_id) / Path(node_id + ".dat"))
        f = utils.data_dir() / Path("notebooks.zip")
        s3_client.upload_file(bucket_name, object_name, f)
        key = node_id.replace("template", "deep-copy")
        destination_project["workbench"][key] = node
        node_mapping[node_id] = key

    status = await dsm.deep_copy_project_simcore_s3(user_id, source_project,
                                                    destination_project,
                                                    node_mapping)

    new_path = destination_project["workbench"][
        "deep-copy-uuid-48eb-a9d2-aaad6b72400a"]["outputs"]["outFile"]["path"]
    assert new_path != path_in_datcore
    assert Path(new_path).name == file_name_in_datcore
    files = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR)
    assert len(files) == 3
    # one of the files in s3 should be the dowloaded one from datcore
    assert any(f.fmd.file_name == Path(
        datcore_structured_testbucket["filename3"]).name for f in files)

    response = await dsm.delete_project_simcore_s3(user_id,
                                                   destination_project["uuid"])

    files = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR)
    assert len(files) == 0
示例#14
0
def test_datcore_fixture(datcore_structured_testbucket):
    if not has_datcore_tokens():
        return
    print(datcore_structured_testbucket)