コード例 #1
0
    def setUp(self):
        self.maxDiff = None
        conn = engine.connect()
        text = (
            f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) "
            f"VALUES ('{TASK_ID}', '{NAME}', '{DESCRIPTION}', '{IMAGE}', '{COMMANDS_JSON}', '{ARGUMENTS_JSON}', '{TAGS_JSON}', '{EXPERIMENT_NOTEBOOK_PATH}', '{DEPLOYMENT_NOTEBOOK_PATH}', 0, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)
        conn.close()

        try:
            MINIO_CLIENT.make_bucket(BUCKET_NAME)
        except BucketAlreadyOwnedByYou:
            pass

        file = BytesIO(
            b'{"cells":[{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]}],"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.6.9"}},"nbformat":4,"nbformat_minor":4}'
        )
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=EXPERIMENT_NOTEBOOK_PATH[len(f"minio://{BUCKET_NAME}/"
                                                     ):],
            data=file,
            length=file.getbuffer().nbytes,
        )
コード例 #2
0
    def tearDown(self):
        prefix = f"tasks/{NAME}"
        for obj in MINIO_CLIENT.list_objects(BUCKET_NAME,
                                             prefix=prefix,
                                             recursive=True):
            MINIO_CLIENT.remove_object(BUCKET_NAME, obj.object_name)

        session = requests.Session()
        session.cookies.update(COOKIES)
        session.headers.update(HEADERS)
        session.hooks = {
            "response": lambda r, *args, **kwargs: r.raise_for_status(),
        }

        r = session.get(url=f"{JUPYTER_ENDPOINT}/api/contents/tasks", )
        contents = r.json()["content"]
        for content in contents:
            session.delete(
                url=
                f"{JUPYTER_ENDPOINT}/api/contents/{content['path']}/Experiment.ipynb",
            )
            session.delete(
                url=
                f"{JUPYTER_ENDPOINT}/api/contents/{content['path']}/Deployment.ipynb",
            )
            session.delete(
                url=f"{JUPYTER_ENDPOINT}/api/contents/{content['path']}", )

        conn = engine.connect()
        text = f"DELETE FROM tasks WHERE 1 = 1"
        conn.execute(text)
        conn.close()
コード例 #3
0
def creates_eucalyptus_metadata(dataset_object_name):
    """Create eucalyptus metadata into which the model will import.

    Args:
        dataset_object_name (str): name of the dataset that the Model will search in Minio.
    """
    try:
        MINIO_CLIENT.make_bucket(BUCKET_NAME)
    except BucketAlreadyOwnedByYou:
        pass

    metadata = {
        "columns": [
            "Abbrev", "Rep", "Locality", "Map_Ref", "Latitude", "Altitude",
            "Rainfall", "Frosts", "Year", "Sp", "PMCno", "DBH", "Ht", "Surv",
            "Vig", "Ins_res", "Stem_Fm", "Crown_Fm", "Brnch_Fm", "Utility"
        ],
        "featuretypes": [
            "Categorical", "Numerical", "Categorical", "Categorical",
            "Categorical", "Numerical", "Numerical", "Numerical", "Numerical",
            "Categorical", "Numerical", "Numerical", "Numerical", "Numerical",
            "Numerical", "Numerical", "Numerical", "Numerical", "Numerical",
            "Categorical"
        ],
        "filename":
        "eucalyptus_mock.metadata"
    }
    buffer = BytesIO(dumps(metadata).encode())
    MINIO_CLIENT.put_object(
        bucket_name=BUCKET_NAME,
        object_name=
        f"datasets/{dataset_object_name}/{dataset_object_name}.metadata",
        data=buffer,
        length=buffer.getbuffer().nbytes,
    )
コード例 #4
0
def creates_titanic_metadata(dataset_object_name):
    """Create titanic metadata into which the model will import.

    Args:
        dataset_object_name (str): name of the dataset that the Model will search in Minio.
    """
    try:
        MINIO_CLIENT.make_bucket(BUCKET_NAME)
    except BucketAlreadyOwnedByYou:
        pass

    metadata = {
        "columns": [
            "PassengerId", "Survived", "Pclass", "Name", "Sex", "Age", "SibSp",
            "Parch", "Ticket", "Fare", "Cabin", "Embarked"
        ],
        "featuretypes": [
            "Numerical", "Numerical", "Numerical", "Categorical",
            "Categorical", "Numerical", "Numerical", "Numerical",
            "Categorical", "Numerical", "Categorical", "Categorical"
        ],
        "filename":
        "iris_mock.metadata"
    }

    buffer = BytesIO(dumps(metadata).encode())
    MINIO_CLIENT.put_object(
        bucket_name=BUCKET_NAME,
        object_name=
        f"datasets/{dataset_object_name}/{dataset_object_name}.metadata",
        data=buffer,
        length=buffer.getbuffer().nbytes,
    )
コード例 #5
0
def creates_iris_metadata(dataset_object_name):
    """Create iris metadata into which the model will import.

    Args:
        dataset_object_name (str): name of the dataset that the Model will search in Minio.
    """
    try:
        MINIO_CLIENT.make_bucket(BUCKET_NAME)
    except BucketAlreadyOwnedByYou:
        pass

    metadata = {
        "columns": [
            "SepalLengthCm", "SepalWidthCm", "PetalLengthCm", "PetalWidthCm",
            "Species"
        ],
        "featuretypes":
        ["Numerical", "Numerical", "Numerical", "Numerical", "Categorical"],
        "filename":
        "iris_mock.metadata"
    }

    buffer = BytesIO(dumps(metadata).encode())
    MINIO_CLIENT.put_object(
        bucket_name=BUCKET_NAME,
        object_name=
        f"datasets/{dataset_object_name}/{dataset_object_name}.metadata",
        data=buffer,
        length=buffer.getbuffer().nbytes,
    )
コード例 #6
0
def creates_boston_metadata(dataset_object_name):
    """Create boston metadata into which the model will import.

    Args:
        dataset_object_name (str): name of the dataset that the Model will search in Minio.
    """
    try:
        MINIO_CLIENT.make_bucket(BUCKET_NAME)
    except BucketAlreadyOwnedByYou:
        pass

    metadata = {
        "columns": [
            "crim", "zn", "indus", "chas", "nox", "rm", "age", "dis", "rad",
            "tax", "ptratio", "black", "lstat", "medv"
        ],
        "featuretypes": [
            "Numerical", "Numerical", "Numerical", "Numerical", "Numerical",
            "Numerical", "Numerical", "Numerical", "Numerical", "Numerical",
            "Numerical", "Numerical", "Numerical", "Numerical"
        ],
        "filename":
        "boston.metadata"
    }

    buffer = BytesIO(dumps(metadata).encode())
    MINIO_CLIENT.put_object(
        bucket_name=BUCKET_NAME,
        object_name=
        f"datasets/{dataset_object_name}/{dataset_object_name}.metadata",
        data=buffer,
        length=buffer.getbuffer().nbytes,
    )
コード例 #7
0
    def tearDown(self):
        prefix = f"components/{COMPONENT_ID}"
        for obj in MINIO_CLIENT.list_objects(BUCKET_NAME, prefix=prefix, recursive=True):
            MINIO_CLIENT.remove_object(BUCKET_NAME, obj.object_name)

        conn = engine.connect()
        text = f"DELETE FROM components WHERE 1 = 1"
        conn.execute(text)
        conn.close()
コード例 #8
0
    def tearDown(self):
        conn = engine.connect()
        text = f"DELETE FROM tasks WHERE uuid = '{TASK_ID}'"
        conn.execute(text)
        conn.close()

        prefix = f"tasks/{TASK_ID}"
        for obj in MINIO_CLIENT.list_objects(BUCKET_NAME,
                                             prefix=prefix,
                                             recursive=True):
            MINIO_CLIENT.remove_object(BUCKET_NAME, obj.object_name)
コード例 #9
0
def delete_mock_dataset(obeject_name):
    """Delete a mock dataset from Minio.

    Args:
        obeject_name (str): dataset name.
    """
    try:
        for obj in MINIO_CLIENT.list_objects(BUCKET_NAME,
                                             prefix=f"datasets/{obeject_name}",
                                             recursive=True):
            MINIO_CLIENT.remove_object(BUCKET_NAME, obj.object_name)
    except ResponseError as err:
        print(err)
コード例 #10
0
    def setUp(self):
        self.maxDiff = None
        try:
            MINIO_CLIENT.make_bucket(BUCKET_NAME)
        except BucketAlreadyOwnedByYou:
            pass

        buffer = BytesIO(dumps(METRICS).encode())
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=METRICS_NAME,
            data=buffer,
            length=buffer.getbuffer().nbytes,
        )
コード例 #11
0
    def tearDown(self):
        MINIO_CLIENT.remove_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}.metadata",
        )
        MINIO_CLIENT.remove_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}",
        )
        MINIO_CLIENT.remove_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/{DATASET}.metadata",
        )
        MINIO_CLIENT.remove_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/{DATASET}",
        )

        conn = engine.connect()
        text = f"DELETE FROM operators WHERE experiment_id = '{EXPERIMENT_ID}'"
        conn.execute(text)

        text = f"DELETE FROM components WHERE uuid = '{COMPONENT_ID}'"
        conn.execute(text)

        text = f"DELETE FROM experiments WHERE project_id = '{PROJECT_ID}'"
        conn.execute(text)

        text = f"DELETE FROM projects WHERE uuid = '{PROJECT_ID}'"
        conn.execute(text)
        conn.close()
コード例 #12
0
    def setUp(self):
        self.maxDiff = None
        try:
            MINIO_CLIENT.make_bucket(BUCKET_NAME)
        except BucketAlreadyOwnedByYou:
            pass

        file = BytesIO(
            b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\xc0\x00\x00\x00\xba\x08\x02\x00\x00\x00w\x07\xd5\xf7\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\tpHYs\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7o\xa8d\x00\x00\x01\xbaIDATx^\xed\xd21\x01\x00\x00\x0c\xc3\xa0\xf97\xdd\x89\xc8\x0b\x1a\xb8A \x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\x89@$\x02\x91\x08D"\x10\xc1\xf6\x1a:\xf5\xe1\x06\x89A\xdf\x00\x00\x00\x00IEND\xaeB`\x82'
        )
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=FIGURE_NAME,
            data=file,
            length=file.getbuffer().nbytes,
        )
コード例 #13
0
    def tearDown(self):
        prefix = f"experiments/{EXPERIMENT_ID}"
        for obj in MINIO_CLIENT.list_objects(BUCKET_NAME, prefix=prefix, recursive=True):
            MINIO_CLIENT.remove_object(BUCKET_NAME, obj.object_name)

        conn = engine.connect()
        text = f"DELETE FROM operators WHERE experiment_id = '{EXPERIMENT_ID}'"
        conn.execute(text)

        text = f"DELETE FROM tasks WHERE uuid = '{TASK_ID}'"
        conn.execute(text)

        text = f"DELETE FROM experiments WHERE project_id = '{PROJECT_ID}'"
        conn.execute(text)

        text = f"DELETE FROM projects WHERE uuid = '{PROJECT_ID}'"
        conn.execute(text)
        conn.close()
コード例 #14
0
def creates_mock_dataset(object_name, object_content):
    """Create a mock dataset to be used in unit test.

    Args:
        object_name (str): dataset name.
        object_content (bytes): data of the dataset.
    """
    try:
        MINIO_CLIENT.make_bucket(BUCKET_NAME)
    except BucketAlreadyOwnedByYou:
        pass

    file = BytesIO(object_content)
    MINIO_CLIENT.put_object(
        bucket_name=BUCKET_NAME,
        object_name=f"datasets/{object_name}/{object_name}",
        data=file,
        length=file.getbuffer().nbytes,
    )
コード例 #15
0
    def setUp(self):
        self.maxDiff = None
        conn = engine.connect()
        text = (
            f"INSERT INTO components (uuid, name, description, tags, training_notebook_path, inference_notebook_path, is_default, created_at, updated_at) "
            f"VALUES ('{COMPONENT_ID}', '{NAME}', '{DESCRIPTION}', '{TAGS_JSON}', '{TRAINING_NOTEBOOK_PATH}', '{INFERENCE_NOTEBOOK_PATH}', 0, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)
        conn.close()

        try:
            MINIO_CLIENT.make_bucket(BUCKET_NAME)
        except BucketAlreadyOwnedByYou:
            pass

        file = BytesIO(SAMPLE_NOTEBOOK.encode("utf-8"))
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=TRAINING_NOTEBOOK_PATH[len(f"minio://{BUCKET_NAME}/"
                                                   ):],
            data=file,
            length=file.getbuffer().nbytes,
        )

        file = BytesIO(
            b'{"cells":[{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]}],"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.6.9"}},"nbformat":4,"nbformat_minor":4}'
        )
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=INFERENCE_NOTEBOOK_PATH[len(f"minio://{BUCKET_NAME}/"
                                                    ):],
            data=file,
            length=file.getbuffer().nbytes,
        )

        session = requests.Session()
        session.cookies.update(COOKIES)
        session.headers.update(HEADERS)
        session.hooks = {
            "response": lambda r, *args, **kwargs: r.raise_for_status(),
        }

        session.put(
            url=f"{JUPYTER_ENDPOINT}/api/contents/components",
            data=dumps({
                "type": "directory",
                "content": None
            }),
        )

        session.put(
            url=f"{JUPYTER_ENDPOINT}/api/contents/components/{COMPONENT_ID}",
            data=dumps({
                "type": "directory",
                "content": None
            }),
        )

        session.put(
            url=
            f"{JUPYTER_ENDPOINT}/api/contents/components/{COMPONENT_ID}/Inference.ipynb",
            data=dumps({
                "type": "notebook",
                "content": loads(SAMPLE_NOTEBOOK)
            }),
        )

        session.put(
            url=
            f"{JUPYTER_ENDPOINT}/api/contents/components/{COMPONENT_ID}/Training.ipynb",
            data=dumps({
                "type": "notebook",
                "content": loads(SAMPLE_NOTEBOOK)
            }),
        )
コード例 #16
0
ファイル: test_tasks.py プロジェクト: dnlcesilva/projects
    def setUp(self):
        self.maxDiff = None
        conn = engine.connect()
        text = (
            f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, parameters, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
        )
        conn.execute(text, (TASK_ID, NAME, DESCRIPTION, IMAGE, COMMANDS_JSON, ARGUMENTS_JSON, TAGS_JSON,
                            dumps([]), EXPERIMENT_NOTEBOOK_PATH, DEPLOYMENT_NOTEBOOK_PATH, 0, CREATED_AT, UPDATED_AT,))

        text = (
            f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, parameters, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
        )
        conn.execute(text, (TASK_ID_2, 'foo 2', DESCRIPTION, IMAGE, COMMANDS_JSON, ARGUMENTS_JSON, TAGS_JSON,
                            dumps([]), EXPERIMENT_NOTEBOOK_PATH_2, DEPLOYMENT_NOTEBOOK_PATH_2, 0, CREATED_AT, UPDATED_AT,))
        conn.close()

        try:
            MINIO_CLIENT.make_bucket(BUCKET_NAME)
        except BucketAlreadyOwnedByYou:
            pass

        file = BytesIO(SAMPLE_NOTEBOOK.encode("utf-8"))
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=EXPERIMENT_NOTEBOOK_PATH[len(f"minio://{BUCKET_NAME}/"):],
            data=file,
            length=file.getbuffer().nbytes,
        )

        file = BytesIO(b'{"cells":[{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]}],"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.6.9"}},"nbformat":4,"nbformat_minor":4}')
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=DEPLOYMENT_NOTEBOOK_PATH[len(f"minio://{BUCKET_NAME}/"):],
            data=file,
            length=file.getbuffer().nbytes,
        )

        session = requests.Session()
        session.cookies.update(COOKIES)
        session.headers.update(HEADERS)
        session.hooks = {
            "response": lambda r, *args, **kwargs: r.raise_for_status(),
        }

        session.put(
            url=f"{JUPYTER_ENDPOINT}/api/contents/tasks",
            data=dumps({"type": "directory", "content": None}),
        )

        session.put(
            url=f"{JUPYTER_ENDPOINT}/api/contents/tasks/{NAME}",
            data=dumps({"type": "directory", "content": None}),
        )

        session.put(
            url=f"{JUPYTER_ENDPOINT}/api/contents/tasks/{NAME}/Deployment.ipynb",
            data=dumps({"type": "notebook", "content": loads(SAMPLE_NOTEBOOK)}),
        )

        session.put(
            url=f"{JUPYTER_ENDPOINT}/api/contents/tasks/{NAME}/Experiment.ipynb",
            data=dumps({"type": "notebook", "content": loads(SAMPLE_NOTEBOOK)}),
        )
コード例 #17
0
    def setUp(self):
        self.maxDiff = None
        conn = engine.connect()
        text = (
            f"INSERT INTO projects (uuid, name, created_at, updated_at) "
            f"VALUES ('{PROJECT_ID}', '{NAME}', '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)

        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES ('{EXPERIMENT_ID}', '{NAME}', '{PROJECT_ID}', '{POSITION}', 1, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)

        text = (
            f"INSERT INTO components (uuid, name, description, commands, tags, experiment_notebook_path, deployment_notebook_path, created_at, updated_at) "
            f"VALUES ('{COMPONENT_ID}', '{NAME}', '{DESCRIPTION}', '{COMMANDS_JSON}', '{TAGS_JSON}', '{EXPERIMENT_NOTEBOOK_PATH}', '{DEPLOYMENT_NOTEBOOK_PATH}', '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)

        text = (
            f"INSERT INTO operators (uuid, experiment_id, component_id, parameters, created_at, updated_at) "
            f"VALUES ('{OPERATOR_ID}', '{EXPERIMENT_ID}', '{COMPONENT_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)

        text = (
            f"INSERT INTO operators (uuid, experiment_id, component_id, parameters, created_at, updated_at) "
            f"VALUES ('{OPERATOR_ID2}', '{EXPERIMENT_ID}', '{COMPONENT_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)
        conn.close()

        # uploads mock dataset
        try:
            MINIO_CLIENT.make_bucket(BUCKET_NAME)
        except BucketAlreadyOwnedByYou:
            pass

        file = BytesIO((
            b'col0,col1,col2,col3,col4,col5\n'
            b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n'
            b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n'
            b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n'
        ))
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/{DATASET}",
            data=file,
            length=file.getbuffer().nbytes,
        )
        metadata = {
            "columns": ["col0", "col1", "col2", "col3", "col4", "col5"],
            "featuretypes": [DATETIME, NUMERICAL, NUMERICAL, NUMERICAL, NUMERICAL, CATEGORICAL],
            "filename": DATASET,
            "run_id": RUN_ID,
        }
        buffer = BytesIO(dumps(metadata).encode())
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/{DATASET}.metadata",
            data=buffer,
            length=buffer.getbuffer().nbytes,
        )
        MINIO_CLIENT.copy_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}",
            object_source=f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}",
        )
        MINIO_CLIENT.copy_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}.metadata",
            object_source=f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}.metadata",
        )
コード例 #18
0
 def tearDown(self):
     prefix = f"experiments/{EXPERIMENT_ID}"
     for obj in MINIO_CLIENT.list_objects(BUCKET_NAME,
                                          prefix=prefix,
                                          recursive=True):
         MINIO_CLIENT.remove_object(BUCKET_NAME, obj.object_name)
コード例 #19
0
    def setUp(self):
        self.maxDiff = None
        conn = engine.connect()
        text = (f"INSERT INTO projects (uuid, name, created_at, updated_at) "
                f"VALUES (%s, %s, %s, %s)")
        conn.execute(text, (
            PROJECT_ID,
            NAME,
            CREATED_AT,
            UPDATED_AT,
        ))

        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s, %s)")
        conn.execute(text, (
            EXPERIMENT_ID,
            NAME,
            PROJECT_ID,
            POSITION,
            1,
            CREATED_AT,
            UPDATED_AT,
        ))

        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s, %s)")
        conn.execute(text, (
            EXPERIMENT_ID_2,
            NAME,
            PROJECT_ID,
            POSITION,
            1,
            CREATED_AT,
            UPDATED_AT,
        ))

        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s, %s)")
        conn.execute(text, (
            EXPERIMENT_ID_3,
            NAME,
            PROJECT_ID,
            POSITION,
            1,
            CREATED_AT,
            UPDATED_AT,
        ))

        text = (
            f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, parameters, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)")
        conn.execute(text, (
            TASK_ID,
            NAME,
            DESCRIPTION,
            IMAGE,
            COMMANDS_JSON,
            ARGUMENTS_JSON,
            TAGS_JSON,
            dumps([]),
            EXPERIMENT_NOTEBOOK_PATH,
            DEPLOYMENT_NOTEBOOK_PATH,
            0,
            CREATED_AT,
            UPDATED_AT,
        ))

        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s)")
        conn.execute(text, (
            OPERATOR_ID,
            EXPERIMENT_ID,
            TASK_ID,
            PARAMETERS_JSON,
            CREATED_AT,
            UPDATED_AT,
        ))

        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s)")
        conn.execute(text, (
            OPERATOR_ID_2,
            EXPERIMENT_ID_2,
            TASK_ID,
            PARAMETERS_JSON_2,
            CREATED_AT,
            UPDATED_AT,
        ))

        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) "
            f"VALUES (%s, %s, %s, %s, %s, %s)")
        conn.execute(text, (
            OPERATOR_ID_3,
            EXPERIMENT_ID_3,
            TASK_ID,
            PARAMETERS_JSON_3,
            CREATED_AT,
            UPDATED_AT,
        ))
        conn.close()

        # uploads mock dataset
        try:
            MINIO_CLIENT.make_bucket(BUCKET_NAME)
        except BucketAlreadyOwnedByYou:
            pass

        file = BytesIO((b'col0,col1,col2,col3,col4,col5\n'
                        b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n'
                        b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n'
                        b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n'))
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/{DATASET}",
            data=file,
            length=file.getbuffer().nbytes,
        )
        metadata = {
            "columns": ["col0", "col1", "col2", "col3", "col4", "col5"],
            "featuretypes": [
                DATETIME, NUMERICAL, NUMERICAL, NUMERICAL, NUMERICAL,
                CATEGORICAL
            ],
            "filename":
            DATASET,
            "run_id":
            RUN_ID,
        }
        buffer = BytesIO(dumps(metadata).encode())
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET}/{DATASET}.metadata",
            data=buffer,
            length=buffer.getbuffer().nbytes,
        )

        file = BytesIO((b'foo,bar,baz,qux\n'
                        b'01/01/2000,foo,1.2,2.3\n'
                        b'01/01/2000,bar,2.3,3.4\n'
                        b'01/01/2000,baz,4.5,4.5\n'))
        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET_2}/{DATASET_2}",
            data=file,
            length=file.getbuffer().nbytes,
        )
        metadata = {
            "columns": ["foo", "bar", "baz", "qux"],
            "featuretypes": [DATETIME, CATEGORICAL, NUMERICAL, NUMERICAL],
            "filename": DATASET_2,
            "runId": None
        }
        buffer = BytesIO(dumps(metadata).encode())

        MINIO_CLIENT.put_object(
            bucket_name=BUCKET_NAME,
            object_name=f"datasets/{DATASET_2}/{DATASET_2}.metadata",
            data=buffer,
            length=buffer.getbuffer().nbytes,
        )

        MINIO_CLIENT.copy_object(
            bucket_name=BUCKET_NAME,
            object_name=
            f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}",
            object_source=f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}",
        )
        MINIO_CLIENT.copy_object(
            bucket_name=BUCKET_NAME,
            object_name=
            f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}.metadata",
            object_source=
            f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}.metadata",
        )