def tearDown(self): conn = engine.connect() text = f"DELETE FROM operators WHERE 1 = 1" conn.execute(text) text = f"DELETE FROM experiments WHERE project_id in ('{PROJECT_ID}')" conn.execute(text) text = f"DELETE FROM projects WHERE uuid = '{PROJECT_ID}'" conn.execute(text) text = f"DELETE FROM tasks WHERE uuid = '{TASK_ID}'" conn.execute(text) conn.close()
def setUp(self): self.maxDiff = None conn = engine.connect() text = ( f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) " f"VALUES ('{TASK_ID}', '{NAME}', '{DESCRIPTION}', '{IMAGE}', '{COMMANDS_JSON}', '{ARGUMENTS_JSON}', '{TAGS_JSON}', '{EXPERIMENT_NOTEBOOK_PATH}', '{DEPLOYMENT_NOTEBOOK_PATH}', 0, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO projects (uuid, name, created_at, updated_at) " f"VALUES ('{PROJECT_ID}', '{NAME}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EXPERIMENT_ID}', '{NAME}', '{PROJECT_ID}', '{POSITION}', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO deployments (uuid, name, experiment_id, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{DEPLOYMENT_ID}', '{NAME}', '{EXPERIMENT_ID}', '{PROJECT_ID}', '{POSITION}', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO deployments (uuid, name, experiment_id, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{DEPLOYMENT_ID_2}', '{NAME_2}', '{EXPERIMENT_ID}', '{PROJECT_ID}', '{POSITION}', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, deployment_id, task_id, parameters, created_at, updated_at, dependencies) " f"VALUES ('{OPERATOR_ID}', '{DEPLOYMENT_ID}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEPENDENCIES_EMPTY_JSON}')" ) conn.execute(text) conn.close()
def tearDown(self): MINIO_CLIENT.remove_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OP_ID_1_1}/{DATASET}/{DATASET}.metadata", ) MINIO_CLIENT.remove_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OP_ID_1_1}/{DATASET}/{DATASET}", ) MINIO_CLIENT.remove_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}.metadata", ) MINIO_CLIENT.remove_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}", ) MINIO_CLIENT.remove_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET_RUN_ID_NONE}/{DATASET_RUN_ID_NONE}.metadata", ) conn = engine.connect() text = f"DELETE FROM operators WHERE experiment_id in ('{EXP_ID_1}', '{EXP_ID_2}', '{EXP_ID_3}')" conn.execute(text) text = f"DELETE FROM tasks WHERE uuid = '{TASK_ID}'" conn.execute(text) text = f"DELETE FROM experiments WHERE project_id = '{PROJECT_ID}'" conn.execute(text) text = f"DELETE FROM projects WHERE uuid = '{PROJECT_ID}'" conn.execute(text) conn.close()
def setUp(self): self.maxDiff = None conn = engine.connect() text = ( f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) " f"VALUES ('{TASK_ID}', '{NAME}', 'long foo', '{IMAGE}', '{dumps(['CMD'])}', '{dumps(['ARG'])}', '{dumps(['PREDICTOR'])}', '{EXPERIMENT_NOTEBOOK_PATH}', '{DEPLOYMENT_NOTEBOOK_PATH}', 0, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO projects (uuid, name, created_at, updated_at) " f"VALUES ('{PROJECT_ID}', '{NAME}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EXP_ID_1}', '{NAME}', '{PROJECT_ID}', 0, 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES ('{OP_ID_1_1}', '{EXP_ID_1}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES ('{OP_ID_1_2}', '{EXP_ID_1}', '{TASK_ID}', '{dumps({})}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EXP_ID_2}', '{NAME}', '{PROJECT_ID}', 1, 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES ('{OP_ID_2_1}', '{EXP_ID_2}', '{TASK_ID}', '{dumps({})}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES ('{OP_ID_2_2}', '{EXP_ID_2}', '{TASK_ID}', '{dumps({})}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EXP_ID_3}', '{NAME}', '{PROJECT_ID}', 2, 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES ('{OP_ID_3_1}', '{EXP_ID_3}', '{TASK_ID}', '{dumps({'dataset': DATASET_RUN_ID_NONE})}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) conn.close() # uploads mock dataset try: MINIO_CLIENT.make_bucket(BUCKET_NAME) except BucketAlreadyOwnedByYou: pass file = BytesIO((b'col0,col1,col2,col3,col4,col5\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n')) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}", data=file, length=file.getbuffer().nbytes, ) metadata = { "columns": ["col0", "col1", "col2", "col3", "col4", "col5"], "featuretypes": [ DATETIME, NUMERICAL, NUMERICAL, NUMERICAL, NUMERICAL, CATEGORICAL ], "filename": DATASET, "run_id": RUN_ID, } buffer = BytesIO(dumps(metadata).encode()) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}.metadata", data=buffer, length=buffer.getbuffer().nbytes, ) MINIO_CLIENT.copy_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OP_ID_1_1}/{DATASET}/{DATASET}", object_source=f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}", ) MINIO_CLIENT.copy_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OP_ID_1_1}/{DATASET}/{DATASET}.metadata", object_source= f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}.metadata", ) buffer = BytesIO(dumps({}).encode()) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET_RUN_ID_NONE}/{DATASET_RUN_ID_NONE}.metadata", data=buffer, length=buffer.getbuffer().nbytes, )
def setUp(self): # Run a default pipeline for tests client = init_pipeline_client() experiment = client.create_experiment(name=MOCKED_TRAINING_ID) client.run_pipeline(experiment.id, MOCKED_TRAINING_ID, "tests/resources/mocked_training.yaml") conn = engine.connect() text = ( f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) " f"VALUES ('{TASK_ID}', 'name', 'desc', '{IMAGE}', '{COMMANDS_JSON}', '{ARGUMENTS_JSON}', '{TAGS_JSON}', '{EX_NOTEBOOK_PATH}', '{DEPLOY_NOTEBOOK_PATH}', 0, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO projects (uuid, name, created_at, updated_at) " f"VALUES ('{PROJECT_ID}', 'name', '{CREATED_AT}', '{UPDATED_AT}')") conn.execute(text) # Experiment 1 text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EX_ID_1}', 'ex1', '{PROJECT_ID}', '0', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) " f"VALUES ('{OP_ID_1_1}', '{EX_ID_1}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_EMPTY_JSON}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) " f"VALUES ('{OP_ID_1_2}', '{EX_ID_1}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_ID_1_1_JSON}')" ) conn.execute(text) # Experiment 2 text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EX_ID_2}', 'ex2', '{PROJECT_ID}', '1', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) # Experiment 3 text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EX_ID_3}', 'ex3', '{PROJECT_ID}', '2', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) " f"VALUES ('{OP_ID_3_1}', '{EX_ID_3}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_INVALID_JSON}')" ) conn.execute(text) # Experiment 4 text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EX_ID_4}', 'ex4', '{PROJECT_ID}', '3', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) " f"VALUES ('{OP_ID_4_1}', '{EX_ID_4}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_ID_4_2_JSON}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) " f"VALUES ('{OP_ID_4_2}', '{EX_ID_4}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_ID_4_1_JSON}')" ) conn.execute(text) conn.close()