def setUp(self): self.maxDiff = None conn = engine.connect() text = ( f"INSERT INTO projects (uuid, name, created_at, updated_at) " f"VALUES ('{PROJECT_ID}', '{NAME}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES ('{EXPERIMENT_ID}', '{NAME}', '{PROJECT_ID}', '{POSITION}', 1, '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO components (uuid, name, description, commands, tags, experiment_notebook_path, deployment_notebook_path, created_at, updated_at) " f"VALUES ('{COMPONENT_ID}', '{NAME}', '{DESCRIPTION}', '{COMMANDS_JSON}', '{TAGS_JSON}', '{EXPERIMENT_NOTEBOOK_PATH}', '{DEPLOYMENT_NOTEBOOK_PATH}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, component_id, parameters, created_at, updated_at) " f"VALUES ('{OPERATOR_ID}', '{EXPERIMENT_ID}', '{COMPONENT_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) text = ( f"INSERT INTO operators (uuid, experiment_id, component_id, parameters, created_at, updated_at) " f"VALUES ('{OPERATOR_ID2}', '{EXPERIMENT_ID}', '{COMPONENT_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}')" ) conn.execute(text) conn.close() # uploads mock dataset try: MINIO_CLIENT.make_bucket(BUCKET_NAME) except BucketAlreadyOwnedByYou: pass file = BytesIO(( b'col0,col1,col2,col3,col4,col5\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n' )) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}", data=file, length=file.getbuffer().nbytes, ) metadata = { "columns": ["col0", "col1", "col2", "col3", "col4", "col5"], "featuretypes": [DATETIME, NUMERICAL, NUMERICAL, NUMERICAL, NUMERICAL, CATEGORICAL], "filename": DATASET, "run_id": RUN_ID, } buffer = BytesIO(dumps(metadata).encode()) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}.metadata", data=buffer, length=buffer.getbuffer().nbytes, ) MINIO_CLIENT.copy_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}", object_source=f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}", ) MINIO_CLIENT.copy_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}.metadata", object_source=f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}.metadata", )
def setUp(self): self.maxDiff = None conn = engine.connect() text = (f"INSERT INTO projects (uuid, name, created_at, updated_at) " f"VALUES (%s, %s, %s, %s)") conn.execute(text, ( PROJECT_ID, NAME, CREATED_AT, UPDATED_AT, )) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES (%s, %s, %s, %s, %s, %s, %s)") conn.execute(text, ( EXPERIMENT_ID, NAME, PROJECT_ID, POSITION, 1, CREATED_AT, UPDATED_AT, )) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES (%s, %s, %s, %s, %s, %s, %s)") conn.execute(text, ( EXPERIMENT_ID_2, NAME, PROJECT_ID, POSITION, 1, CREATED_AT, UPDATED_AT, )) text = ( f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) " f"VALUES (%s, %s, %s, %s, %s, %s, %s)") conn.execute(text, ( EXPERIMENT_ID_3, NAME, PROJECT_ID, POSITION, 1, CREATED_AT, UPDATED_AT, )) text = ( f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, parameters, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) " f"VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)") conn.execute(text, ( TASK_ID, NAME, DESCRIPTION, IMAGE, COMMANDS_JSON, ARGUMENTS_JSON, TAGS_JSON, dumps([]), EXPERIMENT_NOTEBOOK_PATH, DEPLOYMENT_NOTEBOOK_PATH, 0, CREATED_AT, UPDATED_AT, )) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES (%s, %s, %s, %s, %s, %s)") conn.execute(text, ( OPERATOR_ID, EXPERIMENT_ID, TASK_ID, PARAMETERS_JSON, CREATED_AT, UPDATED_AT, )) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES (%s, %s, %s, %s, %s, %s)") conn.execute(text, ( OPERATOR_ID_2, EXPERIMENT_ID_2, TASK_ID, PARAMETERS_JSON_2, CREATED_AT, UPDATED_AT, )) text = ( f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at) " f"VALUES (%s, %s, %s, %s, %s, %s)") conn.execute(text, ( OPERATOR_ID_3, EXPERIMENT_ID_3, TASK_ID, PARAMETERS_JSON_3, CREATED_AT, UPDATED_AT, )) conn.close() # uploads mock dataset try: MINIO_CLIENT.make_bucket(BUCKET_NAME) except BucketAlreadyOwnedByYou: pass file = BytesIO((b'col0,col1,col2,col3,col4,col5\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n' b'01/01/2000,5.1,3.5,1.4,0.2,Iris-setosa\n')) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}", data=file, length=file.getbuffer().nbytes, ) metadata = { "columns": ["col0", "col1", "col2", "col3", "col4", "col5"], "featuretypes": [ DATETIME, NUMERICAL, NUMERICAL, NUMERICAL, NUMERICAL, CATEGORICAL ], "filename": DATASET, "run_id": RUN_ID, } buffer = BytesIO(dumps(metadata).encode()) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET}/{DATASET}.metadata", data=buffer, length=buffer.getbuffer().nbytes, ) file = BytesIO((b'foo,bar,baz,qux\n' b'01/01/2000,foo,1.2,2.3\n' b'01/01/2000,bar,2.3,3.4\n' b'01/01/2000,baz,4.5,4.5\n')) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET_2}/{DATASET_2}", data=file, length=file.getbuffer().nbytes, ) metadata = { "columns": ["foo", "bar", "baz", "qux"], "featuretypes": [DATETIME, CATEGORICAL, NUMERICAL, NUMERICAL], "filename": DATASET_2, "runId": None } buffer = BytesIO(dumps(metadata).encode()) MINIO_CLIENT.put_object( bucket_name=BUCKET_NAME, object_name=f"datasets/{DATASET_2}/{DATASET_2}.metadata", data=buffer, length=buffer.getbuffer().nbytes, ) MINIO_CLIENT.copy_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}", object_source=f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}", ) MINIO_CLIENT.copy_object( bucket_name=BUCKET_NAME, object_name= f"datasets/{DATASET}/runs/{RUN_ID}/operators/{OPERATOR_ID}/{DATASET}/{DATASET}.metadata", object_source= f"/{BUCKET_NAME}/datasets/{DATASET}/{DATASET}.metadata", )