async def compute_output_data_schema( app: FastAPI, user_id: UserID, project_id: ProjectID, node_id: NodeID, ) -> TaskOutputDataSchema: ports = await _create_node_ports( db_engine=app.state.engine, user_id=user_id, project_id=project_id, node_id=node_id, ) output_data_schema = {} for port in (await ports.outputs).values(): output_data_schema[port.key] = {"required": port.default_value is None} if port_utils.is_file_type(port.property_type): value_link = await port_utils.get_upload_link_from_storage( user_id=user_id, project_id=f"{project_id}", node_id=f"{node_id}", file_name=next(iter(port.file_to_key_map)) if port.file_to_key_map else port.key, ) output_data_schema[port.key].update( { "mapping": next(iter(port.file_to_key_map)) if port.file_to_key_map else None, "url": value_link, } ) return TaskOutputDataSchema.parse_obj(output_data_schema)
def _create_fake_outputs( schema: TaskOutputDataSchema, output_folder: Path, set_optional_field: bool, faker: Faker, ) -> Optional[str]: jsonable_data = {} for key, value in schema.items(): if not value.required and not set_optional_field: continue if isinstance(value, FilePortSchema): # a file shall be present a_file = output_folder / (value.mapping or key) a_file.write_text(faker.text(max_nb_chars=450)) assert a_file.exists() else: jsonable_data[ key] = "some value just for testing, does not represent any kind of type" if jsonable_data: output_file = output_folder / faker.file_name() with output_file.open("wt") as fp: json.dump(jsonable_data, fp) assert output_file.exists() return output_file.name return None
def mocked_node_ports(mocker: MockerFixture): mocker.patch( "simcore_service_director_v2.modules.dask_client.compute_input_data", return_value=TaskInputData.parse_obj({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.compute_output_data_schema", return_value=TaskOutputDataSchema.parse_obj({}), ) mocker.patch( "simcore_service_director_v2.modules.dask_client.compute_service_log_file_upload_link", return_value=parse_obj_as(AnyUrl, "file://undefined"), )
def test_create_task_output_from_task_does_not_throw_when_there_are_optional_entries( tmp_path: Path, faker: Faker): task_output_schema = TaskOutputDataSchema.parse_obj({ "some_output": { "required": False, }, }) task_output_data = TaskOutputData.from_task_output( schema=task_output_schema, output_folder=tmp_path, output_file_ext=faker.file_name(), ) assert len(task_output_data) == 0
def test_create_task_output_from_task_throws_when_there_are_entries( tmp_path: Path, faker: Faker): task_output_schema = TaskOutputDataSchema.parse_obj({ "some_output": { "required": True, }, }) with pytest.raises(ValueError): TaskOutputData.from_task_output( schema=task_output_schema, output_folder=tmp_path, output_file_ext=faker.file_name(), )
def test_create_task_output_from_task_throws_when_there_are_missing_files( tmp_path: Path, faker: Faker): task_output_schema = TaskOutputDataSchema.parse_obj({ "required_file_output": { "required": True, "url": "s3://some_file_url", "mapping": "the_output_filename", }, }) with pytest.raises(ValueError): TaskOutputData.from_task_output( schema=task_output_schema, output_folder=tmp_path, output_file_ext=faker.file_name(), )
def test_create_task_output_from_task_does_not_throw_when_there_are_optional_missing_files( tmp_path: Path, faker: Faker): task_output_schema = TaskOutputDataSchema.parse_obj({ "optional_file_output": { "required": False, "url": "s3://some_file_url", "mapping": "the_output_filename", }, }) task_output_data = TaskOutputData.from_task_output( schema=task_output_schema, output_folder=tmp_path, output_file_ext=faker.file_name(), ) assert len(task_output_data) == 0
def test_create_task_output_from_task_with_optional_fields_as_required( tmp_path: Path, optional_fields_set: bool, faker: Faker): for schema_example in TaskOutputDataSchema.Config.schema_extra["examples"]: task_output_schema = TaskOutputDataSchema.parse_obj(schema_example) outputs_file_name = _create_fake_outputs(task_output_schema, tmp_path, optional_fields_set, faker) task_output_data = TaskOutputData.from_task_output( schema=task_output_schema, output_folder=tmp_path, output_file_ext=outputs_file_name, ) assert task_output_data for key, value in task_output_schema.items(): if not value.required and not optional_fields_set: assert task_output_data.get(key) is None if value.required or optional_fields_set: assert task_output_data.get(key) is not None
def ubuntu_task(request: FixtureRequest, ftp_server: List[URL]) -> ServiceExampleParam: """Creates a console task in an ubuntu distro that checks for the expected files and error in case they are missing""" integration_version = version.Version(request.param) print("Using service integration:", integration_version) # defines the inputs of the task input_data = TaskInputData.parse_obj({ "input_1": 23, "input_23": "a string input", "the_input_43": 15.0, "the_bool_input_54": False, **{ f"some_file_input_{index+1}": FileUrl(url=f"{file}") for index, file in enumerate(ftp_server) }, **{ f"some_file_input_with_mapping{index+1}": FileUrl(url=f"{file}", file_mapping=f"{index+1}/some_file_input") for index, file in enumerate(ftp_server) }, }) # check in the console that the expected files are present in the expected INPUT folder (set as ${INPUT_FOLDER} in the service) file_names = [file.path for file in ftp_server] list_of_commands = [ "echo User: $(id $(whoami))", "echo Inputs:", "ls -tlah -R ${INPUT_FOLDER}", "echo Outputs:", "ls -tlah -R ${OUTPUT_FOLDER}", "echo Logs:", "ls -tlah -R ${LOG_FOLDER}", ] list_of_commands += [ f"(test -f ${{INPUT_FOLDER}}/{file} || (echo ${{INPUT_FOLDER}}/{file} does not exists && exit 1))" for file in file_names ] + [f"echo $(cat ${{INPUT_FOLDER}}/{file})" for file in file_names] input_json_file_name = ("inputs.json" if integration_version > LEGACY_INTEGRATION_VERSION else "input.json") list_of_commands += [ f"(test -f ${{INPUT_FOLDER}}/{input_json_file_name} || (echo ${{INPUT_FOLDER}}/{input_json_file_name} file does not exists && exit 1))", f"echo $(cat ${{INPUT_FOLDER}}/{input_json_file_name})", f"sleep {randint(1,4)}", ] # defines the expected outputs jsonable_outputs = { "pytest_string": "is quite an amazing feat", "pytest_integer": 432, "pytest_float": 3.2, "pytest_bool": False, } output_file_url = next(iter(ftp_server)).with_path("output_file") expected_output_keys = TaskOutputDataSchema.parse_obj({ **{k: { "required": True } for k in jsonable_outputs.keys()}, **{ "pytest_file": { "required": True, "mapping": "a_outputfile", "url": f"{output_file_url}", }, "pytest_file_with_mapping": { "required": True, "mapping": "subfolder/a_outputfile", "url": f"{output_file_url}", }, }, }) expected_output_data = TaskOutputData.parse_obj({ **jsonable_outputs, **{ "pytest_file": { "url": f"{output_file_url}", "file_mapping": "a_outputfile", }, "pytest_file_with_mapping": { "url": f"{output_file_url}", "file_mapping": "subfolder/a_outputfile", }, }, }) jsonized_outputs = json.dumps(jsonable_outputs).replace('"', '\\"') output_json_file_name = ("outputs.json" if integration_version > LEGACY_INTEGRATION_VERSION else "output.json") # check for the log file if legacy version list_of_commands += [ "echo $(ls -tlah ${LOG_FOLDER})", f"(test {'!' if integration_version > LEGACY_INTEGRATION_VERSION else ''} -f ${{LOG_FOLDER}}/{LEGACY_SERVICE_LOG_FILE_NAME} || (echo ${{LOG_FOLDER}}/{LEGACY_SERVICE_LOG_FILE_NAME} file does {'' if integration_version > LEGACY_INTEGRATION_VERSION else 'not'} exists && exit 1))", ] if integration_version == LEGACY_INTEGRATION_VERSION: list_of_commands = [ f"{c} >> ${{LOG_FOLDER}}/{LEGACY_SERVICE_LOG_FILE_NAME}" for c in list_of_commands ] # set the final command to generate the output file(s) (files and json output) list_of_commands += [ f"echo {jsonized_outputs} > ${{OUTPUT_FOLDER}}/{output_json_file_name}", "echo 'some data for the output file' > ${OUTPUT_FOLDER}/a_outputfile", "mkdir -p ${OUTPUT_FOLDER}/subfolder", "echo 'some data for the output file' > ${OUTPUT_FOLDER}/subfolder/a_outputfile", ] log_file_url = parse_obj_as( AnyUrl, f"{next(iter(ftp_server)).with_path('log.dat')}") return ServiceExampleParam( docker_basic_auth=DockerBasicAuth(server_address="docker.io", username="******", password=""), # # NOTE: we use sleeper because it defines a user # that can write in outputs and the # sidecar can remove the outputs dirs # service_key="itisfoundation/sleeper", service_version="2.1.2", command=[ "/bin/bash", "-c", " && ".join(list_of_commands), ], input_data=input_data, output_data_keys=expected_output_keys, log_file_url=log_file_url, expected_output_data=expected_output_data, expected_logs=[ '{"input_1": 23, "input_23": "a string input", "the_input_43": 15.0, "the_bool_input_54": false}', "This is the file contents of 'file_1'", "This is the file contents of 'file_2'", "This is the file contents of 'file_3'", ], integration_version=integration_version, )