async def test_removing_ports( user_id: int, project_id: str, node_uuid: str, special_configuration: Callable, postgres_db: sa.engine.Engine, ): config_dict, project_id, node_uuid = special_configuration( inputs=[("in_14", "integer", 15), ("in_17", "boolean", False)], outputs=[("out_123", "string", "blahblah"), ("out_2", "number", -12.3)], ) # pylint: disable=W0612 PORTS = await node_ports_v2.ports(user_id=user_id, project_id=project_id, node_uuid=node_uuid) await check_config_valid(PORTS, config_dict) # let's remove the first input del config_dict["schema"]["inputs"]["in_14"] del config_dict["inputs"]["in_14"] np_helpers.update_configuration(postgres_db, project_id, node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) # let's do the same for the second output del config_dict["schema"]["outputs"]["out_2"] del config_dict["outputs"]["out_2"] np_helpers.update_configuration(postgres_db, project_id, node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict)
async def test_adding_new_ports(special_configuration, postgres_session): config_dict, project_id, node_uuid = special_configuration() PORTS = await node_ports.ports() await check_config_valid(PORTS, config_dict) # check empty configuration assert not (await PORTS.inputs) assert not (await PORTS.outputs) # replace the configuration now, add an input config_dict["schema"]["inputs"].update({ "in_15": { "label": "additional data", "description": "here some additional data", "displayOrder": 2, "type": "integer", } }) config_dict["inputs"].update({"in_15": 15}) np_helpers.update_configuration(postgres_session, project_id, node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) # # replace the configuration now, add an output config_dict["schema"]["outputs"].update({ "out_15": { "label": "output data", "description": "a cool output", "displayOrder": 2, "type": "boolean", } }) np_helpers.update_configuration(postgres_session, project_id, node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict)
async def test_file_mapping( special_configuration, project_id, node_uuid, filemanager_cfg, s3_simcore_location, bucket, store_link, postgres_session, item_type, item_value, item_alias, item_pytype, ): config_dict, project_id, node_uuid = special_configuration( inputs=[("in_1", item_type, store_link(item_value, project_id, node_uuid))], outputs=[("out_1", item_type, None)], project_id=project_id, node_id=node_uuid, ) PORTS = await node_ports.ports() await check_config_valid(PORTS, config_dict) # add a filetokeymap config_dict["schema"]["inputs"]["in_1"]["fileToKeyMap"] = { item_alias: "in_1" } config_dict["schema"]["outputs"]["out_1"]["fileToKeyMap"] = { item_alias: "out_1" } np_helpers.update_configuration(postgres_session, project_id, node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) file_path = await (await PORTS.inputs)["in_1"].get() assert isinstance(file_path, item_pytype) assert file_path == Path(tempfile.gettempdir(), "simcorefiles", "in_1", item_alias) # let's get it a second time to see if replacing works file_path = await (await PORTS.inputs)["in_1"].get() assert isinstance(file_path, item_pytype) assert file_path == Path(tempfile.gettempdir(), "simcorefiles", "in_1", item_alias) # now set invalid_alias = Path("invalid_alias.fjfj") with pytest.raises(exceptions.PortNotFound): await PORTS.set_file_by_keymap(invalid_alias) await PORTS.set_file_by_keymap(file_path) file_id = np_helpers.file_uuid(file_path, project_id, node_uuid) assert (await PORTS.outputs)["out_1"].value == { "store": s3_simcore_location, "path": file_id, }
async def test_get_file_from_previous_node_with_mapping_of_same_key_name( special_2nodes_configuration: Callable, user_id: int, project_id: str, node_uuid: str, filemanager_cfg: None, node_link: Callable, store_link: Callable, postgres_db: sa.engine.Engine, item_type: str, item_value: str, item_alias: str, item_pytype: Type, ): config_dict, _, this_node_uuid = special_2nodes_configuration( prev_node_inputs=None, prev_node_outputs=[("in_15", item_type, await store_link(item_value))], inputs=[("in_15", item_type, node_link("in_15"))], outputs=None, project_id=project_id, previous_node_id=f"{uuid4()}", node_id=node_uuid, ) PORTS = await node_ports_v2.ports(user_id=user_id, project_id=project_id, node_uuid=node_uuid) await check_config_valid(PORTS, config_dict) # add a filetokeymap config_dict["schema"]["inputs"]["in_15"]["fileToKeyMap"] = { item_alias: "in_15" } np_helpers.update_configuration(postgres_db, project_id, this_node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) file_path = await (await PORTS.inputs)["in_15"].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), "simcorefiles", f"{threading.get_ident()}", "in_15", item_alias, ) assert file_path.exists() filecmp.clear_cache() assert filecmp.cmp(file_path, item_value)
async def test_adding_new_ports( user_id: int, project_id: str, node_uuid: str, special_configuration: Callable, postgres_db: sa.engine.Engine, ): config_dict, project_id, node_uuid = special_configuration() PORTS = await node_ports_v2.ports( user_id=user_id, project_id=project_id, node_uuid=node_uuid ) await check_config_valid(PORTS, config_dict) # replace the configuration now, add an input config_dict["schema"]["inputs"].update( { "in_15": { "label": "additional data", "description": "here some additional data", "displayOrder": 2, "type": "integer", } } ) config_dict["inputs"].update({"in_15": 15}) np_helpers.update_configuration( postgres_db, project_id, node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) # # replace the configuration now, add an output config_dict["schema"]["outputs"].update( { "out_15": { "label": "output data", "description": "a cool output", "displayOrder": 2, "type": "boolean", } } ) np_helpers.update_configuration( postgres_db, project_id, node_uuid, config_dict ) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict)
async def test_get_file_from_previous_node_with_mapping_of_same_key_name( special_2nodes_configuration, project_id, node_uuid, filemanager_cfg, node_link, store_link, postgres_session, item_type, item_value, item_alias, item_pytype, ): config_dict, _, this_node_uuid = special_2nodes_configuration( prev_node_outputs=[("in_15", item_type, store_link(item_value, project_id, node_uuid))], inputs=[("in_15", item_type, node_link("in_15"))], project_id=project_id, previous_node_id=node_uuid, ) PORTS = await node_ports.ports() await check_config_valid(PORTS, config_dict) # add a filetokeymap config_dict["schema"]["inputs"]["in_15"]["fileToKeyMap"] = { item_alias: "in_15" } np_helpers.update_configuration(postgres_session, project_id, this_node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) file_path = await (await PORTS.inputs)["in_15"].get() assert isinstance(file_path, item_pytype) assert file_path == Path(tempfile.gettempdir(), "simcorefiles", "in_15", item_alias) assert file_path.exists() filecmp.clear_cache() assert filecmp.cmp(file_path, item_value)
async def test_file_mapping( special_configuration: Callable, user_id: int, project_id: str, node_uuid: str, filemanager_cfg: None, s3_simcore_location: str, bucket: str, store_link: Callable, postgres_db: sa.engine.Engine, item_type: str, item_value: str, item_alias: str, item_pytype: Type, ): config_dict, project_id, node_uuid = special_configuration( inputs=[("in_1", item_type, await store_link(item_value))], outputs=[("out_1", item_type, None)], project_id=project_id, node_id=node_uuid, ) PORTS = await node_ports_v2.ports(user_id=user_id, project_id=project_id, node_uuid=node_uuid) await check_config_valid(PORTS, config_dict) # add a filetokeymap config_dict["schema"]["inputs"]["in_1"]["fileToKeyMap"] = { item_alias: "in_1" } config_dict["schema"]["outputs"]["out_1"]["fileToKeyMap"] = { item_alias: "out_1" } np_helpers.update_configuration(postgres_db, project_id, node_uuid, config_dict) # pylint: disable=E1101 await check_config_valid(PORTS, config_dict) file_path = await (await PORTS.inputs)["in_1"].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), "simcorefiles", f"{threading.get_ident()}", "in_1", item_alias, ) # let's get it a second time to see if replacing works file_path = await (await PORTS.inputs)["in_1"].get() assert isinstance(file_path, item_pytype) assert file_path == Path( tempfile.gettempdir(), "simcorefiles", f"{threading.get_ident()}", "in_1", item_alias, ) # now set invalid_alias = Path("invalid_alias.fjfj") with pytest.raises(exceptions.PortNotFound): await PORTS.set_file_by_keymap(invalid_alias) await PORTS.set_file_by_keymap(file_path) file_id = np_helpers.file_uuid(file_path, project_id, node_uuid) received_file_link = (await PORTS.outputs)["out_1"].value.dict( by_alias=True, exclude_unset=True) assert received_file_link["store"] == s3_simcore_location assert received_file_link["path"] == file_id # received a new eTag assert received_file_link["eTag"]