def create_work_phase(pb_id): """Create work phase.""" pb = workflow.ProcessingBlock(pb_id) in_buffer_res = pb.request_buffer(100e6, tags=["sdm"]) out_buffer_res = pb.request_buffer(10 * 6e15 / 3600, tags=["visibilities"]) work_phase = pb.create_phase("Work", [in_buffer_res, out_buffer_res]) return work_phase
def test_dns_name(): """Test generating dns name.""" # Wipe the config DB wipe_config_db() # Create sbi and pb create_sbi_pbi() # Create processing block states create_pb_states() pb_id = "pb-mvp01-20200425-00000" pb = workflow.ProcessingBlock(pb_id) # Port and receive process configuration host_port, num_process = pb.configure_recv_processes_ports( pb.get_scan_types(), 10, 9000, 1) VALUES["replicas"] = num_process work_phase = pb.create_phase("Work", []) expected_dns_name = [ "test-recv-0.receive.test-sdp.svc.cluster.local", "proc-pb-mvp01-20200425-00000-test-receive-0.receive.sdp.svc.cluster.local", ] with work_phase: for txn in CONFIG_DB_CLIENT.txn(): for sbi_id in txn.list_scheduling_blocks(): ee_receive = work_phase.ee_deploy_helm("test-receive") # Testing with just passing scan types pb.receive_addresses(chart_name=ee_receive.get_id(), configured_host_port=host_port) state = txn.get_processing_block_state(pb_id) pb_recv_addresses = state.get("receive_addresses") pb_science_host = pb_recv_addresses["science_A"].get("host") assert pb_science_host[0][1] == expected_dns_name[1] # Testing with statefulset name, service name and namespace host_port1, num_process1 = pb.configure_recv_processes_ports( SCAN_TYPES, 10, 9000, 1) VALUES["replicas"] = num_process1 pb.receive_addresses("test-recv", "receive", "test-sdp", host_port1) state = txn.get_processing_block_state(pb_id) pb_receive_addresses = state.get("receive_addresses") pb_cal_host = pb_receive_addresses["calibration_B"].get("host") assert pb_cal_host[0][1] == expected_dns_name[0] # Set scheduling block instance to FINISHED sbi_state = txn.get_scheduling_block(sbi_id) sbi_state.update({"subarray_id": None, "status": "FINISHED"}) txn.update_scheduling_block(sbi_id, sbi_state) for txn in CONFIG_DB_CLIENT.txn(): pb_state = txn.get_processing_block_state(pb_id) assert pb_state.get("status") == "FINISHED"
def test_port(): """Test generating and updating receive addresses.""" # Wipe the config DB wipe_config_db() # Create sbi and pb create_sbi_pbi() # Create processing block states create_pb_states() pb_id = "pb-mvp01-20200425-00000" pb = workflow.ProcessingBlock(pb_id) # Port and receive process configuration host_port, num_process = pb.configure_recv_processes_ports( SCAN_TYPES, 10, 41000, 2) work_phase = pb.create_phase("Work", []) VALUES["num_process"] = num_process assert VALUES["num_process"] == 2 # Get the expected receive addresses from the data file receive_addresses_expected = read_json_data( "receive_addresses_multiple_ports.json", decode=True) with work_phase: for txn in CONFIG_DB_CLIENT.txn(): sbi_list = txn.list_scheduling_blocks() for sbi_id in sbi_list: work_phase.ee_deploy_helm("test-receive", pb.nested_parameters(VALUES)) pb.receive_addresses(configured_host_port=host_port) state = txn.get_processing_block_state(pb_id) pb_receive_addresses = state.get("receive_addresses") assert pb_receive_addresses == receive_addresses_expected validate(SDP_RECVADDRS_PREFIX + SCHEMA_VERSION, pb_receive_addresses, 2) # Set scheduling block instance to FINISHED sbi = {"subarray_id": None, "status": "FINISHED"} sbi_state = txn.get_scheduling_block(sbi_id) sbi_state.update(sbi) txn.update_scheduling_block(sbi_id, sbi_state) for txn in CONFIG_DB_CLIENT.txn(): pb_state = txn.get_processing_block_state(pb_id) pb_status = pb_state.get("status") assert pb_status == "FINISHED"
def test_claim_processing_block(): """Test claiming processing block""" # Wipe the config DB wipe_config_db() # Create sbi and pb create_sbi_pbi() for txn in CONFIG_DB_CLIENT.txn(): pb_list = txn.list_processing_blocks() for pb_id in pb_list: assert txn.get_processing_block(pb_id).id == pb_id workflow.ProcessingBlock(pb_id) assert txn.is_processing_block_owner(pb_id)
def test_buffer_request(): """Test requesting input and output buffer.""" # Wipe the config DB wipe_config_db() # Create sbi and pb create_sbi_pbi() for txn in CONFIG_DB_CLIENT.txn(): pb_list = txn.list_processing_blocks() for pb_id in pb_list: pb = workflow.ProcessingBlock(pb_id) parameters = pb.get_parameters() assert parameters["length"] == 10 in_buffer_res = pb.request_buffer(100e6, tags=["sdm"]) out_buffer_res = pb.request_buffer(parameters["length"] * 6e15 / 3600, tags=["visibilities"]) assert in_buffer_res is not None assert out_buffer_res is not None
address map on the appropriate attribute to complete the transition following AssignResources. This workflow does not generate any deployments. """ import logging import ska_ser_logging from ska_sdp_workflow import workflow ska_ser_logging.configure_logging() LOG = logging.getLogger("test_receive_addresses") LOG.setLevel(logging.DEBUG) # Claim processing block pb = workflow.ProcessingBlock() # Default maximum number of channels per receive process max_channels = 20 # Port configuration port_start = 9000 num_ports = 1 # Get the channel link map from SBI scan_types = pb.get_scan_types() # Port and receive process configuration host_port, num_process = pb.configure_recv_processes_ports( scan_types, max_channels, port_start, num_ports )
def test_receive_addresses(): """Test generating and updating receive addresses.""" # Wipe the config DB wipe_config_db() # Create sbi and pb create_sbi_pbi() # Create processing block states create_pb_states() pb_id = "pb-mvp01-20200425-00000" pb = workflow.ProcessingBlock(pb_id) # Port and receive process configuration host_port, num_process = pb.configure_recv_processes_ports( pb.get_scan_types(), 4, 9000, 1) # Update values with number of process VALUES["replicas"] = num_process assert VALUES["replicas"] == 1 work_phase = pb.create_phase("Work", []) # Get the expected receive addresses from the data file receive_addresses_expected = read_receive_addresses() with work_phase: for txn in CONFIG_DB_CLIENT.txn(): sbi_list = txn.list_scheduling_blocks() for sbi_id in sbi_list: work_phase.ee_deploy_helm("test-receive", pb.nested_parameters(VALUES)) pb.receive_addresses(configured_host_port=host_port) state = txn.get_processing_block_state(pb_id) pb_receive_addresses = state.get("receive_addresses") assert pb_receive_addresses == receive_addresses_expected validate(SDP_RECVADDRS_PREFIX + SCHEMA_VERSION, pb_receive_addresses, 2) # Testing with two channels with maximum channels per receiver set to 10 host_port1, num_process1 = pb.configure_recv_processes_ports( SCAN_TYPES, 10, 9000, 1) VALUES["replicas"] = num_process1 pb.receive_addresses(configured_host_port=host_port1) pb_state = txn.get_processing_block_state(pb_id) recv_address = pb_state.get("receive_addresses") assert recv_address == RECV_ADDRESS validate(SDP_RECVADDRS_PREFIX + SCHEMA_VERSION, recv_address, 2) # Set scheduling block instance to FINISHED sbi = {"subarray_id": None, "status": "FINISHED"} sbi_state = txn.get_scheduling_block(sbi_id) sbi_state.update(sbi) txn.update_scheduling_block(sbi_id, sbi_state) for txn in CONFIG_DB_CLIENT.txn(): pb_state = txn.get_processing_block_state(pb_id) pb_status = pb_state.get("status") assert pb_status == "FINISHED"