def setUp(self): # This test does not start a container so we have to hack creating a FileSystem singleton instance FileSystem(DotDict()) self.px_ctd = SimpleCtdPublisher() self.px_ctd.last_time = 0 self.tx_L0 = ctd_L0_all() self.tx_L0.streams = defaultdict(Mock) self.tx_L0.conductivity = Mock() self.tx_L0.temperature = Mock() self.tx_L0.pressure = Mock() self.tx_L1_C = CTDL1ConductivityTransform() self.tx_L1_C.streams = defaultdict(Mock) self.tx_L1_T = CTDL1TemperatureTransform() self.tx_L1_T.streams = defaultdict(Mock) self.tx_L1_P = CTDL1PressureTransform() self.tx_L1_P.streams = defaultdict(Mock) self.tx_L2_S = SalinityTransform() self.tx_L2_S.streams = defaultdict(Mock) self.tx_L2_D = DensityTransform() self.tx_L2_D.streams = defaultdict(Mock)
class ScienceObjectCodecIntTest(IonIntegrationTestCase): def setUp(self): super(ScienceObjectCodecIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.px_ctd = SimpleCtdPublisher() self.px_ctd.last_time = 0 self.tx_L0 = ctd_L0_all() self.cond_L1 = CTDL1ConductivityTransform() self.pres_L1 = CTDL1PressureTransform() self.temp_L1 = CTDL1TemperatureTransform() self.dens_L2 = DensityTransform() self.sal_L2 = SalinityTransform() @attr('LOCOINT') @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode') def test_process(self): ''' Test that packets are processed by the ctd_L0_all transform ''' length = 1 packet = self.px_ctd._get_new_ctd_packet("STR_ID", length) log.info("Packet: %s" % packet) self.tx_L0.process(packet) @unittest.skip('write it later') def test_execute(self): ''' Test that the other transforms (temperature, press, density) execute correctly ''' pass
def setUp(self): super(ScienceObjectCodecIntTest, self).setUp() self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.px_ctd = SimpleCtdPublisher() self.px_ctd.last_time = 0 self.tx_L0 = ctd_L0_all() self.cond_L1 = CTDL1ConductivityTransform() self.pres_L1 = CTDL1PressureTransform() self.temp_L1 = CTDL1TemperatureTransform() self.dens_L2 = DensityTransform() self.sal_L2 = SalinityTransform()
class TestScienceObjectCodec(IonUnitTestCase): pass def setUp(self): # This test does not start a container so we have to hack creating a FileSystem singleton instance FileSystem(DotDict()) self.px_ctd = SimpleCtdPublisher() self.px_ctd.last_time = 0 self.tx_L0 = ctd_L0_all() self.tx_L0.streams = defaultdict(Mock) self.tx_L0.conductivity = Mock() self.tx_L0.temperature = Mock() self.tx_L0.pressure = Mock() self.tx_L1_C = CTDL1ConductivityTransform() self.tx_L1_C.streams = defaultdict(Mock) self.tx_L1_T = CTDL1TemperatureTransform() self.tx_L1_T.streams = defaultdict(Mock) self.tx_L1_P = CTDL1PressureTransform() self.tx_L1_P.streams = defaultdict(Mock) self.tx_L2_S = SalinityTransform() self.tx_L2_S.streams = defaultdict(Mock) self.tx_L2_D = DensityTransform() self.tx_L2_D.streams = defaultdict(Mock) def test_transforms(self): length = 1 packet = self.px_ctd._get_ctd_packet("STR_ID", length) log.info("Packet: %s" % packet) self.tx_L0.process(packet) L0_cond = self.tx_L0.conductivity.publish.call_args[0][0] L0_temp = self.tx_L0.temperature.publish.call_args[0][0] L0_pres = self.tx_L0.pressure.publish.call_args[0][0] log.info("L0 cond: %s" % L0_cond) log.info("L0 temp: %s" % L0_temp) log.info("L0 pres: %s" % L0_pres) L1_cond = self.tx_L1_C.execute(L0_cond) log.info("L1 cond: %s" % L1_cond) L1_temp = self.tx_L1_T.execute(L0_temp) log.info("L1 temp: %s" % L1_temp) L1_pres = self.tx_L1_P.execute(L0_pres) log.info("L1 pres: %s" % L1_pres) L2_sal = self.tx_L2_S.execute(packet) log.info("L2 sal: %s" % L2_sal) L2_dens = self.tx_L2_D.execute(packet) log.info("L2 dens: %s" % L2_dens)
def test_event_triggered_transform_A(self): ''' Test that packets are processed by the event triggered transform ''' #--------------------------------------------------------------------------------------------- # Launch a ctd transform #--------------------------------------------------------------------------------------------- # Create the process definition process_definition = ProcessDefinition( name='EventTriggeredTransform_A', description='For testing EventTriggeredTransform_A') process_definition.executable[ 'module'] = 'ion.processes.data.transforms.event_triggered_transform' process_definition.executable['class'] = 'EventTriggeredTransform_A' event_transform_proc_def_id = self.process_dispatcher.create_process_definition( process_definition=process_definition) # Build the config config = DotDict() config.process.queue_name = self.exchange_name config.process.exchange_point = self.exchange_point pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) stream_def_id = self.pubsub.create_stream_definition( 'cond_stream_def', parameter_dictionary_id=pdict_id) cond_stream_id, _ = self.pubsub.create_stream( 'test_conductivity', exchange_point='science_data', stream_definition_id=stream_def_id) config.process.publish_streams.conductivity = cond_stream_id config.process.event_type = 'ResourceLifecycleEvent' # Schedule the process self.process_dispatcher.schedule_process( process_definition_id=event_transform_proc_def_id, configuration=config) #--------------------------------------------------------------------------------------------- # Publish an event to wake up the event triggered transform #--------------------------------------------------------------------------------------------- event_publisher = EventPublisher("ResourceLifecycleEvent") event_publisher.publish_event(origin='fake_origin') #--------------------------------------------------------------------------------------------- # Create subscribers that will receive the conductivity, temperature and pressure granules from # the ctd transform #--------------------------------------------------------------------------------------------- ar_cond = gevent.event.AsyncResult() def subscriber1(m, r, s): ar_cond.set(m) sub_event_transform = StandaloneStreamSubscriber( 'sub_event_transform', subscriber1) self.addCleanup(sub_event_transform.stop) sub_event_transform_id = self.pubsub.create_subscription( 'subscription_cond', stream_ids=[cond_stream_id], exchange_name='sub_event_transform') self.pubsub.activate_subscription(sub_event_transform_id) self.queue_cleanup.append(sub_event_transform.xn.queue) sub_event_transform.start() #------------------------------------------------------------------------------------------------------ # Use a StandaloneStreamPublisher to publish a packet that can be then picked up by a ctd transform #------------------------------------------------------------------------------------------------------ # Do all the routing stuff for the publishing routing_key = 'stream_id.stream' stream_route = StreamRoute(self.exchange_point, routing_key) xn = self.container.ex_manager.create_xn_queue(self.exchange_name) xp = self.container.ex_manager.create_xp(self.exchange_point) xn.bind('stream_id.stream', xp) pub = StandaloneStreamPublisher('stream_id', stream_route) # Build a packet that can be published self.px_ctd = SimpleCtdPublisher() publish_granule = self._get_new_ctd_packet( stream_definition_id=stream_def_id, length=5) # Publish the packet pub.publish(publish_granule) #------------------------------------------------------------------------------------------------------ # Make assertions about whether the ctd transform executed its algorithm and published the correct # granules #------------------------------------------------------------------------------------------------------ # Get the granule that is published by the ctd transform post processing result_cond = ar_cond.get(timeout=10) self.assertTrue(isinstance(result_cond, Granule)) rdt = RecordDictionaryTool.load_from_granule(result_cond) self.assertTrue(rdt.__contains__('conductivity')) self.check_cond_algorithm_execution(publish_granule, result_cond)