def resolve_org_negotiation():
    try:
        payload = request.form['payload']
        json_params = simplejson.loads(str(payload))

        ion_actor_id, expiry = get_governance_info_from_request(
            'serviceRequest', json_params)
        ion_actor_id, expiry = validate_request(ion_actor_id, expiry)
        headers = build_message_headers(ion_actor_id, expiry)

        # extract negotiation-specific data (convert from unicode just in case - these are machine generated and unicode specific
        # chars are unexpected)
        verb = str(json_params['verb'])
        originator = str(json_params['originator'])
        negotiation_id = str(json_params['negotiation_id'])
        reason = str(json_params.get('reason', ''))

        proposal_status = None
        if verb.lower() == "accept":
            proposal_status = ProposalStatusEnum.ACCEPTED
        elif verb.lower() == "reject":
            proposal_status = ProposalStatusEnum.REJECTED

        proposal_originator = None
        if originator.lower() == "consumer":
            proposal_originator = ProposalOriginatorEnum.CONSUMER
        elif originator.lower() == "provider":
            proposal_originator = ProposalOriginatorEnum.PROVIDER

        rr_client = ResourceRegistryServiceProcessClient(
            node=Container.instance.node, process=service_gateway_instance)
        negotiation = rr_client.read(negotiation_id, headers=headers)

        new_negotiation_sap = Negotiation.create_counter_proposal(
            negotiation, proposal_status, proposal_originator)

        org_client = OrgManagementServiceProcessClient(
            node=Container.instance.node, process=service_gateway_instance)
        resp = org_client.negotiate(new_negotiation_sap, headers=headers)

        # update reason if it exists
        if reason:
            # reload negotiation because it has changed
            negotiation = rr_client.read(negotiation_id, headers=headers)
            negotiation.reason = reason
            rr_client.update(negotiation)

        return gateway_json_response(resp)

    except Exception, e:
        return build_error_response(e)
Example #2
0
    def on_start(self):

        self.query = self.CFG.get_safe('process.query',{})

        self.delivery_format = self.CFG.get_safe('process.delivery_format',{})
        self.datastore_name = self.CFG.get_safe('process.datastore_name','dm_datastore')

        definition_id = self.delivery_format.get('definition_id')
        rrsc = ResourceRegistryServiceProcessClient(process=self, node=self.container.node)
        definition = rrsc.read(definition_id)
        self.definition = definition.container

        self.fields = self.delivery_format.get('fields',None)

        self.view_name = self.CFG.get_safe('process.view_name','datasets/dataset_by_id')
        self.key_id = self.CFG.get_safe('process.key_id')
        self.stream_id = self.CFG.get_safe('process.publish_streams.output')

        if not self.stream_id:
            raise Inconsistent('The replay process requires a stream id. Invalid configuration!')

        self.data_stream_id = self.definition.data_stream_id
        self.encoding_id = self.definition.identifiables[self.data_stream_id].encoding_id
        self.element_type_id = self.definition.identifiables[self.data_stream_id].element_type_id
        self.element_count_id = self.definition.identifiables[self.data_stream_id].element_count_id
        self.data_record_id = self.definition.identifiables[self.element_type_id].data_record_id
        self.field_ids = self.definition.identifiables[self.data_record_id].field_ids
        self.domain_ids = self.definition.identifiables[self.data_record_id].domain_ids
        self.time_id = self.definition.identifiables[self.domain_ids[0]].temporal_coordinate_vector_id
def resolve_org_negotiation():
    try:
        payload              = request.form['payload']
        json_params          = json_loads(str(payload))

        ion_actor_id, expiry = get_governance_info_from_request('serviceRequest', json_params)
        ion_actor_id, expiry = validate_request(ion_actor_id, expiry)
        headers              = build_message_headers(ion_actor_id, expiry)

        # extract negotiation-specific data (convert from unicode just in case - these are machine generated and unicode specific
        # chars are unexpected)
        verb                 = str(json_params['verb'])
        originator           = str(json_params['originator'])
        negotiation_id       = str(json_params['negotiation_id'])
        reason               = str(json_params.get('reason', ''))

        proposal_status = None
        if verb.lower() == "accept":
            proposal_status = ProposalStatusEnum.ACCEPTED
        elif verb.lower() == "reject":
            proposal_status = ProposalStatusEnum.REJECTED

        proposal_originator = None
        if originator.lower() == "consumer":
            proposal_originator = ProposalOriginatorEnum.CONSUMER
        elif originator.lower() == "provider":
            proposal_originator = ProposalOriginatorEnum.PROVIDER

        rr_client = ResourceRegistryServiceProcessClient(process=service_gateway_instance)
        negotiation = rr_client.read(negotiation_id, headers=headers)

        new_negotiation_sap = Negotiation.create_counter_proposal(negotiation, proposal_status, proposal_originator)

        org_client = OrgManagementServiceProcessClient(process=service_gateway_instance)
        resp = org_client.negotiate(new_negotiation_sap, headers=headers)

        # update reason if it exists
        if reason:
            # reload negotiation because it has changed
            negotiation = rr_client.read(negotiation_id, headers=headers)
            negotiation.reason = reason
            rr_client.update(negotiation)

        return gateway_json_response(resp)

    except Exception as e:
        return build_error_response(e)
def get_resource(resource_id):

    ret = None
    client = ResourceRegistryServiceProcessClient(node=Container.instance.node, process=service_gateway_instance)
    if resource_id != '':
        try:
            result = client.read(resource_id)
            if not result:
                raise NotFound("No resource found for id: %s " % resource_id)

            ret = simplejson.dumps(result, default=ion_object_encoder)

        except Exception, e:
            ret =  "Error: %s" % e
def get_resource(resource_id):

    result = None
    client = ResourceRegistryServiceProcessClient(node=Container.instance.node, process=service_gateway_instance)
    if resource_id != '':
        try:
            #Database object IDs are not unicode
            result = client.read(convert_unicode(resource_id))
            if not result:
                raise NotFound("No resource found for id: %s " % resource_id)

            return json_response({ GATEWAY_RESPONSE :result } )

        except Exception, e:
            return build_error_response(e)
def get_resource(resource_id):
    try:
        client = ResourceRegistryServiceProcessClient(node=Container.instance.node, process=service_gateway_instance)

        # Validate requesting user and expiry and add governance headers
        ion_actor_id, expiry = get_governance_info_from_request()
        ion_actor_id, expiry = validate_request(ion_actor_id, expiry)

        # Database object IDs are not unicode
        result = client.read(convert_unicode(resource_id))
        if not result:
            raise NotFound("No resource found for id: %s " % resource_id)

        return gateway_json_response(result)

    except Exception, e:
        return build_error_response(e)
def get_resource(resource_id):

    try:
        client = ResourceRegistryServiceProcessClient(
            node=Container.instance.node, process=service_gateway_instance)

        #Validate requesting user and expiry and add governance headers
        ion_actor_id, expiry = get_governance_info_from_request()
        ion_actor_id, expiry = validate_request(ion_actor_id, expiry)

        #Database object IDs are not unicode
        result = client.read(str(resource_id))
        if not result:
            raise NotFound("No resource found for id: %s " % resource_id)

        return gateway_json_response(result)

    except Exception, e:
        return build_error_response(e)
Example #8
0
    def on_start(self):

        self.query = self.CFG.get_safe('process.query', {})

        self.delivery_format = self.CFG.get_safe('process.delivery_format', {})
        self.datastore_name = self.CFG.get_safe('process.datastore_name',
                                                'dm_datastore')

        definition_id = self.delivery_format.get('definition_id')
        rrsc = ResourceRegistryServiceProcessClient(process=self,
                                                    node=self.container.node)
        definition = rrsc.read(definition_id)
        self.definition = definition.container

        self.fields = self.delivery_format.get('fields', None)

        self.view_name = self.CFG.get_safe('process.view_name',
                                           'datasets/dataset_by_id')
        self.key_id = self.CFG.get_safe('process.key_id')
        self.stream_id = self.CFG.get_safe('process.publish_streams.output')

        if not self.stream_id:
            raise Inconsistent(
                'The replay process requires a stream id. Invalid configuration!'
            )

        self.data_stream_id = self.definition.data_stream_id
        self.encoding_id = self.definition.identifiables[
            self.data_stream_id].encoding_id
        self.element_type_id = self.definition.identifiables[
            self.data_stream_id].element_type_id
        self.element_count_id = self.definition.identifiables[
            self.data_stream_id].element_count_id
        self.data_record_id = self.definition.identifiables[
            self.element_type_id].data_record_id
        self.field_ids = self.definition.identifiables[
            self.data_record_id].field_ids
        self.domain_ids = self.definition.identifiables[
            self.data_record_id].domain_ids
        self.time_id = self.definition.identifiables[
            self.domain_ids[0]].temporal_coordinate_vector_id
class TestSchedulerService(IonIntegrationTestCase):

    def setUp(self):
        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        process = FakeProcess()
        self.ssclient = SchedulerServiceProcessClient(node=self.container.node, process=process)
        self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process)

    def tearDown(self):
        pass

    def now_utc(self):
        return time.mktime(datetime.datetime.utcnow().timetuple())

    def test_create_interval_timer(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # cancel the timer
        # wait until after next interval to verify that timer was correctly cancelled

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval_Timer_233"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.interval_timer_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 10
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Validate the timer is stored in RR
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until two events are published
        gevent.sleep((self.interval_timer_interval * 2) + 1)

        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)

        #Cancle the timer
        ss = self.ssclient.cancel_timer(id)

        # wait until after next interval to verify that timer was correctly cancelled
        gevent.sleep(self.interval_timer_interval)

        # Validate the timer correctly cancelled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate the timer is removed from resource regsitry
        with self.assertRaises(NotFound):
            self.rrclient.read(id)

        # Validate the number of timer counts
        self.assertEqual(self.interval_timer_count, timer_counts, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.interval_timer_count, timer_counts, id))


    def test_system_restart(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # cancel the timer
        # wait until after next interval to verify that timer was correctly cancelled

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval_Timer_4444"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.on_restart_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 20
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Validate the timer is stored in RR
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until 1 event is published
        gevent.sleep((self.interval_timer_interval) + 1)
        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)

        # Validate the number of events generated
        self.assertEqual(self.interval_timer_count, timer_counts, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.interval_timer_count, timer_counts, id))

        self.ssclient.on_system_restart()

        # after system restart, validate the timer is restored
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until another event is published
        start_time = datetime.datetime.utcnow()
        gevent.sleep((self.interval_timer_interval * 2) + 1)
        time_diff = (datetime.datetime.utcnow() - start_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)

        # Validate the number of events generated
        self.assertGreater(self.interval_timer_count, timer_counts)

        #Cancle the timer
        ss = self.ssclient.cancel_timer(id)

        # wait until after next interval to verify that timer was correctly cancelled
        gevent.sleep(self.interval_timer_interval)

        # Validate the timer correctly cancelled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate the timer is removed from resource regsitry
        with self.assertRaises(NotFound):
            self.rrclient.read(id)

    def on_restart_callback(self, *args, **kwargs):
        self.interval_timer_count += 1
        log.debug("test_scheduler: on_restart_callback: time: " + str(self.now_utc()) + " count: " + str(self.interval_timer_count))

    def test_create_interval_timer_with_end_time(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # Validate no more events are published after end_time expires
        # Validate the timer was canceled after the end_time expires

        self.interval_timer_count_2 = 0
        self.interval_timer_sent_time_2 = 0
        self.interval_timer_received_time_2 = 0
        self.interval_timer_interval_2 = 3

        event_origin = "Interval_Timer_2"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.interval_timer_callback_with_end_time, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        start_time = self.now_utc()
        self.interval_timer_end_time_2 = start_time + 7
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval_2,
            end_time=self.interval_timer_end_time_2,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time_2 = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait until all events are published
        gevent.sleep((self.interval_timer_end_time_2 - start_time) + self.interval_timer_interval_2 + 1)

        # Validate the number of events generated
        self.assertEqual(self.interval_timer_count_2, 2, "Invalid number of timeouts generated. Number of event: %d Expected: 2 Timer id: %s " %(self.interval_timer_count_2, id))

        # Validate the timer was canceled after the end_time is expired
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

    def interval_timer_callback_with_end_time(self, *args, **kwargs):
        self.interval_timer_received_time_2 = datetime.datetime.utcnow()
        self.interval_timer_count_2 += 1
        time_diff = math.fabs( ((self.interval_timer_received_time_2 - self.interval_timer_sent_time_2).total_seconds())
                               - (self.interval_timer_interval_2 * self.interval_timer_count_2) )
        # Assert expire time is within +-10 seconds
        self.assertTrue(time_diff <= 10)
        log.debug("test_scheduler: interval_timer_callback_with_end_time: time:" + str(self.interval_timer_received_time_2) + " count:" + str(self.interval_timer_count_2))


    def interval_timer_callback(self, *args, **kwargs):
        self.interval_timer_received_time = datetime.datetime.utcnow()
        self.interval_timer_count += 1
        time_diff = math.fabs( ((self.interval_timer_received_time - self.interval_timer_sent_time).total_seconds())
                               - (self.interval_timer_interval * self.interval_timer_count) )
        # Assert expire time is within +-10 seconds
        self.assertTrue(time_diff <= 10)
        log.debug("test_scheduler: interval_timer_callback: time:" + str(self.interval_timer_received_time) + " count:" + str(self.interval_timer_count))

    def test_cancel_single_timer(self):
        # test creating a new timer that is one-time-only

        # create the timer resource
        # create the event listener
        # call scheduler to set the timer

        # create then cancel the timer, verify that event is not received

        # create the timer resource
        # create the event listener
        # call scheduler to set the timer
        # call scheduler to cancel the timer
        # wait until after expiry to verify that event is not sent
        self.single_timer_count = 0
        event_origin = "Time_of_Day"

        sub = EventSubscriber(event_type="TimerEvent", callback=self.single_timer_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        now = datetime.datetime.utcnow() + timedelta(seconds=3)
        times_of_day =[{'hour': str(now.hour),'minute' : str(now.minute), 'second':str(now.second) }]
        id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day,  expires=self.now_utc()+3, event_origin=event_origin, event_subtype="test")
        self.assertEqual(type(id), str)
        self.ssclient.cancel_timer(id)
        gevent.sleep(3)

        # Validate the event is not generated
        self.assertEqual(self.single_timer_count, 0, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: 0 Timer id: %s " %(self.single_timer_count, id))

    def single_timer_callback (self, *args, **kwargs):
        self.single_timer_count =+ 1
        log.debug("test_scheduler: single_timer_call_back: time:" + str(self.now_utc()) + " count:" + str(self.single_timer_count))

    def test_create_forever_interval_timer(self):
        # Test creating interval timer that runs forever

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval Timer Forever"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.interval_timer_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        id = self.ssclient.create_interval_timer(start_time= self.now_utc(), interval=self.interval_timer_interval,
            end_time=-1,
            event_origin=event_origin, event_subtype=event_origin)
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait for 4 events to be published
        gevent.sleep((self.interval_timer_interval * 4) + 1)
        self.ssclient.cancel_timer(id)
        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)


        # Validate the timer id is invalid once it has been canceled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate events are not generated after canceling the timer
        self.assertEqual(self.interval_timer_count, timer_counts, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.interval_timer_count, timer_counts, id))

    def test_timeoffday_timer(self):
        # test creating a new timer that is one-time-only
        # create the timer resource
        # get the current time, set the timer to several seconds from current time
        # create the event listener
        # call scheduler to set the timer
        # verify that  event arrival is within one/two seconds of current time

        event_origin = "Time Of Day2"
        self.expire_sec_1 = 4
        self.expire_sec_2 = 5
        self.tod_count = 0
        expire1 = datetime.datetime.utcnow() + timedelta(seconds=self.expire_sec_1)
        expire2 = datetime.datetime.utcnow() + timedelta(seconds=self.expire_sec_2)
        # Create two timers
        times_of_day =[{'hour': str(expire1.hour),'minute' : str(expire1.minute), 'second':str(expire1.second) },
                       {'hour': str(expire2.hour),'minute' : str(expire2.minute), 'second':str(expire2.second)}]

        sub = EventSubscriber(event_type="TimerEvent", callback=self.tod_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        # Expires in one days
        expires = time.mktime((datetime.datetime.utcnow() + timedelta(days=2)).timetuple())
        self.tod_sent_time = datetime.datetime.utcnow()
        id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day, expires=expires, event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait until all events are generated
        gevent.sleep(9)
        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.expire_sec_1) + math.floor(time_diff/self.expire_sec_2)

        # After waiting, validate only 2 events are generated.
        self.assertEqual(self.tod_count, 2, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.tod_count, timer_counts, id))

        # Cancel the timer
        self.ssclient.cancel_timer(id)



    def tod_callback(self, *args, **kwargs):
        tod_receive_time = datetime.datetime.utcnow()
        self.tod_count += 1
        if self.tod_count == 1:
            time_diff = math.fabs((tod_receive_time - self.tod_sent_time).total_seconds() - self.expire_sec_1)
            self.assertTrue(time_diff <= 2)
        elif self.tod_count == 2:
            time_diff = math.fabs((tod_receive_time - self.tod_sent_time).total_seconds() - self.expire_sec_2)
            self.assertTrue(time_diff <= 2)
        log.debug("test_scheduler: tod_callback: time:" + str(tod_receive_time) + " count:" + str(self.tod_count))

    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_quit_stops_timers(self):

        ar = AsyncResult()
        def cb(*args, **kwargs):
            ar.set(args)

            self.interval_timer_count += 1

        event_origin = "test_quitter"
        sub = EventSubscriber(event_type="TimerEvent", callback=cb, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        tid = self.ssclient.create_interval_timer(start_time="now",
                                                  interval=1,
                                                  event_origin=event_origin)

        # wait until at least one scheduled message
        ar.get(timeout=5)

        # shut it down!
        p = self.container.proc_manager.procs_by_name['scheduler']
        self.container.terminate_process(p.id)

        # assert empty
        self.assertEquals(p.schedule_entries, {})
class TestWorkflowManagementIntegration(VisualizationIntegrationTestHelper):

    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        #Instantiate a process to represent the test
        process=WorkflowServiceTestProcess()

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process)
        self.damsclient = DataAcquisitionManagementServiceProcessClient(node=self.container.node, process=process)
        self.pubsubclient =  PubsubManagementServiceProcessClient(node=self.container.node, process=process)
        self.ingestclient = IngestionManagementServiceProcessClient(node=self.container.node, process=process)
        self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataproductclient = DataProductManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataprocessclient = DataProcessManagementServiceProcessClient(node=self.container.node, process=process)
        self.datasetclient =  DatasetManagementServiceProcessClient(node=self.container.node, process=process)
        self.workflowclient = WorkflowManagementServiceProcessClient(node=self.container.node, process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(node=self.container.node, process=process)
        self.data_retriever = DataRetrieverServiceProcessClient(node=self.container.node, process=process)

        self.ctd_stream_def = SBE37_CDM_stream_definition()



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_SA_transform_components(self):

        assertions = self.assertTrue

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)


        ###
        ###  Setup the first transformation
        ###

        # Salinity: Data Process Definition
        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()

        l2_salinity_all_data_process_id, ctd_l2_salinity_output_dp_id = self.create_transform_process(ctd_L2_salinity_dprocdef_id,ctd_parsed_data_product_id, 'salinity' )

        ## get the stream id for the transform outputs
        stream_ids, _ = self.rrclient.find_objects(ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_stream_id)


        ###
        ###  Setup the second transformation
        ###

        # Salinity Doubler: Data Process Definition
        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition()

        salinity_double_data_process_id, salinity_doubler_output_dp_id = self.create_transform_process(salinity_doubler_dprocdef_id, ctd_l2_salinity_output_dp_id, 'salinity' )

        stream_ids, _ = self.rrclient.find_objects(salinity_doubler_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0 )
        sal_dbl_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_dbl_stream_id)


        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)


        #Stop the transform processes
        self.dataprocessclient.deactivate_data_process(salinity_double_data_process_id)
        self.dataprocessclient.deactivate_data_process(l2_salinity_all_data_process_id)

        #Validate the data from each of the messages along the way
        self.validate_messages(results)


    @attr('LOCOINT')
    @attr('SMOKE')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_transform_workflow(self):

        assertions = self.assertTrue

        log.debug("Building the workflow definition")

        workflow_def_obj = IonObject(RT.WorkflowDefinition,
                                     name='Salinity_Test_Workflow',
                                     description='tests a workflow of multiple transform data processes')

        workflow_data_product_name = 'TEST-Workflow_Output_Product' #Set a specific output product name

        #-------------------------------------------------------------------------------------------------------------------------
        log.debug( "Adding a transformation process definition for salinity")
        #-------------------------------------------------------------------------------------------------------------------------

        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep',
                                      data_process_definition_id=ctd_L2_salinity_dprocdef_id,
                                      persist_process_output_data=False)  #Don't persist the intermediate data product
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #-------------------------------------------------------------------------------------------------------------------------
        log.debug( "Adding a transformation process definition for salinity doubler")
        #-------------------------------------------------------------------------------------------------------------------------

        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep',
                                      data_process_definition_id=salinity_doubler_dprocdef_id, )
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        log.debug( "Creating workflow def in the resource registry")
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aids) == 2 )

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        log.debug( "Creating the input data product")
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        log.debug( "Creating and starting the workflow")
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id,
                                                                                            ctd_parsed_data_product_id,
                persist_workflow_data_product=True, output_data_product_name=workflow_data_product_name, timeout=300)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        log.debug( "persisting the output product")
        #self.dataproductclient.activate_data_product_persistence(workflow_product_id)
        dataset_ids,_ = self.rrclient.find_objects(workflow_product_id, PRED.hasDataset, RT.Dataset, True)
        assertions(len(dataset_ids) == 1 )
        dataset_id = dataset_ids[0]

        log.debug( "Verifying the output data product name matches what was specified in the workflow definition")
        workflow_product = self.rrclient.read(workflow_product_id)
        assertions(workflow_product.name.startswith(workflow_data_product_name), 'Nope: %s != %s' % (workflow_product.name, workflow_data_product_name))

        log.debug( "Walking the associations to find the appropriate output data streams to validate the messages")

        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 2 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        log.debug( "data_product_stream_ids: %s" % data_product_stream_ids)

        log.debug( "Starting the output stream listener to monitor to collect messages")
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        log.debug( "results::: %s" % results)

        log.debug( "Stopping the workflow processes")
        self.workflowclient.terminate_data_process_workflow(workflow_id, False, timeout=250)  # Should test true at some point

        log.debug( "Making sure the Workflow object was removed")
        objs, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(objs) == 0)

        log.debug( "Validating the data from each of the messages along the way")
        self.validate_messages(results)

        log.debug( "Checking to see if dataset id = %s, was persisted, and that it can be retrieved...." % dataset_id)
        self.validate_data_ingest_retrieve(dataset_id)

        log.debug( "Cleaning up to make sure delete is correct.")
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_google_dt_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='GoogleDT_Test_Workflow',description='Tests the workflow of converting stream data to Google DT')

        #Add a transformation process definition
        google_dt_procdef_id = self.create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id, persist_process_output_data=False)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id, timeout=60)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id=workflow_id,delete_data_products=False, timeout=60)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_google_dt_transform_results(results)

        """
        # Check to see if ingestion worked. Extract the granules from data_retrieval.
        # First find the dataset associated with the output dp product
        ds_ids,_ = self.rrclient.find_objects(workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasDataset, RT.Dataset, True)
        retrieved_granule = self.data_retriever.retrieve(ds_ids[0])

        #Validate the data from each of the messages along the way
        self.validate_google_dt_transform_results(retrieved_granule)
        """

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )



    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_mpl_graphs_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Mpl_Graphs_Test_Workflow',description='Tests the workflow of converting stream data to Matplotlib graphs')

        #Add a transformation process definition
        mpl_graphs_procdef_id = self.create_mpl_graphs_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=mpl_graphs_procdef_id, persist_process_output_data=False)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product()
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id,
            persist_workflow_data_product=True, timeout=60)

        workflow_output_ids,_ = self.rrclient.find_subjects(RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1 )

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids,_ = self.rrclient.find_objects(workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1 )

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, None, True)
            assertions(len(stream_ids) == 1 )
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id, data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id=workflow_id,delete_data_products=False, timeout=60)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(results)

        # Check to see if ingestion worked. Extract the granules from data_retrieval.
        # First find the dataset associated with the output dp product
        ds_ids,_ = self.rrclient.find_objects(workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasDataset, RT.Dataset, True)
        retrieved_granule = self.data_retriever.retrieve_last_data_points(ds_ids[0], 10)

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(retrieved_granule)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )


    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),'Not integrated for CEI')
    def test_multiple_workflow_instances(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(RT.WorkflowDefinition, name='Multiple_Test_Workflow',description='Tests the workflow of converting stream data')

        #Add a transformation process definition
        google_dt_procdef_id = self.create_google_dt_data_process_definition()
        workflow_step_obj = IonObject('DataProcessWorkflowStep', data_process_definition_id=google_dt_procdef_id, persist_process_output_data=False)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the first input data product
        ctd_stream_id1, ctd_parsed_data_product_id1 = self.create_ctd_input_stream_and_data_product('ctd_parsed1')
        data_product_stream_ids.append(ctd_stream_id1)

        #Create and start the first workflow
        workflow_id1, workflow_product_id1 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id1, timeout=60)

        #Create the second input data product
        ctd_stream_id2, ctd_parsed_data_product_id2 = self.create_ctd_input_stream_and_data_product('ctd_parsed2')
        data_product_stream_ids.append(ctd_stream_id2)

        #Create and start the second workflow
        workflow_id2, workflow_product_id2 = self.workflowclient.create_data_process_workflow(workflow_def_id, ctd_parsed_data_product_id2, timeout=60)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 2 )


        #Start the first input stream process
        ctd_sim_pid1 = self.start_sinusoidal_input_stream_process(ctd_stream_id1)

        #Start the second input stream process
        ctd_sim_pid2 = self.start_simple_input_stream_process(ctd_stream_id2)

        #Start the output stream listener to monitor a set number of messages being sent through the workflows
        results = self.start_output_stream_and_listen(None, data_product_stream_ids, message_count_per_stream=5)

        # stop the flow of messages...
        self.process_dispatcher.cancel_process(ctd_sim_pid1) # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid2)

        #Stop the first workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id=workflow_id1,delete_data_products=False, timeout=60)  # Should test true at some point

        #Stop the second workflow processes
        self.workflowclient.terminate_data_process_workflow(workflow_id=workflow_id2,delete_data_products=False, timeout=60)  # Should test true at some point

        workflow_ids,_ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 0 )

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)

        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )

        aid_list = self.rrclient.find_associations(workflow_def_id, PRED.hasDataProcessDefinition)
        assertions(len(aid_list) == 0 )
class VizTransformProcForMatplotlibGraphs(TransformDataProcess):
    """
    This class is used for instantiating worker processes that have subscriptions to data streams and convert
    incoming data from CDM format to Matplotlib graphs

    """
    def on_start(self):
        super(VizTransformProcForMatplotlibGraphs, self).on_start()
        #assert len(self.streams)==1
        self.initDataFlag = True
        self.graph_data = {
        }  # Stores a dictionary of variables : [List of values]

        # Need some clients
        self.rr_cli = ResourceRegistryServiceProcessClient(
            process=self, node=self.container.node)
        self.pubsub_cli = PubsubManagementServiceClient(
            node=self.container.node)

        # extract the various parameters passed to the transform process
        self.out_stream_id = self.CFG.get('process').get(
            'publish_streams').get('visualization_service_submit_stream_id')

        # Create a publisher on the output stream
        #stream_route = self.pubsub_cli.register_producer(stream_id=self.out_stream_id)
        out_stream_pub_registrar = StreamPublisherRegistrar(
            process=self.container, node=self.container.node)
        self.out_stream_pub = out_stream_pub_registrar.create_publisher(
            stream_id=self.out_stream_id)

        self.data_product_id = self.CFG.get('data_product_id')
        self.stream_def_id = self.CFG.get("stream_def_id")
        self.stream_def = self.rr_cli.read(self.stream_def_id)

        # Start the thread responsible for keeping track of time and generating graphs
        # Mutex for ensuring proper concurrent communications between threads
        self.lock = RLock()
        self.rendering_proc = Greenlet(self.rendering_thread)
        self.rendering_proc.start()

    def process(self, packet):
        log.debug('(%s): Received Viz Data Packet' % self.name)
        #log.debug('(%s):   - Processing: %s' % (self.name,packet))

        # parse the incoming data
        psd = PointSupplementStreamParser(
            stream_definition=self.stream_def.container, stream_granule=packet)

        # re-arrange incoming data into an easy to parse dictionary
        vardict = {}
        arrLen = None
        for varname in psd.list_field_names():
            vardict[varname] = psd.get_values(varname)
            arrLen = len(vardict[varname])

        if self.initDataFlag:
            # look at the incoming packet and store
            for varname in psd.list_field_names():
                self.lock.acquire()
                self.graph_data[varname] = []
                self.lock.release()

            self.initDataFlag = False

        # If code reached here, the graph data storage has been initialized. Just add values
        # to the list
        with self.lock:
            for varname in psd.list_field_names():
                self.graph_data[varname].extend(vardict[varname])

    def rendering_thread(self):
        from copy import deepcopy
        # Service Client

        # init Matplotlib
        fig = Figure()
        ax = fig.add_subplot(111)
        canvas = FigureCanvas(fig)
        imgInMem = StringIO.StringIO()
        while True:

            # Sleep for a pre-decided interval. Should be specifiable in a YAML file
            gevent.sleep(20)

            # If there's no data, wait
            # Lock is used here to make sure the entire vector exists start to finish, this assures that the data won
            working_set = None
            with self.lock:
                if len(self.graph_data) == 0:
                    continue
                else:
                    working_set = deepcopy(self.graph_data)

            # For the simple case of testing, lets plot all time variant variables one at a time
            xAxisVar = 'time'
            xAxisFloatData = working_set[xAxisVar]

            for varName, varData in working_set.iteritems():
                if varName == 'time' or varName == 'height' or varName == 'longitude' or varName == 'latitude':
                    continue

                yAxisVar = varName
                yAxisFloatData = working_set[varName]

                # Generate the plot

                ax.plot(xAxisFloatData, yAxisFloatData, 'ro')
                ax.set_xlabel(xAxisVar)
                ax.set_ylabel(yAxisVar)
                ax.set_title(yAxisVar + ' vs ' + xAxisVar)
                ax.set_autoscale_on(False)

                # generate filename for the output image
                fileName = yAxisVar + '_vs_' + xAxisVar + '.png'
                # Save the figure to the in memory file
                canvas.print_figure(imgInMem, format="png")
                imgInMem.seek(0)

                # submit resulting table back using the out stream publisher
                msg = {
                    "viz_product_type": "matplotlib_graphs",
                    "data_product_id": self.data_product_id,
                    "image_obj": imgInMem.getvalue(),
                    "image_name": fileName
                }
                self.out_stream_pub.publish(msg)

                #clear the canvas for the next image
                ax.clear()
class VizTransformProcForGoogleDT(TransformDataProcess):
    """
    This class is used for instantiating worker processes that have subscriptions to data streams and convert
    incoming data from CDM format to JSON style Google DataTables

    """
    def on_start(self):
        super(VizTransformProcForGoogleDT, self).on_start()
        self.initDataTableFlag = True

        # need some clients
        self.rr_cli = ResourceRegistryServiceProcessClient(
            process=self, node=self.container.node)
        self.pubsub_cli = PubsubManagementServiceClient(
            node=self.container.node)

        # extract the various parameters passed
        self.out_stream_id = self.CFG.get('process').get(
            'publish_streams').get('visualization_service_submit_stream_id')

        # Create a publisher on the output stream
        out_stream_pub_registrar = StreamPublisherRegistrar(
            process=self.container, node=self.container.node)
        self.out_stream_pub = out_stream_pub_registrar.create_publisher(
            stream_id=self.out_stream_id)

        self.data_product_id = self.CFG.get('data_product_id')
        self.stream_def_id = self.CFG.get("stream_def_id")
        stream_def_resource = self.rr_cli.read(self.stream_def_id)
        self.stream_def = stream_def_resource.container
        self.realtime_flag = False
        if self.CFG.get("realtime_flag") == "True":
            self.realtime_flag = True
        else:
            self.data_product_id_token = self.CFG.get('data_product_id_token')

        # extract the stream_id associated with the DP. Needed later
        stream_ids, _ = self.rr_cli.find_objects(self.data_product_id,
                                                 PRED.hasStream, None, True)
        self.stream_id = stream_ids[0]

        self.dataDescription = []
        self.dataTableContent = []
        self.varTuple = []
        self.total_num_of_records_recvd = 0

    def process(self, packet):

        log.debug('(%s): Received Viz Data Packet' % (self.name))

        element_count_id = 0
        expected_range = []

        psd = PointSupplementStreamParser(stream_definition=self.stream_def,
                                          stream_granule=packet)
        vardict = {}
        arrLen = None
        for varname in psd.list_field_names():
            vardict[varname] = psd.get_values(varname)
            arrLen = len(vardict[varname])

        #if its the first time, init the dataTable
        if self.initDataTableFlag:
            # create data description from the variables in the message
            self.dataDescription = [('time', 'datetime', 'time')]

            # split the data string to extract variable names
            for varname in psd.list_field_names():
                if varname == 'time':
                    continue

                self.dataDescription.append((varname, 'number', varname))

            self.initDataTableFlag = False

        # Add the records to the datatable
        for i in xrange(arrLen):
            varTuple = []

            for varname, _, _ in self.dataDescription:
                val = float(vardict[varname][i])
                if varname == 'time':
                    varTuple.append(datetime.fromtimestamp(val))
                else:
                    varTuple.append(val)

            # Append the tuples to the data table
            self.dataTableContent.append(varTuple)

            if self.realtime_flag:
                # Maintain a sliding window for realtime transform processes
                realtime_window_size = 100
                if len(self.dataTableContent) > realtime_window_size:
                    # always pop the first element till window size is what we want
                    while len(self.dataTableContent) > realtime_window_size:
                        self.dataTableContent.pop(0)

        if not self.realtime_flag:
            # This is the historical view part. Make a note of now many records were received
            data_stream_id = self.stream_def.data_stream_id
            element_count_id = self.stream_def.identifiables[
                data_stream_id].element_count_id
            # From each granule you can check the constraint on the number of records
            expected_range = packet.identifiables[
                element_count_id].constraint.intervals[0]

            # The number of records in a given packet is:
            self.total_num_of_records_recvd += packet.identifiables[
                element_count_id].value

        # submit the Json version of the datatable to the viz service
        if self.realtime_flag:
            # create the google viz data table
            data_table = gviz_api.DataTable(self.dataDescription)
            data_table.LoadData(self.dataTableContent)

            # submit resulting table back using the out stream publisher
            msg = {
                "viz_product_type": "google_realtime_dt",
                "data_product_id": self.data_product_id,
                "data_table": data_table.ToJSonResponse()
            }
            self.out_stream_pub.publish(msg)
        else:
            # Submit table back to the service if we received all the replay data
            if self.total_num_of_records_recvd == (expected_range[1] + 1):
                # If the datatable received was too big, decimate on the fly to a fixed size
                max_google_dt_len = 1024
                if len(self.dataTableContent) > max_google_dt_len:
                    decimation_factor = int(
                        math.ceil(
                            len(self.dataTableContent) / (max_google_dt_len)))

                    tempDataTableContent = []
                    for i in xrange(0, len(self.dataTableContent),
                                    decimation_factor):
                        # check limits
                        if i >= len(self.dataTableContent):
                            break

                        tempDataTableContent.append(self.dataTableContent[i])

                    self.dataTableContent = tempDataTableContent

                data_table = gviz_api.DataTable(self.dataDescription)
                data_table.LoadData(self.dataTableContent)

                # submit resulting table back using the out stream publisher
                msg = {
                    "viz_product_type": "google_dt",
                    "data_product_id_token": self.data_product_id_token,
                    "data_table": data_table.ToJSonResponse()
                }
                self.out_stream_pub.publish(msg)
                return

        # clear the tuple for future use
        self.varTuple[:] = []
Example #13
0
class TestSchedulerService(IonIntegrationTestCase):

    def setUp(self):
        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        process = FakeProcess()
        self.ssclient = SchedulerServiceProcessClient(node=self.container.node, process=process)
        self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process)

    def tearDown(self):
        pass

    def now_utc(self):
        return time.mktime(datetime.datetime.utcnow().timetuple())

    def test_create_interval_timer(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # cancel the timer
        # wait until after next interval to verify that timer was correctly cancelled

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval_Timer_233"
        sub = EventSubscriber(event_type="ResourceEvent", callback=self.interval_timer_callback, origin=event_origin)
        sub.start()

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 10
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Validate the timer is stored in RR
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until two events are published
        gevent.sleep((self.interval_timer_interval * 2) + 1)

        #Cancle the timer
        ss = self.ssclient.cancel_timer(id)

        # wait until after next interval to verify that timer was correctly cancelled
        gevent.sleep(self.interval_timer_interval)

        # Validate the timer correctly cancelled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)


        # Validate the timer is removed from resource regsitry
        with self.assertRaises(NotFound):
            self.rrclient.read(id)

        # Validate only 2 events are published
        self.assertEqual(self.interval_timer_count, 2)

    def test_system_restart(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # cancel the timer
        # wait until after next interval to verify that timer was correctly cancelled

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval_Timer_4444"
        sub = EventSubscriber(event_type="ResourceEvent", callback=self.on_restart_callback, origin=event_origin)
        sub.start()

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 20
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Validate the timer is stored in RR
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until 1 event is published
        gevent.sleep((self.interval_timer_interval) + 1)
        # Validate 1 event is published
        self.assertEqual(self.interval_timer_count, 1)

        self.ssclient.on_system_restart()

        # after system restart, validate the timer is restored
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until another event is published
        gevent.sleep((self.interval_timer_interval * 2) + 1)

        # Validate 1 event is published
        self.assertGreater(self.interval_timer_count, 2)

        #Cancle the timer
        ss = self.ssclient.cancel_timer(id)

        # wait until after next interval to verify that timer was correctly cancelled
        gevent.sleep(self.interval_timer_interval)

        # Validate the timer correctly cancelled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate the timer is removed from resource regsitry
        with self.assertRaises(NotFound):
            self.rrclient.read(id)

    def on_restart_callback(self, *args, **kwargs):
        self.interval_timer_count += 1

    def test_create_interval_timer_with_end_time(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # Validate no more events are published after end_time expires
        # Validate the timer was canceled after the end_time expires

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 2

        event_origin = "Interval Timer"
        sub = EventSubscriber(event_type="ResourceEvent", callback=self.interval_timer_callback, origin=event_origin)
        sub.start()

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 5
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait until all events are published
        gevent.sleep((self.interval_timer_end_time - start_time) + self.interval_timer_interval + 1)

        # Validate only 2 events are published
        self.assertEqual(self.interval_timer_count, 2)

        # Validate the timer was canceled after the end_time is expired
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

    def interval_timer_callback(self, *args, **kwargs):
        self.interval_timer_received_time = datetime.datetime.utcnow()
        self.interval_timer_count += 1
        time_diff = math.fabs( ((self.interval_timer_received_time - self.interval_timer_sent_time).total_seconds())
                               - (self.interval_timer_interval * self.interval_timer_count) )
        # Assert expire time is within +-10 seconds
        self.assertTrue(time_diff <= 10)

    def test_cancel_single_timer(self):
        # test creating a new timer that is one-time-only

        # create the timer resource
        # create the event listener
        # call scheduler to set the timer

        # create then cancel the timer, verify that event is not received

        # create the timer resource
        # create the event listener
        # call scheduler to set the timer
        # call scheduler to cancel the timer
        # wait until after expiry to verify that event is not sent
        self.single_timer_count = 0
        event_origin = "Time_of_Day"

        sub = EventSubscriber(event_type="ResourceEvent", callback=self.single_timer_call_back, origin=event_origin)
        sub.start()

        now = datetime.datetime.utcnow() + timedelta(seconds=3)
        times_of_day =[{'hour': str(now.hour),'minute' : str(now.minute), 'second':str(now.second) }]
        id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day,  expires=self.now_utc()+3, event_origin=event_origin, event_subtype="test")
        self.assertEqual(type(id), str)
        self.ssclient.cancel_timer(id)
        gevent.sleep(3)

        # Validate the event is not generated
        self.assertEqual(self.single_timer_count, 0)


    def single_timer_call_back (self, *args, **kwargs):
        self.single_timer_count =+ 1

    def test_create_forever_interval_timer(self):
        # Test creating interval timer that runs forever

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval Timer Forever"
        sub = EventSubscriber(event_type="ResourceEvent", callback=self.interval_timer_callback, origin=event_origin)
        sub.start()

        id = self.ssclient.create_interval_timer(start_time= self.now_utc(), interval=self.interval_timer_interval,
            end_time=-1,
            event_origin=event_origin, event_subtype=event_origin)
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait for 4 events to be published
        gevent.sleep((self.interval_timer_interval * 4) + 1)
        self.ssclient.cancel_timer(id)

        # Validate the timer id is invalid once it has been canceled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate events are not generated after canceling the timer
        self.assertEqual(self.interval_timer_count, 4)

    def test_timeoffday_timer(self):
        # test creating a new timer that is one-time-only
        # create the timer resource
        # get the current time, set the timer to several seconds from current time
        # create the event listener
        # call scheduler to set the timer
        # verify that  event arrival is within one/two seconds of current time

        event_origin = "Time Of Day2"
        self.expire_sec_1 = 4
        self.expire_sec_2 = 5
        self.tod_count = 0
        expire1 = datetime.datetime.utcnow() + timedelta(seconds=self.expire_sec_1)
        expire2 = datetime.datetime.utcnow() + timedelta(seconds=self.expire_sec_2)
        # Create two timers
        times_of_day =[{'hour': str(expire1.hour),'minute' : str(expire1.minute), 'second':str(expire1.second) },
                       {'hour': str(expire2.hour),'minute' : str(expire2.minute), 'second':str(expire2.second)}]

        sub = EventSubscriber(event_type="ResourceEvent", callback=self.tod_callback, origin=event_origin)
        sub.start()

        # Expires in one days
        expires = time.mktime((datetime.datetime.utcnow() + timedelta(days=2)).timetuple())
        self.tod_sent_time = datetime.datetime.utcnow()
        id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day, expires=expires, event_origin=event_origin, event_subtype="")
        self.assertEqual(type(id), str)

        # Wait until all events are generated
        gevent.sleep(9)

        # After waiting for 15 seconds, validate only 2 events are generated.
        self.assertTrue(self.tod_count == 2)

        # Cancel the timer
        self.ssclient.cancel_timer(id)


    def tod_callback(self, *args, **kwargs):
        tod_receive_time = datetime.datetime.utcnow()
        self.tod_count += 1
        if self.tod_count == 1:
            time_diff = math.fabs((tod_receive_time - self.tod_sent_time).total_seconds() - self.expire_sec_1)
            self.assertTrue(time_diff <= 2)
        elif self.tod_count == 2:
            time_diff = math.fabs((tod_receive_time - self.tod_sent_time).total_seconds() - self.expire_sec_2)
            self.assertTrue(time_diff <= 2)
class VizTransformProcForGoogleDT(TransformDataProcess):

    """
    This class is used for instantiating worker processes that have subscriptions to data streams and convert
    incoming data from CDM format to JSON style Google DataTables

    """
    def on_start(self):
        super(VizTransformProcForGoogleDT,self).on_start()
        self.initDataTableFlag = True

        # need some clients
        self.rr_cli = ResourceRegistryServiceProcessClient(process = self, node = self.container.node)
        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)

        # extract the various parameters passed
        self.out_stream_id = self.CFG.get('process').get('publish_streams').get('visualization_service_submit_stream_id')

        # Create a publisher on the output stream
        out_stream_pub_registrar = StreamPublisherRegistrar(process=self.container, node=self.container.node)
        self.out_stream_pub = out_stream_pub_registrar.create_publisher(stream_id=self.out_stream_id)

        self.data_product_id = self.CFG.get('data_product_id')
        self.stream_def_id = self.CFG.get("stream_def_id")
        stream_def_resource = self.rr_cli.read(self.stream_def_id)
        self.stream_def = stream_def_resource.container
        self.realtime_flag = False
        if self.CFG.get("realtime_flag") == "True":
            self.realtime_flag = True
        else:
            self.data_product_id_token = self.CFG.get('data_product_id_token')


        # extract the stream_id associated with the DP. Needed later
        stream_ids,_ = self.rr_cli.find_objects(self.data_product_id, PRED.hasStream, None, True)
        self.stream_id = stream_ids[0]

        self.dataDescription = []
        self.dataTableContent = []
        self.varTuple = []
        self.total_num_of_records_recvd = 0


    def process(self, packet):

        log.debug('(%s): Received Viz Data Packet' % (self.name) )

        element_count_id = 0
        expected_range = []

        psd = PointSupplementStreamParser(stream_definition=self.stream_def, stream_granule=packet)
        vardict = {}
        arrLen = None
        for varname in psd.list_field_names():
            vardict[varname] = psd.get_values(varname)
            arrLen = len(vardict[varname])


        #if its the first time, init the dataTable
        if self.initDataTableFlag:
            # create data description from the variables in the message
            self.dataDescription = [('time', 'datetime', 'time')]

            # split the data string to extract variable names
            for varname in psd.list_field_names():
                if varname == 'time':
                    continue

                self.dataDescription.append((varname, 'number', varname))

            self.initDataTableFlag = False


        # Add the records to the datatable
        for i in xrange(arrLen):
            varTuple = []

            for varname,_,_ in self.dataDescription:
                val = float(vardict[varname][i])
                if varname == 'time':
                    varTuple.append(datetime.fromtimestamp(val))
                else:
                    varTuple.append(val)

            # Append the tuples to the data table
            self.dataTableContent.append (varTuple)

            if self.realtime_flag:
                # Maintain a sliding window for realtime transform processes
                realtime_window_size = 100
                if len(self.dataTableContent) > realtime_window_size:
                    # always pop the first element till window size is what we want
                    while len(self.dataTableContent) > realtime_window_size:
                        self.dataTableContent.pop(0)


        if not self.realtime_flag:
            # This is the historical view part. Make a note of now many records were received
            data_stream_id = self.stream_def.data_stream_id
            element_count_id = self.stream_def.identifiables[data_stream_id].element_count_id
            # From each granule you can check the constraint on the number of records
            expected_range = packet.identifiables[element_count_id].constraint.intervals[0]

            # The number of records in a given packet is:
            self.total_num_of_records_recvd += packet.identifiables[element_count_id].value


        # submit the Json version of the datatable to the viz service
        if self.realtime_flag:
        # create the google viz data table
            data_table = gviz_api.DataTable(self.dataDescription)
            data_table.LoadData(self.dataTableContent)

            # submit resulting table back using the out stream publisher
            msg = {"viz_product_type": "google_realtime_dt",
                   "data_product_id": self.data_product_id,
                   "data_table": data_table.ToJSonResponse() }
            self.out_stream_pub.publish(msg)
        else:
            # Submit table back to the service if we received all the replay data
            if self.total_num_of_records_recvd == (expected_range[1] + 1):
                # If the datatable received was too big, decimate on the fly to a fixed size
                max_google_dt_len = 1024
                if len(self.dataTableContent) > max_google_dt_len:
                    decimation_factor = int(math.ceil(len(self.dataTableContent) / (max_google_dt_len)))

                    for i in xrange(len(self.dataTableContent) - 1, 0, -1):

                        if(i % decimation_factor == 0):
                            continue
                        self.dataTableContent.pop(i)

                data_table = gviz_api.DataTable(self.dataDescription)
                data_table.LoadData(self.dataTableContent)

                # submit resulting table back using the out stream publisher
                msg = {"viz_product_type": "google_dt",
                       "data_product_id_token": self.data_product_id_token,
                       "data_table": data_table.ToJSonResponse() }
                self.out_stream_pub.publish(msg)
                return


        # clear the tuple for future use
        self.varTuple[:] = []
class VizTransformProcForMatplotlibGraphs(TransformDataProcess):

    """
    This class is used for instantiating worker processes that have subscriptions to data streams and convert
    incoming data from CDM format to Matplotlib graphs

    """
    def on_start(self):
        super(VizTransformProcForMatplotlibGraphs,self).on_start()
        #assert len(self.streams)==1
        self.initDataFlag = True
        self.graph_data = {} # Stores a dictionary of variables : [List of values]

        # Need some clients
        self.rr_cli = ResourceRegistryServiceProcessClient(process = self, node = self.container.node)
        self.pubsub_cli = PubsubManagementServiceClient(node=self.container.node)

        # extract the various parameters passed to the transform process
        self.out_stream_id = self.CFG.get('process').get('publish_streams').get('visualization_service_submit_stream_id')

        # Create a publisher on the output stream
        #stream_route = self.pubsub_cli.register_producer(stream_id=self.out_stream_id)
        out_stream_pub_registrar = StreamPublisherRegistrar(process=self.container, node=self.container.node)
        self.out_stream_pub = out_stream_pub_registrar.create_publisher(stream_id=self.out_stream_id)

        self.data_product_id = self.CFG.get('data_product_id')
        self.stream_def_id = self.CFG.get("stream_def_id")
        self.stream_def = self.rr_cli.read(self.stream_def_id)

        # Start the thread responsible for keeping track of time and generating graphs
        # Mutex for ensuring proper concurrent communications between threads
        self.lock = RLock()
        self.rendering_proc = Greenlet(self.rendering_thread)
        self.rendering_proc.start()




    def process(self, packet):
        log.debug('(%s): Received Viz Data Packet' % self.name )
        #log.debug('(%s):   - Processing: %s' % (self.name,packet))

        # parse the incoming data
        psd = PointSupplementStreamParser(stream_definition=self.stream_def.container, stream_granule=packet)

        # re-arrange incoming data into an easy to parse dictionary
        vardict = {}
        arrLen = None
        for varname in psd.list_field_names():
            vardict[varname] = psd.get_values(varname)
            arrLen = len(vardict[varname])

        if self.initDataFlag:
            # look at the incoming packet and store
            for varname in psd.list_field_names():
                self.lock.acquire()
                self.graph_data[varname] = []
                self.lock.release()

            self.initDataFlag = False

        # If code reached here, the graph data storage has been initialized. Just add values
        # to the list
        with self.lock:
            for varname in psd.list_field_names():
                self.graph_data[varname].extend(vardict[varname])


    def rendering_thread(self):
        from copy import deepcopy
        # Service Client

        # init Matplotlib
        fig = Figure()
        ax = fig.add_subplot(111)
        canvas = FigureCanvas(fig)
        imgInMem = StringIO.StringIO()
        while True:

            # Sleep for a pre-decided interval. Should be specifiable in a YAML file
            gevent.sleep(20)

            # If there's no data, wait
            # Lock is used here to make sure the entire vector exists start to finish, this assures that the data won
            working_set=None
            with self.lock:
                if len(self.graph_data) == 0:
                    continue
                else:
                    working_set = deepcopy(self.graph_data)


            # For the simple case of testing, lets plot all time variant variables one at a time
            xAxisVar = 'time'
            xAxisFloatData = working_set[xAxisVar]

            for varName, varData in working_set.iteritems():
                if varName == 'time' or varName == 'height' or varName == 'longitude' or varName == 'latitude':
                    continue

                yAxisVar = varName
                yAxisFloatData = working_set[varName]

                # Generate the plot

                ax.plot(xAxisFloatData, yAxisFloatData, 'ro')
                ax.set_xlabel(xAxisVar)
                ax.set_ylabel(yAxisVar)
                ax.set_title(yAxisVar + ' vs ' + xAxisVar)
                ax.set_autoscale_on(False)

                # generate filename for the output image
                fileName = yAxisVar + '_vs_' + xAxisVar + '.png'
                # Save the figure to the in memory file
                canvas.print_figure(imgInMem, format="png")
                imgInMem.seek(0)

                # submit resulting table back using the out stream publisher
                msg = {"viz_product_type": "matplotlib_graphs",
                       "data_product_id": self.data_product_id,
                       "image_obj": imgInMem.getvalue(),
                       "image_name": fileName}
                self.out_stream_pub.publish(msg)

                #clear the canvas for the next image
                ax.clear()
def upload_data(dataproduct_id):
    upload_folder = FileSystem.get_url(FS.TEMP, 'uploads')
    try:

        rr_client = ResourceRegistryServiceProcessClient(
            node=Container.instance.node, process=service_gateway_instance)
        object_store = Container.instance.object_store

        try:
            rr_client.read(str(dataproduct_id))
        except BadRequest:
            raise BadRequest('Unknown DataProduct ID %s' % dataproduct_id)

        # required fields
        upload = request.files['file']  # <input type=file name="file">

        # determine filetype
        filetype = _check_magic(upload)
        upload.seek(0)  # return to beginning for save

        if upload and filetype is not None:

            # upload file - run filename through werkzeug.secure_filename
            filename = secure_filename(upload.filename)
            path = os.path.join(upload_folder, filename)
            upload_time = time.time()
            upload.save(path)

            # register upload
            file_upload_context = {
                # TODO add dataproduct_id
                'name': 'User uploaded file %s' % filename,
                'filename': filename,
                'filetype': filetype,
                'path': path,
                'upload_time': upload_time,
                'status': 'File uploaded to server'
            }
            fuc_id, _ = object_store.create_doc(file_upload_context)

            # client to process dispatch
            pd_client = ProcessDispatcherServiceClient()

            # create process definition
            process_definition = ProcessDefinition(
                name='upload_data_processor',
                executable={
                    'module':
                    'ion.processes.data.upload.upload_data_processing',
                    'class': 'UploadDataProcessing'
                })
            process_definition_id = pd_client.create_process_definition(
                process_definition)
            # create process
            process_id = pd_client.create_process(process_definition_id)
            #schedule process
            config = DotDict()
            config.process.fuc_id = fuc_id
            config.process.dp_id = dataproduct_id
            pid = pd_client.schedule_process(process_definition_id,
                                             process_id=process_id,
                                             configuration=config)
            log.info('UploadDataProcessing process created %s' % pid)
            # response - only FileUploadContext ID and determined filetype for UX display
            resp = {'fuc_id': fuc_id}
            return gateway_json_response(resp)

        raise BadRequest('Invalid Upload')

    except Exception as e:
        return build_error_response(e)
Example #17
0
class TestSchedulerService(IonIntegrationTestCase):

    def setUp(self):
        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        # Start container
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        process = FakeProcess()
        self.ssclient = SchedulerServiceProcessClient(node=self.container.node, process=process)
        self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process)

    def tearDown(self):
        pass

    def now_utc(self):
        return time.time()

    def test_create_interval_timer(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # cancel the timer
        # wait until after next interval to verify that timer was correctly cancelled

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval_Timer_233"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.interval_timer_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 10
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Validate the timer is stored in RR
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until two events are published
        gevent.sleep((self.interval_timer_interval * 2) + 1)

        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)

        #Cancle the timer
        ss = self.ssclient.cancel_timer(id)

        # wait until after next interval to verify that timer was correctly cancelled
        gevent.sleep(self.interval_timer_interval)

        # Validate the timer correctly cancelled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate the timer is removed from resource regsitry
        with self.assertRaises(NotFound):
            self.rrclient.read(id)

        # Validate the number of timer counts
        self.assertEqual(self.interval_timer_count, timer_counts, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.interval_timer_count, timer_counts, id))


    def test_system_restart(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # cancel the timer
        # wait until after next interval to verify that timer was correctly cancelled

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval_Timer_4444"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.on_restart_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        start_time = self.now_utc()
        self.interval_timer_end_time = start_time + 20
        id = self.ssclient.create_interval_timer(start_time="now", interval=self.interval_timer_interval,
            end_time=self.interval_timer_end_time,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Validate the timer is stored in RR
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until 1 event is published
        gevent.sleep((self.interval_timer_interval) + 1)
        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)

        # Validate the number of events generated
        self.assertEqual(self.interval_timer_count, timer_counts, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.interval_timer_count, timer_counts, id))

        self.ssclient.on_system_restart()

        # after system restart, validate the timer is restored
        ss = self.rrclient.read(id)
        self.assertEqual(ss.entry.event_origin, event_origin)

        # Wait until another event is published
        start_time = datetime.datetime.utcnow()
        gevent.sleep((self.interval_timer_interval * 2) + 1)
        time_diff = (datetime.datetime.utcnow() - start_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)

        # Validate the number of events generated
        self.assertGreater(self.interval_timer_count, timer_counts)

        #Cancle the timer
        ss = self.ssclient.cancel_timer(id)

        # wait until after next interval to verify that timer was correctly cancelled
        gevent.sleep(self.interval_timer_interval)

        # Validate the timer correctly cancelled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate the timer is removed from resource regsitry
        with self.assertRaises(NotFound):
            self.rrclient.read(id)

    def on_restart_callback(self, *args, **kwargs):
        self.interval_timer_count += 1
        log.debug("test_scheduler: on_restart_callback: time: " + str(self.now_utc()) + " count: " + str(self.interval_timer_count))

    def test_create_interval_timer_with_end_time(self):
        # create the interval timer resource
        # create the event listener
        # call scheduler to set the timer
        # receive a few intervals, validate that arrival time is as expected
        # Validate no more events are published after end_time expires
        # Validate the timer was canceled after the end_time expires

        self.interval_timer_count_2 = 0
        self.interval_timer_sent_time_2 = 0
        self.interval_timer_received_time_2 = 0
        self.interval_timer_interval_2 = 3

        event_origin = "Interval_Timer_2"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.interval_timer_callback_with_end_time, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        start_time = self.now_utc()
        self.interval_timer_end_time_2 = start_time + 7
        id = self.ssclient.create_interval_timer(start_time="now" , interval=self.interval_timer_interval_2,
            end_time=self.interval_timer_end_time_2,
            event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time_2 = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait until all events are published
        gevent.sleep((self.interval_timer_end_time_2 - start_time) + self.interval_timer_interval_2 + 1)

        # Validate the number of events generated
        self.assertEqual(self.interval_timer_count_2, 2, "Invalid number of timeouts generated. Number of event: %d Expected: 2 Timer id: %s " %(self.interval_timer_count_2, id))

        # Validate the timer was canceled after the end_time is expired
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

    def interval_timer_callback_with_end_time(self, *args, **kwargs):
        self.interval_timer_received_time_2 = datetime.datetime.utcnow()
        self.interval_timer_count_2 += 1
        time_diff = math.fabs( ((self.interval_timer_received_time_2 - self.interval_timer_sent_time_2).total_seconds())
                               - (self.interval_timer_interval_2 * self.interval_timer_count_2) )
        # Assert expire time is within +-10 seconds
        self.assertTrue(time_diff <= 10)
        log.debug("test_scheduler: interval_timer_callback_with_end_time: time:" + str(self.interval_timer_received_time_2) + " count:" + str(self.interval_timer_count_2))


    def interval_timer_callback(self, *args, **kwargs):
        self.interval_timer_received_time = datetime.datetime.utcnow()
        self.interval_timer_count += 1
        time_diff = math.fabs( ((self.interval_timer_received_time - self.interval_timer_sent_time).total_seconds())
                               - (self.interval_timer_interval * self.interval_timer_count) )
        # Assert expire time is within +-10 seconds
        self.assertTrue(time_diff <= 10)
        log.debug("test_scheduler: interval_timer_callback: time:" + str(self.interval_timer_received_time) + " count:" + str(self.interval_timer_count))

    def test_cancel_single_timer(self):
        # test creating a new timer that is one-time-only

        # create the timer resource
        # create the event listener
        # call scheduler to set the timer

        # create then cancel the timer, verify that event is not received

        # create the timer resource
        # create the event listener
        # call scheduler to set the timer
        # call scheduler to cancel the timer
        # wait until after expiry to verify that event is not sent
        self.single_timer_count = 0
        event_origin = "Time_of_Day"

        sub = EventSubscriber(event_type="TimerEvent", callback=self.single_timer_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        now = datetime.datetime.utcnow() + timedelta(seconds=3)
        times_of_day =[{'hour': str(now.hour),'minute' : str(now.minute), 'second':str(now.second) }]
        id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day,  expires=self.now_utc()+3, event_origin=event_origin, event_subtype="test")
        self.assertEqual(type(id), str)
        self.ssclient.cancel_timer(id)
        gevent.sleep(3)

        # Validate the event is not generated
        self.assertEqual(self.single_timer_count, 0, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: 0 Timer id: %s " %(self.single_timer_count, id))

    def single_timer_callback (self, *args, **kwargs):
        self.single_timer_count =+ 1
        log.debug("test_scheduler: single_timer_call_back: time:" + str(self.now_utc()) + " count:" + str(self.single_timer_count))

    def test_create_forever_interval_timer(self):
        # Test creating interval timer that runs forever

        self.interval_timer_count = 0
        self.interval_timer_sent_time = 0
        self.interval_timer_received_time = 0
        self.interval_timer_interval = 3

        event_origin = "Interval Timer Forever"
        sub = EventSubscriber(event_type="TimerEvent", callback=self.interval_timer_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        id = self.ssclient.create_interval_timer(start_time=str(self.now_utc()), interval=self.interval_timer_interval,
            end_time="-1",
            event_origin=event_origin, event_subtype=event_origin)
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait for 4 events to be published
        gevent.sleep((self.interval_timer_interval * 4) + 1)
        self.ssclient.cancel_timer(id)
        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.interval_timer_interval)


        # Validate the timer id is invalid once it has been canceled
        with self.assertRaises(BadRequest):
            self.ssclient.cancel_timer(id)

        # Validate events are not generated after canceling the timer
        self.assertEqual(self.interval_timer_count, timer_counts, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.interval_timer_count, timer_counts, id))

    def test_timeoffday_timer(self):
        # test creating a new timer that is one-time-only
        # create the timer resource
        # get the current time, set the timer to several seconds from current time
        # create the event listener
        # call scheduler to set the timer
        # verify that  event arrival is within one/two seconds of current time

        event_origin = "Time Of Day2"
        self.expire_sec_1 = 4
        self.expire_sec_2 = 5
        self.tod_count = 0
        expire1 = datetime.datetime.utcnow() + timedelta(seconds=self.expire_sec_1)
        expire2 = datetime.datetime.utcnow() + timedelta(seconds=self.expire_sec_2)
        # Create two timers
        times_of_day =[{'hour': str(expire1.hour),'minute' : str(expire1.minute), 'second':str(expire1.second) },
                       {'hour': str(expire2.hour),'minute' : str(expire2.minute), 'second':str(expire2.second)}]

        sub = EventSubscriber(event_type="TimerEvent", callback=self.tod_callback, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        # Expires in one days
        expires = calendar.timegm((datetime.datetime.utcnow() + timedelta(days=2)).timetuple())
        self.tod_sent_time = datetime.datetime.utcnow()
        id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day, expires=expires, event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait until all events are generated
        gevent.sleep(9)
        time_diff = (datetime.datetime.utcnow() - self.interval_timer_sent_time).seconds
        timer_counts =  math.floor(time_diff/self.expire_sec_1) + math.floor(time_diff/self.expire_sec_2)

        # After waiting, validate only 2 events are generated.
        self.assertEqual(self.tod_count, 2, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: %d Timer id: %s " %(self.tod_count, timer_counts, id))

        # Cancel the timer
        self.ssclient.cancel_timer(id)

    def tod_callback(self, *args, **kwargs):
        tod_receive_time = datetime.datetime.utcnow()
        self.tod_count += 1
        if self.tod_count == 1:
            time_diff = math.fabs((tod_receive_time - self.tod_sent_time).total_seconds() - self.expire_sec_1)
            self.assertTrue(time_diff <= 2)
        elif self.tod_count == 2:
            time_diff = math.fabs((tod_receive_time - self.tod_sent_time).total_seconds() - self.expire_sec_2)
            self.assertTrue(time_diff <= 2)
        log.debug("test_scheduler: tod_callback: time:" + str(tod_receive_time) + " count:" + str(self.tod_count))

    def test_timeoffday_timer_in_past_seconds(self):
        # test creating a new timer that is one-time-only
        # create the timer resource
        # get the current time, set the timer to several seconds from current time
        # create the event listener
        # call scheduler to set the timer
        # verify that  event arrival is within one/two seconds of current time

        event_origin = "Time_Of_Day3"
        expire_sec = -4
        self.tod_count2 = 0
        now = datetime.datetime.utcnow()
        expire1 = now + timedelta(seconds=expire_sec)
        # Create two timers
        times_of_day = [{'hour': str(expire1.hour), 'minute': str(expire1.minute), 'second': str(expire1.second)}]

        sub = EventSubscriber(event_type="TimerEvent", callback=self.tod_callback2, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        # Expires in 3 days
        expires = calendar.timegm((datetime.datetime.utcnow() + timedelta(days=3)).timetuple())
        self.tod_sent_time = datetime.datetime.utcnow()
        id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day, expires=expires, event_origin=event_origin, event_subtype="")
        self.interval_timer_sent_time = datetime.datetime.utcnow()
        self.assertEqual(type(id), str)

        # Wait and see if the any events are generated
        gevent.sleep(5)

        # After waiting, validate no event is generated
        self.assertEqual(self.tod_count2, 0, "Invalid number of timeouts generated. Number of timeout: %d Expected timeout: 0 Timer id: %s " %(self.tod_count2, id))

        # Cancel the timer
        self.ssclient.cancel_timer(id)

        # This is example for the following case
        # Example current time is 8:00AM. User setups a timer for 6:00AM. Since it is 8am, it tries to
        #   setup a timer for tomorrow 6am but the expire time is set at 5AM tomorrow
        event_origin = "Time_Of_Day4"
        expire_sec = -4
        self.tod_count2 = 0
        now = datetime.datetime.utcnow()
        expire1 = now + timedelta(seconds=expire_sec)
        times_of_day = [{'hour': str(expire1.hour), 'minute': str(expire1.minute), 'second': str(expire1.second)}]

        sub = EventSubscriber(event_type="TimerEvent", callback=self.tod_callback2, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        # Expires before the first event
        time_delta = timedelta(days=1) + timedelta(seconds=-(abs(expire_sec*2)))   # Notice the minus sign. It expires before the first event
        expires = calendar.timegm((now + time_delta).timetuple())
        self.tod_sent_time = datetime.datetime.utcnow()
        with self.assertRaises(BadRequest):
            id = self.ssclient.create_time_of_day_timer(times_of_day=times_of_day, expires=expires, event_origin=event_origin, event_subtype="")

    def tod_callback2(self, *args, **kwargs):
        tod_receive_time = datetime.datetime.utcnow()
        self.tod_count2 += 1
        log.debug("test_scheduler: tod_callback2: time:")

    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False), 'Skip test while in CEI LAUNCH mode')
    def test_quit_stops_timers(self):

        ar = AsyncResult()
        def cb(*args, **kwargs):
            ar.set(args)

            self.interval_timer_count += 1

        event_origin = "test_quitter"
        sub = EventSubscriber(event_type="TimerEvent", callback=cb, origin=event_origin)
        sub.start()
        self.addCleanup(sub.stop)

        tid = self.ssclient.create_interval_timer(start_time="now",
                                                  end_time="-1",
                                                  interval=1,
                                                  event_origin=event_origin)

        # wait until at least one scheduled message
        ar.get(timeout=5)

        # shut it down!
        p = self.container.proc_manager.procs_by_name['scheduler']
        self.container.terminate_process(p.id)

        # assert empty
        self.assertEquals(p.schedule_entries, {})
def upload_data(dataproduct_id):
    upload_folder = FileSystem.get_url(FS.TEMP,'uploads')
    try:

        rr_client = ResourceRegistryServiceProcessClient(process=service_gateway_instance)
        object_store = Container.instance.object_store

        try:
            rr_client.read(str(dataproduct_id))
        except BadRequest:
            raise BadRequest('Unknown DataProduct ID %s' % dataproduct_id)

        # required fields
        upload = request.files['file']  # <input type=file name="file">

        # determine filetype
        filetype = _check_magic(upload)
        upload.seek(0)  # return to beginning for save

        if upload and filetype is not None:

            # upload file - run filename through werkzeug.secure_filename
            filename = secure_filename(upload.filename)
            path = os.path.join(upload_folder, filename)
            upload_time = time.time()
            upload.save(path)

            # register upload
            file_upload_context = {
                # TODO add dataproduct_id
                'name':'User uploaded file %s' % filename,
                'filename':filename,
                'filetype':filetype,
                'path':path,
                'upload_time':upload_time,
                'status':'File uploaded to server'
            }
            fuc_id, _ = object_store.create_doc(file_upload_context)

            # client to process dispatch
            pd_client = ProcessDispatcherServiceClient()

            # create process definition
            process_definition = ProcessDefinition(
                name='upload_data_processor',
                executable={
                    'module':'ion.processes.data.upload.upload_data_processing',
                    'class':'UploadDataProcessing'
                }
            )
            process_definition_id = pd_client.create_process_definition(process_definition)
            # create process
            process_id = pd_client.create_process(process_definition_id)
            #schedule process
            config = DotDict()
            config.process.fuc_id = fuc_id
            config.process.dp_id = dataproduct_id
            pid = pd_client.schedule_process(process_definition_id, process_id=process_id, configuration=config)
            log.info('UploadDataProcessing process created %s' % pid)
            # response - only FileUploadContext ID and determined filetype for UX display
            resp = {'fuc_id': fuc_id}
            return gateway_json_response(resp)

        raise BadRequest('Invalid Upload')

    except Exception as e:
        return build_error_response(e)
Example #19
0
class TestWorkflowManagementIntegration(VisualizationIntegrationTestHelper):
    def setUp(self):
        # Start container

        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # simulate preloading
        preload_ion_params(self.container)

        #Instantiate a process to represent the test
        process = WorkflowServiceTestProcess()

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceProcessClient(
            node=self.container.node, process=process)
        self.damsclient = DataAcquisitionManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.pubsubclient = PubsubManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.ingestclient = IngestionManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.imsclient = InstrumentManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.dataproductclient = DataProductManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.dataprocessclient = DataProcessManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.datasetclient = DatasetManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.workflowclient = WorkflowManagementServiceProcessClient(
            node=self.container.node, process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(
            node=self.container.node, process=process)
        self.data_retriever = DataRetrieverServiceProcessClient(
            node=self.container.node, process=process)

        self.ctd_stream_def = SBE37_CDM_stream_definition()

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    def test_SA_transform_components(self):

        assertions = self.assertTrue

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        ###
        ###  Setup the first transformation
        ###

        # Salinity: Data Process Definition
        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition(
        )

        l2_salinity_all_data_process_id, ctd_l2_salinity_output_dp_id = self.create_transform_process(
            ctd_L2_salinity_dprocdef_id, ctd_parsed_data_product_id,
            'salinity')

        ## get the stream id for the transform outputs
        stream_ids, _ = self.rrclient.find_objects(
            ctd_l2_salinity_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        sal_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_stream_id)

        ###
        ###  Setup the second transformation
        ###

        # Salinity Doubler: Data Process Definition
        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition(
        )

        salinity_double_data_process_id, salinity_doubler_output_dp_id = self.create_transform_process(
            salinity_doubler_dprocdef_id, ctd_l2_salinity_output_dp_id,
            'salinity')

        stream_ids, _ = self.rrclient.find_objects(
            salinity_doubler_output_dp_id, PRED.hasStream, None, True)
        assertions(len(stream_ids) > 0)
        sal_dbl_stream_id = stream_ids[0]
        data_product_stream_ids.append(sal_dbl_stream_id)

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id,
                                                      data_product_stream_ids)

        #Stop the transform processes
        self.dataprocessclient.deactivate_data_process(
            salinity_double_data_process_id)
        self.dataprocessclient.deactivate_data_process(
            l2_salinity_all_data_process_id)

        #Validate the data from each of the messages along the way
        self.validate_messages(results)

    @attr('LOCOINT')
    @attr('SMOKE')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    def test_transform_workflow(self):

        assertions = self.assertTrue

        log.debug("Building the workflow definition")

        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Salinity_Test_Workflow',
            description='tests a workflow of multiple transform data processes'
        )

        workflow_data_product_name = 'TEST-Workflow_Output_Product'  #Set a specific output product name

        #-------------------------------------------------------------------------------------------------------------------------
        log.debug("Adding a transformation process definition for salinity")
        #-------------------------------------------------------------------------------------------------------------------------

        ctd_L2_salinity_dprocdef_id = self.create_salinity_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=ctd_L2_salinity_dprocdef_id,
            persist_process_output_data=False
        )  #Don't persist the intermediate data product
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #-------------------------------------------------------------------------------------------------------------------------
        log.debug(
            "Adding a transformation process definition for salinity doubler")
        #-------------------------------------------------------------------------------------------------------------------------

        salinity_doubler_dprocdef_id = self.create_salinity_doubler_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=salinity_doubler_dprocdef_id,
        )
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        log.debug("Creating workflow def in the resource registry")
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        aids = self.rrclient.find_associations(workflow_def_id,
                                               PRED.hasDataProcessDefinition)
        assertions(len(aids) == 2)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        log.debug("Creating the input data product")
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        log.debug("Creating and starting the workflow")
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id,
            ctd_parsed_data_product_id,
            persist_workflow_data_product=True,
            output_data_product_name=workflow_data_product_name,
            timeout=300)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        log.debug("persisting the output product")
        #self.dataproductclient.activate_data_product_persistence(workflow_product_id)
        dataset_ids, _ = self.rrclient.find_objects(workflow_product_id,
                                                    PRED.hasDataset,
                                                    RT.Dataset, True)
        assertions(len(dataset_ids) == 1)
        dataset_id = dataset_ids[0]

        log.debug(
            "Verifying the output data product name matches what was specified in the workflow definition"
        )
        workflow_product = self.rrclient.read(workflow_product_id)
        assertions(
            workflow_product.name.startswith(workflow_data_product_name),
            'Nope: %s != %s' %
            (workflow_product.name, workflow_data_product_name))

        log.debug(
            "Walking the associations to find the appropriate output data streams to validate the messages"
        )

        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 2)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        log.debug("data_product_stream_ids: %s" % data_product_stream_ids)

        log.debug(
            "Starting the output stream listener to monitor to collect messages"
        )
        results = self.start_output_stream_and_listen(ctd_stream_id,
                                                      data_product_stream_ids)

        log.debug("results::: %s" % results)

        log.debug("Stopping the workflow processes")
        self.workflowclient.terminate_data_process_workflow(
            workflow_id, False, timeout=250)  # Should test true at some point

        log.debug("Making sure the Workflow object was removed")
        objs, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(objs) == 0)

        log.debug(
            "Validating the data from each of the messages along the way")
        self.validate_messages(results)

        log.debug(
            "Checking to see if dataset id = %s, was persisted, and that it can be retrieved...."
            % dataset_id)
        self.validate_data_ingest_retrieve(dataset_id)

        log.debug("Cleaning up to make sure delete is correct.")
        self.workflowclient.delete_workflow_definition(workflow_def_id)
        """
        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )
        """

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    def test_highcharts_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='HighCharts_Test_Workflow',
            description=
            'Tests the workflow of converting stream data to HighCharts')

        #Add a transformation process definition
        highcharts_procdef_id = self.create_highcharts_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=highcharts_procdef_id,
            persist_process_output_data=False)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id, timeout=60)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id,
                                                      data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id=workflow_id, delete_data_products=False,
            timeout=60)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_highcharts_transform_results(results)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)
        """
        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)

        assertions(len(workflow_def_ids) == 0 )
        """

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    def test_mpl_graphs_transform_workflow(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Mpl_Graphs_Test_Workflow',
            description=
            'Tests the workflow of converting stream data to Matplotlib graphs'
        )

        #Add a transformation process definition
        mpl_graphs_procdef_id = self.create_mpl_graphs_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=mpl_graphs_procdef_id,
            persist_process_output_data=False)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the input data product
        ctd_stream_id, ctd_parsed_data_product_id = self.create_ctd_input_stream_and_data_product(
        )
        data_product_stream_ids.append(ctd_stream_id)

        #Create and start the workflow
        workflow_id, workflow_product_id = self.workflowclient.create_data_process_workflow(
            workflow_def_id,
            ctd_parsed_data_product_id,
            persist_workflow_data_product=True,
            timeout=60)

        workflow_output_ids, _ = self.rrclient.find_subjects(
            RT.Workflow, PRED.hasOutputProduct, workflow_product_id, True)
        assertions(len(workflow_output_ids) == 1)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_dp_ids, _ = self.rrclient.find_objects(
            workflow_id, PRED.hasDataProduct, RT.DataProduct, True)
        assertions(len(workflow_dp_ids) == 1)

        for dp_id in workflow_dp_ids:
            stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream,
                                                       None, True)
            assertions(len(stream_ids) == 1)
            data_product_stream_ids.append(stream_ids[0])

        #Start the output stream listener to monitor and collect messages
        results = self.start_output_stream_and_listen(ctd_stream_id,
                                                      data_product_stream_ids)

        #Stop the workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id=workflow_id, delete_data_products=False,
            timeout=60)  # Should test true at some point

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(results)

        # Check to see if ingestion worked. Extract the granules from data_retrieval.
        # First find the dataset associated with the output dp product
        ds_ids, _ = self.rrclient.find_objects(
            workflow_dp_ids[len(workflow_dp_ids) - 1], PRED.hasDataset,
            RT.Dataset, True)
        retrieved_granule = self.data_retriever.retrieve_last_data_points(
            ds_ids[0], 10)

        #Validate the data from each of the messages along the way
        self.validate_mpl_graphs_transform_results(retrieved_granule)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)
        """
        workflow_def_ids,_ = self.rrclient.find_resources(restype=RT.WorkflowDefinition)
        assertions(len(workflow_def_ids) == 0 )
        """

    @attr('LOCOINT')
    @unittest.skipIf(os.getenv('CEI_LAUNCH_TEST', False),
                     'Not integrated for CEI')
    def test_multiple_workflow_instances(self):

        assertions = self.assertTrue

        # Build the workflow definition
        workflow_def_obj = IonObject(
            RT.WorkflowDefinition,
            name='Multiple_Test_Workflow',
            description='Tests the workflow of converting stream data')

        #Add a transformation process definition
        highcharts_procdef_id = self.create_highcharts_data_process_definition(
        )
        workflow_step_obj = IonObject(
            'DataProcessWorkflowStep',
            data_process_definition_id=highcharts_procdef_id,
            persist_process_output_data=False)
        workflow_def_obj.workflow_steps.append(workflow_step_obj)

        #Create it in the resource registry
        workflow_def_id = self.workflowclient.create_workflow_definition(
            workflow_def_obj)

        #The list of data product streams to monitor
        data_product_stream_ids = list()

        #Create the first input data product
        ctd_stream_id1, ctd_parsed_data_product_id1 = self.create_ctd_input_stream_and_data_product(
            'ctd_parsed1')
        data_product_stream_ids.append(ctd_stream_id1)

        #Create and start the first workflow
        workflow_id1, workflow_product_id1 = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id1, timeout=60)

        #Create the second input data product
        ctd_stream_id2, ctd_parsed_data_product_id2 = self.create_ctd_input_stream_and_data_product(
            'ctd_parsed2')
        data_product_stream_ids.append(ctd_stream_id2)

        #Create and start the second workflow
        workflow_id2, workflow_product_id2 = self.workflowclient.create_data_process_workflow(
            workflow_def_id, ctd_parsed_data_product_id2, timeout=60)

        #Walk the associations to find the appropriate output data streams to validate the messages
        workflow_ids, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 2)

        #Start the first input stream process
        ctd_sim_pid1 = self.start_sinusoidal_input_stream_process(
            ctd_stream_id1)

        #Start the second input stream process
        ctd_sim_pid2 = self.start_simple_input_stream_process(ctd_stream_id2)

        #Start the output stream listener to monitor a set number of messages being sent through the workflows
        results = self.start_output_stream_and_listen(
            None, data_product_stream_ids, message_count_per_stream=5)

        # stop the flow of messages...
        self.process_dispatcher.cancel_process(
            ctd_sim_pid1
        )  # kill the ctd simulator process - that is enough data
        self.process_dispatcher.cancel_process(ctd_sim_pid2)

        #Stop the first workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id=workflow_id1, delete_data_products=False,
            timeout=60)  # Should test true at some point

        #Stop the second workflow processes
        self.workflowclient.terminate_data_process_workflow(
            workflow_id=workflow_id2, delete_data_products=False,
            timeout=60)  # Should test true at some point

        workflow_ids, _ = self.rrclient.find_resources(restype=RT.Workflow)
        assertions(len(workflow_ids) == 0)

        #Cleanup to make sure delete is correct.
        self.workflowclient.delete_workflow_definition(workflow_def_id)
        """