Example #1
0
    def test_create_deployment(self):

        #create a deployment with metadata and an initial site and device
        platform_site__obj = IonObject(RT.PlatformSite,
                                       name='PlatformSite1',
                                       description='test platform site')
        site_id = self.omsclient.create_platform_site(platform_site__obj)

        platform_device__obj = IonObject(RT.PlatformDevice,
                                         name='PlatformDevice1',
                                         description='test platform device')
        device_id = self.imsclient.create_platform_device(platform_device__obj)

        start = str(int(time.mktime(datetime.datetime(2013, 1,
                                                      1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2014, 1, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start,
                                    end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment',
                                   description='some new deployment',
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)
        self.omsclient.assign_site_to_deployment(site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        #retrieve the deployment objects and check that the assoc site and device are attached
        read_deployment_obj = self.omsclient.read_deployment(deployment_id)
        log.debug("test_create_deployment: created deployment obj: %s ",
                  str(read_deployment_obj))

        site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite,
                                                  PRED.hasDeployment,
                                                  deployment_id, True)
        self.assertEqual(len(site_ids), 1)

        device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice,
                                                    PRED.hasDeployment,
                                                    deployment_id, True)
        self.assertEqual(len(device_ids), 1)

        #delete the deployment
        self.omsclient.force_delete_deployment(deployment_id)
        # now try to get the deleted dp object
        try:
            self.omsclient.read_deployment(deployment_id)
        except NotFound:
            pass
        else:
            self.fail("deleted deployment was found during read")
    def test_get_resource_extension(self):

        #Testing multiple instrument owners
        subject1 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254"

        actor_identity_obj1 = IonObject(RT.ActorIdentity, {"name": subject1})
        actor_id1, _ = self.resource_registry_service.create(
            actor_identity_obj1)

        user_info_obj1 = IonObject(RT.UserInfo, {"name": "Foo"})
        user_info_id1, _ = self.resource_registry_service.create(
            user_info_obj1)
        self.resource_registry_service.create_association(
            actor_id1, PRED.hasInfo, user_info_id1)

        subject2 = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Bob Cumbers A256"

        actor_identity_obj2 = IonObject(RT.ActorIdentity, {"name": subject2})
        actor_id2, _ = self.resource_registry_service.create(
            actor_identity_obj2)

        user_info_obj2 = IonObject(RT.UserInfo, {"name": "Foo2"})
        user_info_id2, _ = self.resource_registry_service.create(
            user_info_obj2)
        self.resource_registry_service.create_association(
            actor_id2, PRED.hasInfo, user_info_id2)

        test_obj = IonObject(RT.InformationResource, {"name": "TestResource"})
        test_obj_id, _ = self.resource_registry_service.create(test_obj)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id1)
        self.resource_registry_service.create_association(
            test_obj_id, PRED.hasOwner, actor_id2)

        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id, OT.ExtendedInformationResource)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)

        extended_resource_list = self.resource_registry_service.get_resource_extension(
            str([user_info_id1, user_info_id2]),
            OT.ExtendedInformationResource)
        self.assertEqual(len(extended_resource_list), 2)

        optional_args = {'user_id': user_info_id1}
        extended_resource = self.resource_registry_service.get_resource_extension(
            test_obj_id,
            OT.TestExtendedInformationResource,
            optional_args=optional_args)

        self.assertEqual(test_obj_id, extended_resource._id)
        self.assertEqual(len(extended_resource.owners), 2)
        self.assertEqual(extended_resource.user_id, user_info_id1)
Example #3
0
    def test_org_crud(self):

        with self.assertRaises(BadRequest) as br:
            self.org_management_service.create_org(IonObject("Org", {"name": "Test Facility", "org_governance_name": "Test Facility" }))
        self.assertTrue("can only contain alphanumeric and underscore characters" in br.exception.message)

        with self.assertRaises(BadRequest):
            self.org_management_service.create_org()

        org_obj = IonObject("Org", {"name": "Test Facility"})
        org_id = self.org_management_service.create_org(org_obj)
        self.assertNotEqual(org_id, None)


        org = None
        org = self.org_management_service.read_org(org_id)
        self.assertNotEqual(org, None)
        self.assertEqual(org.org_governance_name, 'Test_Facility')

        #Check that the roles got associated to them
        role_list = self.org_management_service.find_org_roles(org_id)
        self.assertEqual(len(role_list),2 )

        with self.assertRaises(BadRequest):
            self.org_management_service.update_org()
        org.name = 'Updated Test Facility'
        self.org_management_service.update_org(org)

        org = None
        org = self.org_management_service.read_org(org_id)
        self.assertNotEqual(org, None)
        self.assertEqual(org.name, 'Updated Test Facility')
        self.assertEqual(org.org_governance_name, 'Test_Facility')

        user_role = self.org_management_service.find_org_role_by_name(org_id, ORG_MANAGER_ROLE)
        self.assertNotEqual(user_role, None)

        self.org_management_service.remove_user_role(org_id, ORG_MANAGER_ROLE)
        with self.assertRaises(BadRequest) as cm:
            user_role = self.org_management_service.find_org_role_by_name(org_id, ORG_MANAGER_ROLE)
        self.assertIn("The User Role 'ORG_MANAGER' does not exist for this Org", cm.exception.message)


        with self.assertRaises(BadRequest):
            self.org_management_service.delete_org()
        self.org_management_service.delete_org(org_id)

        with self.assertRaises(NotFound) as cm:
            self.org_management_service.read_org(org_id)
        self.assertIn("does not exist", cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.org_management_service.delete_org(org_id)
        self.assertIn("does not exist", cm.exception.message)
    def _create_l1_out_data_products(self):

        ctd_l1_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Conductivity',
            description='transform output L1 conductivity',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l1_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_conductivity_output_dp_obj,
            self.outgoing_stream_l1_conductivity_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l1_conductivity_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l1_conductivity_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_conductivity', stream_ids[0])
        self.loggerpids.append(pid)

        ctd_l1_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Pressure',
            description='transform output L1 pressure',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l1_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_pressure_output_dp_obj, self.outgoing_stream_l1_pressure_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l1_pressure_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l1_pressure_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_pressure', stream_ids[0])
        self.loggerpids.append(pid)

        ctd_l1_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L1_Temperature',
            description='transform output L1 temperature',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l1_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l1_temperature_output_dp_obj,
            self.outgoing_stream_l1_temperature_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l1_temperature_output_dp_id)
        # Retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger
        stream_ids, _ = self.rrclient.find_objects(
            self.ctd_l1_temperature_output_dp_id, PRED.hasStream, None, True)
        pid = self.create_logger('ctd_l1_temperature', stream_ids[0])
        self.loggerpids.append(pid)
Example #5
0
def parse_phones(text):
    if ':' in text:
        out = []
        for type, number in parse_dict(text).iteritems():
            out.append(IonObject("Phone", phone_number=number,
                                 phone_type=type))
        return out
    elif text:
        return [
            IonObject("Phone", phone_number=text.strip(), phone_type='office')
        ]
    else:
        return []
    def test_register_instrument(self):
        # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object

        # set up initial instrument to register
        instrument_obj = IonObject(
            RT.InstrumentDevice,
            name='Inst1',
            description='an instrument that is creating the data product')
        instrument_id, rev = self.rrclient.create(instrument_obj)

        dataproduct_obj = IonObject(RT.DataProduct,
                                    name='DataProduct1',
                                    description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new data producer
        try:
            ds_id = self.client.register_instrument(instrument_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" % ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an instrument, creating the stream for the product
        try:
            self.client.assign_data_product(instrument_id, dataproduct_id,
                                            True)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %
                      ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %
                      ex)

        # test UNassigning a data product from instrument, deleting the stream for the product
        try:
            self.client.unassign_data_product(instrument_id, dataproduct_id,
                                              True)
        except BadRequest as ex:
            self.fail(
                "failed to failed to UNassign data product to data producer data producer: %s"
                % ex)
        except NotFound as ex:
            self.fail(
                "failed to failed to UNassign data product to data producer data producer: %s"
                % ex)

        # test UNregistering a new data producer
        try:
            ds_id = self.client.unregister_instrument(instrument_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" % ex)
Example #7
0
 def create_basic_deployment(self, name='', description=''):
     start = IonTime(datetime.datetime(2013, 1, 1))
     end = IonTime(datetime.datetime(2014, 1, 1))
     temporal_bounds = IonObject(OT.TemporalBounds,
                                 name='planned',
                                 start_datetime=start.to_string(),
                                 end_datetime=end.to_string())
     deployment_obj = IonObject(RT.Deployment,
                                name=name,
                                description=description,
                                context=IonObject(
                                    OT.CabledNodeDeploymentContext),
                                constraint_list=[temporal_bounds])
     return self.OMS.create_deployment(deployment_obj)
    def _create_l0_output_data_products(self,
                                        outgoing_stream_l0_conductivity_id,
                                        outgoing_stream_l0_pressure_id,
                                        outgoing_stream_l0_temperature_id):

        out_data_prods = []

        ctd_l0_conductivity_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Conductivity',
            description='transform output conductivity',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_conductivity_output_dp_obj,
            outgoing_stream_l0_conductivity_id)
        out_data_prods.append(self.ctd_l0_conductivity_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l0_conductivity_output_dp_id)

        ctd_l0_pressure_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_pressure_output_dp_obj, outgoing_stream_l0_pressure_id)
        out_data_prods.append(self.ctd_l0_pressure_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l0_pressure_output_dp_id)

        ctd_l0_temperature_output_dp_obj = IonObject(
            RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain=self.tdom,
            spatial_domain=self.sdom)

        self.ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(
            ctd_l0_temperature_output_dp_obj,
            outgoing_stream_l0_temperature_id)
        out_data_prods.append(self.ctd_l0_temperature_output_dp_id)
        self.dataproductclient.activate_data_product_persistence(
            data_product_id=self.ctd_l0_temperature_output_dp_id)

        return out_data_prods
    def setUp(self):
        mock_clients = self._create_service_mock('workflow_management')

        self.workflow_management_service = WorkflowManagementService()
        self.workflow_management_service.clients = mock_clients

        # Rename to save some typing
        self.mock_create = mock_clients.resource_registry.create
        self.mock_read = mock_clients.resource_registry.read
        self.mock_update = mock_clients.resource_registry.update
        self.mock_delete = mock_clients.resource_registry.delete
        self.mock_create_association = mock_clients.resource_registry.create_association
        self.mock_delete_association = mock_clients.resource_registry.delete_association
        self.mock_find_objects = mock_clients.resource_registry.find_objects
        self.mock_find_resources = mock_clients.resource_registry.find_resources
        self.mock_find_subjects = mock_clients.resource_registry.find_subjects
        self.mock_find_associations = mock_clients.resource_registry.find_associations

        # workflow definition
        self.workflow_definition = Mock()
        self.workflow_definition.name = "Foo"
        self.workflow_definition.description = "This is a test workflow definition"
        self.workflow_definition.workflow_steps = []

        workflow_step_obj = IonObject('DataProcessWorkflowStep',
                                      data_process_definition_id='123')
        self.workflow_definition.workflow_steps.append(workflow_step_obj)

        workflow_step_obj = IonObject('DataProcessWorkflowStep',
                                      data_process_definition_id='456')
        self.workflow_definition.workflow_steps.append(workflow_step_obj)

        # WorkflowDefinition to DataProcessDefinition associations
        self.workflow_definition_to_dataprocess_definition_association = Mock()
        self.workflow_definition_to_dataprocess_definition_association._id = 'abc'
        self.workflow_definition_to_dataprocess_definition_association.s = "111"
        self.workflow_definition_to_dataprocess_definition_association.st = RT.WorkflowDefinition
        self.workflow_definition_to_dataprocess_definition_association.p = PRED.hasDataProcessDefinition
        self.workflow_definition_to_dataprocess_definition_association.o = "123"
        self.workflow_definition_to_dataprocess_definition_association.ot = RT.DataProcessDefinition

        self.workflow_definition_to_dataprocess_definition_association2 = Mock(
        )
        self.workflow_definition_to_dataprocess_definition_association2._id = 'def'
        self.workflow_definition_to_dataprocess_definition_association2.s = "111"
        self.workflow_definition_to_dataprocess_definition_association2.st = RT.WorkflowDefinition
        self.workflow_definition_to_dataprocess_definition_association2.p = PRED.hasDataProcessDefinition
        self.workflow_definition_to_dataprocess_definition_association2.o = "456"
        self.workflow_definition_to_dataprocess_definition_association2.ot = RT.DataProcessDefinition
Example #10
0
    def _create_subsequent_deployment(self, prior_dep_info):
        platform_device_obj = IonObject(RT.PlatformDevice,
                                        name='PlatformDevice2',
                                        description='test platform device')
        platform_device_id = self.imsclient.create_platform_device(
            platform_device_obj)

        instrument_device_obj = IonObject(RT.InstrumentDevice,
                                          name='InstrumentDevice2',
                                          description='test instrument device')
        instrument_device_id = self.imsclient.create_instrument_device(
            instrument_device_obj)
        self.rrclient.create_association(platform_device_id, PRED.hasDevice,
                                         instrument_device_id)

        self.imsclient.assign_platform_model_to_platform_device(
            prior_dep_info.platform_model_id, platform_device_id)
        self.imsclient.assign_instrument_model_to_instrument_device(
            prior_dep_info.instrument_model_id, instrument_device_id)

        start = str(int(time.mktime(datetime.datetime(2013, 6,
                                                      1).timetuple())))
        end = str(int(time.mktime(datetime.datetime(2020, 6, 1).timetuple())))
        temporal_bounds = IonObject(OT.TemporalBounds,
                                    name='planned',
                                    start_datetime=start,
                                    end_datetime=end)
        deployment_obj = IonObject(RT.Deployment,
                                   name='TestDeployment2',
                                   description='some new deployment',
                                   context=IonObject(
                                       OT.CabledNodeDeploymentContext),
                                   constraint_list=[temporal_bounds])
        deployment_id = self.omsclient.create_deployment(deployment_obj)

        self.omsclient.assign_site_to_deployment(
            prior_dep_info.platform_site_id, deployment_id)
        self.omsclient.assign_device_to_deployment(
            prior_dep_info.platform_device_id, deployment_id)

        log.debug("test_create_deployment: created deployment id: %s ",
                  str(deployment_id))

        ret = DotDict(instrument_device_id=instrument_device_id,
                      platform_device_id=platform_device_id,
                      deployment_id=deployment_id)

        return ret
Example #11
0
    def test_register_external_data_set(self):
        # Register an external data set as a data producer in coordination with DM PubSub: create stream, register and create producer object

        # set up initial instrument to register
        ext_dataset_obj = IonObject(RT.ExternalDataset,
                                    name='DataSet1',
                                    description='an external data feed')
        ext_dataset_id, rev = self.rrclient.create(ext_dataset_obj)

        dataproduct_obj = IonObject(RT.DataProduct,
                                    name='DataProduct1',
                                    description='sample data product')
        dataproduct_id, rev = self.rrclient.create(dataproduct_obj)

        # test registering a new external data set
        try:
            ds_id = self.client.register_external_data_set(ext_dataset_id)
        except BadRequest as ex:
            self.fail("failed to create new data producer: %s" % ex)
        print 'new data producer id = ', ds_id

        # test assigning a data product to an ext_dataset_id, creating the stream for the product
        try:
            self.client.assign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail("failed to assign data product to data producer: %s" %
                      ex)
        except NotFound as ex:
            self.fail("failed to assign data product to data producer: %s" %
                      ex)

        # test UNassigning a data product from ext_dataset_id, deleting the stream for the product
        try:
            self.client.unassign_data_product(ext_dataset_id, dataproduct_id)
        except BadRequest as ex:
            self.fail(
                "failed to failed to UNassign data product to data producer data producer: %s"
                % ex)
        except NotFound as ex:
            self.fail(
                "failed to failed to UNassign data product to data producer data producer: %s"
                % ex)

        # test UNregistering a external data set
        try:
            ds_id = self.client.unregister_external_data_set(ext_dataset_id)
        except NotFound as ex:
            self.fail("failed to unregister instrument producer: %s" % ex)
    def trigger_container_snapshot(self,
                                   snapshot_id='',
                                   include_snapshots=None,
                                   exclude_snapshots=None,
                                   take_at_time='',
                                   clear_all=False,
                                   persist_snapshot=True,
                                   snapshot_kwargs=None):

        if not snapshot_id:
            snapshot_id = get_ion_ts()
        if not snapshot_kwargs:
            snapshot_kwargs = {}

        self.perform_action(
            ALL_CONTAINERS_INSTANCE,
            IonObject(OT.TriggerContainerSnapshot,
                      snapshot_id=snapshot_id,
                      include_snapshots=include_snapshots,
                      exclude_snapshots=exclude_snapshots,
                      take_at_time=take_at_time,
                      clear_all=clear_all,
                      persist_snapshot=persist_snapshot,
                      snapshot_kwargs=snapshot_kwargs))
        log.info("Event to trigger container snapshots sent. snapshot_id=%s" %
                 snapshot_id)
    def trigger_garbage_collection(self):
        """Triggers a garbage collection in all containers

        @throws BadRequest    None
        """
        self.perform_action(ALL_CONTAINERS_INSTANCE,
                            IonObject(OT.TriggerGarbageCollection))
    def reset_policy_cache(self, headers=None, timeout=None):
        """Clears and reloads the policy caches in all of the containers.

        @throws BadRequest    None
        """
        self.perform_action(ALL_CONTAINERS_INSTANCE,
                            IonObject(OT.ResetPolicyCache))
 def set_log_level(self, logger='', level='', recursive=False):
     self.perform_action(
         ALL_CONTAINERS_INSTANCE,
         IonObject(OT.ChangeLogLevel,
                   logger=logger,
                   level=level,
                   recursive=recursive))
Example #16
0
    def _do_test_lifecycle(self):
        # Lifecycle tests
        att = IonObject("InstrumentDevice", name='mine', description='desc')

        rid,rev = self.resource_registry_service.create(att)

        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.name, att.name)
        self.assertEquals(att1.lcstate, LCS.DRAFT)
        self.assertEquals(att1.availability, AS.PRIVATE)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.PLAN)
        self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE))

        att2 = self.resource_registry_service.read(rid)
        self.assertEquals(att2.lcstate, LCS.PLANNED)
        self.assertEquals(att2.availability, AS.PRIVATE)

        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.execute_lifecycle_transition(rid, LCE.UNANNOUNCE)
        self.assertTrue("type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message)

        new_state = self.resource_registry_service.execute_lifecycle_transition(rid, LCE.DEVELOP)
        self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE))

        with self.assertRaises(BadRequest):
            self.resource_registry_service.execute_lifecycle_transition(
                    resource_id=rid, transition_event='NONE##')

        self.resource_registry_service.set_lifecycle_state(rid, lcstate(LCS.INTEGRATED, AS.PRIVATE))
        att1 = self.resource_registry_service.read(rid)
        self.assertEquals(att1.lcstate, LCS.INTEGRATED)
        self.assertEquals(att1.availability, AS.PRIVATE)
Example #17
0
    def set_object_field(self, obj, field, field_val):
        """Recursively set sub object field values.
        TODO: This may be an expensive operation. May also be redundant with object code
        """
        if isinstance(field_val, dict) and field != "kwargs":
            sub_obj = getattr(obj, field)

            if isinstance(sub_obj, IonObjectBase):

                if "type_" in field_val and field_val["type_"] != sub_obj.type_:
                    if issubtype(field_val["type_"], sub_obj.type_):
                        sub_obj = IonObject(field_val["type_"])
                        setattr(obj, field, sub_obj)
                    else:
                        raise Inconsistent(
                            "Unable to walk the field %s - types don't match: %s %s"
                            % (field, sub_obj.type_, field_val["type_"]))

                for sub_field in field_val:
                    self.set_object_field(sub_obj, sub_field,
                                          field_val.get(sub_field))

            elif isinstance(sub_obj, dict):
                setattr(obj, field, field_val)

            else:
                for sub_field in field_val:
                    self.set_object_field(sub_obj, sub_field,
                                          field_val.get(sub_field))
        else:
            # type_ already exists in the class.
            if field != "type_":
                setattr(obj, field, field_val)
    def run_reverse_transform(self):
        ''' Runs a reverse transform example and displays the results of performing the transform
        '''
        tms_cli = TransformManagementServiceClient(node=self.container.node)
        procd_cli = ProcessDispatcherServiceClient(node=self.container.node)

        #-------------------------------
        # Process Definition
        #-------------------------------
        process_definition = IonObject(RT.ProcessDefinition,
                                       name='transform_process_definition')
        process_definition.executable = {
            'module': 'ion.processes.data.transforms.transform_example',
            'class': 'ReverseTransform'
        }

        process_definition_id = procd_cli.create_process_definition(
            process_definition)

        #-------------------------------
        # Execute Transform
        #-------------------------------
        input = [1, 2, 3, 4]
        retval = tms_cli.execute_transform(
            process_definition_id=process_definition_id,
            data=[1, 2, 3, 4],
            configuration={})
        log.debug('Transform Input: %s', input)
        log.debug('Transform Output: %s', retval)
Example #19
0
    def get_recent_events(self, resource_id='', limit=100):
        """
        Get recent events for use in extended resource computed attribute
        @param resource_id str
        @param limit int
        @retval ComputedListValue with value list of 4-tuple with Event objects
        """

        now = get_ion_ts()
        events = self.find_events(origin=resource_id,
                                  limit=limit,
                                  max_datetime=now,
                                  descending=True)

        ret = IonObject(OT.ComputedEventListValue)
        if events:
            ret.value = events
            ret.computed_list = [
                get_event_computed_attributes(event) for event in events
            ]
            ret.status = ComputedValueAvailability.PROVIDED
        else:
            ret.status = ComputedValueAvailability.NOTAVAILABLE

        return ret
    def setUp(self):
        # Start container
        #print 'instantiating container'
        self._start_container()

        self.container.start_rel_from_url('res/deploy/r2deploy.yml')

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
        self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node)
        self.pubsubclient =  PubsubManagementServiceClient(node=self.container.node)
        self.ingestclient = IngestionManagementServiceClient(node=self.container.node)
        self.dpmsclient = DataProductManagementServiceClient(node=self.container.node)
        self.dataprocessclient = DataProcessManagementServiceClient(node=self.container.node)
        self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
        self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
        self.process_dispatcher   = ProcessDispatcherServiceClient()

        self.dataset_management = DatasetManagementServiceClient()

        # create missing data process definition
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name=LOGICAL_TRANSFORM_DEFINITION_NAME,
                            description="normally in preload",
                            module='ion.processes.data.transforms.logical_transform',
                            class_name='logical_transform')
        self.dataprocessclient.create_data_process_definition(dpd_obj)

        # deactivate all data processes when tests are complete
        def killAllDataProcesses():
            for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]:
                self.dataprocessclient.deactivate_data_process(proc_id)
                self.dataprocessclient.delete_data_process(proc_id)
        self.addCleanup(killAllDataProcesses)
Example #21
0
    def create_salinity_doubler_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='salinity_doubler')
        if len(dpd) > 0:
            return dpd[0]

        # Salinity Doubler: Data Process Definition
        log.debug("Create data process definition SalinityDoublerTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='salinity_doubler',
            description='create a salinity doubler data product',
            module='ion.processes.data.transforms.example_double_salinity',
            class_name='SalinityDoubler')
        try:
            salinity_doubler_dprocdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new SalinityDoubler data process definition: %s"
                % ex)

        # create a stream definition for the data from the salinity Transform
        ctd_pdict_id = self.datasetclient.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict', id_only=True)
        salinity_double_stream_def_id = self.pubsubclient.create_stream_definition(
            name='SalinityDoubler', parameter_dictionary_id=ctd_pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            salinity_double_stream_def_id,
            salinity_doubler_dprocdef_id,
            binding='salinity')

        return salinity_doubler_dprocdef_id
Example #22
0
def any_old(resource_type, extra_fields=None):
    """
    Create any old resource... a generic and unique object of a given type
    @param resource_type the resource type
    @param extra_fields dict of any extra fields to set
    """
    if not extra_fields:
        extra_fields = {}

    if resource_type not in _sa_test_helpers_ionobj_count:
        _sa_test_helpers_ionobj_count[resource_type] = 0

    _sa_test_helpers_ionobj_count[
        resource_type] = _sa_test_helpers_ionobj_count[resource_type] + 1

    name = "%s_%d" % (resource_type,
                      _sa_test_helpers_ionobj_count[resource_type])
    desc = "My %s #%d" % (resource_type,
                          _sa_test_helpers_ionobj_count[resource_type])
    log.debug("Creating any old %s IonObject (#%d)" %
              (resource_type, _sa_test_helpers_ionobj_count[resource_type]))

    ret = IonObject(resource_type, name=name, description=desc)

    #add any extra fields
    for k, v in extra_fields.iteritems():
        setattr(ret, k, v)

    return ret
    def acquire_resource(self, org_id='', user_id='', resource_id=''):
        """Acquire the specified resource for a specified user withing the specified Org. Once shared, the resource is
        committed to the user. Throws a NotFound exception if none of the ids are found.

        @param org_id    str
        @param user_id    str
        @param resource_id    str
        @retval success    bool
        @throws NotFound    object with specified id does not exist
        """
        param_objects = self._validate_parameters(org_id=org_id,
                                                  user_id=user_id,
                                                  resource_id=resource_id)

        commitment = IonObject(RT.ResourceCommitment,
                               name='',
                               org_id=org_id,
                               user_id=user_id,
                               resource_id=resource_id,
                               description='Resource Commitment')

        commitment_id, commitment_rev = self.clients.resource_registry.create(
            commitment)
        commitment._id = commitment_id
        commitment._rev = commitment_rev
        self.clients.resource_registry.create_association(
            user_id, PRED.hasCommitment, commitment)
        self.clients.resource_registry.create_association(
            resource_id, PRED.hasCommitment, commitment)

        return True
Example #24
0
    def create_mpl_graphs_data_process_definition(self):

        #First look to see if it exists and if not, then create it
        dpd, _ = self.rrclient.find_resources(restype=RT.DataProcessDefinition,
                                              name='mpl_graphs_transform')
        if len(dpd) > 0:
            return dpd[0]

        #Data Process Definition
        log.debug("Create data process definition MatplotlibGraphsTransform")
        dpd_obj = IonObject(
            RT.DataProcessDefinition,
            name='mpl_graphs_transform',
            description='Convert data streams to Matplotlib graphs',
            module='ion.processes.data.transforms.viz.matplotlib_graphs',
            class_name='VizTransformMatplotlibGraphs')
        try:
            procdef_id = self.dataprocessclient.create_data_process_definition(
                dpd_obj)
        except Exception as ex:
            self.fail(
                "failed to create new VizTransformMatplotlibGraphs data process definition: %s"
                % ex)

        pdict_id = self.datasetclient.read_parameter_dictionary_by_name(
            'graph_image_param_dict', id_only=True)
        # create a stream definition for the data
        stream_def_id = self.pubsubclient.create_stream_definition(
            name='VizTransformMatplotlibGraphs',
            parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(
            stream_def_id, procdef_id, binding='graph_image_param_dict')

        return procdef_id
Example #25
0
    def test_custom_attributes(self):
        """
        Test assignment of custom attributes
        """

        instModel_obj = IonObject(OT.CustomAttribute,
                                  name='SBE37IMModelAttr',
                                  description="model custom attr")

        instrument_model_id, _ = self.RR.create(
            any_old(RT.InstrumentModel,
                    {"custom_attributes": [instModel_obj]}))
        instrument_device_id, _ = self.RR.create(
            any_old(
                RT.InstrumentDevice, {
                    "custom_attributes": {
                        "favorite_color": "red",
                        "bogus_attr": "should raise warning"
                    }
                }))

        self.IMS.assign_instrument_model_to_instrument_device(
            instrument_model_id, instrument_device_id)

        # cleanup
        self.IMS.force_delete_instrument_device(instrument_device_id)
        self.IMS.force_delete_instrument_model(instrument_model_id)
Example #26
0
    def buy_bonds(self, account_id='', cash_amount=0.0):
        """
        Purchase the specified amount of bonds.  Check is first made
        that the cash account has sufficient funds.
        """
        account_obj = self.clients.resource_registry.read(account_id)
        if not account_obj:
            raise NotFound("Account %s does not exist" % account_id)
        if account_obj.cash_balance < cash_amount:
            raise BadRequest("Insufficient funds")

        owner_obj = self.clients.resource_registry.find_subjects(
            "BankCustomer", PRED.hasAccount, account_obj, False)[0][0]
        # Create order object and call trade service
        order_obj = IonObject("Order",
                              type="buy",
                              on_behalf=owner_obj.name,
                              cash_amount=cash_amount)

        # Make the call to trade service
        confirmation_obj = self.clients.trade.exercise(order_obj)

        if confirmation_obj.status == "complete":
            account_obj.cash_balance -= cash_amount
            account_obj.bond_balance += confirmation_obj.proceeds
            self.clients.resource_registry.update(account_obj)
            return "Balances after bond purchase: cash %f, bonds: %s" % (
                account_obj.cash_balance, account_obj.bond_balance)
        return "Bond purchase status is: %s" % confirmation_obj.status
    def test_createDataProduct_and_DataProducer_success(self):
        # setup
        self.clients.resource_registry.find_resources.return_value = (
            [], 'do not care')
        self.clients.resource_registry.find_associations.return_value = []
        self.clients.resource_registry.create.return_value = ('SOME_RR_ID1',
                                                              'Version_1')
        self.clients.data_acquisition_management.assign_data_product.return_value = None
        self.clients.pubsub_management.create_stream.return_value = "stream_id", "route_id"

        # Construct temporal and spatial Coordinate Reference System objects
        tcrs = CRS([AxisTypeEnum.TIME])
        scrs = CRS([AxisTypeEnum.LON, AxisTypeEnum.LAT])

        # Construct temporal and spatial Domain objects
        tdom = GridDomain(GridShape('temporal', [0]), tcrs,
                          MutabilityEnum.EXTENSIBLE)  # 1d (timeline)
        sdom = GridDomain(GridShape('spatial',
                                    [0]), scrs, MutabilityEnum.IMMUTABLE
                          )  # 1d spatial topology (station/trajectory)

        sdom = sdom.dump()
        tdom = tdom.dump()

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp',
                           temporal_domain=tdom,
                           spatial_domain=sdom)

        # test call
        dp_id = self.data_product_management_service.create_data_product(
            data_product=dp_obj, stream_definition_id='a stream def id')
Example #28
0
    def create_inst_agent_instance(self, agent_id, device_id):

        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }


        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance',
                                          description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)


        instAgentInstance_id = self.client.IMS.create_instrument_agent_instance(instAgentInstance_obj,
                                                                                agent_id,
                                                                                device_id)

        return instAgentInstance_id
Example #29
0
    def _get_type_interface(self, res_type):
        """
        Creates a merge of params and commands up the type inheritance chain.
        Note: Entire param and command entries if subtypes replace their super types definition.
        """
        res_interface = dict(params={}, commands={})

        base_types = IonObject(res_type)._get_extends()
        base_types.insert(0, res_type)

        for rt in reversed(base_types):
            type_interface = self.resource_interface.get(rt, None)
            if not type_interface:
                continue
            for tpar, tval in type_interface.iteritems():
                if tpar in res_interface:
                    rval = res_interface[tpar]
                    if isinstance(rval, dict):
                        rval.update(tval)
                    else:
                        res_interface[tpar] = tval
                else:
                    res_interface[tpar] = dict(tval) if isinstance(
                        tval, dict) else tval

        return res_interface
    def _do_test_find_resources(self):
        with self.assertRaises(BadRequest) as cm:
            self.resource_registry_service.find_resources(
                RT.UserInfo, LCS.DRAFT, "name", False)
        self.assertTrue(
            cm.exception.message == "find by name does not support lcstate")

        ret = self.resource_registry_service.find_resources(
            RT.UserInfo, None, "name", False)
        self.assertEquals(len(ret[0]), 0)

        # Instantiate an object
        obj = IonObject("InstrumentAgentInstance", name="name")

        # Persist object and read it back
        obj_id, obj_rev = self.resource_registry_service.create(obj)
        read_obj = self.resource_registry_service.read(obj_id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, None, "name", False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)

        ret = self.resource_registry_service.find_resources(
            RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False)
        self.assertEquals(len(ret[0]), 1)
        self.assertEquals(ret[0][0]._id, read_obj._id)