Пример #1
0
    def parse_constraints(self, environ):
#        print ">> Start parse_constraints"
#        import pprint
#        pprint.pprint("")
#        pprint.pprint(environ)

        ds_name, ds_id, ds_url, buf_size = get_dataset_info(self)
#        print "DS Info:  name=%s ds_id=%s ds_url=%s buf_size=%s" % (ds_name, ds_id, ds_url, buf_size)

        # TODO: Call the "damsP" module to retrieve a BaseDatasetHandler based on the ds_id
        dsh = self.damsP.get_data_handlers(ds_id=ds_id)

        #DSH WAY
        dataset_type = DatasetType(name=ds_name, attributes={'NC_GLOBAL': dsh.get_attributes()})
        fields, queries = environ['pydap.ce']
        fields = fields or [[(name, ())] for name in dsh.ds.variables]

#        print "CE Fields: %s" % fields
#        print "CE Queries: %s" % queries

        pdr_obj = IonObject("PydapVarDataRequest", name="p_req")

        for fvar in fields:
            target = dataset_type
            while fvar:
                name, slice_ = fvar.pop(0)
                pdr_obj.name = name
                pdr_obj.slice = slice_
                if (name in dsh.ds.dimensions or not dsh.ds.variables[name].dimensions or target is not dataset_type):
#                    print "==> if"
                    nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj)
                    target[name] = BaseType(name=nm, data=dat, shape=dat.shape, type=tc, dimensions=di, attributes=at)
                elif fvar:
#                    print "==> elif"
                    attrs = dsh.get_attributes(name)
                    target.setdefault(name, StructureType(name=name, attributes=attrs))
                    target = target[name]
                else:
#                    print "==> else"
                    attrs = dsh.get_attributes(name)
                    grid = target[name] = GridType(name=name, attributes=attrs)
                    nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj)
                    grid[name] = BaseType(name=nm, data=dat, shape=dat.shape, type=tc, dimensions=di, attributes=at)
                    slice_ = list(slice_) + [slice(None)] * (len(grid.array.shape) - len(slice_))
                    for dim, dimslice in zip(dsh.ds.variables[name].dimensions, slice_):
                        pdr_obj.name=dim
                        pdr_obj.slice=dimslice
                        nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj)
                        grid[dim] = BaseType(name=nm, data=dat, shape=dat.shape, type=tc, dimensions=di, attributes=at)

        dataset_type._set_id()
        dataset_type.close = dsh.ds.close

#        print ">> End parse_constraints"
        return dataset_type
    def _convert_negotiations_to_requests(self, negotiations=None, user_info_id='', org_id=''):
        assert isinstance(negotiations, list)

        orgs,_ = self.clients.resource_registry.find_resources(restype=RT.Org)

        ret_list = []
        for neg in negotiations:

            request = IonObject(OT.OrgUserNegotiationRequest, ts_updated=neg.ts_updated, negotiation_id=neg._id,
                negotiation_type=NegotiationTypeEnum._str_map[neg.negotiation_type],
                negotiation_status=NegotiationStatusEnum._str_map[neg.negotiation_status],
                originator=ProposalOriginatorEnum._str_map[neg.proposals[-1].originator],
                request_type=neg.proposals[-1].type_,
                description=neg.description, reason=neg.reason,
                user_id=user_info_id)

            # since this is a proxy for the Negotiation object, simulate its id to help the UI deal with it
            request._id = neg._id

            org_request = [ o for o in orgs if o._id == neg.proposals[-1].provider ]
            if org_request:
                request.org_id = org_request[0]._id
                request.name = org_request[0].name

            ret_list.append(request)

        return ret_list
Пример #3
0
 def fun():
     #ret = Mock()
     log.debug("Creating sample %s", iontype)
     ret = IonObject(iontype)
     ret.name = "sample %s" % iontype
     ret.description = "description of sample %s" % iontype
     for k, v in resource_params.iteritems():
         setattr(ret, k, v)
     return ret
Пример #4
0
 def fun():
     #ret = Mock()
     log.debug("Creating sample %s", iontype)
     ret = IonObject(iontype)
     ret.name = "sample %s" % iontype
     ret.description = "description of sample %s" % iontype
     for k, v in resource_params.iteritems():
         setattr(ret, k, v)
     return ret
Пример #5
0
    def test_user_info(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})
        self.identity_management_service.register_user_credentials(user_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        user_info = self.identity_management_service.create_user_info(user_id, user_info_obj)

        with self.assertRaises(Conflict) as cm:
            self.identity_management_service.create_user_info(user_id, user_info_obj)
        self.assertTrue("UserInfo already exists for user id" in cm.exception.message)

        user_info_obj = self.identity_management_service.find_user_info_by_id(user_id)

        user_info_obj = self.identity_management_service.find_user_info_by_name("Foo")

        user_info_obj = self.identity_management_service.find_user_info_by_subject(self.subject)

        user_info_obj = self.identity_management_service.read_user_info(user_info)

        user_info_obj.name = 'Jane Doe'

        self.identity_management_service.update_user_info(user_info_obj)

        self.identity_management_service.delete_user_info(user_info)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_name("John Doe")
        self.assertEqual(cm.exception.message, 'UserInfo with name John Doe does not exist')

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_subject("Bogus subject")
        self.assertEqual(cm.exception.message, "UserCredentials with subject Bogus subject does not exist")

        self.identity_management_service.unregister_user_credentials(user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)
    def test_user_info(self):
        actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject})
        user_id = self.identity_management_service.create_actor_identity(actor_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})
        self.identity_management_service.register_user_credentials(user_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})
        user_info = self.identity_management_service.create_user_info(user_id, user_info_obj)

        with self.assertRaises(Conflict) as cm:
            self.identity_management_service.create_user_info(user_id, user_info_obj)
        self.assertTrue("UserInfo already exists for user id" in cm.exception.message)

        user_info_obj = self.identity_management_service.find_user_info_by_id(user_id)

        user_info_obj = self.identity_management_service.find_user_info_by_name("Foo")

        user_info_obj = self.identity_management_service.find_user_info_by_subject(self.subject)

        user_info_obj = self.identity_management_service.read_user_info(user_info)

        user_info_obj.name = 'Jane Doe'

        self.identity_management_service.update_user_info(user_info_obj)

        self.identity_management_service.delete_user_info(user_info)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.read_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.delete_user_info(user_info)
        self.assertTrue('does not exist' in cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_name("John Doe")
        self.assertEqual(cm.exception.message, 'UserInfo with name John Doe does not exist')

        with self.assertRaises(NotFound) as cm:
            self.identity_management_service.find_user_info_by_subject("Bogus subject")
        self.assertEqual(cm.exception.message, "UserCredentials with subject Bogus subject does not exist")

        self.identity_management_service.unregister_user_credentials(user_id, self.subject)

        self.identity_management_service.delete_actor_identity(user_id)
Пример #7
0
    def _build_ui_resource(self, row, objtype, mapping, auto_add=True):
        refid = None
        obj_fields = {}
        for obj_attr, row_attr in mapping.iteritems():
            row_val = row[row_attr]

            obj_fields[obj_attr] = row_val
            if obj_attr == "uirefid":
                refid = row_val

        obj = IonObject(objtype, **obj_fields)
        if not obj.name:
            obj.name = 'TBD'

        if auto_add:
            self._add_ui_object(refid, obj)

        return refid, obj
Пример #8
0
    def _build_ui_resource(self, row, objtype, mapping, auto_add=True):
        refid = None
        obj_fields = {}
        for obj_attr, row_attr in mapping.iteritems():
            row_val = row[row_attr]

            obj_fields[obj_attr] = row_val
            if obj_attr == "uirefid":
                refid = row_val

        obj_fields['ordinal'] = self.counter
        obj = IonObject(objtype, **obj_fields)
        if not obj.name:
            obj.name = 'TBD'

        if auto_add:
            self._add_ui_object(refid, obj)

        return refid, obj
    def _convert_negotiations_to_requests(self,
                                          negotiations=None,
                                          user_info_id='',
                                          org_id=''):
        assert isinstance(negotiations, list)

        orgs, _ = self.clients.resource_registry.find_resources(restype=RT.Org)

        ret_list = []
        for neg in negotiations:

            request = IonObject(
                OT.OrgUserNegotiationRequest,
                ts_updated=neg.ts_updated,
                negotiation_id=neg._id,
                negotiation_type=NegotiationTypeEnum._str_map[
                    neg.negotiation_type],
                negotiation_status=NegotiationStatusEnum._str_map[
                    neg.negotiation_status],
                originator=ProposalOriginatorEnum._str_map[
                    neg.proposals[-1].originator],
                request_type=neg.proposals[-1].type_,
                description=neg.description,
                reason=neg.reason,
                user_id=user_info_id)

            # since this is a proxy for the Negotiation object, simulate its id to help the UI deal with it
            request._id = neg._id

            org_request = [
                o for o in orgs if o._id == neg.proposals[-1].provider
            ]
            if org_request:
                request.org_id = org_request[0]._id
                request.name = org_request[0].name

            ret_list.append(request)

        return ret_list
    def _do_test_policy_crud(self):
        policy_rule = '<Rule id="{rule_id}"> <description>{description}</description></Rule>'

        policy_obj = IonObject(RT.Policy, name='Test_Policy', description='This is a test policy',
                               policy_type=PolicyTypeEnum.RESOURCE_ACCESS,
                               definition=policy_rule)

        policy_obj.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_policy(policy_obj)

        policy_obj.name = 'Test_Policy'
        policy_id = self.policy_management_service.create_policy(policy_obj)
        self.assertNotEqual(policy_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.read_policy()
        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)

        policy.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.update_policy(policy)
        policy.name = 'Updated_Test_Policy'
        self.policy_management_service.update_policy(policy)

        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)
        self.assertEqual(policy.name, 'Updated_Test_Policy')

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(policy_id, policy.name)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(policy_id, policy.name, "description")
        #p_id =  self.policy_management_service.create_resource_access_policy(policy_id, "Resource_access_name", "Policy Description", "Test_Rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(service_name="service_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(service_name="service_name", policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(service_name="service_name", policy_name="policy_name", description="description")
        #p_obj = self.policy_management_service.create_service_access_policy("service_name", "policy_name", "description", "policy_rule")
        #self.assertNotEqual(p_obj, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description")
        #p_id = self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description", policy_rule="test_rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(process_id="process_id")
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(process_id="process_id", op="op")

        self.policy_management_service.enable_policy(policy_id)
        self.policy_management_service.enable_policy(policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.delete_policy()
        self.policy_management_service.delete_policy(policy_id)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.read_policy(policy_id)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.delete_policy(policy_id)
Пример #11
0
    def parse_constraints(self, environ):
        #        print ">> Start parse_constraints"
        #        import pprint
        #        pprint.pprint("")
        #        pprint.pprint(environ)

        ds_name, ds_id, ds_url, buf_size = get_dataset_info(self)
        #        print "DS Info:  name=%s ds_id=%s ds_url=%s buf_size=%s" % (ds_name, ds_id, ds_url, buf_size)

        # TODO: Call the "damsP" module to retrieve a BaseDatasetHandler based on the ds_id
        dsh = self.damsP.get_data_handlers(ds_id=ds_id)

        #DSH WAY
        dataset_type = DatasetType(
            name=ds_name, attributes={'NC_GLOBAL': dsh.get_attributes()})
        fields, queries = environ['pydap.ce']
        fields = fields or [[(name, ())] for name in dsh.ds.variables]

        #        print "CE Fields: %s" % fields
        #        print "CE Queries: %s" % queries

        pdr_obj = IonObject("PydapVarDataRequest", name="p_req")

        for fvar in fields:
            target = dataset_type
            while fvar:
                name, slice_ = fvar.pop(0)
                pdr_obj.name = name
                pdr_obj.slice = slice_
                if (name in dsh.ds.dimensions
                        or not dsh.ds.variables[name].dimensions
                        or target is not dataset_type):
                    #                    print "==> if"
                    nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj)
                    target[name] = BaseType(name=nm,
                                            data=dat,
                                            shape=dat.shape,
                                            type=tc,
                                            dimensions=di,
                                            attributes=at)
                elif fvar:
                    #                    print "==> elif"
                    attrs = dsh.get_attributes(name)
                    target.setdefault(
                        name, StructureType(name=name, attributes=attrs))
                    target = target[name]
                else:
                    #                    print "==> else"
                    attrs = dsh.get_attributes(name)
                    grid = target[name] = GridType(name=name, attributes=attrs)
                    nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj)
                    grid[name] = BaseType(name=nm,
                                          data=dat,
                                          shape=dat.shape,
                                          type=tc,
                                          dimensions=di,
                                          attributes=at)
                    slice_ = list(slice_) + [slice(None)] * (
                        len(grid.array.shape) - len(slice_))
                    for dim, dimslice in zip(dsh.ds.variables[name].dimensions,
                                             slice_):
                        pdr_obj.name = dim
                        pdr_obj.slice = dimslice
                        nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj)
                        grid[dim] = BaseType(name=nm,
                                             data=dat,
                                             shape=dat.shape,
                                             type=tc,
                                             dimensions=di,
                                             attributes=at)

        dataset_type._set_id()
        dataset_type.close = dsh.ds.close

        #        print ">> End parse_constraints"
        return dataset_type
Пример #12
0
    def test_policy_crud(self):

        res_policy_obj = IonObject(
            OT.ResourceAccessPolicy,
            policy_rule='<Rule id="%s"> <description>%s</description></Rule>')

        policy_obj = IonObject(RT.Policy,
                               name='Test_Policy',
                               description='This is a test policy',
                               policy_type=res_policy_obj)

        policy_obj.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_policy(policy_obj)

        policy_obj.name = 'Test_Policy'
        policy_id = self.policy_management_service.create_policy(policy_obj)
        self.assertNotEqual(policy_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.read_policy()
        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)

        policy.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.update_policy(policy)
        policy.name = 'Updated_Test_Policy'
        self.policy_management_service.update_policy(policy)

        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)
        self.assertEqual(policy.name, 'Updated_Test_Policy')

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(
                policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(
                policy_id, policy.name)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(
                policy_id, policy.name, "description")
        #p_id =  self.policy_management_service.create_resource_access_policy(policy_id, "Resource_access_name", "Policy Description", "Test_Rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(
                service_name="service_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(
                service_name="service_name", policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(
                service_name="service_name",
                policy_name="policy_name",
                description="description")
        #p_obj = self.policy_management_service.create_service_access_policy("service_name", "policy_name", "description", "policy_rule")
        #self.assertNotEqual(p_obj, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(
            )
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(
                policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(
                policy_name="policy_name", description="description")
        #p_id = self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description", policy_rule="test_rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(
            )
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(
                process_name="process_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(
                process_name="process_name", op="op")

        self.policy_management_service.enable_policy(policy_id)
        self.policy_management_service.enable_policy(policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.delete_policy()
        self.policy_management_service.delete_policy(policy_id)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.read_policy(policy_id)
        self.assertIn("does not exist", cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.delete_policy(policy_id)
        self.assertIn("does not exist", cm.exception.message)
Пример #13
0
    def test_CRUD_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        parameter_dictionary = self.DSMS.read_parameter_dictionary_by_name('ctd_parsed_param_dict')
        ctd_stream_def_id = self.PSMS.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------

        # Generic time-series data domain creation
        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp',
                           temporal_domain = tdom.dump(),
                           spatial_domain = sdom.dump())

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        log.debug("create dataset")
        dataset_id = self.RR2.create(any_old(RT.Dataset))
        log.debug("dataset_id = %s", dataset_id)

        log.debug("create data product 1")
        dp_id = self.DPMS.create_data_product( data_product= dp_obj,
                                                   stream_definition_id=ctd_stream_def_id,
                                                   dataset_id=dataset_id)
        log.debug("dp_id = %s", dp_id)
        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = self.RR.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
        self.assertNotEquals(len(stream_ids), 0)

        log.debug("read data product")
        dp_obj = self.DPMS.read_data_product(dp_id)

        log.debug("find data products")
        self.assertIn(dp_id, [r._id for r in self.DPMS.find_data_products()])

        log.debug("update data product")
        dp_obj.name = "brand new"
        self.DPMS.update_data_product(dp_obj)
        self.assertEqual("brand new", self.DPMS.read_data_product(dp_id).name)

        log.debug("activate/suspend persistence")
        self.assertFalse(self.DPMS.is_persisted(dp_id))
        self.DPMS.activate_data_product_persistence(dp_id)
        self.addCleanup(self.DPMS.suspend_data_product_persistence, dp_id) # delete op will do this for us
        self.assertTrue(self.DPMS.is_persisted(dp_id))

        log.debug("check error on checking persistence of nonexistent stream")
        #with self.assertRaises(NotFound):
        if True:
            self.DPMS.is_persisted(self.RR2.create(any_old(RT.DataProduct)))

        #log.debug("get extension")
        #self.DPMS.get_data_product_extension(dp_id)

        log.debug("getting last update")
        lastupdate = self.DPMS.get_last_update(dp_id)
        self.assertEqual({}, lastupdate) # should be no updates to a new data product

        log.debug("prepare resource support")
        support = self.DPMS.prepare_data_product_support(dp_id)
        self.assertIsNotNone(support)

        log.debug("delete data product")
        self.DPMS.delete_data_product(dp_id)

        log.debug("try to suspend again")
        self.DPMS.suspend_data_product_persistence(dp_id)

        # try basic create
        log.debug("create without a dataset")
        dp_id2 = self.DPMS.create_data_product_(any_old(RT.DataProduct))
        self.assertIsNotNone(dp_id2)
        log.debug("activate product %s", dp_id2)
        self.assertRaises(BadRequest, self.DPMS.activate_data_product_persistence, dp_id2)

        #self.assertNotEqual(0, len(self.RR2.find_dataset_ids_of_data_product_using_has_dataset(dp_id2)))

        log.debug("force delete data product")
        self.DPMS.force_delete_data_product(dp_id2)

        log.debug("test complete")
    def test_user_info(self):
        user_identity_obj = IonObject("UserIdentity", {"name": self.subject})        
        user_id = self.identity_management_service.create_user_identity(user_identity_obj)

        user_credentials_obj = IonObject("UserCredentials", {"name": self.subject})        
        self.identity_management_service.register_user_credentials(user_id, user_credentials_obj)

        user_info_obj = IonObject("UserInfo", {"name": "Foo"})        
        user_info = self.identity_management_service.create_user_info(user_id, user_info_obj)

        dup_create_failed = False
        try:
            user_info2 = self.identity_management_service.create_user_info(user_id, user_info_obj)
        except Conflict as ex:
            self.assertTrue("UserInfo already exists for user id" in ex.message)
            dup_create_failed = True

        self.assertTrue(dup_create_failed)

        user_info_obj = self.identity_management_service.find_user_info_by_id(user_id)

        user_info_obj = self.identity_management_service.find_user_info_by_name("Foo")

        user_info_obj = self.identity_management_service.find_user_info_by_subject(self.subject)

        user_info_obj = self.identity_management_service.read_user_info(user_info)
        
        user_info_obj.name = 'Jane Doe'
        
        self.identity_management_service.update_user_info(user_info_obj)
        
        self.identity_management_service.delete_user_info(user_info)

        read_failed = False
        try:
            self.identity_management_service.read_user_info(user_info)
        except NotFound as ex:
            self.assertTrue('does not exist' in ex.message)
            read_failed = True

        self.assertTrue(read_failed)

        delete_failed = False
        try:
            self.identity_management_service.delete_user_info(user_info)
        except NotFound as ex:
            self.assertTrue('does not exist' in ex.message)
            delete_failed = True

        self.assertTrue(delete_failed)

        find_failed = False
        try:
            self.identity_management_service.find_user_info_by_name("John Doe")
        except NotFound as ex:
            self.assertEqual(ex.message, 'UserInfo with name John Doe does not exist')
            find_failed = True

        self.assertTrue(find_failed)

        find_failed = False
        try:
            self.identity_management_service.find_user_info_by_subject("Bogus subject")
        except NotFound as ex:
            self.assertEqual(ex.message, "UserCredentials with subject Bogus subject does not exist")
            find_failed = True

        self.assertTrue(find_failed)

        self.identity_management_service.unregister_user_credentials(user_id, self.subject)

        self.identity_management_service.delete_user_identity(user_id)
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg")
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------

        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
        data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        process_id = data_process.process_id
        self.addCleanup(self.process_dispatcher.cancel_process, process_id)

        #-------------------------------
        # Wait until the process launched in the create_data_process() method is actually running, before proceeding further in this test
        #-------------------------------

        gate = ProcessStateGate(self.process_dispatcher.read_process, process_id, ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The data process (%s) did not spawn in 30 seconds" % process_id)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: data topics are being handled by pub sub at the level of streams
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        ################################ Test the removal of data processes ##################################

        #-------------------------------------------------------------------
        # Test the deleting of the data process
        #-------------------------------------------------------------------

        # Before deleting, get the input streams, output streams and the subscriptions so that they can be checked after deleting
#        dp_obj_1 = self.rrclient.read(ctd_l0_all_data_process_id)
#        input_subscription_id = dp_obj_1.input_subscription_id
#        out_prods, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
#        in_prods, _ = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasInputProduct, id_only=True)
#        in_streams = []
#        for in_prod in in_prods:
#            streams, _ = self.rrclient.find_objects(in_prod, PRED.hasStream, id_only=True)
#            in_streams.extend(streams)
#        out_streams = []
#        for out_prod in out_prods:
#            streams, _ = self.rrclient.find_objects(out_prod, PRED.hasStream, id_only=True)
#            out_streams.extend(streams)

        # Deleting the data process
        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)

        # Check that the data process got removed. Check the lcs state. It should be retired
        dp_obj = self.rrclient.read(ctd_l0_all_data_process_id)
        self.assertEquals(dp_obj.lcstate, LCS.RETIRED)

        # Check for process defs still attached to the data process
        dpd_assn_ids = self.rrclient.find_associations(subject=ctd_l0_all_data_process_id,  predicate=PRED.hasProcessDefinition, id_only=True)
        self.assertEquals(len(dpd_assn_ids), 0)

        # Check for output data product still attached to the data process
        out_products, assocs = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
        self.assertEquals(len(out_products), 0)
        self.assertEquals(len(assocs), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check of the data process has been deactivated
        self.assertIsNone(dp_obj.input_subscription_id)

        # Read the original subscription id of the data process and check that it has been deactivated
        with self.assertRaises(NotFound):
            self.pubsubclient.read_subscription(input_subscription_id)

        #-------------------------------------------------------------------
        # Delete the data process definition
        #-------------------------------------------------------------------

        # before deleting, get the process definition being associated to in order to be able to check later if the latter gets deleted as it should
        proc_def_ids, proc_def_asocs = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasProcessDefinition)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # check that the data process definition has been retired
        dp_proc_def = self.rrclient.read(ctd_L0_all_dprocdef_id)
        self.assertEquals(dp_proc_def.lcstate, LCS.RETIRED)

        # Check for old associations of this data process definition
        proc_defs, proc_def_asocs = self.rrclient.find_objects(ctd_L0_all_dprocdef_id, PRED.hasProcessDefinition)
        self.assertEquals(len(proc_defs), 0)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject= ctd_L0_all_dprocdef_id, id_only=True)
        self.assertEquals(len(obj_assns), 0)

        ################################ Test the removal of data processes ##################################
        # Try force delete... This should simply delete the associations and the data process object
        # from the resource registry

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)
        
        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process definition
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)

        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri="http://sddevrepo.oceanobservatories.org/releases/seabird_sbe37smb_ooicore-0.0.1-py2.7.egg")
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }


        port_agent_config = {
            'device_addr':  CFG.device.sbe37.host,
            'device_port':  CFG.device.sbe37.port,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'port_agent_addr': 'localhost',
            'command_port': CFG.device.sbe37.port_agent_cmd_port,
            'data_port': CFG.device.sbe37.port_agent_data_port,
            'log_level': 5,
            'type': PortAgentType.ETHERNET
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance",
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------

        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)
        data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        process_id = data_process.process_id
        self.addCleanup(self.process_dispatcher.cancel_process, process_id)

        #-------------------------------
        # Wait until the process launched in the create_data_process() method is actually running, before proceeding further in this test
        #-------------------------------

        gate = ProcessStateGate(self.process_dispatcher.read_process, process_id, ProcessStateEnum.RUNNING)
        self.assertTrue(gate.await(30), "The data process (%s) did not spawn in 30 seconds" % process_id)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: data topics are being handled by pub sub at the level of streams
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        ################################ Test the removal of data processes ##################################

        #-------------------------------------------------------------------
        # Test the deleting of the data process
        #-------------------------------------------------------------------

        # Before deleting, get the input streams, output streams and the subscriptions so that they can be checked after deleting
#        dp_obj_1 = self.rrclient.read(ctd_l0_all_data_process_id)
#        input_subscription_id = dp_obj_1.input_subscription_id
#        out_prods, _ = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
#        in_prods, _ = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasInputProduct, id_only=True)
#        in_streams = []
#        for in_prod in in_prods:
#            streams, _ = self.rrclient.find_objects(in_prod, PRED.hasStream, id_only=True)
#            in_streams.extend(streams)
#        out_streams = []
#        for out_prod in out_prods:
#            streams, _ = self.rrclient.find_objects(out_prod, PRED.hasStream, id_only=True)
#            out_streams.extend(streams)

        # Deleting the data process
        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)

        # Check that the data process got removed. Check the lcs state. It should be retired
        dp_obj = self.rrclient.read(ctd_l0_all_data_process_id)
        self.assertEquals(dp_obj.lcstate, LCS.RETIRED)

        # Check for process defs still attached to the data process
        dpd_assn_ids = self.rrclient.find_associations(subject=ctd_l0_all_data_process_id,  predicate=PRED.hasProcessDefinition, id_only=True)
        self.assertEquals(len(dpd_assn_ids), 0)

        # Check for output data product still attached to the data process
        out_products, assocs = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, predicate=PRED.hasOutputProduct, id_only=True)
        self.assertEquals(len(out_products), 0)
        self.assertEquals(len(assocs), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check for input data products still attached to the data process
        inprod_associations = self.rrclient.find_associations(ctd_l0_all_data_process_id, PRED.hasInputProduct)
        self.assertEquals(len(inprod_associations), 0)

        # Check of the data process has been deactivated
        self.assertIsNone(dp_obj.input_subscription_id)

        # Read the original subscription id of the data process and check that it has been deactivated
        with self.assertRaises(NotFound):
            self.pubsubclient.read_subscription(input_subscription_id)

        #-------------------------------------------------------------------
        # Delete the data process definition
        #-------------------------------------------------------------------

        # before deleting, get the process definition being associated to in order to be able to check later if the latter gets deleted as it should
        proc_def_ids, proc_def_asocs = self.rrclient.find_objects(ctd_l0_all_data_process_id, PRED.hasProcessDefinition)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # check that the data process definition has been retired
        dp_proc_def = self.rrclient.read(ctd_L0_all_dprocdef_id)
        self.assertEquals(dp_proc_def.lcstate, LCS.RETIRED)

        # Check for old associations of this data process definition
        proc_defs, proc_def_asocs = self.rrclient.find_objects(ctd_L0_all_dprocdef_id, PRED.hasProcessDefinition)
        self.assertEquals(len(proc_defs), 0)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject= ctd_L0_all_dprocdef_id, id_only=True)
        self.assertEquals(len(obj_assns), 0)

        ################################ Test the removal of data processes ##################################
        # Try force delete... This should simply delete the associations and the data process object
        # from the resource registry

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)
        
        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)

        #---------------------------------------------------------------------------------------------------------------
        # Force deleting a data process definition
        #---------------------------------------------------------------------------------------------------------------
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)

        # find all associations where this is the subject
        _, obj_assns = self.rrclient.find_objects(subject=ctd_l0_all_data_process_id, id_only=True)

        # find all associations where this is the object
        _, sbj_assns = self.rrclient.find_subjects(object=ctd_l0_all_data_process_id, id_only=True)

        self.assertEquals(len(obj_assns), 0)
        self.assertEquals(len(sbj_assns), 0)

        with self.assertRaises(NotFound):
            self.rrclient.read(ctd_l0_all_data_process_id)
    def test_policy_crud(self):

        res_policy_obj = IonObject(OT.ResourceAccessPolicy, policy_rule='<Rule id="%s"> <description>%s</description></Rule>')

        policy_obj = IonObject(RT.Policy, name='Test_Policy',
            description='This is a test policy',
            policy_type=res_policy_obj)

        policy_obj.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_policy(policy_obj)

        policy_obj.name = 'Test_Policy'
        policy_id = self.policy_management_service.create_policy(policy_obj)
        self.assertNotEqual(policy_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.read_policy()
        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)

        policy.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.update_policy(policy)
        policy.name = 'Updated_Test_Policy'
        self.policy_management_service.update_policy(policy)

        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)
        self.assertEqual(policy.name, 'Updated_Test_Policy')

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(policy_id, policy.name)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(policy_id, policy.name, "description")
        #p_id =  self.policy_management_service.create_resource_access_policy(policy_id, "Resource_access_name", "Policy Description", "Test_Rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(service_name="service_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(service_name="service_name", policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(service_name="service_name", policy_name="policy_name", description="description")
        #p_obj = self.policy_management_service.create_service_access_policy("service_name", "policy_name", "description", "policy_rule")
        #self.assertNotEqual(p_obj, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description")
        #p_id = self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description", policy_rule="test_rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(process_name="process_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(process_name="process_name", op="op")

        self.policy_management_service.enable_policy(policy_id)
        self.policy_management_service.enable_policy(policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.delete_policy()
        self.policy_management_service.delete_policy(policy_id)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.read_policy(policy_id)
        self.assertIn("does not exist", cm.exception.message)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.delete_policy(policy_id)
        self.assertIn("does not exist", cm.exception.message)
Пример #18
0
    def test_CRUD_data_product(self):

        #------------------------------------------------------------------------------------------------
        # create a stream definition for the data from the ctd simulator
        #------------------------------------------------------------------------------------------------
        parameter_dictionary = self.DSMS.read_parameter_dictionary_by_name(
            'ctd_parsed_param_dict')
        ctd_stream_def_id = self.PSMS.create_stream_definition(
            name='Simulated CTD data',
            parameter_dictionary_id=parameter_dictionary._id)
        log.debug("Created stream def id %s" % ctd_stream_def_id)

        #------------------------------------------------------------------------------------------------
        # test creating a new data product w/o a stream definition
        #------------------------------------------------------------------------------------------------

        # Generic time-series data domain creation
        tdom, sdom = time_series_domain()

        dp_obj = IonObject(RT.DataProduct,
                           name='DP1',
                           description='some new dp',
                           temporal_domain=tdom.dump(),
                           spatial_domain=sdom.dump())

        dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
        dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
        dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
        dp_obj.ooi_product_name = "PRODNAME"

        #------------------------------------------------------------------------------------------------
        # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
        #------------------------------------------------------------------------------------------------

        log.debug("create dataset")
        dataset_id = self.RR2.create(any_old(RT.Dataset))
        log.debug("dataset_id = %s", dataset_id)

        log.debug("create data product 1")
        dp_id = self.DPMS.create_data_product(
            data_product=dp_obj,
            stream_definition_id=ctd_stream_def_id,
            dataset_id=dataset_id)
        log.debug("dp_id = %s", dp_id)
        # Assert that the data product has an associated stream at this stage
        stream_ids, _ = self.RR.find_objects(dp_id, PRED.hasStream, RT.Stream,
                                             True)
        self.assertNotEquals(len(stream_ids), 0)

        log.debug("read data product")
        dp_obj = self.DPMS.read_data_product(dp_id)

        log.debug("find data products")
        self.assertIn(dp_id, [r._id for r in self.DPMS.find_data_products()])

        log.debug("update data product")
        dp_obj.name = "brand new"
        self.DPMS.update_data_product(dp_obj)
        self.assertEqual("brand new", self.DPMS.read_data_product(dp_id).name)

        log.debug("activate/suspend persistence")
        self.assertFalse(self.DPMS.is_persisted(dp_id))
        self.DPMS.activate_data_product_persistence(dp_id)
        self.addCleanup(self.DPMS.suspend_data_product_persistence,
                        dp_id)  # delete op will do this for us
        self.assertTrue(self.DPMS.is_persisted(dp_id))

        log.debug("check error on checking persistence of nonexistent stream")
        #with self.assertRaises(NotFound):
        if True:
            self.DPMS.is_persisted(self.RR2.create(any_old(RT.DataProduct)))

        #log.debug("get extension")
        #self.DPMS.get_data_product_extension(dp_id)

        log.debug("getting last update")
        lastupdate = self.DPMS.get_last_update(dp_id)
        self.assertEqual(
            {}, lastupdate)  # should be no updates to a new data product

        log.debug("prepare resource support")
        support = self.DPMS.prepare_data_product_support(dp_id)
        self.assertIsNotNone(support)

        log.debug("delete data product")
        self.DPMS.delete_data_product(dp_id)

        log.debug("try to suspend again")
        self.DPMS.suspend_data_product_persistence(dp_id)

        # try basic create
        log.debug("create without a dataset")
        dp_id2 = self.DPMS.create_data_product_(any_old(RT.DataProduct))
        self.assertIsNotNone(dp_id2)
        log.debug("activate product %s", dp_id2)
        self.assertRaises(BadRequest,
                          self.DPMS.activate_data_product_persistence, dp_id2)

        #self.assertNotEqual(0, len(self.RR2.find_dataset_ids_of_data_product_using_has_dataset(dp_id2)))

        log.debug("force delete data product")
        self.DPMS.force_delete_data_product(dp_id2)

        log.debug("test complete")
    def test_createDataProcessUsingSim(self):
        #-------------------------------
        # Create InstrumentModel
        #-------------------------------
        instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel" )
        instModel_id = self.imsclient.create_instrument_model(instModel_obj)

        #-------------------------------
        # Create InstrumentAgent
        #-------------------------------
        instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_module="mi.instrument.seabird.sbe37smb.ooicore.driver", driver_class="SBE37Driver" )
        instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj)

        self.imsclient.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id)

        #-------------------------------
        # Create InstrumentDevice
        #-------------------------------
        instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" )
        instDevice_id = self.imsclient.create_instrument_device(instrument_device=instDevice_obj)
        self.imsclient.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id)

        #-------------------------------
        # Create InstrumentAgentInstance to hold configuration information
        #-------------------------------


        port_agent_config = {
            'device_addr': 'sbe37-simulator.oceanobservatories.org',
            'device_port': 4001,
            'process_type': PortAgentProcessType.UNIX,
            'binary_path': "port_agent",
            'command_port': 4002,
            'data_port': 4003,
            'log_level': 5,
        }

        instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", svr_addr="localhost",
                                          comms_device_address=CFG.device.sbe37.host, comms_device_port=CFG.device.sbe37.port,
                                          port_agent_config = port_agent_config)
        instAgentInstance_id = self.imsclient.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id)


        #-------------------------------
        # Create CTD Parsed as the first data product
        #-------------------------------
        # create a stream definition for the data from the ctd simulator
        pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
        ctd_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE32_CDM', parameter_dictionary_id=pdict_id)

        # Construct temporal and spatial Coordinate Reference System objects
        tdom, sdom = time_series_domain()

        sdom = sdom.dump()
        tdom = tdom.dump()



        dp_obj = IonObject(RT.DataProduct,
            name='ctd_parsed',
            description='ctd stream test',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_parsed_data_product = self.dataproductclient.create_data_product(dp_obj, ctd_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_parsed_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_parsed_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # Create CTD Raw as the second data product
        #-------------------------------
        raw_stream_def_id = self.pubsubclient.create_stream_definition(name='SBE37_RAW', parameter_dictionary_id=pdict_id)

        dp_obj.name = 'ctd_raw'
        ctd_raw_data_product = self.dataproductclient.create_data_product(dp_obj, raw_stream_def_id)

        self.damsclient.assign_data_product(input_resource_id=instDevice_id, data_product_id=ctd_raw_data_product)

        # Retrieve the id of the OUTPUT stream from the out Data Product
        stream_ids, _ = self.rrclient.find_objects(ctd_raw_data_product, PRED.hasStream, None, True)

        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Data Process Definition
        #-------------------------------
        dpd_obj = IonObject(RT.DataProcessDefinition,
                            name='ctd_L0_all',
                            description='transform ctd package into three separate L0 streams',
                            module='ion.processes.data.transforms.ctd.ctd_L0_all',
                            class_name='ctd_L0_all')
        ctd_L0_all_dprocdef_id = self.dataprocessclient.create_data_process_definition(dpd_obj)
            
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Output Data Products
        #-------------------------------

        outgoing_stream_l0_conductivity_id = self.pubsubclient.create_stream_definition(name='L0_Conductivity', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_conductivity_id, ctd_L0_all_dprocdef_id, binding='conductivity' )

        outgoing_stream_l0_pressure_id = self.pubsubclient.create_stream_definition(name='L0_Pressure', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_pressure_id, ctd_L0_all_dprocdef_id, binding='pressure' )

        outgoing_stream_l0_temperature_id = self.pubsubclient.create_stream_definition(name='L0_Temperature', parameter_dictionary_id=pdict_id)
        self.dataprocessclient.assign_stream_definition_to_data_process_definition(outgoing_stream_l0_temperature_id, ctd_L0_all_dprocdef_id, binding='temperature' )


        self.output_products={}

        ctd_l0_conductivity_output_dp_obj = IonObject(  RT.DataProduct,
                                                        name='L0_Conductivity',
                                                        description='transform output conductivity',
                                                        temporal_domain = tdom,
                                                        spatial_domain = sdom)


        ctd_l0_conductivity_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_conductivity_output_dp_obj,
                                                                                outgoing_stream_l0_conductivity_id)
        self.output_products['conductivity'] = ctd_l0_conductivity_output_dp_id

        ctd_l0_pressure_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Pressure',
            description='transform output pressure',
            temporal_domain = tdom,
            spatial_domain = sdom)

        ctd_l0_pressure_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_pressure_output_dp_obj,
                                                                                    outgoing_stream_l0_pressure_id)
        self.output_products['pressure'] = ctd_l0_pressure_output_dp_id

        ctd_l0_temperature_output_dp_obj = IonObject(RT.DataProduct,
            name='L0_Temperature',
            description='transform output temperature',
            temporal_domain = tdom,
            spatial_domain = sdom)


        ctd_l0_temperature_output_dp_id = self.dataproductclient.create_data_product(ctd_l0_temperature_output_dp_obj,
                                                                                    outgoing_stream_l0_temperature_id)
        self.output_products['temperature'] = ctd_l0_temperature_output_dp_id


        #-------------------------------
        # Create listener for data process events and verify that events are received.
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-367  Data processing shall notify registered data product consumers about data processing workflow life cycle events
        #todo (contd) ... I believe the capability does not exist yet now. ANS And SA are not yet publishing any workflow life cycle events (Swarbhanu)
        
        #-------------------------------
        # L0 Conductivity - Temperature - Pressure: Create the data process
        #-------------------------------
        ctd_l0_all_data_process_id = self.dataprocessclient.create_data_process(ctd_L0_all_dprocdef_id, [ctd_parsed_data_product], self.output_products)

        #-------------------------------
        # Retrieve a list of all data process defintions in RR and validate that the DPD is listed
        #-------------------------------

        # todo: add this validate for Req: L4-CI-SA-RQ-366  Data processing shall manage data topic definitions
        # todo: This capability is not yet completed (Swarbhanu)
        self.dataprocessclient.activate_data_process(ctd_l0_all_data_process_id)
        

        #todo: check that activate event is received L4-CI-SA-RQ-367
        #todo... (it looks like no event is being published when the data process is activated... so below, we just check for now
        # todo... that the subscription is indeed activated) (Swarbhanu)


        # todo: monitor process to see if it is active (sa-rq-182)
        ctd_l0_all_data_process = self.rrclient.read(ctd_l0_all_data_process_id)
        input_subscription_id = ctd_l0_all_data_process.input_subscription_id
        subs = self.rrclient.read(input_subscription_id)
        self.assertTrue(subs.activated)

        # todo: This has not yet been completed by CEI, will prbly surface thru a DPMS call
        self.dataprocessclient.deactivate_data_process(ctd_l0_all_data_process_id)


        #-------------------------------
        # Retrieve the extended resources for data process definition and for data process
        #-------------------------------
        extended_process_definition = self.dataprocessclient.get_data_process_definition_extension(ctd_L0_all_dprocdef_id)
        self.assertEqual(1, len(extended_process_definition.data_processes))
        log.debug("test_createDataProcess: extended_process_definition  %s", str(extended_process_definition))

        extended_process = self.dataprocessclient.get_data_process_extension(ctd_l0_all_data_process_id)
        self.assertEqual(1, len(extended_process.input_data_products))
        log.debug("test_createDataProcess: extended_process  %s", str(extended_process))

        #-------------------------------
        # Cleanup
        #-------------------------------

        self.dataprocessclient.delete_data_process(ctd_l0_all_data_process_id)
        self.dataprocessclient.delete_data_process_definition(ctd_L0_all_dprocdef_id)

        self.dataprocessclient.force_delete_data_process(ctd_l0_all_data_process_id)
        self.dataprocessclient.force_delete_data_process_definition(ctd_L0_all_dprocdef_id)
Пример #20
0
    def _do_test_policy_crud(self):
        policy_rule = '<Rule id="{rule_id}"> <description>{description}</description></Rule>'

        policy_obj = IonObject(RT.Policy,
                               name='Test_Policy',
                               description='This is a test policy',
                               policy_type=PolicyTypeEnum.RESOURCE_ACCESS,
                               definition=policy_rule)

        policy_obj.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_policy(policy_obj)

        policy_obj.name = 'Test_Policy'
        policy_id = self.policy_management_service.create_policy(policy_obj)
        self.assertNotEqual(policy_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.read_policy()
        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)

        policy.name = ' '
        with self.assertRaises(BadRequest):
            self.policy_management_service.update_policy(policy)
        policy.name = 'Updated_Test_Policy'
        self.policy_management_service.update_policy(policy)

        policy = None
        policy = self.policy_management_service.read_policy(policy_id)
        self.assertNotEqual(policy, None)
        self.assertEqual(policy.name, 'Updated_Test_Policy')

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(
                policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(
                policy_id, policy.name)
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_resource_access_policy(
                policy_id, policy.name, "description")
        #p_id =  self.policy_management_service.create_resource_access_policy(policy_id, "Resource_access_name", "Policy Description", "Test_Rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy()
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(
                service_name="service_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(
                service_name="service_name", policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_service_access_policy(
                service_name="service_name",
                policy_name="policy_name",
                description="description")
        #p_obj = self.policy_management_service.create_service_access_policy("service_name", "policy_name", "description", "policy_rule")
        #self.assertNotEqual(p_obj, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(
            )
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(
                policy_name="policy_name")
        with self.assertRaises(BadRequest):
            self.policy_management_service.create_common_service_access_policy(
                policy_name="policy_name", description="description")
        #p_id = self.policy_management_service.create_common_service_access_policy(policy_name="policy_name",description="description", policy_rule="test_rule")
        #self.assertNotEqual(p_id, None)

        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(
            )
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(
                process_id="process_id")
        with self.assertRaises(BadRequest):
            self.policy_management_service.add_process_operation_precondition_policy(
                process_id="process_id", op="op")

        self.policy_management_service.enable_policy(policy_id)
        self.policy_management_service.enable_policy(policy_id)
        with self.assertRaises(BadRequest):
            self.policy_management_service.delete_policy()
        self.policy_management_service.delete_policy(policy_id)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.read_policy(policy_id)

        with self.assertRaises(NotFound) as cm:
            self.policy_management_service.delete_policy(policy_id)