def _do_test_create_geospatial_point_center(self, resources): platformsite_obj = IonObject(RT.PlatformSite, name='TestPlatformSite', description='some new TestPlatformSite') geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 20.0 geo_index_obj.geospatial_latitude_limit_south = 10.0 geo_index_obj.geospatial_longitude_limit_east = 15.0 geo_index_obj.geospatial_longitude_limit_west = 20.0 platformsite_obj.constraint_list = [geo_index_obj] platformsite_id = self.OMS.create_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some new TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(15.0, platformsite_obj.geospatial_point_center.lat, places=1) #now adjust a few params platformsite_obj.description = 'some old TestPlatformSite' geo_index_obj = IonObject(OT.GeospatialBounds) geo_index_obj.geospatial_latitude_limit_north = 30.0 geo_index_obj.geospatial_latitude_limit_south = 20.0 platformsite_obj.constraint_list = [geo_index_obj] update_result = self.OMS.update_platform_site(platformsite_obj) # now get the dp back to see if it was updated platformsite_obj = self.OMS.read_platform_site(platformsite_id) self.assertEquals('some old TestPlatformSite', platformsite_obj.description) self.assertAlmostEqual(25.0, platformsite_obj.geospatial_point_center.lat, places=1) self.OMS.force_delete_platform_site(platformsite_id)
def fun(): #ret = Mock() log.debug("Creating sample %s", iontype) ret = IonObject(iontype) ret.name = "sample %s" % iontype ret.description = "description of sample %s" % iontype for k, v in resource_params.iteritems(): setattr(ret, k, v) return ret
def create_negotiation(self, sap=None): if sap is None or (sap.type_ != OT.ServiceAgreementProposal and not issubtype(sap.type_, OT.ServiceAgreementProposal)): raise BadRequest('The sap parameter must be a valid Service Agreement Proposal object') if sap.proposal_status != ProposalStatusEnum.INITIAL or sap.sequence_num != 0: raise Inconsistent('The specified Service Agreement Proposal has inconsistent status fields') if sap.negotiation_id != '': raise Inconsistent('The specified Service Agreement Proposal cannot have a negotiation_id for an initial proposal') if sap.type_ in self.negotiation_rules: # validate preconditions before creating for pc in self.negotiation_rules[sap.type_]['pre_conditions']: if pc.startswith('not '): pre_condition_met = not eval("self.service_provider." + pc.lstrip('not ')) #Strip off the 'not ' part else: pre_condition_met = eval("self.service_provider."+pc) if not pre_condition_met: raise BadRequest("A precondition for this request has not been satisfied: %s" % pc) # Should be able to determine the negotiation type based on the intial originator neg_type = NegotiationTypeEnum.REQUEST if sap.originator == ProposalOriginatorEnum.PROVIDER: neg_type = NegotiationTypeEnum.INVITATION elif sap.originator == ProposalOriginatorEnum.BROKER: neg_type = NegotiationTypeEnum.BROKERED neg_obj = IonObject(RT.Negotiation, negotiation_type=neg_type) # If there is a description in the initial proposal, then set the negotiation description with it. if sap.description != '': neg_obj.description = sap.description neg_id,_ = self.service_provider.clients.resource_registry.create(neg_obj) # Create associations between the parties self.service_provider.clients.resource_registry.create_association(sap.consumer, PRED.hasNegotiation, neg_id) self.service_provider.clients.resource_registry.create_association(sap.provider, PRED.hasNegotiation, neg_id) if sap.broker != "": self.service_provider.clients.resource_registry.create_association(sap.broker, PRED.hasNegotiation, neg_id) return neg_id
def test_data_source_ops(self): # test creating a new data source print 'Creating new data source' datasource_obj = IonObject(RT.DataSource, name='DataSource1', description='instrument based new source' , type='sbe37') try: ds_id = self.client.create_data_source(datasource_obj) except BadRequest as ex: self.fail("failed to create new data source: %s" %ex) print 'new data source id = ', ds_id # test reading a non-existent data source print 'reading non-existent data source' try: dp_obj = self.client.read_data_source('some_fake_id') except NotFound as ex: pass else: self.fail("non-existing data source was found during read: %s" %dp_obj) # update a data source (tests read also) print 'Updating data source' # first get the existing data source object try: datasource_obj = self.client.read_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during read") else: pass # now tweak the object datasource_obj.description = 'the very first data source' # now write the dp back to the registry try: update_result = self.client.update_data_source(datasource_obj) except NotFound as ex: self.fail("existing data source was not found during update") except Conflict as ex: self.fail("revision conflict exception during data source update") #else: # self.assertTrue(update_result == True) # now get the data source back to see if it was updated try: datasource_obj = self.client.read_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during read") else: pass self.assertTrue(datasource_obj.description == 'the very first data source') # now 'delete' the data source print "deleting data source" try: delete_result = self.client.delete_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during delete") #self.assertTrue(delete_result == True) # now try to get the deleted dp object try: dp_obj = self.client.read_data_source(ds_id) except NotFound as ex: pass else: self.fail("deleted data source was found during read") # now try to delete the already deleted data source object print "deleting non-existing data source" try: delete_result = self.client.delete_data_source(ds_id) except NotFound as ex: pass else: self.fail("non-existing data source was found during delete")
def test_createDataProduct(self): client = self.client # create a stream definition for the data from the ctd simulator ctd_stream_def = ctd_stream_definition() ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data') # test creating a new data product w/o a stream definition print 'test_createDataProduct: Creating new data product w/o a stream definition (L4-CI-SA-RQ-308)' dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') try: dp_id = client.create_data_product(dp_obj, '') dp_obj = client.read_data_product(dp_id) except BadRequest as ex: self.fail("failed to create new data product: %s" %ex) print 'new dp_id = ', dp_id log.debug("test_createDataProduct: Data product info from registry %s (L4-CI-SA-RQ-308)", str(dp_obj)) # test creating a new data product with a stream definition print 'Creating new data product with a stream definition' dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp') try: dp_id2 = client.create_data_product(dp_obj, ctd_stream_def_id) except BadRequest as ex: self.fail("failed to create new data product: %s" %ex) print 'new dp_id = ', dp_id2 # test activate and suspend data product persistence try: client.activate_data_product_persistence(dp_id2, persist_data=True, persist_metadata=True) time.sleep(3) client.suspend_data_product_persistence(dp_id2) except BadRequest as ex: self.fail("failed to activate / deactivate data product persistence : %s" %ex) pid = self.container.spawn_process(name='dummy_process_for_test', module='pyon.ion.process', cls='SimpleProcess', config={}) dummy_process = self.container.proc_manager.procs[pid] ''' publisher_registrar = StreamPublisherRegistrar(process=dummy_process, node=self.container.node) self.ctd_stream1_publisher = publisher_registrar.create_publisher(stream_id=self.in_stream_id) msg = {'num':'3'} self.ctd_stream1_publisher.publish(msg) time.sleep(1) msg = {'num':'5'} self.ctd_stream1_publisher.publish(msg) time.sleep(1) msg = {'num':'9'} self.ctd_stream1_publisher.publish(msg) ''' # test creating a duplicate data product print 'Creating the same data product a second time (duplicate)' dp_obj.description = 'the first dp' try: dp_id = client.create_data_product(dp_obj, ctd_stream_def_id) except BadRequest as ex: print ex else: self.fail("duplicate data product was created with the same name") # test reading a non-existent data product print 'reading non-existent data product' try: dp_obj = client.read_data_product('some_fake_id') except NotFound as ex: pass else: self.fail("non-existing data product was found during read: %s" %dp_obj) # update a data product (tests read also) print 'Updating data product' # first get the existing dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj # now tweak the object dp_obj.description = 'the very first dp' # now write the dp back to the registry try: update_result = client.update_data_product(dp_obj) except NotFound as ex: self.fail("existing data product was not found during update") except Conflict as ex: self.fail("revision conflict exception during data product update") # now get the dp back to see if it was updated try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj self.assertTrue(dp_obj.description == 'the very first dp') # now 'delete' the data product print "deleting data product: ", dp_id try: client.delete_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during delete") # now try to get the deleted dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: pass else: self.fail("deleted data product was found during read") # now try to delete the already deleted dp object print "deleting non-existing data product" try: client.delete_data_product(dp_id) except NotFound as ex: pass else: self.fail("non-existing data product was found during delete")
def test_createDataProduct(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' # Establish endpoint with container container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) #print 'got CC client' container_client.start_rel_from_url('res/deploy/r2sa.yml') print 'started services' # Now create client to DataProductManagementService client = DataProductManagementServiceClient(node=self.container.node) # test creating a new data product w/o a data producer print 'Creating new data product w/o a data producer' dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') try: dp_id = client.create_data_product(dp_obj) except BadRequest as ex: self.fail("failed to create new data product: %s" %ex) print 'new dp_id = ', dp_id # test creating a duplicate data product print 'Creating the same data product a second time (duplicate)' dp_obj.description = 'the first dp' try: dp_id = client.create_data_product(dp_obj) except BadRequest as ex: print ex else: self.fail("duplicate data product was created with the same name") """ # This is broken until the interceptor handles lists properly (w/o converting them to constants) # and DAMS works with pubsub_management.register_producer() correctly # test creating a new data product with a data producer print 'Creating new data product with a data producer' dp_obj = IonObject(RT.DataProduct, name='DP2', description='another new dp') data_producer_obj = IonObject(RT.DataProducer, name='DataProducer1', description='a new data producer') try: dp_id = client.create_data_product(dp_obj, data_producer_obj) except BadRequest as ex: self.fail("failed to create new data product") print 'new dp_id = ', dp_id """ # test reading a non-existent data product print 'reading non-existent data product' try: dp_obj = client.read_data_product('some_fake_id') except NotFound as ex: pass else: self.fail("non-existing data product was found during read: %s" %dp_obj) # update a data product (tests read also) print 'Updating data product' # first get the existing dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj # now tweak the object dp_obj.description = 'the very first dp' # now write the dp back to the registry try: update_result = client.update_data_product(dp_obj) except NotFound as ex: self.fail("existing data product was not found during update") except Conflict as ex: self.fail("revision conflict exception during data product update") else: self.assertTrue(update_result == True) # now get the dp back to see if it was updated try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass #print 'dp_obj = ', dp_obj self.assertTrue(dp_obj.description == 'the very first dp') # now 'delete' the data product print "deleting data product" try: delete_result = client.delete_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during delete") self.assertTrue(delete_result == True) # now try to get the deleted dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: pass else: self.fail("deleted data product was found during read") # now try to delete the already deleted dp object print "deleting non-existing data product" try: delete_result = client.delete_data_product(dp_id) except NotFound as ex: pass else: self.fail("non-existing data product was found during delete")
def test_createDataProduct(self): client = self.client rrclient = self.rrclient # Not sure we want to mix in DAMS tests here # set up initial data source and its associated data producer # instrument_obj = IonObject(RT.InstrumentDevice, name='Inst1',description='an instrument that is creating the data product') # instrument_id, rev = rrclient.create(instrument_obj) # self.damsclient.register_instrument(instrument_id) # test creating a new data product w/o a data producer print "Creating new data product w/o a data producer" dp_obj = IonObject(RT.DataProduct, name="DP1", description="some new dp") try: dp_id = client.create_data_product(dp_obj, "") except BadRequest as ex: self.fail("failed to create new data product: %s" % ex) print "new dp_id = ", dp_id # test creating a duplicate data product print "Creating the same data product a second time (duplicate)" dp_obj.description = "the first dp" try: dp_id = client.create_data_product(dp_obj, "source_resource_id") except BadRequest as ex: print ex else: self.fail("duplicate data product was created with the same name") """ # This is broken until the interceptor handles lists properly (w/o converting them to constants) # and DAMS works with pubsub_management.register_producer() correctly # test creating a new data product with a data producer print 'Creating new data product with a data producer' dp_obj = IonObject(RT.DataProduct, name='DP2', description='another new dp') data_producer_obj = IonObject(RT.DataProducer, name='DataProducer1', description='a new data producer') try: dp_id = client.create_data_product(dp_obj, data_producer_obj) except BadRequest as ex: self.fail("failed to create new data product") print 'new dp_id = ', dp_id """ # test reading a non-existent data product print "reading non-existent data product" try: dp_obj = client.read_data_product("some_fake_id") except NotFound as ex: pass else: self.fail("non-existing data product was found during read: %s" % dp_obj) # update a data product (tests read also) print "Updating data product" # first get the existing dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass # print 'dp_obj = ', dp_obj # now tweak the object dp_obj.description = "the very first dp" # now write the dp back to the registry try: update_result = client.update_data_product(dp_obj) except NotFound as ex: self.fail("existing data product was not found during update") except Conflict as ex: self.fail("revision conflict exception during data product update") # now get the dp back to see if it was updated try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during read") else: pass # print 'dp_obj = ', dp_obj self.assertTrue(dp_obj.description == "the very first dp") # now 'delete' the data product print "deleting data product: ", dp_id try: client.delete_data_product(dp_id) except NotFound as ex: self.fail("existing data product was not found during delete") # now try to get the deleted dp object try: dp_obj = client.read_data_product(dp_id) except NotFound as ex: pass else: self.fail("deleted data product was found during read") # now try to delete the already deleted dp object print "deleting non-existing data product" try: client.delete_data_product(dp_id) except NotFound as ex: pass else: self.fail("non-existing data product was found during delete")
def test_data_source_ops(self): # Register an instrument in coordination with DM PubSub: create stream, register and create producer object # Start container #print 'instantiating container' self._start_container() # Establish endpoint with container container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) #print 'got CC client' container_client.start_rel_from_url('res/deploy/r2sa.yml') print 'started services' # Now create client to DataAcquisitionManagementService client = DataAcquisitionManagementServiceClient(node=self.container.node) # test creating a new data source print 'Creating new data source' datasource_obj = IonObject(RT.DataSource, name='DataSource1', description='instrument based new source' , type='sbe37') try: ds_id = client.read_data_source().create_data_source(datasource_obj) except BadRequest as ex: self.fail("failed to create new data source: %s" %ex) print 'new data source id = ', ds_id # test reading a non-existent data source print 'reading non-existent data source' try: dp_obj = client.read_data_source('some_fake_id') except NotFound as ex: pass else: self.fail("non-existing data source was found during read: %s" %dp_obj) # update a data source (tests read also) print 'Updating data source' # first get the existing data source object try: datasource_obj = client.read_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during read") else: pass # now tweak the object datasource_obj.description = 'the very first data source' # now write the dp back to the registry try: update_result = client.update_data_source(datasource_obj) except NotFound as ex: self.fail("existing data source was not found during update") except Conflict as ex: self.fail("revision conflict exception during data source update") else: self.assertTrue(update_result == True) # now get the data source back to see if it was updated try: datasource_obj = client.read_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during read") else: pass self.assertTrue(datasource_obj.description == 'the very first data source') # now 'delete' the data source print "deleting data source" try: delete_result = client.delete_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during delete") self.assertTrue(delete_result == True) # now try to get the deleted dp object try: dp_obj = client.read_data_source(ds_id) except NotFound as ex: pass else: self.fail("deleted data source was found during read") # now try to delete the already deleted data source object print "deleting non-existing data source" try: delete_result = client.delete_data_source(ds_id) except NotFound as ex: pass else: self.fail("non-existing data source was found during delete")
def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product=dp_obj, stream_definition_id=ctd_stream_def_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp') dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) group_names = self.dpsc_cli.get_data_product_group_list() self.assertIn("PRODNAME", group_names) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description, 'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual( ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual( 0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) def ion_object_encoder(obj): return obj.__dict__ #test prepare for create data_product_data = self.dpsc_cli.prepare_data_product_support() #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, "") self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 0) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 0) #test prepare for update data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, dp_id) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 1) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 1) self.assertEqual( data_product_data.associations['StreamDefinition']. associated_resources[0].s, dp_id) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 1) self.assertEqual( data_product_data.associations['Dataset'].associated_resources[0]. s, dp_id) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) # Assert that there are no associated streams leftover after deleting the data product stream_ids, assoc_ids = self.rrclient.find_objects( dp_id, PRED.hasStream, RT.Stream, True) self.assertEquals(len(stream_ids), 0) self.assertEquals(len(assoc_ids), 0) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0)
def test_data_source_ops(self): # test creating a new data source print 'Creating new data source' datasource_obj = IonObject(RT.DataSource, name='DataSource1', description='instrument based new source' , data_source_type='sbe37') try: ds_id = self.client.create_data_source(datasource_obj) except BadRequest as ex: self.fail("failed to create new data source: %s" %ex) print 'new data source id = ', ds_id # test reading a non-existent data source print 'reading non-existent data source' try: dp_obj = self.client.read_data_source('some_fake_id') except NotFound as ex: pass else: self.fail("non-existing data source was found during read: %s" %dp_obj) # update a data source (tests read also) print 'Updating data source' # first get the existing data source object try: datasource_obj = self.client.read_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during read") else: pass # now tweak the object datasource_obj.description = 'the very first data source' # now write the dp back to the registry try: update_result = self.client.update_data_source(datasource_obj) except NotFound as ex: self.fail("existing data source was not found during update") except Conflict as ex: self.fail("revision conflict exception during data source update") #else: # self.assertTrue(update_result == True) # now get the data source back to see if it was updated try: datasource_obj = self.client.read_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during read") else: pass self.assertTrue(datasource_obj.description == 'the very first data source') # now 'delete' the data source print "deleting data source" try: delete_result = self.client.force_delete_data_source(ds_id) except NotFound as ex: self.fail("existing data source was not found during delete") #self.assertTrue(delete_result == True) # now try to get the deleted dp object try: dp_obj = self.client.read_data_source(ds_id) except NotFound as ex: pass else: self.fail("deleted data source was found during read") # now try to delete the already deleted data source object print "deleting non-existing data source" try: delete_result = self.client.delete_data_source(ds_id) except NotFound as ex: pass else: self.fail("non-existing data source was found during delete")
def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ ctd_stream_def = ctd_stream_definition() ctd_stream_def_id = self.pubsubcli.create_stream_definition(container=ctd_stream_def, name='Simulated CTD data') log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ log.debug('test_createDataProduct: Creating new data product w/o a stream definition (L4-CI-SA-RQ-308)') craft = CoverageCraft sdom, tdom = craft.create_domains() sdom = sdom.dump() tdom = tdom.dump() parameter_dictionary = craft.create_parameters() parameter_dictionary = parameter_dictionary.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ log.debug("parameter dictionary: %s" % parameter_dictionary) dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id, parameter_dictionary= parameter_dictionary) dp_obj = self.dpsc_cli.read_data_product(dp_id) log.debug('new dp_id = %s' % dp_id) log.debug("test_createDataProduct: Data product info from registry %s (L4-CI-SA-RQ-308)", str(dp_obj)) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id, parameter_dictionary) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id)
def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) log.debug("Created an IonObject for a data product: %s" % dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created)
def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp') dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) group_names = self.dpsc_cli.get_data_product_group_list() self.assertIn("PRODNAME", group_names) #---------------------------------------------------------------------------------------- # Create users then notifications to this data product for each user #---------------------------------------------------------------------------------------- # user_1 user_1 = UserInfo() user_1.name = 'user_1' user_1.contact.email = '*****@*****.**' # user_2 user_2 = UserInfo() user_2.name = 'user_2' user_2.contact.email = '*****@*****.**' #user1 is a complete user self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254" actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject}) actor_id = self.identcli.create_actor_identity(actor_identity_obj) user_credentials_obj = IonObject("UserCredentials", {"name": self.subject}) self.identcli.register_user_credentials(actor_id, user_credentials_obj) user_id_1 = self.identcli.create_user_info(actor_id, user_1) user_id_2, _ = self.rrclient.create(user_2) delivery_config1a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) delivery_config1b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) notification_request_1 = NotificationRequest( name = "notification_1", origin=dp_id, origin_type="type_1", event_type=OT.ResourceLifecycleEvent, disabled_by_system = False, delivery_configurations=[delivery_config1a, delivery_config1b]) delivery_config2a = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) delivery_config2b = IonObject(OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) notification_request_2 = NotificationRequest( name = "notification_2", origin=dp_id, origin_type="type_2", disabled_by_system = False, event_type=OT.DetectionEvent, delivery_configurations=[delivery_config2a, delivery_config2b]) notification_request_1_id = self.unsc.create_notification(notification=notification_request_1, user_id=user_id_1) notification_request_2_id = self.unsc.create_notification(notification=notification_request_2, user_id=user_id_2) self.unsc.delete_notification(notification_request_1_id) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) #validate that there is one active and one retired user notification for this data product self.assertEqual(1, len(extended_product.computed.active_user_subscriptions.value)) self.assertEqual(1, len(extended_product.computed.past_user_subscriptions.value)) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) def ion_object_encoder(obj): return obj.__dict__ #test prepare for create data_product_data = self.dpsc_cli.prepare_data_product_support() #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, "") self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 0) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 0) #test prepare for update data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, dp_id) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 1) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 1) self.assertEqual(data_product_data.associations['StreamDefinition'].associated_resources[0].s, dp_id) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 1) self.assertEqual(data_product_data.associations['Dataset'].associated_resources[0].s, dp_id) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) # Assert that there are no associated streams leftover after deleting the data product stream_ids, assoc_ids = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertEquals(len(stream_ids), 0) self.assertEquals(len(assoc_ids), 0) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0)
def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp') dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) group_names = self.dpsc_cli.get_data_product_group_list() self.assertIn("PRODNAME", group_names) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) def ion_object_encoder(obj): return obj.__dict__ #test prepare for create data_product_data = self.dpsc_cli.prepare_data_product_support() #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, "") self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 0) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 0) #test prepare for update data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, dp_id) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual(len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual(len(data_product_data.associations['Dataset'].resources), 1) self.assertEqual(len(data_product_data.associations['StreamDefinition'].associated_resources), 1) self.assertEqual(data_product_data.associations['StreamDefinition'].associated_resources[0].s, dp_id) self.assertEqual(len(data_product_data.associations['Dataset'].associated_resources), 1) self.assertEqual(data_product_data.associations['Dataset'].associated_resources[0].s, dp_id) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) # Assert that there are no associated streams leftover after deleting the data product stream_ids, assoc_ids = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertEquals(len(stream_ids), 0) self.assertEquals(len(assoc_ids), 0) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0)
def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary_id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 200.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = 100.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 50.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = 100.0 #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product= dp_obj, stream_definition_id=ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 150.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 300.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = 200.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 150.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = 200.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description,'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 250.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) self.assertEqual(dp_id, extended_product._id) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual(0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0)
def test_create_data_product(self): #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ parameter_dictionary = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict') ctd_stream_def_id = self.pubsubcli.create_stream_definition( name='Simulated CTD data', parameter_dictionary_id=parameter_dictionary._id) log.debug("Created stream def id %s" % ctd_stream_def_id) #------------------------------------------------------------------------------------------------ # test creating a new data product w/o a stream definition #------------------------------------------------------------------------------------------------ dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_id = self.dpsc_cli.create_data_product( data_product=dp_obj, stream_definition_id=ctd_stream_def_id) # Assert that the data product has an associated stream at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = self.rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) self.dpsc_cli.activate_data_product_persistence(dp_id) dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj) #------------------------------------------------------------------------------------------------ # test creating a new data product with a stream definition #------------------------------------------------------------------------------------------------ log.debug('Creating new data product with a stream definition') dp_obj = IonObject(RT.DataProduct, name='DP2', description='some new dp') dp_id2 = self.dpsc_cli.create_data_product(dp_obj, ctd_stream_def_id) self.dpsc_cli.activate_data_product_persistence(dp_id2) log.debug('new dp_id = %s' % dp_id2) #------------------------------------------------------------------------------------------------ #make sure data product is associated with stream def #------------------------------------------------------------------------------------------------ streamdefs = [] streams, _ = self.rrclient.find_objects(dp_id2, PRED.hasStream, RT.Stream, True) for s in streams: log.debug("Checking stream %s" % s) sdefs, _ = self.rrclient.find_objects(s, PRED.hasStreamDefinition, RT.StreamDefinition, True) for sd in sdefs: log.debug("Checking streamdef %s" % sd) streamdefs.append(sd) self.assertIn(ctd_stream_def_id, streamdefs) group_names = self.dpsc_cli.get_data_product_group_list() self.assertIn("PRODNAME", group_names) #---------------------------------------------------------------------------------------- # Create users then notifications to this data product for each user #---------------------------------------------------------------------------------------- # user_1 user_1 = UserInfo() user_1.name = 'user_1' user_1.contact.email = '*****@*****.**' # user_2 user_2 = UserInfo() user_2.name = 'user_2' user_2.contact.email = '*****@*****.**' #user1 is a complete user self.subject = "/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254" actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject}) actor_id = self.identcli.create_actor_identity(actor_identity_obj) user_credentials_obj = IonObject("UserCredentials", {"name": self.subject}) self.identcli.register_user_credentials(actor_id, user_credentials_obj) user_id_1 = self.identcli.create_user_info(actor_id, user_1) user_id_2, _ = self.rrclient.create(user_2) delivery_config1a = IonObject( OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) delivery_config1b = IonObject( OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) notification_request_1 = NotificationRequest( name="notification_1", origin=dp_id, origin_type="type_1", event_type=OT.ResourceLifecycleEvent, disabled_by_system=False, delivery_configurations=[delivery_config1a, delivery_config1b]) delivery_config2a = IonObject( OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) delivery_config2b = IonObject( OT.DeliveryConfiguration, email='*****@*****.**', mode=DeliveryModeEnum.EMAIL, frequency=NotificationFrequencyEnum.BATCH) notification_request_2 = NotificationRequest( name="notification_2", origin=dp_id, origin_type="type_2", disabled_by_system=False, event_type=OT.DetectionEvent, delivery_configurations=[delivery_config2a, delivery_config2b]) notification_request_1_id = self.unsc.create_notification( notification=notification_request_1, user_id=user_id_1) notification_request_2_id = self.unsc.create_notification( notification=notification_request_2, user_id=user_id_2) self.unsc.delete_notification(notification_request_1_id) # test reading a non-existent data product log.debug('reading non-existent data product') with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product('some_fake_id') # update a data product (tests read also) log.debug('Updating data product') # first get the existing dp object dp_obj = self.dpsc_cli.read_data_product(dp_id) # now tweak the object dp_obj.description = 'the very first dp' dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 20.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 20.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -20.0 # now write the dp back to the registry update_result = self.dpsc_cli.update_data_product(dp_obj) # now get the dp back to see if it was updated dp_obj = self.dpsc_cli.read_data_product(dp_id) self.assertEquals(dp_obj.description, 'the very first dp') self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Updated data product %s', dp_obj) #test extension extended_product = self.dpsc_cli.get_data_product_extension(dp_id) #validate that there is one active and one retired user notification for this data product self.assertEqual( 1, len(extended_product.computed.active_user_subscriptions.value)) self.assertEqual( 1, len(extended_product.computed.past_user_subscriptions.value)) self.assertEqual(dp_id, extended_product._id) self.assertEqual( ComputedValueAvailability.PROVIDED, extended_product.computed.product_download_size_estimated.status) self.assertEqual( 0, extended_product.computed.product_download_size_estimated.value) self.assertEqual(ComputedValueAvailability.PROVIDED, extended_product.computed.parameters.status) #log.debug("test_create_data_product: parameters %s" % extended_product.computed.parameters.value) def ion_object_encoder(obj): return obj.__dict__ #test prepare for create data_product_data = self.dpsc_cli.prepare_data_product_support() #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, "") self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 0) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 0) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 0) #test prepare for update data_product_data = self.dpsc_cli.prepare_data_product_support(dp_id) #print simplejson.dumps(data_product_data, default=ion_object_encoder, indent= 2) self.assertEqual(data_product_data._id, dp_id) self.assertEqual(data_product_data.type_, OT.DataProductPrepareSupport) self.assertEqual( len(data_product_data.associations['StreamDefinition'].resources), 2) self.assertEqual( len(data_product_data.associations['Dataset'].resources), 1) self.assertEqual( len(data_product_data.associations['StreamDefinition']. associated_resources), 1) self.assertEqual( data_product_data.associations['StreamDefinition']. associated_resources[0].s, dp_id) self.assertEqual( len(data_product_data.associations['Dataset'].associated_resources ), 1) self.assertEqual( data_product_data.associations['Dataset'].associated_resources[0]. s, dp_id) # now 'delete' the data product log.debug("deleting data product: %s" % dp_id) self.dpsc_cli.delete_data_product(dp_id) # Assert that there are no associated streams leftover after deleting the data product stream_ids, assoc_ids = self.rrclient.find_objects( dp_id, PRED.hasStream, RT.Stream, True) self.assertEquals(len(stream_ids), 0) self.assertEquals(len(assoc_ids), 0) self.dpsc_cli.force_delete_data_product(dp_id) # now try to get the deleted dp object with self.assertRaises(NotFound): dp_obj = self.dpsc_cli.read_data_product(dp_id) # Get the events corresponding to the data product ret = self.unsc.get_recent_events(resource_id=dp_id) events = ret.value for event in events: log.debug("event time: %s" % event.ts_created) self.assertTrue(len(events) > 0)