def create_resource_commitment(self, org_id="", actor_id="", resource_id="", exclusive=False, expiration=0): """Creates a Commitment for the specified resource for a specified actor within the specified Org. Once shared, the resource is committed to the actor. """ org_obj = self._validate_resource_id("org_id", org_id, RT.Org, optional=True) actor_obj = self._validate_resource_id("actor_id", actor_id, RT.ActorIdentity) resource_obj = self._validate_resource_id("resource_id", resource_id) if org_id: # Check that resource is shared in Org? pass res_commitment = IonObject(OT.ResourceCommitment, resource_id=resource_id, exclusive=exclusive) commitment = IonObject(RT.Commitment, name="", provider=org_id, consumer=actor_id, commitment=res_commitment, description="Resource Commitment", expiration=str(expiration)) commitment._id, commitment._rev = self.rr.create(commitment) # Creating associations to all related objects self.rr.create_association(actor_id, PRED.hasCommitment, commitment._id) self.rr.create_association(commitment._id, PRED.hasTarget, resource_id) if org_id: self.rr.create_association(org_id, PRED.hasCommitment, commitment._id) self.event_pub.publish_event(event_type=OT.ResourceCommitmentCreatedEvent, origin=org_id, origin_type="Org", sub_type=resource_obj.type_, description="The resource has been committed by the Org", resource_id=resource_id, org_name=org_obj.name, commitment_id=commitment._id, commitment_type=commitment.commitment.type_) return commitment._id
def get_is_persisted(self, data_product_id=''): # Returns True if data product is currently being persisted ret = IonObject(OT.ComputedIntValue) ret.value = self.is_persisted(data_product_id) ret.status = ComputedValueAvailability.PROVIDED return ret
def run_reverse_transform(self): ''' Runs a reverse transform example and displays the results of performing the transform ''' tms_cli = TransformManagementServiceClient(node=self.container.node) procd_cli = ProcessDispatcherServiceClient(node=self.container.node) #------------------------------- # Process Definition #------------------------------- process_definition = IonObject(RT.ProcessDefinition, name='transform_process_definition') process_definition.executable = { 'module': 'ion.processes.data.transforms.transform_example', 'class':'ReverseTransform' } process_definition_id = procd_cli.create_process_definition(process_definition) #------------------------------- # Execute Transform #------------------------------- input = [1,2,3,4] retval = tms_cli.execute_transform(process_definition_id=process_definition_id, data=[1,2,3,4], configuration={}) log.debug('Transform Input: %s', input) log.debug('Transform Output: %s', retval)
def _get_type_interface(self, res_type): """ Creates a merge of params and commands up the type inheritance chain. Note: Entire param and command entries if subtypes replace their super types definition. """ res_interface = dict(params={}, commands={}) base_types = IonObject(res_type)._get_extends() base_types.insert(0, res_type) for rt in reversed(base_types): type_interface = self.resource_interface.get(rt, None) if not type_interface: continue for tpar, tval in type_interface.iteritems(): if tpar in res_interface: rval = res_interface[tpar] if isinstance(rval, dict): rval.update(tval) else: res_interface[tpar] = tval else: res_interface[tpar] = dict(tval) if isinstance(tval, dict) else tval return res_interface
def acquire_resource(self, sap=None): """Creates a Commitment Resource for the specified resource for a specified user withing the specified Org as defined in the proposal. Once shared, the resource is committed to the user. Throws a NotFound exception if none of the ids are found. @param proposal AcquireResourceProposal @retval commitment_id str @throws NotFound object with specified id does not exist """ param_objects = self._validate_parameters(org_id=sap.provider, user_id=sap.consumer, resource_id=sap.resource) if sap.type_ == OT.AcquireResourceExclusiveProposal: exclusive = True else: exclusive = False res_commitment = IonObject(OT.ResourceCommitment, resource_id=sap.resource, exclusive=exclusive) commitment = IonObject(RT.Commitment, name='', provider=sap.provider, consumer=sap.consumer, commitment=res_commitment, description='Resource Commitment', expiration=sap.expiration) commitment_id, commitment_rev = self.clients.resource_registry.create(commitment) commitment._id = commitment_id commitment._rev = commitment_rev #Creating associations to all objects self.clients.resource_registry.create_association(sap.provider, PRED.hasCommitment, commitment_id) self.clients.resource_registry.create_association(sap.consumer, PRED.hasCommitment, commitment_id) self.clients.resource_registry.create_association(sap.resource, PRED.hasCommitment, commitment_id) self.clients.resource_registry.create_association(sap.negotiation_id, PRED.hasContract, commitment_id) #TODO - publish some kind of event for creating a commitment return commitment_id
def _convert_negotiations_to_requests(self, negotiations=None, user_info_id='', org_id=''): assert isinstance(negotiations, list) orgs,_ = self.clients.resource_registry.find_resources(restype=RT.Org) ret_list = [] for neg in negotiations: request = IonObject(OT.OrgUserNegotiationRequest, ts_updated=neg.ts_updated, negotiation_id=neg._id, negotiation_type=NegotiationTypeEnum._str_map[neg.negotiation_type], negotiation_status=NegotiationStatusEnum._str_map[neg.negotiation_status], originator=ProposalOriginatorEnum._str_map[neg.proposals[-1].originator], request_type=neg.proposals[-1].type_, description=neg.description, reason=neg.reason, user_id=user_info_id) # since this is a proxy for the Negotiation object, simulate its id to help the UI deal with it request._id = neg._id org_request = [ o for o in orgs if o._id == neg.proposals[-1].provider ] if org_request: request.org_id = org_request[0]._id request.name = org_request[0].name ret_list.append(request) return ret_list
def _get_computed_events(self, events, add_usernames=True, include_events=False): """ Get events for use in extended resource computed attribute @retval ComputedListValue with value list of 4-tuple with Event objects """ events = events or [] ret = IonObject(OT.ComputedEventListValue) ret.value = events ret.computed_list = [get_event_computed_attributes(event, include_event=include_events) for event in events] ret.status = ComputedValueAvailability.PROVIDED if add_usernames: try: actor_ids = {evt.actor_id for evt in events if evt.actor_id} log.debug("Looking up UserInfo for actors: %s" % actor_ids) if actor_ids: userinfo_list, assoc_list = self.clients.resource_registry.find_objects_mult(actor_ids, predicate=PRED.hasInfo, id_only=False) actor_map = {assoc.s: uinfo for uinfo, assoc in zip(userinfo_list, assoc_list)} for evt, evt_cmp in zip(events, ret.computed_list): ui = actor_map.get(evt.actor_id, None) if ui: evt_cmp["event_summary"] += " [%s %s]" % (ui.contact.individual_names_given, ui.contact.individual_name_family) except Exception as ex: log.exception("Cannot find user names for event actor_ids") return ret
def test_createDataProduct_and_DataProducer_with_id_BadRequest(self): # setup self.resource_registry.find_resources.return_value = ([], 'do not care') self.resource_registry.create.return_value = ('SOME_RR_ID1', 'Version_1') self.data_acquisition_management.create_data_producer.side_effect = BadRequest("Create cannot create document with ID: ") # Data Product dpt_obj = IonObject(RT.DataProduct, name='DPT_X', description='some new data product') # Data Producer dpr_obj = IonObject(RT.DataProducer, name='DP_X', description='some new data producer') dpr_obj._id = "SOME_OTHER_RR_ID" # test call with self.assertRaises(BadRequest) as cm: dp_id = self.data_product_management_service.create_data_product(dpt_obj, dpr_obj) # check results self.resource_registry.find_resources.assert_called_once_with(RT.DataProduct, None, dpt_obj.name, True) self.resource_registry.create.assert_called_once_with(dpt_obj) self.data_acquisition_management.create_data_producer.assert_called_once_with(dpr_obj) ex = cm.exception self.assertEqual(ex.message, "Create cannot create document with ID: ")
def _create_association(self, subject=None, predicate=None, obj=None, support_bulk=False): """ Create an association between two IonObjects with a given predicate. Supports bulk mode """ if self.bulk and support_bulk: if not subject or not predicate or not obj: raise BadRequest("Association must have all elements set: %s/%s/%s" % (subject, predicate, obj)) if isinstance(subject, basestring): subject = self._get_resource_obj(subject) if "_id" not in subject: raise BadRequest("Subject id not available") subject_id = subject._id st = subject.type_ if isinstance(obj, basestring): obj = self._get_resource_obj(obj) if "_id" not in obj: raise BadRequest("Object id not available") object_id = obj._id ot = obj.type_ assoc_id = create_unique_association_id() assoc_obj = IonObject("Association", s=subject_id, st=st, p=predicate, o=object_id, ot=ot, ts=get_ion_ts()) assoc_obj._id = assoc_id self.bulk_associations[assoc_id] = assoc_obj return assoc_id, '1-norev' else: return self.rr.create_association(subject, predicate, obj)
def get_user_notifications(self, user_id=''): ''' Get the notification request objects that are subscribed to by the user @param user_id str @retval notifications list of NotificationRequest objects ''' user = self.clients.resource_registry.read(user_id) if not user: return None if not user.name: raise BadRequest("Please assign a name to the resource. Example: resource.name = \'Irene\' for UNS to " "be able to fetch the related notifications") if self.event_processor.user_info.has_key(user.name): notifications = self.event_processor.user_info[user.name]['notifications'] ret = IonObject(OT.ComputedListValue) if notifications: ret.value = notifications ret.status = ComputedValueAvailability.PROVIDED else: ret.status = ComputedValueAvailability.NOTAVAILABLE return ret else: return None
def get_data_datetime(self, data_product_id=''): # Returns a temporal bounds object of the span of data product life span (may exist without getting a granule) ret = IonObject(OT.ComputedStringValue) ret.value = "" ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "FIXME. also, should datetime be stored as a string?" return ret
def get_data_ingestion_datetime(self, data_product_id=''): # Returns a temporal bounds object of the earliest/most recent values ingested into in the data product ret = IonObject(OT.ComputedStringValue) ret.value = "" ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "FIXME. also, should datetime be stored as a string?" return ret
def fun(): #ret = Mock() log.debug("Creating sample %s", iontype) ret = IonObject(iontype) ret.name = "sample %s" % iontype ret.description = "description of sample %s" % iontype for k, v in resource_params.iteritems(): setattr(ret, k, v) return ret
def get_data_url(self, data_product_id=''): # The unique pointer to this set of data ret = IonObject(OT.ComputedStringValue) ret.value = "" ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "FIXME." return ret
def get_data_contents_updated(self, data_product_id=''): # the datetime when the contents of the data were last modified in any way. # This is distinct from modifications to the data product attributes ret = IonObject(OT.ComputedStringValue) ret.value = "" ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "FIXME. also, should datetime be stored as a string?" return ret
def create_ion_object(object_params): new_obj = IonObject(object_params["type_"]) #Iterate over the parameters to add to object; have to do this instead #of passing a dict to get around restrictions in object creation on setting _id, _rev params for param in object_params: set_object_field(new_obj, param, object_params.get(param)) new_obj._validate() # verify that all of the object fields were set with proper types return new_obj
def parse_constraints(self, environ): # print ">> Start parse_constraints" # import pprint # pprint.pprint("") # pprint.pprint(environ) ds_name, ds_id, ds_url, buf_size = get_dataset_info(self) # print "DS Info: name=%s ds_id=%s ds_url=%s buf_size=%s" % (ds_name, ds_id, ds_url, buf_size) # TODO: Call the "damsP" module to retrieve a BaseDatasetHandler based on the ds_id dsh = self.damsP.get_data_handlers(ds_id=ds_id) #DSH WAY dataset_type = DatasetType(name=ds_name, attributes={'NC_GLOBAL': dsh.get_attributes()}) fields, queries = environ['pydap.ce'] fields = fields or [[(name, ())] for name in dsh.ds.variables] # print "CE Fields: %s" % fields # print "CE Queries: %s" % queries pdr_obj = IonObject("PydapVarDataRequest", name="p_req") for fvar in fields: target = dataset_type while fvar: name, slice_ = fvar.pop(0) pdr_obj.name = name pdr_obj.slice = slice_ if (name in dsh.ds.dimensions or not dsh.ds.variables[name].dimensions or target is not dataset_type): # print "==> if" nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj) target[name] = BaseType(name=nm, data=dat, shape=dat.shape, type=tc, dimensions=di, attributes=at) elif fvar: # print "==> elif" attrs = dsh.get_attributes(name) target.setdefault(name, StructureType(name=name, attributes=attrs)) target = target[name] else: # print "==> else" attrs = dsh.get_attributes(name) grid = target[name] = GridType(name=name, attributes=attrs) nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj) grid[name] = BaseType(name=nm, data=dat, shape=dat.shape, type=tc, dimensions=di, attributes=at) slice_ = list(slice_) + [slice(None)] * (len(grid.array.shape) - len(slice_)) for dim, dimslice in zip(dsh.ds.variables[name].dimensions, slice_): pdr_obj.name=dim pdr_obj.slice=dimslice nm, dat, tc, di, at = dsh.acquire_data(request=pdr_obj) grid[dim] = BaseType(name=nm, data=dat, shape=dat.shape, type=tc, dimensions=di, attributes=at) dataset_type._set_id() dataset_type.close = dsh.ds.close # print ">> End parse_constraints" return dataset_type
def test_tuple_in_dict(self): # create a resource with a tuple saved in a dict transform_obj = IonObject(RT.Transform) transform_obj.configuration = {} transform_obj.configuration["tuple"] = ('STRING',) transform_id, _ = self.resource_registry_service.create(transform_obj) # read the resource back returned_transform_obj = self.resource_registry_service.read(transform_id) self.assertEqual(transform_obj.configuration["tuple"], returned_transform_obj.configuration["tuple"])
def test_greater_than_interval(self): """ test_greater_than_interval Test interval alarm and alarm event publishing for a greater than inteval. """ kwargs = { 'name' : 'current_warning_interval', 'stream_name' : 'fakestreamname', 'value_id' : 'port_current', 'message' : 'Current is above normal range.', 'type' : StreamAlarmType.WARNING, 'lower_bound' : 10.5, 'lower_rel_op' : '<' } if TEST_ION_OBJECTS: # Create alarm object. alarm = IonObject('IntervalAlarmDef', **kwargs) alarm = construct_alarm_expression(alarm) else: alarm = IntervalAlarm(**kwargs) # This sequence will produce 5 alarms: # All clear on the first value, # Warning on the first 30, # All clear on the following 5.5, # Warning on the 15.1, # All clear on the following 3.3. self._event_count = 5 test_vals = [5.5, 5.4, 5.5, 5.6, 30, 30.4, 5.5, 5.6, 15.1, 15.2, 15.3, 3.3, 3.4] pub = EventPublisher(event_type="StreamAlarmEvent", node=self.container.node) for x in test_vals: if TEST_ION_OBJECTS: (alarm, event_data) = eval_alarm(alarm, x) else: event_data = alarm.eval_alarm(x) if event_data: pub.publish_event(origin=self._resource_id, **event_data) self._async_event_result.get(timeout=30) """
def create_ion_object(self, object_params): """Create and initialize an ION object from a dictionary of parameters coming via HTTP, ready to be passed on to services/messaging. The object is validated after creation. Note: This is not called for service operation argument signatures """ new_obj = IonObject(object_params["type_"]) # Iterate over the parameters to add to object; have to do this instead # of passing a dict to get around restrictions in object creation on setting _id, _rev params for param in object_params: self.set_object_field(new_obj, param, object_params.get(param)) new_obj._validate() # verify that all of the object fields were set with proper types return new_obj
def get_past_user_subscriptions(self, data_product_id=''): # Provides information for users who have in the past acquired this data product, but for which that acquisition was terminated ret = IonObject(OT.ComputedListValue) ret.value = [] try: ret.status = ComputedValueAvailability.PROVIDED raise NotFound #todo: ret.value = ??? except NotFound: ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "FIXME: this message should say why the calculation couldn't be done" except Exception as e: raise e return ret
def get_active_user_subscriptions(self, data_product_id=''): # The UserSubscription objects for this data product ret = IonObject(OT.ComputedListValue) ret.value = [] try: ret.status = ComputedValueAvailability.PROVIDED raise NotFound #todo: ret.value = ??? except NotFound: ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "FIXME: this message should say why the calculation couldn't be done" except Exception as e: raise e return ret
def test_two_sided_interval(self): """ test_two_sided_interval Test interval alarm and alarm event publishing for a closed inteval. """ kwargs = { 'name' : 'temp_high_warning', 'stream_name' : 'fakestreamname', 'value_id' : 'temp', 'message' : 'Temperature is above normal range.', 'type' : StreamAlarmType.WARNING, 'lower_bound' : 10.0, 'lower_rel_op' : '<', 'upper_bound' : 20.0, 'upper_rel_op' : '<' } if TEST_ION_OBJECTS: # Create alarm object. alarm = IonObject('IntervalAlarmDef', **kwargs) alarm = construct_alarm_expression(alarm) else: alarm = IntervalAlarm(**kwargs) # This sequence will produce 5 alarms. self._event_count = 5 test_vals = [5.5, 5.5, 5.4, 4.6, 4.5, 10.2, 10.3, 10.5, 15.5, 23.3, 23.3, 24.8, 17.5, 16.5, 12.5, 8.8, 7.7] pub = EventPublisher(event_type="StreamAlarmEvent", node=self.container.node) for x in test_vals: if TEST_ION_OBJECTS: (alarm, event_data) = eval_alarm(alarm, x) else: event_data = alarm.eval_alarm(x) if event_data: pub.publish_event(origin=self._resource_id, **event_data) self._async_event_result.get(timeout=30)
def get_provenance(self, data_product_id=''): # Provides an audit trail for modifications to the original data ret = IonObject(OT.ComputedDictValue) try: ret.value = self.get_data_product_provenance(data_product_id) ret.status = ComputedValueAvailability.PROVIDED except NotFound: ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "Error in DataProuctMgmtService:get_data_product_provenance" except Exception as e: raise e return ret
def get_number_active_subscriptions(self, data_product_id=''): # The number of current subscriptions to the data # Returns the storage size occupied by the data content of the resource, in bytes. ret = IonObject(OT.ComputedIntValue) ret.value = 0 try: ret.status = ComputedValueAvailability.PROVIDED raise NotFound #todo: ret.value = ??? except NotFound: ret.status = ComputedValueAvailability.NOTAVAILABLE ret.reason = "FIXME: this message should say why the calculation couldn't be done" except Exception as e: raise e return ret
def test_user_info(self): actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject}) user_id = self.identity_management_service.create_actor_identity(actor_identity_obj) user_credentials_obj = IonObject("UserCredentials", {"name": self.subject}) self.identity_management_service.register_user_credentials(user_id, user_credentials_obj) user_info_obj = IonObject("UserInfo", {"name": "Foo"}) user_info = self.identity_management_service.create_user_info(user_id, user_info_obj) with self.assertRaises(Conflict) as cm: self.identity_management_service.create_user_info(user_id, user_info_obj) self.assertTrue("UserInfo already exists for user id" in cm.exception.message) user_info_obj = self.identity_management_service.find_user_info_by_id(user_id) user_info_obj = self.identity_management_service.find_user_info_by_name("Foo") user_info_obj = self.identity_management_service.find_user_info_by_subject(self.subject) user_info_obj = self.identity_management_service.read_user_info(user_info) user_info_obj.name = 'Jane Doe' self.identity_management_service.update_user_info(user_info_obj) self.identity_management_service.delete_user_info(user_info) with self.assertRaises(NotFound) as cm: self.identity_management_service.read_user_info(user_info) self.assertTrue('does not exist' in cm.exception.message) with self.assertRaises(NotFound) as cm: self.identity_management_service.delete_user_info(user_info) self.assertTrue('does not exist' in cm.exception.message) with self.assertRaises(NotFound) as cm: self.identity_management_service.find_user_info_by_name("John Doe") self.assertEqual(cm.exception.message, 'UserInfo with name John Doe does not exist') with self.assertRaises(NotFound) as cm: self.identity_management_service.find_user_info_by_subject("Bogus subject") self.assertEqual(cm.exception.message, "UserCredentials with subject Bogus subject does not exist") self.identity_management_service.unregister_user_credentials(user_id, self.subject) self.identity_management_service.delete_actor_identity(user_id)
def load_mock_events(self, event_list): is_first = len(self.events) == 0 for cnt, event_entry in enumerate(event_list): origin = event_entry.get('o', None) origin_type = event_entry.get('ot', None) sub_type = event_entry.get('st', None) attr = event_entry.get('attr', {}) evt_obj = IonObject(event_entry['et'], origin=origin, origin_type=origin_type, sub_type=sub_type, ts_created=get_ion_ts(), **attr) evt_obj._id = str(cnt) self.events.append(evt_obj) if is_first: self.container_mock.event_repository.find_events = Mock() def side_effect(event_type=None, **kwargs): return [(evt._id, None, evt) for evt in reversed(self.events) if evt.type_ == event_type] self.container_mock.event_repository.find_events.side_effect = side_effect
def create_negotiation(self, sap=None): if sap is None or (sap.type_ != OT.ServiceAgreementProposal and not issubtype(sap.type_, OT.ServiceAgreementProposal)): raise BadRequest('The sap parameter must be a valid Service Agreement Proposal object') if sap.proposal_status != ProposalStatusEnum.INITIAL or sap.sequence_num != 0: raise Inconsistent('The specified Service Agreement Proposal has inconsistent status fields') if sap.negotiation_id != '': raise Inconsistent('The specified Service Agreement Proposal cannot have a negotiation_id for an initial proposal') if sap.type_ in self.negotiation_rules: # validate preconditions before creating for pc in self.negotiation_rules[sap.type_]['pre_conditions']: if pc.startswith('not '): pre_condition_met = not eval("self.service_provider." + pc.lstrip('not ')) #Strip off the 'not ' part else: pre_condition_met = eval("self.service_provider."+pc) if not pre_condition_met: raise BadRequest("A precondition for this request has not been satisfied: %s" % pc) # Should be able to determine the negotiation type based on the intial originator neg_type = NegotiationTypeEnum.REQUEST if sap.originator == ProposalOriginatorEnum.PROVIDER: neg_type = NegotiationTypeEnum.INVITATION elif sap.originator == ProposalOriginatorEnum.BROKER: neg_type = NegotiationTypeEnum.BROKERED neg_obj = IonObject(RT.Negotiation, negotiation_type=neg_type) # If there is a description in the initial proposal, then set the negotiation description with it. if sap.description != '': neg_obj.description = sap.description neg_id,_ = self.service_provider.clients.resource_registry.create(neg_obj) # Create associations between the parties self.service_provider.clients.resource_registry.create_association(sap.consumer, PRED.hasNegotiation, neg_id) self.service_provider.clients.resource_registry.create_association(sap.provider, PRED.hasNegotiation, neg_id) if sap.broker != "": self.service_provider.clients.resource_registry.create_association(sap.broker, PRED.hasNegotiation, neg_id) return neg_id
def load_mock_associations(self, assoc_list): for assoc_entry in assoc_list: sid = assoc_entry[0] oid = assoc_entry[2] st = self.res_objs[sid]._get_type() ot = self.res_objs[oid]._get_type() ass_obj = IonObject('Association', s=sid, st=st, o=oid, ot=ot, p=assoc_entry[1], ts=get_ion_ts()) ass_obj._id = "%s_%s_%s" % (sid, assoc_entry[1], oid) self.associations.append(ass_obj) self.container_mock.resource_registry.find_associations = Mock() def side_effect(subject=None, predicate=None, obj=None, **kwargs): if predicate: assocs = [assoc for assoc in self.associations if assoc.p == predicate] else: assocs = self.associations return assocs self.container_mock.resource_registry.find_associations.side_effect = side_effect
def bootstrap_viz_svc(self, config): # Create process definitions which will used to spawn off the transform processes matplotlib_proc_def = IonObject(RT.ProcessDefinition, name='viz_matplotlib_transform_process') matplotlib_proc_def.executable = { 'module': 'ion.services.ans.visualization_service', 'class':'VizTransformProcForMatplotlibGraphs' } matplotlib_proc_def_id, _ = self.clients.resource_registry.create(matplotlib_proc_def) google_dt_proc_def = IonObject(RT.ProcessDefinition, name='viz_google_dt_transform_process') google_dt_proc_def.executable = { 'module': 'ion.services.ans.visualization_service', 'class':'VizTransformProcForGoogleDT' } google_dt_proc_def_id, _ = self.clients.resource_registry.create(google_dt_proc_def) return
def test_update_bad_wrongtype_fun(self): """ self is an instance of the tester class """ log.debug("test_update_bad_wrongtype_fun") # get objects svc = self._utg_getservice() testfun = self._utg_getcrudmethod(resource_label, "update") bad_sample_resource = IonObject(RT.Resource, name="Generic Name") setattr(bad_sample_resource, "_id", "111") if all_in_one: svc.clients.resource_registry.update.reset_mock() self.assertRaisesRegexp(BadRequest, "type", testfun, bad_sample_resource) self.assertEqual(0, svc.clients.resource_registry.update.call_count)
def create_transform_process(self, data_process_definition_id, data_process_input_dp_id, stream_name): data_process_definition = self.rrclient.read(data_process_definition_id) # Find the link between the output Stream Definition resource and the Data Process Definition resource stream_ids,_ = self.rrclient.find_objects(data_process_definition._id, PRED.hasStreamDefinition, RT.StreamDefinition, id_only=True) if not stream_ids: raise Inconsistent("The data process definition %s is missing an association to an output stream definition" % data_process_definition._id ) process_output_stream_def_id = stream_ids[0] #Concatenate the name of the workflow and data process definition for the name of the data product output data_process_name = data_process_definition.name # Create the output data product of the transform tdom, sdom = time_series_domain() transform_dp_obj = IonObject(RT.DataProduct, name=data_process_name, description=data_process_definition.description, temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) transform_dp_id = self.dataproductclient.create_data_product(transform_dp_obj, process_output_stream_def_id) self.dataproductclient.activate_data_product_persistence(data_product_id=transform_dp_id) #last one out of the for loop is the output product id output_data_product_id = transform_dp_id # Create the transform data process log.debug("create data_process and start it") data_process_id = self.dataprocessclient.create_data_process( data_process_definition_id = data_process_definition._id, in_data_product_ids = [data_process_input_dp_id], out_data_product_ids = [transform_dp_id]) self.dataprocessclient.activate_data_process(data_process_id) #Find the id of the output data stream stream_ids, _ = self.rrclient.find_objects(transform_dp_id, PRED.hasStream, None, True) if not stream_ids: raise Inconsistent("The data process %s is missing an association to an output stream" % data_process_id ) return data_process_id, output_data_product_id
def trigger_container_snapshot(self, snapshot_id='', include_snapshots=None, exclude_snapshots=None, take_at_time='', clear_all=False, persist_snapshot=True, snapshot_kwargs=None): if not snapshot_id: snapshot_id = get_ion_ts() if not snapshot_kwargs: snapshot_kwargs = {} self.perform_action(ALL_CONTAINERS_INSTANCE, IonObject(OT.TriggerContainerSnapshot, snapshot_id=snapshot_id, include_snapshots=include_snapshots, exclude_snapshots=exclude_snapshots, take_at_time=take_at_time, clear_all=clear_all, persist_snapshot=persist_snapshot, snapshot_kwargs=snapshot_kwargs)) log.info("Event to trigger container snapshots sent. snapshot_id=%s" % snapshot_id)
def _create_instrument_device(self, instModel_id): instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345") instDevice_id = self.imsclient.create_instrument_device( instrument_device=instDevice_obj) self.imsclient.assign_instrument_model_to_instrument_device( instModel_id, instDevice_id) log.debug( "test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) return instDevice_id
def test_find_event_types_for_resource(self): # create a dataset object in the RR to pass into the UNS method dataset_object = IonObject(RT.DataSet, name="dataset1") dataset_id, version = self.rrc.create(dataset_object) # get the list of event types for the dataset events = self.unsc.find_event_types_for_resource(dataset_id) log.debug("dataset events = " + str(events)) if not events == ['dataset_supplement_added', 'dataset_change']: self.fail("failed to return correct list of event types") # try to pass in an id of a resource that doesn't exist (should fail) try: events = self.unsc.find_event_types_for_resource("bogus_id") self.fail("failed to detect non-existant resource") except: pass
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2cei.yml') #self.pd_cli = ProcessDispatcherServiceClient(node=self.container.node) self.pd_cli = ProcessDispatcherServiceClient( to_name="process_dispatcher") self.process_definition_id = uuid4().hex self.process_definition_name = 'test_haagent_%s' % self.process_definition_id self.process_definition = ProcessDefinition( name=self.process_definition_name, executable={ 'module': 'ion.agents.cei.test.test_haagent', 'class': 'TestProcess' }) self.pd_cli.create_process_definition(self.process_definition, self.process_definition_id) service_definition = SERVICE_DEFINITION_TMPL % self.process_definition_name sd = IonObject(RT.ServiceDefinition, { "name": self.process_definition_name, "definition": service_definition }) self.service_def_id, _ = self.container.resource_registry.create(sd) self.resource_id = "haagent_1234" self._haa_name = "high_availability_agent" self._haa_dashi_name = "dashi_haa_" + uuid4().hex self._haa_dashi_uri = get_dashi_uri_from_cfg() self._haa_dashi_exchange = "hatests" self._haa_config = self._get_haagent_config() self._base_services, _ = self.container.resource_registry.find_resources( restype="Service", name=self.process_definition_name) self._base_procs = self.pd_cli.list_processes() self.waiter = ProcessStateWaiter() self.waiter.start() self.container_client = ContainerAgentClient(node=self.container.node, name=self.container.name) self._spawn_haagent() self.addCleanup(self._stop_haagent) self._setup_haa_client()
def _process_cmd_update(resource_id, res_obj=None): if resource_id == "NEW": restype = get_arg("restype") res_obj = IonObject(restype) schema = res_obj._schema set_fields = [] for field,value in request.values.iteritems(): value = str(value) nested_fields = field.split('.') local_field = nested_fields[0] if field in EDIT_IGNORE_FIELDS or local_field not in schema: continue if len(nested_fields) > 1: obj = res_obj skip_field = False for sub_field in nested_fields: local_obj = getattr(obj, sub_field, None) if skip_field or local_obj is None: skip_field = True continue elif isinstance(local_obj, IonObjectBase): obj = local_obj else: value = get_typed_value(value, obj._schema[sub_field]) setattr(obj, sub_field, value) set_fields.append(field) skip_field = True elif schema[field]['type'] in EDIT_IGNORE_TYPES: pass else: value = get_typed_value(value, res_obj._schema[field]) setattr(res_obj, field, value) set_fields.append(field) #res_obj._validate() if resource_id == "NEW": Container.instance.resource_registry.create(res_obj) else: Container.instance.resource_registry.update(res_obj) return "OK. Set fields:\n%s" % pprint.pformat(sorted(set_fields))
def test_read_and_update_service(self): service_definition = ''' name: datastore_testing2 docstring: Service used to create, read, update and delete persistent Objects dependencies: [] methods: create_datastore: docstring: Create a new datastore namespace. in: datastore_name: "" out: success: True ''' # Create ServiceDefinition sd = IonObject(RT.ServiceDefinition, {"definition": service_definition}) service_id = self.sms.create_service_definition(sd) self.assertTrue(type(service_id) == str) # Read ServiceDefinition and validate service = self.sms.read_service_definition(service_id) self.assertEqual(service.definition, service_definition) #Update ServiceDefinition service_definition2 = ''' name: datastore_testing2 docstring: Service used to create, read, update and delete persistent Objects dependencies: [] methods: create_datastore: docstring: Create a new datastore namespace. in: datastore_id: 0 out: success: True ''' service.definition = service_definition2 self.sms.update_service_definition(service) # Read back and validate the update service2 = self.sms.read_service_definition(service_id) self.assertEqual(service2.definition, service_definition2) # Cleanup self.sms.delete_service_definition(service_id)
def base_register_instrument(self, instrument_id): # Register an instrument as a data producer in coordination with DM PubSub: create stream, register and create producer object dataproduct_obj = IonObject(RT.DataProduct, name='DataProduct1',description='sample data product') dataproduct_id, rev = self.rrclient.create(dataproduct_obj) # test registering a new data producer try: ds_id = self.client.register_instrument(instrument_id) except BadRequest as ex: self.fail("failed to create new data producer: %s" %ex) print 'new data producer id = ', ds_id # test assigning a data product to an instrument, creating the stream for the product try: self.client.assign_data_product(instrument_id, dataproduct_id) self.client.assign_data_product_source(dataproduct_id, instrument_id) except BadRequest as ex: self.fail("failed to assign data product to data producer: %s" %ex) except NotFound as ex: self.fail("failed to assign data product to data producer: %s" %ex) assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id) if not assocs or len(assocs) == 0: self.fail("failed to assign data product to data producer") # test UNassigning a data product from instrument, deleting the stream for the product try: self.client.unassign_data_product(instrument_id, dataproduct_id) self.client.unassign_data_product_source(dataproduct_id, instrument_id) except BadRequest as ex: self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex) except NotFound as ex: self.fail("failed to failed to UNassign data product to data producer data producer: %s" %ex) assocs = self.rrclient.find_associations(dataproduct_id, PRED.hasSource, instrument_id) if assocs: self.fail("failed to unassign data product to data producer") # test UNregistering a new data producer try: ds_id = self.client.unregister_instrument(instrument_id) except NotFound as ex: self.fail("failed to unregister instrument producer: %s" %ex)
def test_read_bad_wrongtype_fun(self): """ self is an instance of the tester class """ log.debug("test_read_bad_wrongtype_fun") # get objects svc = self._utg_getservice() testfun = self._utg_getcrudmethod(resource_label, "read") myret = IonObject(RT.Resource, name="Generic Resource") #configure Mock if all_in_one: svc.clients.resource_registry.read.reset_mock() svc.clients.resource_registry.read.return_value = myret self.assertEqual(0, svc.clients.resource_registry.read.call_count) self.assertRaisesRegexp(BadRequest, "type", testfun, "111") svc.clients.resource_registry.read.assert_called_once_with( "111", "")
def _build_ui_resource(self, row, objtype, mapping, auto_add=True): refid = None obj_fields = {} for obj_attr, row_attr in mapping.iteritems(): row_val = row[row_attr] obj_fields[obj_attr] = row_val if obj_attr == "uirefid": refid = row_val obj = IonObject(objtype, **obj_fields) if 'name' in obj_attr and not obj.name: log.warn("Ignoring object with no name: %s" % obj) else: if auto_add: self._add_ui_object(refid, obj) return refid, obj
def _create_output_data_product(self, name_of_transform = '', tdom = None, sdom = None): dpod_obj = IonObject(RT.DataProduct, name='dprod_%s' % name_of_transform, description='for_%s' % name_of_transform, temporal_domain = tdom, spatial_domain = sdom) if name_of_transform == 'L0': stream_def_id = self.in_stream_def_id_for_L0 else: stream_def_id = self.stream_def_id dpod_id = self.dataproduct_management.create_data_product(data_product=dpod_obj, stream_definition_id=stream_def_id ) self.addCleanup(self.dataproduct_management.delete_data_product, dpod_id) return dpod_id
def test_signon(self): certificate = """-----BEGIN CERTIFICATE----- MIIEMzCCAxugAwIBAgICBQAwDQYJKoZIhvcNAQEFBQAwajETMBEGCgmSJomT8ixkARkWA29yZzEX MBUGCgmSJomT8ixkARkWB2NpbG9nb24xCzAJBgNVBAYTAlVTMRAwDgYDVQQKEwdDSUxvZ29uMRsw GQYDVQQDExJDSUxvZ29uIEJhc2ljIENBIDEwHhcNMTAxMTE4MjIyNTA2WhcNMTAxMTE5MTAzMDA2 WjBvMRMwEQYKCZImiZPyLGQBGRMDb3JnMRcwFQYKCZImiZPyLGQBGRMHY2lsb2dvbjELMAkGA1UE BhMCVVMxFzAVBgNVBAoTDlByb3RlY3ROZXR3b3JrMRkwFwYDVQQDExBSb2dlciBVbndpbiBBMjU0 MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6QhsWxhUXbIxg+1ZyEc7d+hIGvchVmtb g0kKLmivgoVsA4U7swNDRH6svW242THta0oTf6crkRx7kOKg6jma2lcAC1sjOSddqX7/92ChoUPq 7LWt2T6GVVA10ex5WAeB/o7br/Z4U8/75uCBis+ru7xEDl09PToK20mrkcz9M4HqIv1eSoPkrs3b 2lUtQc6cjuHRDU4NknXaVMXTBHKPM40UxEDHJueFyCiZJFg3lvQuSsAl4JL5Z8pC02T8/bODBuf4 dszsqn2SC8YDw1xrujvW2Bd7Q7BwMQ/gO+dZKM1mLJFpfEsR9WrjMeg6vkD2TMWLMr0/WIkGC8u+ 6M6SMQIDAQABo4HdMIHaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgSwMBMGA1UdJQQMMAoG CCsGAQUFBwMCMBgGA1UdIAQRMA8wDQYLKwYBBAGCkTYBAgEwagYDVR0fBGMwYTAuoCygKoYoaHR0 cDovL2NybC5jaWxvZ29uLm9yZy9jaWxvZ29uLWJhc2ljLmNybDAvoC2gK4YpaHR0cDovL2NybC5k b2Vncmlkcy5vcmcvY2lsb2dvbi1iYXNpYy5jcmwwHwYDVR0RBBgwFoEUaXRzYWdyZWVuMUB5YWhv by5jb20wDQYJKoZIhvcNAQEFBQADggEBAEYHQPMY9Grs19MHxUzMwXp1GzCKhGpgyVKJKW86PJlr HGruoWvx+DLNX75Oj5FC4t8bOUQVQusZGeGSEGegzzfIeOI/jWP1UtIjzvTFDq3tQMNvsgROSCx5 CkpK4nS0kbwLux+zI7BWON97UpMIzEeE05pd7SmNAETuWRsHMP+x6i7hoUp/uad4DwbzNUGIotdK f8b270icOVgkOKRdLP/Q4r/x8skKSCRz1ZsRdR+7+B/EgksAJj7Ut3yiWoUekEMxCaTdAHPTMD/g Mh9xL90hfMJyoGemjJswG5g3fAdTP/Lv0I6/nWeH/cLjwwpQgIEjEAVXl7KHuzX5vPD/wqQ= -----END CERTIFICATE-----""" id, valid_until, registered = self.identity_management_service.signon( certificate, True) self.assertFalse(registered) id2, valid_until2, registered2 = self.identity_management_service.signon( certificate, True) self.assertFalse(registered2) self.assertTrue(id == id2) self.assertTrue(valid_until == valid_until2) user_info_obj = IonObject("UserInfo", {"name": "Foo"}) self.identity_management_service.create_user_info(id, user_info_obj) id3, valid_until3, registered3 = self.identity_management_service.signon( certificate, True) self.assertTrue(registered3) self.assertTrue(id == id3) self.assertTrue(valid_until == valid_until3)
def test_create_bad_wrongtype_fun(self): """ self is an instance of the tester class """ log.debug("test_create_bad_wrongtype_fun") # get objects svc = self._utg_getservice() testfun = self._utg_getcrudmethod(resource_label, "create") bad_sample_resource = IonObject(RT.Resource, name="Generic Resource") #configure Mock if all_in_one: svc.clients.resource_registry.create.reset_mock() svc.clients.resource_registry.create.return_value = ('111', 'bla') self.assertRaisesRegexp(BadRequest, "type", testfun, bad_sample_resource) self.assertEqual(0, svc.clients.resource_registry.create.call_count)
def _create_instrument_agent_instance(self, instAgent_id, instDevice_id): # port_agent_config = { # 'device_addr': CFG.device.sbe37.host, # 'device_port': CFG.device.sbe37.port, # 'process_type': PortAgentProcessType.UNIX, # 'binary_path': "port_agent", # 'port_agent_addr': 'localhost', # 'command_port': CFG.device.sbe37.port_agent_cmd_port, # 'data_port': CFG.device.sbe37.port_agent_data_port, # 'log_level': 5, # 'type': PortAgentType.ETHERNET # } port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': 4008, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } temp_alert, late_data_alert = self._create_instrument_stream_alarms( instDevice_id) instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config=port_agent_config, alerts=[temp_alert, late_data_alert]) instAgentInstance_id = self.imsclient.create_instrument_agent_instance( instAgentInstance_obj, instAgent_id, instDevice_id) self.addCleanup(self.imsclient.delete_instrument_agent_instance, instAgentInstance_id) return instAgentInstance_id
def _create_instrument_agent(self, instModel_id): raw_config = StreamConfiguration( stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict') parsed_config = StreamConfiguration( stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') instAgent_obj = IonObject( RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations=[raw_config, parsed_config]) instAgent_id = self.imsclient.create_instrument_agent(instAgent_obj) self.imsclient.assign_instrument_model_to_instrument_agent( instModel_id, instAgent_id) return instAgent_id
def test_actor_identity(self): actor_identity_obj = IonObject("ActorIdentity", {"name": self.subject}) user_id = self.identity_management_service.create_actor_identity( actor_identity_obj) actor_identity = self.identity_management_service.read_actor_identity( user_id) actor_identity.name = 'Updated subject' self.identity_management_service.update_actor_identity(actor_identity) self.identity_management_service.delete_actor_identity(user_id) with self.assertRaises(NotFound) as cm: self.identity_management_service.read_actor_identity(user_id) self.assertTrue("does not exist" in cm.exception.message) with self.assertRaises(NotFound) as cm: self.identity_management_service.delete_actor_identity(user_id) self.assertTrue("does not exist" in cm.exception.message)
def _do_test_attach(self): binary = "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x03\x00\x00\x00(-\x0fS\x00\x00\x00\x03sBIT\x08\x08\x08\xdb\xe1O\xe0\x00\x00\x00~PLTEf3\x00\xfc\xf7\xe0\xee\xcc\x00\xd3\xa0\x00\xcc\x99\x00\xec\xcdc\x9fl\x00\xdd\xb2\x00\xff\xff\xff|I\x00\xf9\xdb\x00\xdd\xb5\x19\xd9\xad\x10\xb6\x83\x00\xf8\xd6\x00\xf2\xc5\x00\xd8\xab\x00n;\x00\xff\xcc\x00\xd6\xa4\t\xeb\xb8\x00\x83Q\x00\xadz\x00\xff\xde\x00\xff\xd6\x00\xd6\xa3\x00\xdf\xaf\x00\xde\xad\x10\xbc\x8e\x00\xec\xbe\x00\xec\xd4d\xff\xe3\x00tA\x00\xf6\xc4\x00\xf6\xce\x00\xa5u\x00\xde\xa5\x00\xf7\xbd\x00\xd6\xad\x08\xdd\xaf\x19\x8cR\x00\xea\xb7\x00\xee\xe9\xdf\xc5\x00\x00\x00\tpHYs\x00\x00\n\xf0\x00\x00\n\xf0\x01B\xac4\x98\x00\x00\x00\x1ctEXtSoftware\x00Adobe Fireworks CS4\x06\xb2\xd3\xa0\x00\x00\x00\x15tEXtCreation Time\x0029/4/09Oq\xfdE\x00\x00\x00\xadIDAT\x18\x95M\x8f\x8d\x0e\x820\x0c\x84;ZdC~f\x07\xb2\x11D\x86\x89\xe8\xfb\xbf\xa0+h\xe2\x97\\\xd2^\x93\xb6\x07:1\x9f)q\x9e\xa5\x06\xad\xd5\x13\x8b\xac,\xb3\x02\x9d\x12C\xa1-\xef;M\x08*\x19\xce\x0e?\x1a\xeb4\xcc\xd4\x0c\x831\x87V\xca\xa1\x1a\xd3\x08@\xe4\xbd\xb7\x15P;\xc8\xd4{\x91\xbf\x11\x90\xffg\xdd\x8di\xfa\xb6\x0bs2Z\xff\xe8yg2\xdc\x11T\x96\xc7\x05\xa5\xef\x96+\xa7\xa59E\xae\xe1\x84cm^1\xa6\xb3\xda\x85\xc8\xd8/\x17se\x0eN^'\x8c\xc7\x8e\x88\xa8\xf6p\x8e\xc2;\xc6.\xd0\x11.\x91o\x12\x7f\xcb\xa5\xfe\x00\x89]\x10:\xf5\x00\x0e\xbf\x00\x00\x00\x00IEND\xaeB`\x82" # Owner creation tests instrument = IonObject("InstrumentDevice", name='instrument') iid,_ = self.resource_registry_service.create(instrument) att = Attachment(content=binary, attachment_type=AttachmentType.BLOB) aid1 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid1, include_content=True) self.assertEquals(binary, att1.content) import base64 att = Attachment(content=base64.encodestring(binary), attachment_type=AttachmentType.ASCII) aid2 = self.resource_registry_service.create_attachment(iid, att) att1 = self.resource_registry_service.read_attachment(aid2, include_content=True) self.assertEquals(binary, base64.decodestring(att1.content)) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid1, aid2]) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True) self.assertEquals(att_ids, [aid2, aid1]) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True, descending=True, limit=1) self.assertEquals(att_ids, [aid2]) atts = self.resource_registry_service.find_attachments(iid, id_only=False, include_content=True, limit=1) self.assertEquals(atts[0].content, binary) self.resource_registry_service.delete_attachment(aid1) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [aid2]) self.resource_registry_service.delete_attachment(aid2) att_ids = self.resource_registry_service.find_attachments(iid, id_only=True) self.assertEquals(att_ids, [])
def test_requests(container, process=FakeProcess()): org_client = OrgManagementServiceProcessClient(node=container.node, process=process) ion_org = org_client.find_org() id_client = IdentityManagementServiceProcessClient(node=container.node, process=process) rr_client = ResourceRegistryServiceProcessClient(node=container.node, process=process) system_actor = id_client.find_actor_identity_by_name( name=CFG.system.system_actor) log.info('system actor:' + system_actor._id) sa_header_roles = get_role_message_headers( org_client.find_all_roles_by_user(system_actor._id)) try: user = id_client.find_actor_identity_by_name( '/DC=org/DC=cilogon/C=US/O=ProtectNetwork/CN=Roger Unwin A254') except: raise Inconsistent( "The test user is not found; did you seed the data?") log.debug('user_id: ' + user._id) user_header_roles = get_role_message_headers( org_client.find_all_roles_by_user(user._id)) try: org2 = org_client.find_org('Org2') org2_id = org2._id except NotFound, e: org2 = IonObject(RT.Org, name='Org2', description='A second Org') org2_id = org_client.create_org(org2, headers={ 'ion-actor-id': system_actor._id, 'ion-actor-roles': sa_header_roles })
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.client.DPMS = DataProductManagementServiceClient( node=self.container.node) self.client.IMS = InstrumentManagementServiceClient( node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient( node=self.container.node) self.client.PSMS = PubsubManagementServiceClient( node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient( node=self.container.node) self.client.RR = ResourceRegistryServiceClient( node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.client.DPRS.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources( RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dmpsclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.psmsclient = PubsubManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient( node=self.container.node) dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def test_data_product_stream_def(self): pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.pubsubcli.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom, spatial_domain = sdom) dp_id = self.dpsc_cli.create_data_product(data_product= dp_obj, stream_definition_id=ctd_stream_def_id) stream_def_id = self.dpsc_cli.get_data_product_stream_definition(dp_id) self.assertEquals(ctd_stream_def_id, stream_def_id)
def get_mission_attachment(self, filename): """ Treat the mission file as if it were a platform attachment """ # read the file with open(filename, 'r') as rfile: content = rfile.read() # make an attachment attachment = IonObject(RT.Attachment, name="Example mission", description="Mission File", content=content, content_type="text/yml", keywords=["mission"], attachment_type=AttachmentType.ASCII) # Create a platform in the test environment p_root = self._create_single_platform() self.RR2.create_attachment(p_root['platform_device_id'], attachment) attachments, _ = self.RR.find_objects(p_root['platform_device_id'], PRED.hasAttachment, RT.Attachment, True) self.assertEqual(len(attachments), 1) a = self.RR.read_attachment(attachments[0], include_content=True) # Write contents of attached mission file to temp yaml file temp_file = 'temp_mission.yml' with open(temp_file, 'w') as wfile: wfile.write(a.content) self.load_mission(yaml_filename=temp_file) if os.path.isfile(temp_file): os.remove(temp_file) return p_root
def _do_test_find_resources(self): with self.assertRaises(BadRequest) as cm: self.resource_registry_service.find_resources(RT.UserInfo, LCS.DRAFT, "name", False) self.assertTrue(cm.exception.message == "find by name does not support lcstate") ret = self.resource_registry_service.find_resources(RT.UserInfo, None, "name", False) self.assertEquals(len(ret[0]), 0) # Instantiate an object obj = IonObject("InstrumentAgentInstance", name="name") # Persist object and read it back obj_id, obj_rev = self.resource_registry_service.create(obj) read_obj = self.resource_registry_service.read(obj_id) ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, None, "name", False) self.assertEquals(len(ret[0]), 1) self.assertEquals(ret[0][0]._id, read_obj._id) ret = self.resource_registry_service.find_resources(RT.InstrumentAgentInstance, LCS.DEPLOYED, None, False) self.assertEquals(len(ret[0]), 1) self.assertEquals(ret[0][0]._id, read_obj._id)
def _do_test_lifecycle(self): # Lifecycle tests att = IonObject("InstrumentDevice", name='mine', description='desc') rid, rev = self.resource_registry_service.create(att) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.name, att.name) self.assertEquals(att1.lcstate, LCS.DRAFT) self.assertEquals(att1.availability, AS.PRIVATE) new_state = self.resource_registry_service.execute_lifecycle_transition( rid, LCE.PLAN) self.assertEquals(new_state, lcstate(LCS.PLANNED, AS.PRIVATE)) att2 = self.resource_registry_service.read(rid) self.assertEquals(att2.lcstate, LCS.PLANNED) self.assertEquals(att2.availability, AS.PRIVATE) with self.assertRaises(BadRequest) as cm: self.resource_registry_service.execute_lifecycle_transition( rid, LCE.UNANNOUNCE) self.assertTrue( "type=InstrumentDevice, lcstate=PLANNED_PRIVATE has no transition for event unannounce" in cm.exception.message) new_state = self.resource_registry_service.execute_lifecycle_transition( rid, LCE.DEVELOP) self.assertEquals(new_state, lcstate(LCS.DEVELOPED, AS.PRIVATE)) with self.assertRaises(BadRequest): self.resource_registry_service.execute_lifecycle_transition( resource_id=rid, transition_event='NONE##') self.resource_registry_service.set_lifecycle_state( rid, lcstate(LCS.INTEGRATED, AS.PRIVATE)) att1 = self.resource_registry_service.read(rid) self.assertEquals(att1.lcstate, LCS.INTEGRATED) self.assertEquals(att1.availability, AS.PRIVATE)
def test_createDataProduct_and_DataProducer_with_id_NotFound(self): # setup self.clients.resource_registry.find_resources.return_value = ( [], 'do not care') self.clients.resource_registry.create.return_value = ('SOME_RR_ID1', 'Version_1') self.clients.pubsub_management.create_stream.return_value = 'stream1' # Data Product dpt_obj = IonObject(RT.DataProduct, name='DPT_X', description='some new data product') # test call with self.assertRaises(NotFound) as cm: dp_id = self.data_product_management_service.create_data_product( dpt_obj, 'stream_def_id') # check results self.clients.resource_registry.find_resources.assert_called_once_with( RT.DataProduct, None, dpt_obj.name, True) self.clients.resource_registry.create.assert_called_once_with(dpt_obj)
def _create_object_from_row(self, objtype, row, prefix=''): log.info("Create object type=%s, prefix=%s" % (objtype, prefix)) schema = self._get_object_class(objtype)._schema obj_fields = {} exclude_prefix = set() for col, value in row.iteritems(): if col.startswith(prefix): fieldname = col[len(prefix):] if '/' in fieldname: slidx = fieldname.find('/') nested_obj_field = fieldname[:slidx] if not nested_obj_field in exclude_prefix: nested_obj_type = schema[nested_obj_field]['type'] nested_prefix = prefix + fieldname[:slidx + 1] log.info( "Get nested object field=%s type=%s, prefix=%s" % (nested_obj_field, nested_obj_type, nested_prefix)) nested_obj = self._create_object_from_row( nested_obj_type, row, nested_prefix) obj_fields[nested_obj_field] = nested_obj exclude_prefix.add(nested_obj_field) elif fieldname in schema: try: if value: fieldvalue = self._get_typed_value( value, schema[fieldname]) obj_fields[fieldname] = fieldvalue except Exception: log.warn( "Object type=%s, prefix=%s, field=%s cannot be converted to type=%s. Value=%s" % (objtype, prefix, fieldname, schema[fieldname]['type'], value)) #fieldvalue = str(fieldvalue) else: log.warn("Unknown fieldname: %s" % fieldname) log.info("Create object type %s from field names %s" % (objtype, obj_fields.keys())) obj = IonObject(objtype, **obj_fields) return obj
def get_event_computed_attributes(event, include_event=False, include_special=False, include_formatted=False): """ @param event any Event to compute attributes for @retval an EventComputedAttributes object for given event """ evt_computed = IonObject(OT.EventComputedAttributes) evt_computed.event_id = event._id evt_computed.ts_computed = get_ion_ts() evt_computed.event = event if include_event else None try: summary = get_event_summary(event) evt_computed.event_summary = summary if include_special: spc_attrs = [ "%s:%s" % (k, str(getattr(event, k))[:50]) for k in sorted(event.__dict__.keys()) if k not in [ '_id', '_rev', 'type_', 'origin', 'origin_type', 'ts_created', 'base_types' ] ] evt_computed.special_attributes = ", ".join(spc_attrs) if include_formatted: evt_computed.event_attributes_formatted = pprint.pformat( event.__dict__) except Exception as ex: log.exception( "Error computing EventComputedAttributes for event %s: %s", event, ex) return evt_computed
def _initialize_queue_resource(self): """ Retrieve the resource and restore the remote queue. If it does not exist, create a new one. """ listen_name = self.CFG.process.listen_name objs, ids = self.clients.resource_registry.find_resources( name=listen_name) # If no persisted queue exists, create one. if len(objs) == 0: createtime = time.time() obj = IonObject('RemoteCommandQueue', name=listen_name, updated=createtime, created=createtime) # Persist object and read it back. obj_id, obj_rev = self.clients.resource_registry.create(obj) obj = self.clients.resource_registry.read(obj_id) log.debug('Created persistent queue for name=%s', listen_name) # If one exists, restore it here. elif len(objs) == 1: obj = objs[0] obj_id = ids[0] for command in obj.queue: self._tx_dict[command.command_id] = command self._client.enqueue(command) log.debug('Restored remote queue for name=%s: len=%i updated=%f.', listen_name, len(obj.queue), obj.updated) # Error: multiple queues with same name. else: log.error('%i > 1 remote command queues found for name=%s', len(objs), listen_name)
def create_output_data_product(self): dp1_outgoing_stream_id = self.pubsub_client.create_stream_definition( name='dp1_stream', parameter_dictionary_id=self.parameter_dict_id) dp1_output_dp_obj = IonObject(RT.DataProduct, name='data_process1_data_product', description='output of add array func') dp1_func_output_dp_id = self.dataproductclient.create_data_product( dp1_output_dp_obj, dp1_outgoing_stream_id) self.addCleanup(self.dataproductclient.delete_data_product, dp1_func_output_dp_id) # retrieve the id of the OUTPUT stream from the out Data Product and add to granule logger stream_ids, _ = self.rrclient.find_objects(dp1_func_output_dp_id, PRED.hasStream, None, True) self._output_stream_ids.append(stream_ids[0]) subscription_id = self.pubsub_client.create_subscription( 'validator', data_product_ids=[dp1_func_output_dp_id]) self.addCleanup(self.pubsub_client.delete_subscription, subscription_id) def on_granule(msg, route, stream_id): log.debug('recv_packet stream_id: %s route: %s msg: %s', stream_id, route, msg) self.validate_output_granule(msg, route, stream_id) self.granule_verified.set() validator = StandaloneStreamSubscriber('validator', callback=on_granule) validator.start() self.addCleanup(validator.stop) self.pubsub_client.activate_subscription(subscription_id) self.addCleanup(self.pubsub_client.deactivate_subscription, subscription_id) return dp1_func_output_dp_id