def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient( node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher()
def __init__(self, clients): self.clients = clients self.RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry) self.agent_instance_obj = None self.associated_objects = None self.last_id = None self.will_launch = False
def __init__(self, clients=None, enhanced_rr=None): self.clients = clients self.enhanced_rr = enhanced_rr if not enhanced_rr: self.enhanced_rr = EnhancedResourceRegistryClient(self.clients.resource_registry) self.outil = ObservatoryUtil(self, enhanced_rr=self.enhanced_rr)
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.client = DataProductManagementServiceClient( node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.pubsubcli = PubsubManagementServiceClient( node=self.container.node) self.ingestclient = IngestionManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dataproductclient = DataProductManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.client.DPMS = DataProductManagementServiceClient( node=self.container.node) self.client.IMS = InstrumentManagementServiceClient( node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient( node=self.container.node) self.client.PSMS = PubsubManagementServiceClient( node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient( node=self.container.node) self.client.RR = ResourceRegistryServiceClient( node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources( RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def __init__(self, process=None): """ the process should be the "self" of a service instance """ assert process self.process = process self.RR2 = EnhancedResourceRegistryClient( self.process.clients.resource_registry)
def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) self._get_platform_attributes() url = OmsTestMixin.start_http_server() log.info("TestPlatformInstrument:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.info( "TestPlatformInstrument:setup register_event_listener result %s", result) # response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall') # log.info("TestPlatformInstrument:setup get_platform_ports %s", response) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.datasetclient = DatasetManagementServiceClient( node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient( node=self.container.node) self.dpclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 177 self.instrument_device = '' self.platform_device = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done)
def override_clients(self, new_clients): """ Replaces the service clients with a new set of them... and makes sure they go to the right places """ self.RR2 = EnhancedResourceRegistryClient( self.clients.resource_registry) #shortcut names for the import sub-services if hasattr(self.clients, "resource_registry"): self.RR = self.clients.resource_registry
def __init__(self, process=None): """ the process should be the "self" of a service instance """ assert process self.process = process self.dtm = DriverTypingMethod.ByRR self.RR2 = None # make an internal pointer to this function so we can Mock it for testing self._get_agent_client = ResourceAgentClient if DriverTypingMethod.ByRR == self.dtm: self.RR2 = EnhancedResourceRegistryClient(process.clients.resource_registry)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dmpsclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.psmsclient = PubsubManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient(node=self.container.node) dpd_obj = IonObject(RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources(RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def __init__(self, clients, deployment_obj, allow_children, include_children, RR2=None): """ @param clients dict of clients from a service @param deployment_obj the deployment to activate @param allow_children whether to allow children of a device to be provided @param include_children whether to search for child devices from root device @param RR2 a reference to an enhanced RR client """ self.clients = clients self.RR2 = RR2 #sanity if include_children: assert allow_children self.allow_children = allow_children self.include_children = include_children if None is self.RR2: self.RR2 = EnhancedResourceRegistryClient( self.clients.resource_registry) if not isinstance(self.RR2, EnhancedResourceRegistryClient): raise AssertionError( "Type of self.RR2 is %s not %s" % (type(self.RR2), type(EnhancedResourceRegistryClient))) self.deployment_obj = deployment_obj self.on_init()
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.client.DPMS = DataProductManagementServiceClient(node=self.container.node) self.client.IMS = InstrumentManagementServiceClient(node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.client.PSMS = PubsubManagementServiceClient(node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient(node=self.container.node) self.client.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() dpd_obj = IonObject(RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.client.DPRS.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources(RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def __init__(self, process=None): """ the process should be the "self" of a service instance """ assert process self.process = process self.RR2 = EnhancedResourceRegistryClient(self.process.clients.resource_registry)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient() self.IDS = IdentityManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # Use the network definition provided by RSN OMS directly. rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri']) self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms) # get serialized version for the configuration: self._network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition) if log.isEnabledFor(logging.TRACE): log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser) self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber()
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self._setup_statuses()
def setUp(self): # Start container #print 'instantiating container' self._start_container() log.debug("Start rel from url") self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.DPMS = DataProductManagementServiceClient() self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.DAMS = DataAcquisitionManagementServiceClient() self.PSMS = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.PD = ProcessDispatcherServiceClient() self.DSMS = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ log.debug("get datastore") datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore( datastore_name) self.stream_def_id = self.PSMS.create_stream_definition( name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition( name='ingestion worker') ingestion_worker_definition.executable = { 'module': 'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class': 'ScienceGranuleIngestionWorker' } process_definition_id = self.PD.create_process_definition( process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] self.addCleanup(self.cleaning_up)
def setUp(self): self.clients = DotDict() self.rr = Mock() self.RR2 = EnhancedResourceRegistryClient(self.rr) self.clients.resource_registry = self.rr self.policy = ResourceLCSPolicy(self.clients)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.generic_int_helper_rr = ResourceRegistryServiceClient( node=self.container.node) self.generic_int_helper_rr2 = EnhancedResourceRegistryClient( self.generic_int_helper_rr)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.workflow_management = WorkflowManagementServiceClient() self.visualization = VisualizationServiceClient()
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient() self.IDS = IdentityManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # Use the network definition provided by RSN OMS directly. rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri']) self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms) CIOMSClientFactory.destroy_instance(rsn_oms) # get serialized version for the configuration: self._network_definition_ser = NetworkUtil.serialize_network_definition(self._network_definition) log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser) # set attributes for the platforms: self._platform_attributes = {} for platform_id in self._network_definition.pnodes: pnode = self._network_definition.pnodes[platform_id] dic = dict((attr.attr_id, attr.defn) for attr in pnode.attrs.itervalues()) self._platform_attributes[platform_id] = dic log.trace("_platform_attributes: %s", self._platform_attributes) # set ports for the platforms: self._platform_ports = {} for platform_id in self._network_definition.pnodes: pnode = self._network_definition.pnodes[platform_id] dic = {} for port_id, port in pnode.ports.iteritems(): dic[port_id] = dict(port_id=port_id, network=port.network) self._platform_ports[platform_id] = dic log.trace("_platform_ports: %s", self._platform_attributes) self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber()
def __init__(self, clients, RR2=None): self.clients = clients self.RR2 = RR2 if self.RR2 is None: log.warn("Creating new RR2") self.RR2 = EnhancedResourceRegistryClient( self.clients.resource_registry) if not isinstance(self.RR2, EnhancedResourceRegistryClient): raise AssertionError( "Type of self.RR2 is %s not %s" % (type(self.RR2), type(EnhancedResourceRegistryClient))) self.agent_instance_obj = None self.associated_objects = None self.last_id = None self.will_launch = False self.generated_config = False
def override_clients(self, new_clients): """ Replaces the service clients with a new set of them... and makes sure they go to the right places """ self.RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry) #shortcut names for the import sub-services if hasattr(self.clients, "resource_registry"): self.RR = self.clients.resource_registry
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient() self.org_management_service = OrgManagementServiceClient() self.IMS = InstrumentManagementServiceClient() self.dpclient = DataProductManagementServiceClient() self.pubsubcli = PubsubManagementServiceClient() self.damsclient = DataAcquisitionManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.data_product_management = DataProductManagementServiceClient() self._load_stage = 0 self._resources = {}
class DMTestCase(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.workflow_management = WorkflowManagementServiceClient() self.visualization = VisualizationServiceClient() def create_stream_definition(self, *args, **kwargs): stream_def_id = self.pubsub_management.create_stream_definition(*args, **kwargs) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) return stream_def_id def create_data_product(self,name, stream_def_id='', param_dict_name='', pdict_id=''): if not (stream_def_id or param_dict_name or pdict_id): raise AssertionError('Attempted to create a Data Product without a parameter dictionary') tdom, sdom = time_series_domain() dp = DataProduct(name=name, spatial_domain = sdom.dump(), temporal_domain = tdom.dump(), ) stream_def_id = stream_def_id or self.create_stream_definition('%s stream def' % name, parameter_dictionary_id=pdict_id or self.RR2.find_resource_by_name(RT.ParameterDictionary, param_dict_name, id_only=True)) data_product_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) return data_product_id def activate_data_product(self, data_product_id): self.data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id) def data_product_by_id(self, alt_id): data_products, _ = self.container.resource_registry.find_resources_ext(alt_id=alt_id, alt_id_ns='PRE', id_only=True) if data_products: return data_products[0] return None def dataset_of_data_product(self, data_product_id): return self.resource_registry.find_objects(data_product_id, PRED.hasDataset, id_only=True)[0][0]
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.discovery = DiscoveryServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.observatory_management = ObservatoryManagementServiceClient() self.visualization = VisualizationServiceClient() self.ph = ParameterHelper(self.dataset_management, self.addCleanup) self.ctd_count = 0
def __init__(self, process=None): """ the process should be the "self" of a service instance """ assert process self.process = process self.dtm = DriverTypingMethod.ByRR self.RR2 = None if DriverTypingMethod.ByRR == self.dtm: self.RR2 = EnhancedResourceRegistryClient(process.clients.resource_registry)
def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dmpsclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.psmsclient = PubsubManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient( node=self.container.node) dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.client.DPMS = DataProductManagementServiceClient( node=self.container.node) self.client.IMS = InstrumentManagementServiceClient( node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient( node=self.container.node) self.client.PSMS = PubsubManagementServiceClient( node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient( node=self.container.node) self.client.RR = ResourceRegistryServiceClient( node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.client.DPRS.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources( RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR)
def _start_raw_ingestion(self): dpsc_cli = DataProductManagementServiceClient() rrclient = ResourceRegistryServiceClient() RR2 = EnhancedResourceRegistryClient(rrclient) dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp') dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ log.info("Create data product... raw stream id: %s", self._raw_stream_id) dp_id = dpsc_cli.create_data_product_(data_product=dp_obj) dataset_id = self.create_dataset(self._raw_stream_pdict_id) RR2.assign_stream_definition_to_data_product_with_has_stream_definition( self._raw_stream_def_id, dp_id) RR2.assign_stream_to_data_product_with_has_stream( self._raw_stream_id, dp_id) RR2.assign_dataset_to_data_product_with_has_dataset(dataset_id, dp_id) self._raw_dataset_id = dataset_id log.info("Create data product...Complete") # Assert that the data product has an associated stream at this stage stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) log.info("Activate data product persistence") dpsc_cli.activate_data_product_persistence(dp_id) log.info("Read data product") dp_obj = dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) print 'started services'
def setUp(self): self._start_container() self._pp = pprint.PrettyPrinter() log.debug("oms_uri = %s", OMS_URI) self.oms = CIOMSClientFactory.create_instance(OMS_URI) self._get_platform_attributes() url = OmsTestMixin.start_http_server() log.info("TestPlatformInstrument:setup http url %s", url) result = self.oms.event.register_event_listener(url) log.info("TestPlatformInstrument:setup register_event_listener result %s", result) # response = self.oms.port.get_platform_ports('LPJBox_CI_Ben_Hall') # log.info("TestPlatformInstrument:setup get_platform_ports %s", response) self.container.start_rel_from_url('res/deploy/r2deploy.yml') # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.pubsubclient = PubsubManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.datasetclient = DatasetManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.org_id = self.RR2.create(any_old(RT.Org)) log.debug("Org created: %s", self.org_id) # see _set_receive_timeout self._receive_timeout = 177 self.instrument_device = '' self.platform_device = '' self.platform_agent_instance_id = '' self._pa_client = '' def done(): CIOMSClientFactory.destroy_instance(self.oms) event_notifications = OmsTestMixin.stop_http_server() log.info("event_notifications = %s" % str(event_notifications)) self.addCleanup(done)
def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient( node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient() self.IDS = IdentityManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # Use the network definition provided by RSN OMS directly. rsn_oms = CIOMSClientFactory.create_instance(DVR_CONFIG['oms_uri']) self._network_definition = RsnOmsUtil.build_network_definition(rsn_oms) # get serialized version for the configuration: self._network_definition_ser = NetworkUtil.serialize_network_definition( self._network_definition) if log.isEnabledFor(logging.TRACE): log.trace("NetworkDefinition serialization:\n%s", self._network_definition_ser) self._async_data_result = AsyncResult() self._data_subscribers = [] self._samples_received = [] self.addCleanup(self._stop_data_subscribers) self._async_event_result = AsyncResult() self._event_subscribers = [] self._events_received = [] self.addCleanup(self._stop_event_subscribers) self._start_event_subscriber()
def _start_raw_ingestion(self): dpsc_cli = DataProductManagementServiceClient() rrclient = ResourceRegistryServiceClient() RR2 = EnhancedResourceRegistryClient(rrclient) # Generic time-series data domain creation tdom, sdom = time_series_domain() dp_obj = IonObject(RT.DataProduct, name='DP1', description='some new dp', temporal_domain = tdom.dump(), spatial_domain = sdom.dump()) dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0 dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0 dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0 dp_obj.ooi_product_name = "PRODNAME" #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ log.info("Create data product... raw stream id: %s", self._raw_stream_id) dp_id = dpsc_cli.create_data_product_(data_product= dp_obj) dataset_id = self.create_dataset(self._raw_stream_pdict_id) RR2.assign_stream_definition_to_data_product_with_has_stream_definition(self._raw_stream_def_id, dp_id) RR2.assign_stream_to_data_product_with_has_stream(self._raw_stream_id, dp_id) RR2.assign_dataset_to_data_product_with_has_dataset(dataset_id, dp_id) self._raw_dataset_id = dataset_id log.info("Create data product...Complete") # Assert that the data product has an associated stream at this stage stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True) self.assertNotEquals(len(stream_ids), 0) # Assert that the data product has an associated stream def at this stage stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True) self.assertNotEquals(len(stream_ids), 0) log.info("Activate data product persistence") dpsc_cli.activate_data_product_persistence(dp_id) log.info("Read data product") dp_obj = dpsc_cli.read_data_product(dp_id) self.assertIsNotNone(dp_obj) self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0) log.debug('Created data product %s', dp_obj)
class DMTestCase(IonIntegrationTestCase): def setUp(self): self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.resource_registry = self.container.resource_registry self.RR2 = EnhancedResourceRegistryClient(self.resource_registry) self.data_acquisition_management = DataAcquisitionManagementServiceClient() self.pubsub_management = PubsubManagementServiceClient() self.instrument_management = InstrumentManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.dataset_management = DatasetManagementServiceClient() self.process_dispatcher = ProcessDispatcherServiceClient() self.data_process_management = DataProcessManagementServiceClient() self.data_product_management = DataProductManagementServiceClient() self.data_retriever = DataRetrieverServiceClient() self.dataset_management = DatasetManagementServiceClient() self.user_notification = UserNotificationServiceClient() self.workflow_management = WorkflowManagementServiceClient() self.visualization = VisualizationServiceClient() def create_stream_definition(self, *args, **kwargs): stream_def_id = self.pubsub_management.create_stream_definition(*args, **kwargs) self.addCleanup(self.pubsub_management.delete_stream_definition, stream_def_id) return stream_def_id def create_data_product(self,name, stream_def_id='', param_dict_name='', pdict_id=''): if not (stream_def_id or param_dict_name or pdict_id): raise AssertionError('Attempted to create a Data Product without a parameter dictionary') tdom, sdom = time_series_domain() dp = DataProduct(name=name, spatial_domain = sdom.dump(), temporal_domain = tdom.dump(), ) stream_def_id = stream_def_id or self.create_stream_definition('%s stream def' % name, parameter_dictionary_id=pdict_id or self.RR2.find_resource_by_name(RT.ParameterDictionary, param_dict_name, id_only=True)) data_product_id = self.data_product_management.create_data_product(dp, stream_definition_id=stream_def_id) self.addCleanup(self.data_product_management.delete_data_product, data_product_id) return data_product_id def activate_data_product(self, data_product_id): self.data_product_management.activate_data_product_persistence(data_product_id) self.addCleanup(self.data_product_management.suspend_data_product_persistence, data_product_id)
def __init__(self, clients, RR2=None): self.clients = clients self.RR2 = RR2 if self.RR2 is None: log.warn("Creating new RR2") self.RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry) if not isinstance(self.RR2, EnhancedResourceRegistryClient): raise AssertionError("Type of self.RR2 is %s not %s" % (type(self.RR2), type(EnhancedResourceRegistryClient))) self.agent_instance_obj = None self.associated_objects = None self.last_id = None self.will_launch = False self.generated_config = False
def setUp(self): # Start container #print 'instantiating container' self._start_container() log.debug("Start rel from url") self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.DPMS = DataProductManagementServiceClient() self.RR = ResourceRegistryServiceClient() self.RR2 = EnhancedResourceRegistryClient(self.RR) self.DAMS = DataAcquisitionManagementServiceClient() self.PSMS = PubsubManagementServiceClient() self.ingestclient = IngestionManagementServiceClient() self.PD = ProcessDispatcherServiceClient() self.DSMS = DatasetManagementServiceClient() self.unsc = UserNotificationServiceClient() self.data_retriever = DataRetrieverServiceClient() #------------------------------------------ # Create the environment #------------------------------------------ log.debug("get datastore") datastore_name = CACHE_DATASTORE_NAME self.db = self.container.datastore_manager.get_datastore(datastore_name) self.stream_def_id = self.PSMS.create_stream_definition(name='SBE37_CDM') self.process_definitions = {} ingestion_worker_definition = ProcessDefinition(name='ingestion worker') ingestion_worker_definition.executable = { 'module':'ion.processes.data.ingestion.science_granule_ingestion_worker', 'class' :'ScienceGranuleIngestionWorker' } process_definition_id = self.PD.create_process_definition(process_definition=ingestion_worker_definition) self.process_definitions['ingestion_worker'] = process_definition_id self.pids = [] self.exchange_points = [] self.exchange_names = [] self.addCleanup(self.cleaning_up)
def setUp(self): # Start container #print 'instantiating container' self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') print 'started services' # Now create client to DataProductManagementService self.client = DataProductManagementServiceClient(node=self.container.node) self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.rrclient) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.ingestclient = IngestionManagementServiceClient(node=self.container.node) self.processdispatchclient = ProcessDispatcherServiceClient(node=self.container.node) self.dataproductclient = DataProductManagementServiceClient(node=self.container.node) self.imsclient = InstrumentManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient()
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR)
def override_clients(self, new_clients): """ Replaces the service clients with a new set of them... and makes sure they go to the right places """ self.RR2 = EnhancedResourceRegistryClient(new_clients.resource_registry) #shortcut names for the import sub-services if hasattr(new_clients, "resource_registry"): self.RR = new_clients.resource_registry if hasattr(new_clients, "instrument_management"): self.IMS = new_clients.instrument_management if hasattr(new_clients, "data_process_management"): self.PRMS = new_clients.data_process_management #farm everything out to the impls self.dataproductclient = DataProductManagementServiceClient() self.dataprocessclient = DataProcessManagementServiceClient()
def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.org_management_service = OrgManagementServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.dpclient = DataProductManagementServiceClient(node=self.container.node) self.pubsubcli = PubsubManagementServiceClient(node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient(node=self.container.node) self.dataset_management = DatasetManagementServiceClient() #print 'TestObservatoryManagementServiceIntegration: started services' self.event_publisher = EventPublisher()
def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.client.DPMS = DataProductManagementServiceClient(node=self.container.node) self.client.IMS = InstrumentManagementServiceClient(node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.client.PSMS = PubsubManagementServiceClient(node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient(node=self.container.node) self.client.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources(RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses)
def setUp(self): self.rr = Mock() self.RR2 = EnhancedResourceRegistryClient(self.rr)
class TestEnhancedResourceRegistryClient(PyonTestCase): def setUp(self): self.rr = Mock() self.RR2 = EnhancedResourceRegistryClient(self.rr) def sample_resource(self): return any_old(RT.InstrumentDevice) def test_init(self): pass def test_create(self): """ test resource creation in normal case """ # get objects good_sample_resource = self.sample_resource() saved_resource = self.sample_resource() #saved_resource.lcstate = LCS.REGISTERED #configure Mock self.rr.create.return_value = ('111', 'bla') self.rr.find_resources.return_value = ([], []) self.rr.read.return_value = saved_resource sample_resource_id = self.RR2.create(good_sample_resource) self.rr.create.assert_called_once_with(good_sample_resource) self.assertEqual(sample_resource_id, '111') def test_create_bad_noname(self): """ test resource creation failure for no name """ # get objects bad_sample_resource = self.sample_resource() delattr(bad_sample_resource, "name") #configure Mock self.rr.create.return_value = ('111', 'bla') self.rr.find_resources.return_value = ([], []) self.assertRaises(BadRequest, self.RR2.create, bad_sample_resource) def test_create_bad_dupname(self): """ test resource creation failure for duplicate name """ # get objects bad_sample_resource = self.sample_resource() #really, the resource doesn't matter; it's the retval from find that matters #configure Mock self.rr.create.return_value = ('111', 'bla') self.rr.find_resources.return_value = ([0], [0]) self.assertRaises(BadRequest, self.RR2.create, bad_sample_resource) def test_read(self): """ test resource read (passthru) """ # get objects myret = self.sample_resource() #configure Mock self.rr.read.return_value = myret response = self.RR2.read("111") self.rr.read.assert_called_once_with("111") self.assertEqual(response, myret) #self.assertDictEqual(response.__dict__, # self.sample_resource().__dict__) def test_update(self): """ test resource update in normal case """ # get objects good_sample_resource = self.sample_resource() setattr(good_sample_resource, "_id", "111") #configure Mock self.rr.update.return_value = ('111', 'bla') self.rr.find_resources.return_value = ([], []) self.RR2.update(good_sample_resource) self.rr.update.assert_called_once_with(good_sample_resource) def test_update_bad_dupname(self): """ test update failure due to duplicate name """ # get objects bad_sample_resource = self.sample_resource() setattr(bad_sample_resource, "_id", "111") self.rr.find_resources.return_value = ([0], [0]) self.assertRaises(BadRequest, self.RR2.update, bad_sample_resource) def test_update_bad_noid(self): """ test update failure due to duplicate name """ # get objects bad_sample_resource = self.sample_resource() self.rr.find_resources.return_value = ([0], [0]) self.assertRaises(BadRequest, self.RR2.update, bad_sample_resource) def test_delete(self): """ test deletion under normal circumstances """ # get objects myret = self.sample_resource() #configure Mock self.rr.read.return_value = myret self.rr.delete.return_value = None self.rr.retire.return_value = None try: self.RR2.delete("111") except TypeError as te: # for logic tests that run into mock trouble if "'Mock' object is not iterable" != te.message: raise te else: raise SkipTest("Must test this with INT test") except Exception as e: raise e #self.rr.read.assert_called_with("111", "") self.rr.retire.assert_called_once_with("111") def test_delete_destroy(self): """ self is an instance of the tester class """ # get objects myret = self.sample_resource() #configure Mock self.rr.read.return_value = myret self.rr.delete.return_value = None self.rr.find_resources.return_value = None self.rr.find_objects.return_value = (["2"], ["2"]) self.rr.find_subjects.return_value = (["3"], ["3"]) self.RR2.force_delete("111") self.rr.delete.assert_called_once_with("111") def test_advance_lcs(self): """ call RR when the transition ISN'T retire """ self.RR2.advance_lcs("111", LCE.PLAN) self.rr.execute_lifecycle_transition.assert_called_once_with( resource_id="111", transition_event=LCE.PLAN) self.RR2.advance_lcs("222", LCE.RETIRE) self.rr.retire.assert_called_once_with("222") def test_delete_association(self): self.rr.get_association.return_value = "111" self.RR2.delete_association("a", "b", "c") self.rr.delete_association.assert_called_once_with("111") def test_delete_all_object_associations(self): self.rr.find_associations.return_value = ["111"] self.RR2.delete_object_associations("x") self.rr.delete_association.assert_called_once_with("111") def test_delete_all_subject_associations(self): self.rr.find_associations.return_value = ["111"] self.RR2.delete_subject_associations("x") self.rr.delete_association.assert_called_once_with("111") def test_pluck(self): self.rr.find_subjects.return_value = (["111"], ["aaa"]) self.rr.find_objects.return_value = (["222"], ["bbb"]) self.RR2.pluck("x") #self.rr.delete_association.assert_called_with("bbb") self.rr.delete_association.assert_called_with("aaa") self.assertEqual(self.rr.delete_association.call_count, 2) def test_find_objects_using_id(self): self.tbase_find_objects("x_id") def test_find_objects_using_ionobj(self): obj = any_old(RT.InstrumentDevice) setattr(obj, "_id", "foo_id") self.tbase_find_objects(obj) def test_find_objects_using_junk(self): self.tbase_find_objects(1) def tbase_find_objects(self, sample_obj): """ test all 8 flavors of find objects: return IonObjects/ids, return single/multiple, use predicate/no-predicate """ def rst(): self.rr.find_objects.reset_mock() self.rr.find_objects.return_value = ([], []) self.assertEqual(0, self.rr.find_subjects.call_count) def rst1(): self.rr.find_objects.reset_mock() self.rr.find_objects.return_value = (["x"], ["x"]) self.assertEqual(0, self.rr.find_subjects.call_count) def rst2(): self.rr.find_objects.reset_mock() self.rr.find_objects.return_value = (["x", "y"], ["z", "k"]) self.assertEqual(0, self.rr.find_subjects.call_count) x = sample_obj xx = x if hasattr(x, "_id"): xx = x._id # find none rst() self.RR2.find_instrument_models_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst() self.assertRaises(NotFound, self.RR2.find_instrument_model_of_instrument_device, x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst() self.RR2.find_instrument_model_ids_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) rst() self.assertRaises( NotFound, self.RR2.find_instrument_model_id_of_instrument_device, x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) # find one rst1() self.RR2.find_instrument_models_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst1() self.RR2.find_instrument_model_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst1() self.RR2.find_instrument_model_ids_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) rst1() self.RR2.find_instrument_model_id_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) # find multiples rst2() self.RR2.find_instrument_models_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst2() self.assertRaises(Inconsistent, self.RR2.find_instrument_model_of_instrument_device, x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst2() self.RR2.find_instrument_model_ids_of_instrument_device(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) rst2() self.assertRaises( Inconsistent, self.RR2.find_instrument_model_id_of_instrument_device, x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) # find using rst2() self.RR2.find_instrument_models_of_instrument_device_using_has_model(x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst2() self.assertRaises( Inconsistent, self.RR2. find_instrument_model_of_instrument_device_using_has_model, x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=False) rst2() self.RR2.find_instrument_model_ids_of_instrument_device_using_has_model( x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) rst2() self.assertRaises( Inconsistent, self.RR2. find_instrument_model_id_of_instrument_device_using_has_model, x) self.rr.find_objects.assert_called_once_with( subject=xx, predicate=PRED.hasModel, object_type=RT.InstrumentModel, id_only=True) def test_find_subjects_using_id(self): self.tbase_find_subjects("x_id") def test_find_subjects_using_ionobj(self): obj = any_old(RT.InstrumentDevice) setattr(obj, "_id", "foo_id") self.tbase_find_subjects(obj) def test_find_subjects_using_junk(self): self.tbase_find_subjects(1) def tbase_find_subjects(self, sample_obj): """ test all 8 flavors of find subjects: return IonObjects/ids, return single/multiple, use predicate/no-predicate """ def rst(): self.rr.find_subjects.reset_mock() self.rr.find_subjects.return_value = ([], []) self.assertEqual(0, self.rr.find_objects.call_count) def rst1(): self.rr.find_subjects.reset_mock() self.rr.find_subjects.return_value = (["x"], ["x"]) self.assertEqual(0, self.rr.find_objects.call_count) def rst2(): self.rr.find_subjects.reset_mock() self.rr.find_subjects.return_value = (["x", "y"], ["z", "k"]) self.assertEqual(0, self.rr.find_objects.call_count) x = sample_obj xx = x if hasattr(x, "_id"): xx = x._id # find none rst() self.RR2.find_instrument_devices_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst() self.assertRaises(NotFound, self.RR2.find_instrument_device_by_instrument_model, x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst() self.RR2.find_instrument_device_ids_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) rst() self.assertRaises( NotFound, self.RR2.find_instrument_device_id_by_instrument_model, x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) # find 1 rst1() self.RR2.find_instrument_devices_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst1() self.RR2.find_instrument_device_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst1() self.RR2.find_instrument_device_ids_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) rst1() self.RR2.find_instrument_device_id_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) # find multiple rst2() self.RR2.find_instrument_devices_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst2() self.assertRaises(Inconsistent, self.RR2.find_instrument_device_by_instrument_model, x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst2() self.RR2.find_instrument_device_ids_by_instrument_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) rst2() self.assertRaises( Inconsistent, self.RR2.find_instrument_device_id_by_instrument_model, x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) # find using rst2() self.RR2.find_instrument_devices_by_instrument_model_using_has_model(x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst2() self.assertRaises( Inconsistent, self.RR2. find_instrument_device_by_instrument_model_using_has_model, x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=False) rst2() self.RR2.find_instrument_device_ids_by_instrument_model_using_has_model( x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) rst2() self.assertRaises( Inconsistent, self.RR2. find_instrument_device_id_by_instrument_model_using_has_model, x) self.rr.find_subjects.assert_called_once_with( object=xx, predicate=PRED.hasModel, subject_type=RT.InstrumentDevice, id_only=True) def test_assign_unassign(self): """ test all flavors of assign and unassign: with/without predicates """ x = "x_id" y = "y_id" self.RR2.assign_instrument_model_to_instrument_device(y, x) self.rr.create_association.assert_called_once_with(x, PRED.hasModel, y) self.rr.get_association.return_value = "zzz" self.RR2.unassign_instrument_model_from_instrument_device(y, x) self.rr.delete_association.assert_called_once_with("zzz") self.assertRaises(BadRequest, getattr, self.RR2, "assign_data_product_to_data_process") self.assertRaises(BadRequest, getattr, self.RR2, "unassign_data_product_from_data_process") self.rr.create_association.reset_mock() self.RR2.assign_data_product_to_data_process_with_has_output_product( y, x) self.rr.create_association.assert_called_once_with( x, PRED.hasOutputProduct, y) self.rr.delete_association.reset_mock() self.rr.get_association.reset_mock() self.rr.get_association.return_value = "aaa" self.RR2.unassign_data_product_from_data_process_with_has_output_product( y, x) self.rr.delete_association.assert_called_once_with("aaa") def test_assign_single_object(self): x = "x_id" y = "y_id" def rst(): self.rr.find_objects.reset_mock() self.rr.get_association.reset_mock() rst() self.rr.find_objects.return_value = ([], []) self.RR2.assign_one_instrument_model_to_instrument_device(y, x) self.rr.create_association.assert_called_once_with(x, PRED.hasModel, y) rst() self.rr.find_objects.return_value = (["a", "b"], ["c", "d"]) self.assertRaises( Inconsistent, self.RR2.assign_one_instrument_model_to_instrument_device, y, x) rst() self.rr.find_objects.return_value = (["a"], ["b"]) self.rr.get_association.return_value = "yay" self.RR2.assign_one_instrument_model_to_instrument_device(y, x) rst() self.rr.find_objects.return_value = (["a"], ["b"]) self.rr.get_association.side_effect = NotFound("") self.assertRaises( BadRequest, self.RR2.assign_one_instrument_model_to_instrument_device, y, x) def test_assign_single_subject(self): x = "x_id" y = "y_id" def rst(): self.rr.find_subjects.reset_mock() self.rr.get_association.reset_mock() rst() self.rr.find_subjects.return_value = ([], []) self.RR2.assign_instrument_device_to_one_instrument_site(y, x) self.rr.create_association.assert_called_once_with( x, PRED.hasDevice, y) rst() self.rr.find_subjects.return_value = (["a", "b"], ["c", "d"]) self.assertRaises( Inconsistent, self.RR2.assign_instrument_device_to_one_instrument_site, y, x) rst() self.rr.find_subjects.return_value = (["a"], ["b"]) self.rr.get_association.return_value = "yay" self.RR2.assign_instrument_device_to_one_instrument_site(y, x) rst() self.rr.find_subjects.return_value = (["a"], ["b"]) self.rr.get_association.side_effect = NotFound("") self.assertRaises( BadRequest, self.RR2.assign_instrument_device_to_one_instrument_site, y, x) def test_bad_dynamics(self): x = "x_id" self.RR2.assign_foo_to_bar(x) self.rr.assign_foo_to_bar.assert_called_once_with(x) self.assertRaises( BadRequest, getattr, self.RR2, "find_instrument_model_of_instrument_device_using_has_site") self.assertRaises( BadRequest, getattr, self.RR2, "find_instrument_model_of_instrument_device_using_has_banana") self.assertRaises(BadRequest, getattr, self.RR2, "find_data_product_of_data_process") self.RR2.find_sensor_model_by_data_product(x) self.rr.find_sensor_model_by_data_product.assert_called_once_with(x)
class TestAssembly(GenericIntHelperTestCase): """ assembly integration tests at the service level """ def setUp(self): # Start container by calling parent's setUp super(TestAssembly, self).setUp() # Now create client to DataProductManagementService self.client = DotDict() self.client.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.client.DPMS = DataProductManagementServiceClient(node=self.container.node) self.client.IMS = InstrumentManagementServiceClient(node=self.container.node) self.client.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.client.PSMS = PubsubManagementServiceClient(node=self.container.node) self.client.DPRS = DataProcessManagementServiceClient(node=self.container.node) self.client.RR = ResourceRegistryServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.client.RR) self.dataset_management = DatasetManagementServiceClient() # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.client.RR.find_resources(RT.DataProcess, None, None, True)[0]: self.client.DPRS.deactivate_data_process(proc_id) self.client.DPRS.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip('refactoring') def test_observatory_structure(self): """ """ c = self.client c2 = DotDict() c2.resource_registry = self.client.RR #generate a function that finds direct associations, using the more complex one in the service def gen_find_oms_association(output_type): def freeze(): def finder_fun(obj_id): log.debug("Finding related %s frames", output_type) ret = c.OMS.find_related_frames_of_reference(obj_id, [output_type]) return ret[output_type] return finder_fun return freeze() ############################################### # # Assumptions or Order of Events for R2 Preloaded resources # # - orgs # - sites # - models # - agents # - devices # - instances # - attachments # ############################################### ############################################### # # orgs # ############################################### org_id = self.client.OMS.create_marine_facility(any_old(RT.Org)) def add_to_org_fn(generic_resource_id): log.info("Associating with Org") self.client.OMS.assign_resource_to_observatory_org(generic_resource_id, org_id) ############################################### # # sites # ############################################### log.info("Create an observatory") observatory_id = self.perform_fcruf_script(RT.Observatory, "observatory", self.client.OMS, actual_obj=None, extra_fn=add_to_org_fn) log.info("Create a subsite") subsite_id = self.perform_fcruf_script(RT.Subsite, "subsite", self.client.OMS, actual_obj=None, extra_fn=add_to_org_fn) log.info("Create a platform site") platform_site_id = self.perform_fcruf_script(RT.PlatformSite, "platform_site", self.client.OMS, actual_obj=None, extra_fn=add_to_org_fn) log.info("Create instrument site") instSite_obj = IonObject(RT.InstrumentSite, name="instrument_site", reference_designator="GA01SUMO-FI003-01-CTDMO0999") instrument_site_id = self.perform_fcruf_script(RT.InstrumentSite, "instrument_site", self.client.OMS, actual_obj=instSite_obj, extra_fn=add_to_org_fn) ############################################### # # models # ############################################### log.info("Create a platform model") platform_model_id = self.perform_fcruf_script(RT.PlatformModel, "platform_model", self.client.IMS) log.info("Create instrument model") instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel", custom_attributes= {'streams':{'raw': 'ctd_raw_param_dict' , 'parsed': 'ctd_parsed_param_dict' }}) instrument_model_id = self.perform_fcruf_script(RT.InstrumentModel, "instrument_model", self.client.IMS, actual_obj=instModel_obj) log.info("Create sensor model") sensor_model_id = self.perform_fcruf_script(RT.SensorModel, "sensor_model", self.client.IMS) ############################################### # # agents # ############################################### log.info("Create platform agent") platform_agent_id = self.perform_fcruf_script(RT.PlatformAgent, "platform_agent", self.client.IMS) log.info("Create instrument agent") instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD) instrument_agent_id = self.perform_fcruf_script(RT.InstrumentAgent, "instrument_agent", self.client.IMS, actual_obj=instAgent_obj) ############################################### # # devices # ############################################### log.info("Create a platform device") platform_device_id = self.perform_fcruf_script(RT.PlatformDevice, "platform_device", self.client.IMS, actual_obj=None, extra_fn=add_to_org_fn) log.info("Create an instrument device") instrument_device_id = self.perform_fcruf_script(RT.InstrumentDevice, "instrument_device", self.client.IMS, actual_obj=None, extra_fn=add_to_org_fn) log.info("Create a sensor device") sensor_device_id = self.perform_fcruf_script(RT.SensorDevice, "sensor_device", self.client.IMS, actual_obj=None, extra_fn=add_to_org_fn) ############################################### # # instances # ############################################### # we create instrument agent instance below, to verify some lcs checks ############################################### # # # attachments and LCS stuff # # ############################################### #---------------------------------------------- # # orgs # #---------------------------------------------- #---------------------------------------------- # # sites # #---------------------------------------------- log.info("Associate subsite with observatory") self.perform_association_script(c.OMS.assign_site_to_site, gen_find_oms_association(RT.Observatory), gen_find_oms_association(RT.Subsite), observatory_id, subsite_id) log.info("Associate platform site with subsite") self.perform_association_script(c.OMS.assign_site_to_site, gen_find_oms_association(RT.Subsite), gen_find_oms_association(RT.PlatformSite), subsite_id, platform_site_id) log.info("Associate instrument site with platform site") self.perform_association_script(c.OMS.assign_site_to_site, gen_find_oms_association(RT.PlatformSite), gen_find_oms_association(RT.InstrumentSite), platform_site_id, instrument_site_id) #---------------------------------------------- # # models # #---------------------------------------------- log.info("Associate platform model with platform site") self.perform_association_script(c.OMS.assign_platform_model_to_platform_site, self.RR2.find_platform_sites_by_platform_model_using_has_model, self.RR2.find_platform_models_of_platform_site_using_has_model, platform_site_id, platform_model_id) log.info("Associate instrument model with instrument site") self.perform_association_script(c.OMS.assign_instrument_model_to_instrument_site, self.RR2.find_instrument_sites_by_instrument_model_using_has_model, self.RR2.find_instrument_models_of_instrument_site_using_has_model, instrument_site_id, instrument_model_id) #---------------------------------------------- # # agents # # - model required for DEVELOP # - egg required for INTEGRATE # - certification required for DEPLOY #---------------------------------------------- self.assert_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.PLAN, LCS.PLANNED) self.assert_lcs_fail(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEVELOP) log.info("Associate platform model with platform agent") self.perform_association_script(c.IMS.assign_platform_model_to_platform_agent, self.RR2.find_platform_agents_by_platform_model_using_has_model, self.RR2.find_platform_models_of_platform_agent_using_has_model, platform_agent_id, platform_model_id) self.assert_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEVELOP, LCS.DEVELOPED) self.assert_lcs_fail(self.client.IMS, "platform_agent", platform_agent_id, LCE.INTEGRATE) add_keyworded_attachment(self.client.RR, platform_agent_id, [KeywordFlag.EGG_URL]) self.assert_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.INTEGRATE, LCS.INTEGRATED) self.assert_lcs_fail(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEPLOY) add_keyworded_attachment(self.client.RR, platform_agent_id, [KeywordFlag.CERTIFICATION, "platform attachment"]) self.assert_lcs_pass(self.client.IMS, "platform_agent", platform_agent_id, LCE.DEPLOY, LCS.DEPLOYED) self.assert_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.PLAN, LCS.PLANNED) self.assert_lcs_fail(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEVELOP) log.info("Associate instrument model with instrument agent") self.perform_association_script(c.IMS.assign_instrument_model_to_instrument_agent, c.IMS.find_instrument_agent_by_instrument_model, c.IMS.find_instrument_model_by_instrument_agent, instrument_agent_id, instrument_model_id) self.assert_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEVELOP, LCS.DEVELOPED) self.assert_lcs_fail(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.INTEGRATE) add_keyworded_attachment(self.client.RR, instrument_agent_id, [KeywordFlag.EGG_URL]) self.assert_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.INTEGRATE, LCS.INTEGRATED) self.assert_lcs_fail(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEPLOY) add_keyworded_attachment(self.client.RR, instrument_agent_id, [KeywordFlag.CERTIFICATION]) self.assert_lcs_pass(self.client.IMS, "instrument_agent", instrument_agent_id, LCE.DEPLOY, LCS.DEPLOYED) #---------------------------------------------- # # devices # #---------------------------------------------- log.info("LCS plan") self.assert_lcs_pass(self.client.IMS, "platform_device", platform_device_id, LCE.PLAN, LCS.PLANNED) log.info("LCS develop") self.assert_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.DEVELOP) x = self.client.IMS.read_platform_device(platform_device_id) x.serial_number = "12345" self.client.IMS.update_platform_device(x) self.assert_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.DEVELOP) log.info("Associate platform model with platform device") self.assert_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.DEVELOP) self.perform_association_script(c.IMS.assign_platform_model_to_platform_device, c.IMS.find_platform_device_by_platform_model, c.IMS.find_platform_model_by_platform_device, platform_device_id, platform_model_id) self.assert_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.DEVELOP) add_keyworded_attachment(self.client.RR, platform_device_id, [KeywordFlag.VENDOR_TEST_RESULTS]) self.assert_lcs_pass(self.client.IMS, "platform_device", platform_device_id, LCE.DEVELOP, LCS.DEVELOPED) log.info("LCS integrate") self.assert_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.INTEGRATE) add_keyworded_attachment(self.client.RR, platform_device_id, [KeywordFlag.VENDOR_TEST_RESULTS]) self.assert_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.INTEGRATE) platform_agent_instance_id = self.create_plat_agent_instance(platform_agent_id, platform_device_id) self.assert_lcs_pass(self.client.IMS, "platform_device", platform_device_id, LCE.INTEGRATE, LCS.INTEGRATED) log.info("LCS deploy") self.assert_lcs_fail(self.client.IMS, "platform_device", platform_device_id, LCE.DEPLOY) log.info("LCS plan") self.assert_lcs_pass(self.client.IMS, "instrument_device", instrument_device_id, LCE.PLAN, LCS.PLANNED) log.info("LCS develop") self.assert_lcs_fail(self.client.IMS, "instrument_device", instrument_device_id, LCE.DEVELOP) x = self.client.IMS.read_instrument_device(instrument_device_id) x.serial_number = "12345" self.client.IMS.update_instrument_device(x) self.assert_lcs_fail(self.client.IMS, "instrument_device", instrument_device_id, LCE.DEVELOP) log.info("Associate instrument model with instrument device") self.perform_association_script(c.IMS.assign_instrument_model_to_instrument_device, c.IMS.find_instrument_device_by_instrument_model, c.IMS.find_instrument_model_by_instrument_device, instrument_device_id, instrument_model_id) self.assert_lcs_fail(self.client.IMS, "instrument_device", instrument_device_id, LCE.DEVELOP) add_keyworded_attachment(self.client.RR, instrument_device_id, [KeywordFlag.VENDOR_TEST_RESULTS]) self.assert_lcs_pass(self.client.IMS, "instrument_device", instrument_device_id, LCE.DEVELOP, LCS.DEVELOPED) log.info("LCS integrate") self.assert_lcs_fail(self.client.IMS, "instrument_device", instrument_device_id, LCE.INTEGRATE) log.info("Associate instrument device with platform device") self.perform_association_script(c.IMS.assign_instrument_device_to_platform_device, c.IMS.find_platform_device_by_instrument_device, c.IMS.find_instrument_device_by_platform_device, platform_device_id, instrument_device_id) self.assert_lcs_fail(self.client.IMS, "instrument_device", instrument_device_id, LCE.INTEGRATE) log.info("Create instrument agent instance") instrument_agent_instance_id = self.create_inst_agent_instance(instrument_agent_id, instrument_device_id) self.assert_lcs_pass(self.client.IMS, "instrument_device", instrument_device_id, LCE.INTEGRATE, LCS.INTEGRATED) log.info("LCS deploy") self.assert_lcs_fail(self.client.IMS, "instrument_device", instrument_device_id, LCE.DEPLOY) log.info("Associate sensor model with sensor device") self.perform_association_script(c.IMS.assign_sensor_model_to_sensor_device, self.RR2.find_sensor_devices_by_sensor_model_using_has_model, self.RR2.find_sensor_models_of_sensor_device_using_has_model, sensor_device_id, sensor_model_id) log.info("Associate sensor device with instrument device") self.perform_association_script(c.IMS.assign_sensor_device_to_instrument_device, self.RR2.find_instrument_devices_by_sensor_device_using_has_device, self.RR2.find_sensor_devices_of_instrument_device_using_has_device, instrument_device_id, sensor_device_id) #---------------------------------------------- # # instances # #---------------------------------------------- #---------------------------------------------- # # data production chain and swapping # #---------------------------------------------- #------------------------------------------------------------------------------------------------ # create a stream definition for the data from the ctd simulator #------------------------------------------------------------------------------------------------ pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.client.PSMS.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) log.debug("Created stream def id %s", ctd_stream_def_id) #create data products for instrument data dp_obj = self.create_data_product_obj() #log.debug("Created an IonObject for a data product: %s", dp_obj) #------------------------------------------------------------------------------------------------ # Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary #------------------------------------------------------------------------------------------------ dp_obj.name = 'Data Product' inst_data_product_id = c.DPMS.create_data_product(dp_obj, ctd_stream_def_id) #assign data products appropriately c.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) port_assignments={} pp_obj = IonObject(OT.PlatformPort, reference_designator='GA01SUMO-FI003-01-CTDMO0999', port_type= PortTypeEnum.PAYLOAD, ip_address='1' ) port_assignments[instrument_device_id] = pp_obj deployment_obj = IonObject(RT.Deployment, name='deployment', port_assignments=port_assignments, context=IonObject(OT.CabledNodeDeploymentContext)) deployment_id = self.perform_fcruf_script(RT.Deployment, "deployment", c.OMS, actual_obj=deployment_obj, extra_fn=add_to_org_fn) c.OMS.assign_site_to_deployment(platform_site_id, deployment_id) self.RR2.find_deployment_id_of_platform_site_using_has_deployment(platform_site_id) c.OMS.assign_device_to_deployment(platform_device_id, deployment_id) self.RR2.find_deployment_of_platform_device_using_has_deployment(platform_device_id) c.OMS.activate_deployment(deployment_id, True) self.assertLess(0, len(self.RR2.find_instrument_sites_by_instrument_device_using_has_device(instrument_device_id))) self.assertLess(0, len(self.RR2.find_instrument_devices_of_instrument_site_using_has_device(instrument_site_id))) self.assertLess(0, len(self.RR2.find_platform_sites_by_platform_device_using_has_device(platform_device_id))) self.assertLess(0, len(self.RR2.find_platform_devices_of_platform_site_using_has_device(platform_site_id))) self.assert_lcs_pass(self.client.IMS, "platform_device", platform_device_id, LCE.DEPLOY, LCS.DEPLOYED) self.assert_lcs_pass(self.client.IMS, "instrument_device", instrument_device_id, LCE.DEPLOY, LCS.DEPLOYED) idev_lcs = self.client.RR.read(instrument_device_id).lcstate log.info("L4-CI-SA-RQ-334 DEPLOY: Proposed change - Instrument activation shall support transition to " + "the active state for instruments - state is %s" % idev_lcs) #now along comes a new device log.info("Create instrument device 2") instrument_device_id2 = self.perform_fcruf_script(RT.InstrumentDevice, "instrument_device", self.client.IMS, actual_obj=None, extra_fn=add_to_org_fn) log.info("Associate instrument model with instrument device 2") self.perform_association_script(c.IMS.assign_instrument_model_to_instrument_device, c.IMS.find_instrument_device_by_instrument_model, c.IMS.find_instrument_model_by_instrument_device, instrument_device_id2, instrument_model_id) log.info("Associate instrument device with platform device 2") self.perform_association_script(c.IMS.assign_instrument_device_to_platform_device, c.IMS.find_platform_device_by_instrument_device, c.IMS.find_instrument_device_by_platform_device, platform_device_id, instrument_device_id2) dp_obj.name = 'Instrument Data Product 2' inst_data_product_id2 = c.DPMS.create_data_product(dp_obj, ctd_stream_def_id) c.DAMS.assign_data_product(input_resource_id=instrument_device_id2, data_product_id=inst_data_product_id2) # create a new deployment for the new device deployment_obj = any_old(RT.Deployment, {"context": IonObject(OT.CabledNodeDeploymentContext)}) deployment_id2 = self.perform_fcruf_script(RT.Deployment, "deployment", c.OMS, actual_obj=deployment_obj, extra_fn=add_to_org_fn) log.debug("Associating instrument site with new deployment") c.OMS.assign_site_to_deployment(instrument_site_id, deployment_id2) log.debug("Associating instrument device with new deployment") c.OMS.assign_device_to_deployment(instrument_device_id2, deployment_id2) # activate the new deployment -- changing the primary device -- but don't switch subscription log.debug("Activating new deployment") c.OMS.activate_deployment(deployment_id2, False) #todo: assert site hasDevice instrument_device_id2 assocs = self.client.RR.find_associations(instrument_site_id, PRED.hasDevice, instrument_device_id2, id_only=True) self.assertIsNotNone(assocs) #---------------------------------------------- # # generic find ops # #---------------------------------------------- log.info("Find an instrument site by observatory") entities = c.OMS.find_related_frames_of_reference(observatory_id, [RT.InstrumentSite]) self.assertIn(RT.InstrumentSite, entities) inst_sites = entities[RT.InstrumentSite] self.assertEqual(1, len(inst_sites)) self.assertEqual(instrument_site_id, inst_sites[0]._id) c.IMS.delete_instrument_agent(instrument_agent_id) instr_agent_obj_read = self.client.RR.read(instrument_agent_id) self.assertEquals(instr_agent_obj_read.lcstate, LCS.DELETED) log.info("L4-CI-SA-RQ-382: Instrument activation shall manage the life cycle of Instrument Agents") c.IMS.delete_instrument_device(instrument_device_id) # Check whether the instrument device has been retired instrument_obj_read = self.client.RR.read(instrument_device_id) log.debug("The instruments lcs state has been set to %s after the delete operation" % instrument_obj_read.lcstate) self.assertEquals(instrument_obj_read.lcstate, LCS.DELETED) log.debug("L4-CI-SA-RQ-334 DELETED") log.debug("L4-CI-SA-RQ-335: Instrument activation shall support transition to the retired state of instruments") #---------------------------------------------- # # force_deletes # #---------------------------------------------- # need to "pluck" some resources out of associations self.RR2.pluck(instrument_model_id) self.RR2.pluck(platform_model_id) self.RR2.pluck(instrument_agent_id) self.RR2.pluck(platform_agent_id) self.RR2.pluck(deployment_id) self.RR2.pluck(deployment_id2) self.perform_fd_script(observatory_id, "observatory", c.OMS) self.perform_fd_script(subsite_id, "subsite", c.OMS) self.perform_fd_script(platform_site_id, "platform_site", c.OMS) self.perform_fd_script(instrument_site_id, "instrument_site", c.OMS) self.perform_fd_script(platform_model_id, "platform_model", c.IMS) self.perform_fd_script(instrument_model_id, "instrument_model", c.IMS) self.perform_fd_script(platform_agent_id, "platform_agent", c.IMS) self.perform_fd_script(instrument_agent_id, "instrument_agent", c.IMS) self.perform_fd_script(platform_device_id, "platform_device", c.IMS) self.perform_fd_script(instrument_device_id, "instrument_device", c.IMS) self.perform_fd_script(sensor_device_id, "sensor_device", c.IMS) self.perform_fd_script(sensor_model_id, "sensor_model", c.IMS) self.perform_fd_script(platform_agent_instance_id, "platform_agent_instance", c.IMS) self.perform_fd_script(instrument_agent_instance_id, "instrument_agent_instance", c.IMS) self.perform_fd_script(deployment_id, "deployment", c.OMS) self.perform_fd_script(deployment_id2, "deployment", c.OMS) def create_data_product_obj(self): # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() # creates an IonObject of RT.DataProduct and adds custom fields specified by dict return any_old(RT.DataProduct, dict(temporal_domain=tdom, spatial_domain=sdom)) def create_inst_agent_instance(self, agent_id, device_id): port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.client.IMS.create_instrument_agent_instance(instAgentInstance_obj, agent_id, device_id) return instAgentInstance_id def create_plat_agent_instance(self, agent_id, device_id): #todo : do this for real platAgentInstance_id, _ = self.client.RR.create(any_old(RT.PlatformAgentInstance)) self.client.RR.create_association(device_id, PRED.hasAgentInstance, platAgentInstance_id) return platAgentInstance_id def template_tst_deployment_context(self, context=None): """ Creates a minimal deployment: 1 instrument, 1 site. deployment context must be provided """ c = self.client c2 = DotDict() c2.resource_registry = self.client.RR log.info("Create a instrument model") instrument_model_id = self.perform_fcruf_script(RT.InstrumentModel, "instrument_model", self.client.IMS) log.info("Create an instrument device") instrument_device_id = self.perform_fcruf_script(RT.InstrumentDevice, "instrument_device", self.client.IMS) log.info("Create instrument site") instrument_site_id = self.perform_fcruf_script(RT.InstrumentSite, "instrument_site", self.client.OMS) log.info("Associate instrument model with instrument site") self.perform_association_script(c.OMS.assign_instrument_model_to_instrument_site, self.RR2.find_instrument_sites_by_instrument_model_using_has_model, self.RR2.find_instrument_models_of_instrument_site_using_has_model, instrument_site_id, instrument_model_id) log.info("Associate instrument model with instrument device") self.perform_association_script(c.IMS.assign_instrument_model_to_instrument_device, c.IMS.find_instrument_device_by_instrument_model, c.IMS.find_instrument_model_by_instrument_device, instrument_device_id, instrument_model_id) log.info("Create a stream definition for the data from the ctd simulator") pdict_id = self.dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.client.PSMS.create_stream_definition(name='Simulated CTD data', parameter_dictionary_id=pdict_id) log.info("Create an IonObject for a data products") dp_obj = self.create_data_product_obj() dp_obj.name = create_unique_identifier('Inst Data Product') inst_data_product_id = c.DPMS.create_data_product(dp_obj, ctd_stream_def_id) #assign data products appropriately c.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) deployment_obj = any_old(RT.Deployment, dict(context=context)) deployment_id = c.OMS.create_deployment(deployment_obj) c.OMS.assign_site_to_deployment(instrument_site_id, deployment_id) c.OMS.assign_device_to_deployment(instrument_device_id, deployment_id) c.OMS.activate_deployment(deployment_id, True) # cleanup self.RR2.pluck(instrument_model_id) self.RR2.pluck(deployment_id) self.RR2.pluck(instrument_device_id) c.IMS.force_delete_instrument_model(instrument_model_id) c.IMS.force_delete_instrument_device(instrument_device_id) c.OMS.force_delete_instrument_site(instrument_site_id) c.OMS.force_delete_deployment(deployment_id) # test all 4 deployment contexts. can fill in these context when their fields get defined def test_deployment_remoteplatform(self): context = IonObject(OT.RemotePlatformDeploymentContext) self.template_tst_deployment_context(context) def test_deployment_cablednode(self): context = IonObject(OT.CabledNodeDeploymentContext) self.template_tst_deployment_context(context) def test_deployment_cabledinstrument(self): context = IonObject(OT.CabledInstrumentDeploymentContext) self.template_tst_deployment_context(context) @unittest.skip("mobile deployments are unimplemented") def test_deployment_mobile(self): context = IonObject(OT.MobileAssetDeploymentContext) self.template_tst_deployment_context(context) @unittest.skip("cruise deployments are unimplemented") def test_deployment_cruise(self): context = IonObject(OT.CruiseDeploymentContext) self.template_tst_deployment_context(context)
class TestAgentLaunchOps(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' unittest # suppress an pycharm inspector error if all unittest.skip references are commented out self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.IDS = IdentityManagementServiceClient(node=self.container.node) self.PSC = PubsubManagementServiceClient(node=self.container.node) self.DP = DataProductManagementServiceClient(node=self.container.node) self.DAMS = DataAcquisitionManagementServiceClient(node=self.container.node) self.DSC = DatasetManagementServiceClient(node=self.container.node) self.PDC = ProcessDispatcherServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) # @unittest.skip('this test just for debugging setup') # def test_just_the_setup(self): # return def test_get_agent_client_noprocess(self): inst_device_id = self.RR2.create(any_old(RT.InstrumentDevice)) iap = ResourceAgentClient._get_agent_process_id(inst_device_id) # should be no running agent self.assertIsNone(iap) # should raise NotFound self.assertRaises(NotFound, ResourceAgentClient, inst_device_id) def test_resource_state_save_restore(self): # Create InstrumentModel instModel_obj = IonObject(RT.InstrumentModel, name='SBE37IMModel', description="SBE37IMModel") instModel_id = self.IMS.create_instrument_model(instModel_obj) log.debug( 'new InstrumentModel id = %s ', instModel_id) # Create InstrumentAgent raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' ) parsed_config = StreamConfiguration(stream_name='parsed', parameter_dictionary_name='ctd_parsed_param_dict') instAgent_obj = IonObject(RT.InstrumentAgent, name='agent007', description="SBE37IMAgent", driver_uri=DRV_URI_GOOD, stream_configurations = [raw_config, parsed_config] ) instAgent_id = self.IMS.create_instrument_agent(instAgent_obj) log.debug( 'new InstrumentAgent id = %s', instAgent_id) self.IMS.assign_instrument_model_to_instrument_agent(instModel_id, instAgent_id) # Create InstrumentDevice log.debug('test_activateInstrumentSample: Create instrument resource to represent the SBE37 ' + '(SA Req: L4-CI-SA-RQ-241) ') instDevice_obj = IonObject(RT.InstrumentDevice, name='SBE37IMDevice', description="SBE37IMDevice", serial_number="12345" ) instDevice_id = self.IMS.create_instrument_device(instrument_device=instDevice_obj) self.IMS.assign_instrument_model_to_instrument_device(instModel_id, instDevice_id) log.debug("test_activateInstrumentSample: new InstrumentDevice id = %s (SA Req: L4-CI-SA-RQ-241) ", instDevice_id) port_agent_config = { 'device_addr': CFG.device.sbe37.host, 'device_port': CFG.device.sbe37.port, 'process_type': PortAgentProcessType.UNIX, 'binary_path': "port_agent", 'port_agent_addr': 'localhost', 'command_port': CFG.device.sbe37.port_agent_cmd_port, 'data_port': CFG.device.sbe37.port_agent_data_port, 'log_level': 5, 'type': PortAgentType.ETHERNET } instAgentInstance_obj = IonObject(RT.InstrumentAgentInstance, name='SBE37IMAgentInstance', description="SBE37IMAgentInstance", port_agent_config = port_agent_config) instAgentInstance_id = self.IMS.create_instrument_agent_instance(instAgentInstance_obj, instAgent_id, instDevice_id) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() spdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True) parsed_stream_def_id = self.PSC.create_stream_definition(name='parsed', parameter_dictionary_id=spdict_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #------------------------------- # Create Raw and Parsed Data Products for the device #------------------------------- dp_obj = IonObject(RT.DataProduct, name='the parsed data', description='ctd stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id1 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=parsed_stream_def_id) log.debug( 'new dp_id = %s', data_product_id1) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id1) self.DP.activate_data_product_persistence(data_product_id=data_product_id1) self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id1) # Retrieve the id of the OUTPUT stream from the out Data Product stream_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasStream, None, True) log.debug( 'Data product streams1 = %s', stream_ids) # Retrieve the id of the OUTPUT stream from the out Data Product dataset_ids, _ = self.RR.find_objects(data_product_id1, PRED.hasDataset, RT.Dataset, True) log.debug( 'Data set for data_product_id1 = %s', dataset_ids[0]) self.parsed_dataset = dataset_ids[0] #create the datastore at the beginning of each int test that persists data dp_obj = IonObject(RT.DataProduct, name='the raw data', description='raw stream test', temporal_domain = tdom, spatial_domain = sdom) data_product_id2 = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) log.debug( 'new dp_id = %s', str(data_product_id2)) self.DAMS.assign_data_product(input_resource_id=instDevice_id, data_product_id=data_product_id2) self.DP.activate_data_product_persistence(data_product_id=data_product_id2) self.addCleanup(self.DP.suspend_data_product_persistence, data_product_id2) # spin up agent self.IMS.start_instrument_agent_instance(instrument_agent_instance_id=instAgentInstance_id) self.addCleanup(self.IMS.stop_instrument_agent_instance, instrument_agent_instance_id=instAgentInstance_id) #wait for start instance_obj = self.IMS.read_instrument_agent_instance(instAgentInstance_id) gate = AgentProcessStateGate(self.PDC.read_process, instDevice_id, ProcessStateEnum.RUNNING) self.assertTrue(gate.await(30), "The instrument agent instance (%s) did not spawn in 30 seconds" % gate.process_id) # take snapshot of config snap_id = self.IMS.save_resource_state(instDevice_id, "xyzzy snapshot") snap_obj = self.RR.read_attachment(snap_id, include_content=True) #modify config instance_obj.driver_config["comms_config"] = "BAD_DATA" self.RR.update(instance_obj) #restore config self.IMS.restore_resource_state(instDevice_id, snap_id) instance_obj = self.RR.read(instAgentInstance_id) if "BAD_DATA" == instance_obj.driver_config["comms_config"]: print "Saved config:" print snap_obj.content self.fail("Saved config was not properly restored") self.assertNotEqual("BAD_DATA", instance_obj.driver_config["comms_config"]) self.DP.delete_data_product(data_product_id1) self.DP.delete_data_product(data_product_id2) def test_agent_instance_config_hasDevice(self): def assign_fn(child_device_id, parent_device_id): self.RR2.create_association(parent_device_id, PRED.hasDevice, child_device_id) def find_fn(parent_device_id): ret, _ = self.RR.find_objects(subject=parent_device_id, predicate=PRED.hasDevice, id_only=True) return ret self.base_agent_instance_config(assign_fn, find_fn) log.info("END test_agent_instance_config_hasDevice") def test_agent_instance_config_hasNetworkParent(self): def assign_fn(child_device_id, parent_device_id): self.RR2.create_association(child_device_id, PRED.hasNetworkParent, parent_device_id) def find_fn(parent_device_id): ret, _ = self.RR.find_subjects(object=parent_device_id, predicate=PRED.hasNetworkParent, id_only=True) return ret self.base_agent_instance_config(assign_fn, find_fn) log.info("END test_agent_instance_config_hasNetworkParent") def base_agent_instance_config(self, assign_child_platform_to_parent_platform_fn, find_child_platform_ids_of_parent_platform_fn): """ Verify that agent configurations are being built properly """ clients = DotDict() clients.resource_registry = self.RR clients.pubsub_management = self.PSC clients.dataset_management = self.DSC config_builder = DotDict config_builder.i = None config_builder.p = None def refresh_pconfig_builder_hack(config_builder): """ ugly hack to get around "idempotent" RR2 caching remove after https://github.com/ooici/coi-services/pull/1190 """ config_builder.p = PlatformAgentConfigurationBuilder(clients) def refresh_iconfig_builder_hack(config_builder): """ ugly hack to get around "idempotent" RR2 caching remove after https://github.com/ooici/coi-services/pull/1190 """ config_builder.i = InstrumentAgentConfigurationBuilder(clients) tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() org_obj = any_old(RT.Org) org_id = self.RR2.create(org_obj) inst_startup_config = {'startup': 'config'} generic_alerts_config = [ {'lvl2': 'lvl3val'} ] required_config_keys = [ 'org_governance_name', 'device_type', 'agent', 'driver_config', 'stream_config', 'startup_config', 'aparam_alerts_config', 'children'] def verify_instrument_config(config, device_id): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.org_governance_name, config['org_governance_name']) self.assertEqual(RT.InstrumentDevice, config['device_type']) self.assertIn('driver_config', config) driver_config = config['driver_config'] expected_driver_fields = {'process_type': ('ZMQPyClassDriverLauncher',), } for k, v in expected_driver_fields.iteritems(): self.assertIn(k, driver_config) self.assertEqual(v, driver_config[k]) self.assertEqual self.assertEqual({'resource_id': device_id}, config['agent']) self.assertEqual(inst_startup_config, config['startup_config']) self.assertIn('aparam_alerts_config', config) self.assertEqual(generic_alerts_config, config['aparam_alerts_config']) self.assertIn('stream_config', config) for key in ['children']: self.assertEqual({}, config[key]) def verify_child_config(config, device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.org_governance_name, config['org_governance_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertEqual({'resource_id': device_id}, config['agent']) self.assertIn('aparam_alerts_config', config) self.assertEqual(generic_alerts_config, config['aparam_alerts_config']) self.assertIn('stream_config', config) self.assertIn('driver_config', config) self.assertIn('foo', config['driver_config']) self.assertIn('ports', config['driver_config']) self.assertEqual('bar', config['driver_config']['foo']) self.assertIn('process_type', config['driver_config']) self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type']) if None is inst_device_id: for key in ['children', 'startup_config']: self.assertEqual({}, config[key]) else: for key in ['startup_config']: self.assertEqual({}, config[key]) self.assertIn(inst_device_id, config['children']) verify_instrument_config(config['children'][inst_device_id], inst_device_id) if config['driver_config']['ports']: self.assertTrue( isinstance(config['driver_config']['ports'], dict) ) def verify_parent_config(config, parent_device_id, child_device_id, inst_device_id=None): for key in required_config_keys: self.assertIn(key, config) self.assertEqual(org_obj.org_governance_name, config['org_governance_name']) self.assertEqual(RT.PlatformDevice, config['device_type']) self.assertIn('process_type', config['driver_config']) self.assertIn('ports', config['driver_config']) self.assertEqual(('ZMQPyClassDriverLauncher',), config['driver_config']['process_type']) self.assertEqual({'resource_id': parent_device_id}, config['agent']) self.assertIn('aparam_alerts_config', config) self.assertEqual(generic_alerts_config, config['aparam_alerts_config']) self.assertIn('stream_config', config) for key in ['startup_config']: self.assertEqual({}, config[key]) if config['driver_config']['ports']: self.assertTrue( isinstance(config['driver_config']['ports'], dict) ) self.assertIn(child_device_id, config['children']) verify_child_config(config['children'][child_device_id], child_device_id, inst_device_id) rpdict_id = self.DSC.read_parameter_dictionary_by_name('ctd_raw_param_dict', id_only=True) raw_stream_def_id = self.PSC.create_stream_definition(name='raw', parameter_dictionary_id=rpdict_id) #todo: create org and figure out which agent resource needs to get assigned to it def _make_platform_agent_structure(name='', agent_config=None): if None is agent_config: agent_config = {} # instance creation platform_agent_instance_obj = any_old(RT.PlatformAgentInstance, {'driver_config': {'foo': 'bar'}, 'alerts': generic_alerts_config}) platform_agent_instance_obj.agent_config = agent_config platform_agent_instance_id = self.IMS.create_platform_agent_instance(platform_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' ) platform_agent_obj = any_old(RT.PlatformAgent, {"stream_configurations":[raw_config]}) platform_agent_id = self.IMS.create_platform_agent(platform_agent_obj) # device creation platform_device_id = self.IMS.create_platform_device(any_old(RT.PlatformDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=platform_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.DP.suspend_data_product_persistence, dp_id) #deployment creation site_obj = IonObject(RT.PlatformSite, name='sitePlatform') site_id = self.OMS.create_platform_site(platform_site=site_obj) # find current deployment using time constraints current_time = int( calendar.timegm(time.gmtime()) ) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-09-CTDMO0999', port_type=PortTypeEnum.UPLINK, ip_address=0) deployment_obj = IonObject(RT.Deployment, name='TestPlatformDeployment_' + name, description='some new deployment', context=IonObject(OT.CabledNodeDeploymentContext), constraint_list=[temporal_bounds], port_assignments={platform_device_id:platform_port_obj}) deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=platform_device_id) # assignments self.RR2.assign_platform_agent_instance_to_platform_device_with_has_agent_instance(platform_agent_instance_id, platform_device_id) self.RR2.assign_platform_agent_to_platform_agent_instance_with_has_agent_definition(platform_agent_id, platform_agent_instance_id) self.RR2.assign_platform_device_to_org_with_has_resource(platform_agent_instance_id, org_id) return platform_agent_instance_id, platform_agent_id, platform_device_id def _make_instrument_agent_structure(agent_config=None): if None is agent_config: agent_config = {} # instance creation instrument_agent_instance_obj = any_old(RT.InstrumentAgentInstance, {"startup_config": inst_startup_config, 'alerts': generic_alerts_config}) instrument_agent_instance_obj.agent_config = agent_config instrument_agent_instance_id = self.IMS.create_instrument_agent_instance(instrument_agent_instance_obj) # agent creation raw_config = StreamConfiguration(stream_name='raw', parameter_dictionary_name='ctd_raw_param_dict' ) instrument_agent_obj = any_old(RT.InstrumentAgent, {"stream_configurations":[raw_config]}) instrument_agent_id = self.IMS.create_instrument_agent(instrument_agent_obj) # device creation instrument_device_id = self.IMS.create_instrument_device(any_old(RT.InstrumentDevice)) # data product creation dp_obj = any_old(RT.DataProduct, {"temporal_domain":tdom, "spatial_domain": sdom}) dp_id = self.DP.create_data_product(data_product=dp_obj, stream_definition_id=raw_stream_def_id) self.DAMS.assign_data_product(input_resource_id=instrument_device_id, data_product_id=dp_id) self.DP.activate_data_product_persistence(data_product_id=dp_id) self.addCleanup(self.DP.suspend_data_product_persistence, dp_id) #deployment creation site_obj = IonObject(RT.InstrumentSite, name='siteInstrument') site_id = self.OMS.create_instrument_site(instrument_site =site_obj) # find current deployment using time constraints current_time = int( calendar.timegm(time.gmtime()) ) # two years on either side of current time start = current_time - 63115200 end = current_time + 63115200 temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=str(start), end_datetime=str(end)) platform_port_obj= IonObject(OT.PlatformPort, reference_designator = 'GA01SUMO-FI003-08-CTDMO0888', port_type=PortTypeEnum.PAYLOAD, ip_address=0) deployment_obj = IonObject(RT.Deployment, name='TestDeployment for Cabled Instrument', description='some new deployment', context=IonObject(OT.CabledInstrumentDeploymentContext), constraint_list=[temporal_bounds], port_assignments={instrument_device_id:platform_port_obj}) deploy_id = self.OMS.create_deployment(deployment=deployment_obj, site_id=site_id, device_id=instrument_device_id) # assignments self.RR2.assign_instrument_agent_instance_to_instrument_device_with_has_agent_instance(instrument_agent_instance_id, instrument_device_id) self.RR2.assign_instrument_agent_to_instrument_agent_instance_with_has_agent_definition(instrument_agent_id, instrument_agent_instance_id) self.RR2.assign_instrument_device_to_org_with_has_resource(instrument_agent_instance_id, org_id) return instrument_agent_instance_id, instrument_agent_id, instrument_device_id # can't do anything without an agent instance obj log.debug("Testing that preparing a launcher without agent instance raises an error") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated self.assertRaises(AssertionError, config_builder.p.prepare, will_launch=False) log.debug("Making the structure for a platform agent, which will be the child") platform_agent_instance_child_id, _, platform_device_child_id = _make_platform_agent_structure(name='child') platform_agent_instance_child_obj = self.RR2.read(platform_agent_instance_child_id) log.debug("Preparing a valid agent instance launch, for config only") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_child_obj) child_config = config_builder.p.prepare(will_launch=False) verify_child_config(child_config, platform_device_child_id) log.debug("Making the structure for a platform agent, which will be the parent") platform_agent_instance_parent_id, _, platform_device_parent_id = _make_platform_agent_structure(name='parent') platform_agent_instance_parent_obj = self.RR2.read(platform_agent_instance_parent_id) log.debug("Testing child-less parent as a child config") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = config_builder.p.prepare(will_launch=False) verify_child_config(parent_config, platform_device_parent_id) log.debug("assigning child platform to parent") assign_child_platform_to_parent_platform_fn(platform_device_child_id, platform_device_parent_id) child_device_ids = find_child_platform_ids_of_parent_platform_fn(platform_device_parent_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing parent + child as parent config") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj) parent_config = config_builder.p.prepare(will_launch=False) verify_parent_config(parent_config, platform_device_parent_id, platform_device_child_id) log.debug("making the structure for an instrument agent") instrument_agent_instance_id, _, instrument_device_id = _make_instrument_agent_structure() instrument_agent_instance_obj = self.RR2.read(instrument_agent_instance_id) log.debug("Testing instrument config") refresh_iconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.i.set_agent_instance_object(instrument_agent_instance_obj) instrument_config = config_builder.i.prepare(will_launch=False) verify_instrument_config(instrument_config, instrument_device_id) log.debug("assigning instrument to platform") self.RR2.assign_instrument_device_to_platform_device_with_has_device(instrument_device_id, platform_device_child_id) child_device_ids = self.RR2.find_instrument_device_ids_of_platform_device_using_has_device(platform_device_child_id) self.assertNotEqual(0, len(child_device_ids)) log.debug("Testing entire config") refresh_pconfig_builder_hack(config_builder) # associations have changed since builder was instantiated config_builder.p.set_agent_instance_object(platform_agent_instance_parent_obj) full_config = config_builder.p.prepare(will_launch=False) verify_parent_config(full_config, platform_device_parent_id, platform_device_child_id, instrument_device_id) #self.fail(parent_config) #plauncher.prepare(will_launch=False) log.info("END base_agent_instance_config")
class TestDeployment(IonIntegrationTestCase): def setUp(self): # Start container self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.rrclient = ResourceRegistryServiceClient(node=self.container.node) self.omsclient = ObservatoryManagementServiceClient( node=self.container.node) self.imsclient = InstrumentManagementServiceClient( node=self.container.node) self.dmpsclient = DataProductManagementServiceClient( node=self.container.node) self.damsclient = DataAcquisitionManagementServiceClient( node=self.container.node) self.psmsclient = PubsubManagementServiceClient( node=self.container.node) self.dataset_management = DatasetManagementServiceClient() self.c = DotDict() self.c.resource_registry = self.rrclient self.RR2 = EnhancedResourceRegistryClient(self.rrclient) # create missing data process definition self.dsmsclient = DataProcessManagementServiceClient( node=self.container.node) dpd_obj = IonObject( RT.DataProcessDefinition, name=LOGICAL_TRANSFORM_DEFINITION_NAME, description="normally in preload", module='ion.processes.data.transforms.logical_transform', class_name='logical_transform') self.dsmsclient.create_data_process_definition(dpd_obj) # deactivate all data processes when tests are complete def killAllDataProcesses(): for proc_id in self.rrclient.find_resources( RT.DataProcess, None, None, True)[0]: self.dsmsclient.deactivate_data_process(proc_id) self.dsmsclient.delete_data_process(proc_id) self.addCleanup(killAllDataProcesses) #@unittest.skip("targeting") def test_create_deployment(self): #create a deployment with metadata and an initial site and device platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') site_id = self.omsclient.create_platform_site(platform_site__obj) platform_device__obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') device_id = self.imsclient.create_platform_device(platform_device__obj) start = IonTime(datetime.datetime(2013, 1, 1)) end = IonTime(datetime.datetime(2014, 1, 1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) self.omsclient.deploy_platform_site(site_id, deployment_id) self.imsclient.deploy_platform_device(device_id, deployment_id) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) #retrieve the deployment objects and check that the assoc site and device are attached read_deployment_obj = self.omsclient.read_deployment(deployment_id) log.debug("test_create_deployment: created deployment obj: %s ", str(read_deployment_obj)) site_ids, _ = self.rrclient.find_subjects(RT.PlatformSite, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(site_ids), 1) device_ids, _ = self.rrclient.find_subjects(RT.PlatformDevice, PRED.hasDeployment, deployment_id, True) self.assertEqual(len(device_ids), 1) #delete the deployment self.RR2.pluck(deployment_id) self.omsclient.force_delete_deployment(deployment_id) # now try to get the deleted dp object try: self.omsclient.read_deployment(deployment_id) except NotFound: pass else: self.fail("deleted deployment was found during read") #@unittest.skip("targeting") def base_activate_deployment(self): #------------------------------------------------------------------------------------- # Create platform site, platform device, platform model #------------------------------------------------------------------------------------- platform_site__obj = IonObject(RT.PlatformSite, name='PlatformSite1', description='test platform site') platform_site_id = self.omsclient.create_platform_site( platform_site__obj) platform_device_obj = IonObject(RT.PlatformDevice, name='PlatformDevice1', description='test platform device') platform_device_id = self.imsclient.create_platform_device( platform_device_obj) platform_model__obj = IonObject(RT.PlatformModel, name='PlatformModel1', description='test platform model') platform_model_id = self.imsclient.create_platform_model( platform_model__obj) #------------------------------------------------------------------------------------- # Create instrument site #------------------------------------------------------------------------------------- instrument_site_obj = IonObject(RT.InstrumentSite, name='InstrumentSite1', description='test instrument site') instrument_site_id = self.omsclient.create_instrument_site( instrument_site_obj, platform_site_id) pdict_id = self.dataset_management.read_parameter_dictionary_by_name( 'ctd_parsed_param_dict', id_only=True) ctd_stream_def_id = self.psmsclient.create_stream_definition( name='SBE37_CDM', parameter_dictionary_id=pdict_id) # Construct temporal and spatial Coordinate Reference System objects tdom, sdom = time_series_domain() sdom = sdom.dump() tdom = tdom.dump() dp_obj = IonObject(RT.DataProduct, name='Log Data Product', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) out_log_data_product_id = self.dmpsclient.create_data_product( dp_obj, ctd_stream_def_id) #---------------------------------------------------------------------------------------------------- # Start the transform (a logical transform) that acts as an instrument site #---------------------------------------------------------------------------------------------------- self.omsclient.create_site_data_product( site_id=instrument_site_id, data_product_id=out_log_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument device #---------------------------------------------------------------------------------------------------- instrument_device_obj = IonObject(RT.InstrumentDevice, name='InstrumentDevice1', description='test instrument device') instrument_device_id = self.imsclient.create_instrument_device( instrument_device_obj) self.rrclient.create_association(platform_device_id, PRED.hasDevice, instrument_device_id) dp_obj = IonObject(RT.DataProduct, name='Instrument Data Product', description='some new dp', temporal_domain=tdom, spatial_domain=sdom) inst_data_product_id = self.dmpsclient.create_data_product( dp_obj, ctd_stream_def_id) #assign data products appropriately self.damsclient.assign_data_product( input_resource_id=instrument_device_id, data_product_id=inst_data_product_id) #---------------------------------------------------------------------------------------------------- # Create an instrument model #---------------------------------------------------------------------------------------------------- instrument_model_obj = IonObject(RT.InstrumentModel, name='InstrumentModel1', description='test instrument model') instrument_model_id = self.imsclient.create_instrument_model( instrument_model_obj) #---------------------------------------------------------------------------------------------------- # Create a deployment object #---------------------------------------------------------------------------------------------------- start = IonTime(datetime.datetime(2013, 1, 1)) end = IonTime(datetime.datetime(2014, 1, 1)) temporal_bounds = IonObject(OT.TemporalBounds, name='planned', start_datetime=start.to_string(), end_datetime=end.to_string()) deployment_obj = IonObject(RT.Deployment, name='TestDeployment', description='some new deployment', constraint_list=[temporal_bounds]) deployment_id = self.omsclient.create_deployment(deployment_obj) log.debug("test_create_deployment: created deployment id: %s ", str(deployment_id)) ret = DotDict(instrument_site_id=instrument_site_id, instrument_device_id=instrument_device_id, instrument_model_id=instrument_model_id, platform_site_id=platform_site_id, platform_device_id=platform_device_id, platform_model_id=platform_model_id, deployment_id=deployment_id) return ret #@unittest.skip("targeting") def test_activate_deployment_normal(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_platform_model_to_platform_device( res.platform_model_id, res.platform_device_id) self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_platform_model_to_platform_site( res.platform_model_id, res.platform_site_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("adding instrument site and device to deployment") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug("adding platform site and device to deployment") self.omsclient.deploy_platform_site(res.platform_site_id, res.deployment_id) self.imsclient.deploy_platform_device(res.platform_device_id, res.deployment_id) log.debug("activating deployment, expecting success") self.omsclient.activate_deployment(res.deployment_id) #@unittest.skip("targeting") def test_activate_deployment_nomodels(self): res = self.base_activate_deployment() self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug( "activating deployment without site+device models, expecting fail") self.assert_deploy_fail( res.deployment_id, "Expected at least 1 model for InstrumentSite") log.debug("assigning instrument site model") self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "Expected 1 model for InstrumentDevice") #@unittest.skip("targeting") def test_activate_deployment_nosite(self): res = self.base_activate_deployment() log.debug("assigning instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument device only") self.imsclient.deploy_instrument_device(res.instrument_device_id, res.deployment_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail(res.deployment_id, "No sites were found in the deployment") #@unittest.skip("targeting") def test_activate_deployment_nodevice(self): res = self.base_activate_deployment() log.debug("assigning platform and instrument models") self.imsclient.assign_instrument_model_to_instrument_device( res.instrument_model_id, res.instrument_device_id) self.omsclient.assign_instrument_model_to_instrument_site( res.instrument_model_id, res.instrument_site_id) log.debug("deploying instrument site only") self.omsclient.deploy_instrument_site(res.instrument_site_id, res.deployment_id) log.debug( "activating deployment without device models, expecting fail") self.assert_deploy_fail( res.deployment_id, "The set of devices could not be mapped to the set of sites") def assert_deploy_fail(self, deployment_id, fail_message="did not specify fail_message"): with self.assertRaises(BadRequest) as cm: self.omsclient.activate_deployment(deployment_id) self.assertIn(fail_message, cm.exception.message)
class DataProcessManagementService(BaseDataProcessManagementService): def on_init(self): IonObject("Resource") # suppress pyflakes error self.override_clients(self.clients) self.init_module_uploader() self.get_unique_id = (lambda : uuid4().hex) self.data_product_management = DataProductManagementServiceClient() def init_module_uploader(self): if self.CFG: #looking for forms like host=amoeba.ucsd.edu, remotepath=/var/www/release, user=steve cfg_host = self.CFG.get_safe("service.data_process_management.process_release_host", None) cfg_remotepath = self.CFG.get_safe("service.data_process_management.process_release_directory", None) cfg_user = self.CFG.get_safe("service.data_process_management.process_release_user", pwd.getpwuid(os.getuid())[0]) cfg_wwwprefix = self.CFG.get_safe("service.data_process_management.process_release_wwwprefix", None) if cfg_host is None or cfg_remotepath is None or cfg_wwwprefix is None: raise BadRequest("Missing configuration items; host='%s', directory='%s', wwwprefix='%s'" % (cfg_host, cfg_remotepath, cfg_wwwprefix)) self.module_uploader = RegisterModulePreparerPy(dest_user=cfg_user, dest_host=cfg_host, dest_path=cfg_remotepath, dest_wwwprefix=cfg_wwwprefix) def override_clients(self, new_clients): """ Replaces the service clients with a new set of them... and makes sure they go to the right places """ self.RR2 = EnhancedResourceRegistryClient(self.clients.resource_registry) #shortcut names for the import sub-services if hasattr(self.clients, "resource_registry"): self.RR = self.clients.resource_registry #todo: need to know what object will be worked with here def register_data_process_definition(self, process_code=''): """ register a process module by putting it in a web-accessible location @process_code a base64-encoded python file """ # # retrieve the resource # data_process_definition_obj = self.clients.resource_registry.read(data_process_definition_id) dest_filename = "process_code_%s.py" % self.get_unique_id() #data_process_definition_obj._id #process the input file (base64-encoded .py) uploader_obj, err = self.module_uploader.prepare(process_code, dest_filename) if None is uploader_obj: raise BadRequest("Process code failed validation: %s" % err) # actually upload up_success, err = uploader_obj.upload() if not up_success: raise BadRequest("Upload failed: %s" % err) # #todo: save module / class? # data_process_definition_obj.uri = uploader_obj.get_destination_url() # self.clients.resource_registry.update(data_process_definition_obj) return uploader_obj.get_destination_url() @classmethod def _cmp_transform_function(cls, tf1, tf2): return tf1.module == tf2.module and \ tf1.cls == tf2.cls and \ tf1.uri == tf2.uri and \ tf1.function_type == tf2.function_type def create_transform_function(self, transform_function=''): ''' Creates a new transform function ''' return self.RR2.create(transform_function, RT.TransformFunction) def read_transform_function(self, transform_function_id=''): tf = self.RR2.read(transform_function_id, RT.TransformFunction) return tf def update_transform_function(self, transform_function=None): self.RR2.update(transform_function, RT.TransformFunction) def delete_transform_function(self, transform_function_id=''): self.RR2.retire(transform_function_id, RT.TransformFunction) def force_delete_transform_function(self, transform_function_id=''): self.RR2.pluck_delete(transform_function_id, RT.TransformFunction) def create_data_process_definition(self, data_process_definition=None): data_process_definition_id = self.RR2.create(data_process_definition, RT.DataProcessDefinition) #------------------------------- # Process Definition #------------------------------- # Create the underlying process definition process_definition = ProcessDefinition() process_definition.name = data_process_definition.name process_definition.description = data_process_definition.description process_definition.executable = {'module':data_process_definition.module, 'class':data_process_definition.class_name} process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition=process_definition) self.RR2.assign_process_definition_to_data_process_definition_with_has_process_definition(process_definition_id, data_process_definition_id) return data_process_definition_id def update_data_process_definition(self, data_process_definition=None): # TODO: If executable has changed, update underlying ProcessDefinition # Overwrite DataProcessDefinition object self.RR2.update(data_process_definition, RT.DataProcessDefinition) def read_data_process_definition(self, data_process_definition_id=''): data_proc_def_obj = self.RR2.read(data_process_definition_id, RT.DataProcessDefinition) return data_proc_def_obj def delete_data_process_definition(self, data_process_definition_id=''): # Delete the resource self.RR2.retire(data_process_definition_id, RT.DataProcessDefinition) def force_delete_data_process_definition(self, data_process_definition_id=''): processdef_ids, _ = self.clients.resource_registry.find_objects(subject=data_process_definition_id, predicate=PRED.hasProcessDefinition, object_type=RT.ProcessDefinition, id_only=True) self.RR2.pluck_delete(data_process_definition_id, RT.DataProcessDefinition) for processdef_id in processdef_ids: self.clients.process_dispatcher.delete_process_definition(processdef_id) def find_data_process_definitions(self, filters=None): """ @param filters: dict of parameters to filter down the list of possible data proc. @retval """ #todo: add filtering data_process_def_list , _ = self.clients.resource_registry.find_resources(RT.DataProcessDefinition, None, None, True) return data_process_def_list def assign_input_stream_definition_to_data_process_definition(self, stream_definition_id='', data_process_definition_id=''): """Connect the input stream with a data process definition """ # Verify that both ids are valid, RR will throw if not found stream_definition_obj = self.clients.resource_registry.read(stream_definition_id) data_process_definition_obj = self.clients.resource_registry.read(data_process_definition_id) validate_is_not_none(stream_definition_obj, "No stream definition object found for stream definition id: %s" % stream_definition_id) validate_is_not_none(data_process_definition_obj, "No data process definition object found for data process" \ " definition id: %s" % data_process_definition_id) self.clients.resource_registry.create_association(data_process_definition_id, PRED.hasInputStreamDefinition, stream_definition_id) def unassign_input_stream_definition_from_data_process_definition(self, stream_definition_id='', data_process_definition_id=''): """ Disconnect the Data Product from the Data Producer @param stream_definition_id str @param data_process_definition_id str @throws NotFound object with specified id does not exist """ # Remove the link between the Stream Definition resource and the Data Process Definition resource associations = self.clients.resource_registry.find_associations(data_process_definition_id, PRED.hasInputStreamDefinition, stream_definition_id, id_only=True) validate_is_not_none(associations, "No Input Stream Definitions associated with data process definition ID " + str(data_process_definition_id)) for association in associations: self.clients.resource_registry.delete_association(association) def assign_stream_definition_to_data_process_definition(self, stream_definition_id='', data_process_definition_id='', binding=''): """Connect the output stream with a data process definition """ # Verify that both ids are valid, RR will throw if not found stream_definition_obj = self.clients.resource_registry.read(stream_definition_id) data_process_definition_obj = self.clients.resource_registry.read(data_process_definition_id) validate_is_not_none(stream_definition_obj, "No stream definition object found for stream definition id: %s" % stream_definition_id) validate_is_not_none(data_process_definition_obj, "No data process definition object found for data process"\ " definition id: %s" % data_process_definition_id) self.clients.resource_registry.create_association(data_process_definition_id, PRED.hasStreamDefinition, stream_definition_id) if binding: data_process_definition_obj.output_bindings[binding] = stream_definition_id self.clients.resource_registry.update(data_process_definition_obj) def unassign_stream_definition_from_data_process_definition(self, stream_definition_id='', data_process_definition_id=''): """ Disconnect the Data Product from the Data Producer @param stream_definition_id str @param data_process_definition_id str @throws NotFound object with specified id does not exist """ # Remove the link between the Stream Definition resource and the Data Process Definition resource associations = self.clients.resource_registry.find_associations(data_process_definition_id, PRED.hasStreamDefinition, stream_definition_id, id_only=True) validate_is_not_none(associations, "No Stream Definitions associated with data process definition ID " + str(data_process_definition_id)) for association in associations: self.clients.resource_registry.delete_association(association) def create_data_process(self, data_process_definition_id='', in_data_product_ids=None, out_data_product_ids=None, configuration=None): ''' Creates a DataProcess resource and launches the process. A DataProcess is a process that receives one (or more) data products and produces one (or more) data products. @param data_process_definition_id : The Data Process Definition to use, if none is specified the standard TransformDataProcess is used @param in_data_product_ids : A list of input data product identifiers @param out_data_product_ids : A list of output data product identifiers @param configuration : The configuration dictionary for the process, and the routing table: The routing table is defined as such: { in_data_product_id: {out_data_product_id : actor }} Routes are specified in the configuration dictionary under the item "routes" actor is either None (for ParameterFunctions) or a valid TransformFunction identifier ''' configuration = DotDict(configuration or {}) in_data_product_ids = in_data_product_ids or [] out_data_product_ids = out_data_product_ids or [] routes = configuration.get_safe('process.routes', {}) if not routes and (1==len(in_data_product_ids)==len(out_data_product_ids)): routes = {in_data_product_ids[0]: {out_data_product_ids[0]:None}} # Routes are not supported for processes with discrete data process definitions elif not routes and not data_process_definition_id: raise BadRequest('No valid route defined for this data process.') self.validate_compatibility(data_process_definition_id, in_data_product_ids, out_data_product_ids, routes) routes = self._manage_routes(routes) configuration.process.input_products = in_data_product_ids configuration.process.output_products = out_data_product_ids configuration.process.routes = routes if 'lookup_docs' in configuration.process: configuration.process.lookup_docs.extend(self._get_lookup_docs(in_data_product_ids, out_data_product_ids)) else: configuration.process.lookup_docs = self._get_lookup_docs(in_data_product_ids, out_data_product_ids) dproc = DataProcess() dproc.name = 'data_process_%s' % self.get_unique_id() dproc.configuration = configuration dproc_id, rev = self.clients.resource_registry.create(dproc) dproc._id = dproc_id dproc._rev = rev for data_product_id in in_data_product_ids: self.clients.resource_registry.create_association(subject=dproc_id, predicate=PRED.hasInputProduct, object=data_product_id) if data_process_definition_id: self.clients.resource_registry.create_association(data_process_definition_id, PRED.hasDataProcess ,dproc_id) self._manage_producers(dproc_id, out_data_product_ids) self._manage_attachments() queue_name = self._create_subscription(dproc, in_data_product_ids) pid = self._launch_data_process( queue_name=queue_name, data_process_definition_id=data_process_definition_id, out_data_product_ids=out_data_product_ids, configuration=configuration) self.clients.resource_registry.create_association(subject=dproc_id, predicate=PRED.hasProcess, object=pid) return dproc_id def _get_input_stream_ids(self, in_data_product_ids = None): input_stream_ids = [] #------------------------------------------------------------------------------------------------------------------------------------------ # get the streams associated with this IN data products #------------------------------------------------------------------------------------------------------------------------------------------ for in_data_product_id in in_data_product_ids: # Get the stream associated with this input data product stream_ids, _ = self.clients.resource_registry.find_objects(in_data_product_id, PRED.hasStream, RT.Stream, True) validate_is_not_none( stream_ids, "No Stream created for this input Data Product " + str(in_data_product_id)) validate_is_not_none( len(stream_ids) != 1, "Input Data Product should only have ONE stream" + str(in_data_product_id)) # We take for now one stream_id associated with each input data product input_stream_ids.append(stream_ids[0]) return input_stream_ids def _launch_process(self, queue_name='', out_streams=None, process_definition_id='', configuration=None): """ Launches the process """ # ------------------------------------------------------------------------------------ # Spawn Configuration and Parameters # ------------------------------------------------------------------------------------ if 'process' not in configuration: configuration['process'] = {} configuration['process']['queue_name'] = queue_name configuration['process']['publish_streams'] = out_streams # Setting the restart mode schedule = ProcessSchedule() schedule.restart_mode = ProcessRestartMode.ABNORMAL # ------------------------------------------------------------------------------------ # Process Spawning # ------------------------------------------------------------------------------------ # Spawn the process pid = self.clients.process_dispatcher.schedule_process( process_definition_id=process_definition_id, schedule= schedule, configuration=configuration ) validate_is_not_none( pid, "Process could not be spawned") return pid def _find_lookup_tables(self, resource_id="", configuration=None): #check if resource has lookup tables attached configuration = configuration or DotDict() attachment_objs, _ = self.clients.resource_registry.find_objects(resource_id, PRED.hasAttachment, RT.Attachment, False) for attachment_obj in attachment_objs: words = set(attachment_obj.keywords) if 'DataProcessInput' in words: configuration[attachment_obj.name] = attachment_obj.content log.debug("Lookup table, %s, found in attachment %s" % (attachment_obj.content, attachment_obj.name)) else: log.debug("NO lookup table in attachment %s" % attachment_obj.name) return configuration def update_data_process_inputs(self, data_process_id="", in_stream_ids=None): #@TODO: INPUT STREAM VALIDATION log.debug("Updating inputs to data process '%s'", data_process_id) data_process_obj = self.clients.resource_registry.read(data_process_id) subscription_id = data_process_obj.input_subscription_id was_active = False if subscription_id: # get rid of all the current streams try: log.debug("Deactivating subscription '%s'", subscription_id) self.clients.pubsub_management.deactivate_subscription(subscription_id) was_active = True except BadRequest: log.info('Subscription was not active') self.clients.pubsub_management.delete_subscription(subscription_id) new_subscription_id = self.clients.pubsub_management.create_subscription(data_process_obj.name, stream_ids=in_stream_ids) data_process_obj.input_subscription_id = new_subscription_id self.clients.resource_registry.update(data_process_obj) if was_active: log.debug("Activating subscription '%s'", new_subscription_id) self.clients.pubsub_management.activate_subscription(new_subscription_id) def update_data_process(self): raise BadRequest('Cannot update an existing data process.') def read_data_process(self, data_process_id=''): data_proc_obj = self.clients.resource_registry.read(data_process_id) return data_proc_obj def delete_data_process(self, data_process_id=""): #Stops processes and deletes the data process associations #TODO: Delete the processes also? self.deactivate_data_process(data_process_id) processes, assocs = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasProcess, id_only=False) for process, assoc in zip(processes,assocs): self._stop_process(data_process=process) self.clients.resource_registry.delete_association(assoc) #Delete all subscriptions associations subscription_ids, assocs = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasSubscription, id_only=True) for subscription_id, assoc in zip(subscription_ids, assocs): self.clients.resource_registry.delete_association(assoc) self.clients.pubsub_management.delete_subscription(subscription_id) #Unassign data products data_product_ids, assocs = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasOutputProduct, id_only=True) for data_product_id, assoc in zip(data_product_ids, assocs): self.clients.data_acquisition_management.unassign_data_product(input_resource_id=data_process_id, data_product_id=data_product_id) #Unregister the data process with acquisition self.clients.data_acquisition_management.unregister_process(data_process_id=data_process_id) #Delete the data process from the resource registry self.RR2.retire(data_process_id, RT.DataProcess) def force_delete_data_process(self, data_process_id=""): # if not yet deleted, the first execute delete logic dp_obj = self.read_data_process(data_process_id) if dp_obj.lcstate != LCS.RETIRED: self.delete_data_process(data_process_id) self.RR2.pluck_delete(data_process_id, RT.DataProcess) def _stop_process(self, data_process): log.debug("stopping data process '%s'" % data_process.process_id) pid = data_process.process_id self.clients.process_dispatcher.cancel_process(pid) def find_data_process(self, filters=None): """ @param filters: dict of parameters to filter down the list of possible data proc. @retval """ #todo: add filter processing data_process_list , _ = self.clients.resource_registry.find_resources(RT.DataProcess, None, None, True) return data_process_list def activate_data_process(self, data_process_id=''): #@Todo: Data Process Producer context stuff subscription_ids, assocs = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasSubscription, id_only=True) for subscription_id in subscription_ids: if not self.clients.pubsub_management.subscription_is_active(subscription_id): self.clients.pubsub_management.activate_subscription(subscription_id) return True def deactivate_data_process(self, data_process_id=''): #@todo: data process producer context stuff subscription_ids, assocs = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasSubscription, id_only=True) for subscription_id in subscription_ids: if self.clients.pubsub_management.subscription_is_active(subscription_id): self.clients.pubsub_management.deactivate_subscription(subscription_id) return True def attach_process(self, process=''): """ @param process: Should this be the data_process_id? @retval """ # TODO: Determine the proper input param pass def _get_stream_from_dp(self, dp_id): stream_ids, _ = self.clients.resource_registry.find_objects(subject=dp_id, predicate=PRED.hasStream, id_only=True) if not stream_ids: raise BadRequest('No streams associated with this data product') return stream_ids[0] def _has_lookup_values(self, data_product_id): stream_ids, _ = self.clients.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, id_only=True) if not stream_ids: raise BadRequest('No streams found for this data product') stream_def_ids, _ = self.clients.resource_registry.find_objects(subject=stream_ids[0], predicate=PRED.hasStreamDefinition, id_only=True) if not stream_def_ids: raise BadRequest('No stream definitions found for this stream') stream_def_id = stream_def_ids[0] retval = self.clients.pubsub_management.has_lookup_values(stream_def_id) return retval def _get_lookup_docs(self, input_data_product_ids=[], output_data_product_ids=[]): retval = [] need_lookup_docs = False for data_product_id in output_data_product_ids: if self._has_lookup_values(data_product_id): need_lookup_docs = True break if need_lookup_docs: for data_product_id in input_data_product_ids: retval.extend(self.clients.data_acquisition_management.list_qc_references(data_product_id)) for data_product_id in output_data_product_ids: retval.extend(self.clients.data_acquisition_management.list_qc_references(data_product_id)) return retval def _manage_routes(self, routes): retval = {} for in_data_product_id,route in routes.iteritems(): for out_data_product_id, actor in route.iteritems(): in_stream_id = self._get_stream_from_dp(in_data_product_id) out_stream_id = self._get_stream_from_dp(out_data_product_id) if actor: actor = self.clients.resource_registry.read(actor) if isinstance(actor,TransformFunction): actor = {'module': actor.module, 'class':actor.cls} else: raise BadRequest('This actor type is not currently supported') if in_stream_id not in retval: retval[in_stream_id] = {} retval[in_stream_id][out_stream_id] = actor return retval def _manage_producers(self, data_process_id, data_product_ids): self.clients.data_acquisition_management.register_process(data_process_id) for data_product_id in data_product_ids: producer_ids, _ = self.clients.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=True) if len(producer_ids): raise BadRequest('Only one DataProducer allowed per DataProduct') # Validate the data product self.clients.data_product_management.read_data_product(data_product_id) self.clients.data_acquisition_management.assign_data_product(input_resource_id=data_process_id, data_product_id=data_product_id) if not self._get_stream_from_dp(data_product_id): raise BadRequest('No Stream was found for this DataProduct') def _manage_attachments(self): pass def _create_subscription(self, dproc, in_data_product_ids): stream_ids = [self._get_stream_from_dp(i) for i in in_data_product_ids] #@TODO Maybe associate a data process with an exchange point but in the mean time: queue_name = 'sub_%s' % dproc.name subscription_id = self.clients.pubsub_management.create_subscription(name=queue_name, stream_ids=stream_ids) self.clients.resource_registry.create_association(subject=dproc._id, predicate=PRED.hasSubscription, object=subscription_id) return queue_name def _get_process_definition(self, data_process_definition_id=''): process_definition_id = '' if data_process_definition_id: process_definitions, _ = self.clients.resource_registry.find_objects(subject=data_process_definition_id, predicate=PRED.hasProcessDefinition, id_only=True) if process_definitions: process_definition_id = process_definitions[0] else: process_definition = ProcessDefinition() process_definition.name = 'transform_data_process' process_definition.executable['module'] = 'ion.processes.data.transforms.transform_prime' process_definition.executable['class'] = 'TransformPrime' process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition) else: process_definitions, _ = self.clients.resource_registry.find_resources(name='transform_data_process', restype=RT.ProcessDefinition,id_only=True) if process_definitions: process_definition_id = process_definitions[0] else: process_definition = ProcessDefinition() process_definition.name = 'transform_data_process' process_definition.executable['module'] = 'ion.processes.data.transforms.transform_prime' process_definition.executable['class'] = 'TransformPrime' process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition) return process_definition_id def _launch_data_process(self, queue_name='', data_process_definition_id='', out_data_product_ids=[], configuration={}): process_definition_id = self._get_process_definition(data_process_definition_id) out_streams = {} if data_process_definition_id: dpd = self.read_data_process_definition(data_process_definition_id) for dp_id in out_data_product_ids: stream_id = self._get_stream_from_dp(dp_id) out_streams[stream_id] = stream_id if data_process_definition_id: stream_definition = self.clients.pubsub_management.read_stream_definition(stream_id=stream_id) stream_definition_id = stream_definition._id # Check the binding to see if it applies here for binding,stream_def_id in dpd.output_bindings.iteritems(): if stream_def_id == stream_definition_id: out_streams[binding] = stream_id break return self._launch_process(queue_name, out_streams, process_definition_id, configuration) def _validator(self, in_data_product_id, out_data_product_id): in_stream_id = self._get_stream_from_dp(dp_id=in_data_product_id) in_stream_defs, _ = self.clients.resource_registry.find_objects(subject=in_stream_id, predicate=PRED.hasStreamDefinition, id_only=True) if not len(in_stream_defs): raise BadRequest('No valid stream definition defined for data product stream') out_stream_id = self._get_stream_from_dp(dp_id=out_data_product_id) out_stream_defs, _ = self.clients.resource_registry.find_objects(subject=out_stream_id, predicate=PRED.hasStreamDefinition, id_only=True) if not len(out_stream_defs): raise BadRequest('No valid stream definition defined for data product stream') return self.clients.pubsub_management.compatible_stream_definitions(in_stream_definition_id=in_stream_defs[0], out_stream_definition_id=out_stream_defs[0]) def validate_compatibility(self, data_process_definition_id='', in_data_product_ids=None, out_data_product_ids=None, routes=None): ''' Validates compatibility between input and output data products routes are in this form: { (in_data_product_id, out_data_product_id) : actor } if actor is None then the data process is assumed to use parameter functions. if actor is a TransformFunction, the validation is done at runtime ''' if data_process_definition_id: input_stream_def_ids, _ = self.clients.resource_registry.find_objects(subject=data_process_definition_id, predicate=PRED.hasInputStreamDefinition, id_only=True) output_stream_def_ids, _ = self.clients.resource_registry.find_objects(subject=data_process_definition_id, predicate=PRED.hasStreamDefinition, id_only=True) for in_data_product_id in in_data_product_ids: input_stream_def = self.stream_def_from_data_product(in_data_product_id) if input_stream_def not in input_stream_def_ids: log.warning('Creating a data process with an unmatched stream definition input') for out_data_product_id in out_data_product_ids: output_stream_def = self.stream_def_from_data_product(out_data_product_id) if output_stream_def not in output_stream_def_ids: log.warning('Creating a data process with an unmatched stream definition output') if not out_data_product_ids and data_process_definition_id: return True if len(out_data_product_ids)>1 and not routes and not data_process_definition_id: raise BadRequest('Multiple output data products but no routes defined') if len(out_data_product_ids)==1: return all( [self._validator(i, out_data_product_ids[0]) for i in in_data_product_ids] ) elif len(out_data_product_ids)>1: for in_dp_id,out in routes.iteritems(): for out_dp_id, actor in out.iteritems(): if not self._validator(in_dp_id, out_dp_id): return False return True else: raise BadRequest('No input data products specified') def stream_def_from_data_product(self, data_product_id=''): stream_ids, _ = self.clients.resource_registry.find_objects(subject=data_product_id, predicate=PRED.hasStream, object_type=RT.Stream, id_only=True) validate_true(stream_ids, 'No stream found for this data product: %s' % data_product_id) stream_id = stream_ids.pop() stream_def_ids, _ = self.clients.resource_registry.find_objects(subject=stream_id, predicate=PRED.hasStreamDefinition, id_only=True) validate_true(stream_def_ids, 'No stream definition found for this stream: %s' % stream_def_ids) stream_def_id = stream_def_ids.pop() return stream_def_id def _get_process_producer(self, data_process_id=""): producer_objs, _ = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasDataProducer, object_type=RT.DataProducer, id_only=False) if not producer_objs: raise NotFound("No Producers created for this Data Process " + str(data_process_id)) return producer_objs[0] ############################ # # EXTENDED RESOURCES # ############################ def get_data_process_definition_extension(self, data_process_definition_id='', ext_associations=None, ext_exclude=None, user_id=''): #Returns an DataProcessDefinition Extension object containing additional related information if not data_process_definition_id: raise BadRequest("The data_process_definition_id parameter is empty") extended_resource_handler = ExtendedResourceContainer(self) extended_data_process_definition = extended_resource_handler.create_extended_resource_container( extended_resource_type=OT.DataProcessDefinitionExtension, resource_id=data_process_definition_id, computed_resource_type=OT.DataProcessDefinitionComputedAttributes, ext_associations=ext_associations, ext_exclude=ext_exclude, user_id=user_id) #Loop through any attachments and remove the actual content since we don't need # to send it to the front end this way #TODO - see if there is a better way to do this in the extended resource frame work. if hasattr(extended_data_process_definition, 'attachments'): for att in extended_data_process_definition.attachments: if hasattr(att, 'content'): delattr(att, 'content') return extended_data_process_definition def get_data_process_extension(self, data_process_id='', ext_associations=None, ext_exclude=None, user_id=''): #Returns an DataProcessDefinition Extension object containing additional related information if not data_process_id: raise BadRequest("The data_process_definition_id parameter is empty") extended_resource_handler = ExtendedResourceContainer(self) extended_data_process = extended_resource_handler.create_extended_resource_container( extended_resource_type=OT.DataProcessExtension, resource_id=data_process_id, computed_resource_type=OT.DataProcessComputedAttributes, ext_associations=ext_associations, ext_exclude=ext_exclude, user_id=user_id) #Loop through any attachments and remove the actual content since we don't need # to send it to the front end this way #TODO - see if there is a better way to do this in the extended resource frame work. if hasattr(extended_data_process, 'attachments'): for att in extended_data_process.attachments: if hasattr(att, 'content'): delattr(att, 'content') return extended_data_process def get_data_process_subscriptions_count(self, data_process_id=""): if not data_process_id: raise BadRequest("The data_process_definition_id parameter is empty") subscription_ids, _ = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasSubscription, id_only=True) log.debug("get_data_process_subscriptions_count(id=%s): %s subscriptions", data_process_id, len(subscription_ids)) return len(subscription_ids) def get_data_process_active_subscriptions_count(self, data_process_id=""): if not data_process_id: raise BadRequest("The data_process_definition_id parameter is empty") subscription_ids, _ = self.clients.resource_registry.find_objects(subject=data_process_id, predicate=PRED.hasSubscription, id_only=True) active_count = 0 for subscription_id in subscription_ids: if self.clients.pubsub_management.subscription_is_active(subscription_id): active_count += 1 log.debug("get_data_process_active_subscriptions_count(id=%s): %s subscriptions", data_process_id, active_count) return active_count
class DeploymentPlanner(object): """ A deployment activator validates that a set of devices will map to a set of sites in one unique way its primary purpose is to prepare( ) after which you'll be able to access what associations must be made (and unmade) """ def __init__(self, clients=None, enhanced_rr=None): self.clients = clients self.enhanced_rr = enhanced_rr if not enhanced_rr: self.enhanced_rr = EnhancedResourceRegistryClient(self.clients.resource_registry) self.outil = ObservatoryUtil(self, enhanced_rr=self.enhanced_rr) #self.resource_collector= DeploymentResourceCollector(self.clients, self.enhanced_rr) #self.resource_collector = resource_collector.create(self.deployment_obj) def _find_top_site_device(self, deployment_id): top_site = '' top_device = '' #retrieve the site tree information using the OUTIL functions; site info as well has site children deploy_items_objs, _ = self.clients.resource_registry.find_subjects(predicate=PRED.hasDeployment, object=deployment_id, id_only=False) log.debug("site_ids associated to this deployment: %s", deploy_items_objs) for obj in deploy_items_objs: rsrc_type = obj.type_ log.debug("resource type associated to this deployment:: %s", rsrc_type) if RT.PlatformDevice == rsrc_type or RT.InstrumentDevice == rsrc_type: top_device = obj elif RT.PlatformSite == rsrc_type or RT.InstrumentSite == rsrc_type: top_site = obj else: log.error('Deployment may only link to devices and sites. Deployment: %s', str(self.deployment_obj)) if not top_device or not top_site: log.error('Deployment must associate to both site and device. Deployment: %s', str(self.deployment_obj)) raise BadRequest('Deployment must associate to both site and device. Deployment: %s', str(self.deployment_obj)) return top_site, top_device def _find_pairs_to_remove(self): #figure out if any of the devices in the new mapping are already mapped and need to be removed pairs_to_remove = [] pairs_to_ignore = [] for (s, d) in self.match_list: rm_pair, ignore_pair = self._find_existing_relationship(s, d) if rm_pair: pairs_to_remove.append(rm_pair) if ignore_pair: pairs_to_ignore.append(ignore_pair) log.info("Pairs to ignore (will be removed from add list): %s", pairs_to_ignore) # make sure that anything being removed is not also being added self.match_list = filter(lambda x: x not in pairs_to_ignore, self.match_list) log.info("Pairs to remove: %s", pairs_to_remove) self.remove_list = pairs_to_remove def _find_existing_relationship(self, site_id, device_id, site_type=None, device_type=None): # look for an existing relationship between the site_id and another device. # if this site/device pair already exists, we leave it alone assert(type("") == type(site_id) == type(device_id)) log.debug("checking %s/%s pair for deployment", site_type, device_type) #return a pair that should be REMOVED, or None if site_type is None and site_id in self.site_resources: site_type = self.site_resources[site_id].type_ if device_type is None and device_id in self.device_resources: device_type = self.device_resources[device_id].type_ log.debug("checking existing %s hasDevice %s links", site_type, device_type) ret_remove = None ret_ignore = None try: found_device_id = self.enhanced_rr.find_object(site_id, PRED.hasDevice, device_type, True) if found_device_id == device_id: ret_ignore = (site_id, device_id) else: ret_remove = (site_id, found_device_id) log.warning("%s '%s' already hasDevice %s", site_type, site_id, device_type) except NotFound: pass return ret_remove, ret_ignore def _get_site_ref_designator_map(self): # create a map of site ids to their reference designator codes to facilitate matching site_ref_designator_map = {} for id, site_obj in self.site_resources.iteritems(): site_ref_designator_map[site_obj.reference_designator] = id log.debug("prepare_activation site_ref_designator_map: %s", site_ref_designator_map) return site_ref_designator_map def _get_device_resources(self, device_tree): # create a map of device ids to their full resource object to assit with lookup and validation device_objs = self.clients.resource_registry.read_mult(device_tree.keys()) log.debug("prepare_activation device_objectss: %s", device_objs) for device_obj in device_objs: self.device_resources[device_obj._id] = device_obj def _get_models(self): # retrieve all hasModel associations from the registry then filter models_tuples = {} assoc_list = self.outil._get_predicate_assocs(PRED.hasModel) for assoc in assoc_list: # only include these subject types in the map if assoc.st in [RT.InstrumentDevice, RT.InstrumentSite, RT.PlatformDevice, RT.PlatformSite]: if assoc.s not in models_tuples: models_tuples[assoc.s] = [] # a site may support more than one model so map to a list of models models_tuples[assoc.s].append((assoc.st, assoc.o, assoc.ot)) if assoc.s not in self.models_map: self.models_map[assoc.s] = [] self.models_map[assoc.s].append(assoc.o) log.debug("models_map: %s", self.models_map ) def _validate_models(self, site_id, device_id): # validate that the device and the site models are compatible if device_id in self.models_map: device_model_list = self.models_map[device_id] # devices should only be associated to one model if len(device_model_list) != 1: log.error("Device not associated to one distinct model. Device id: %s", device_id) elif device_model_list and device_model_list[0] not in self.models_map[site_id]: log.error("Device and Site to not share a compatible model. Device id: %s Site id: %s", site_id) else: log.error("Device not associated with a device model. Device id: %s", device_id) raise NotFound("Device not associated with a device model. Device id: %s", device_id) def _validate_port_assignments(self, device_id, platform_port): deployment_context_type = type(self.deployment_obj.context).__name__ self._validate_ooi_reference_designator(device_id, platform_port) # a one-to-one deployment of a device onto an RSN platform if OT.CabledInstrumentDeploymentContext == deployment_context_type or \ OT.CabledNodeDeploymentContext == deployment_context_type: # validate IP address for a cabled node deployment from socket import inet_aton try: inet_aton(platform_port.ip_address) except : log.error('IP address validation failed for device. Device id: %s', device_id) # validate port_type based on deployment context # a platform device deployment should have UPLINK port type if OT.RemotePlatformDeploymentContext == deployment_context_type or \ OT.CabledNodeDeploymentContext == deployment_context_type: if device_id in self.device_resources and self.device_resources[device_id].type_ is RT.PlatformDevice: if platform_port.port_type != PortTypeEnum.UPLINK: log.warning('Type of port for platform port assignment should be UPLINK. Device id: %s', device_id) #validate that parent_id is provided if not platform_port.parent_id: log.warning('Id of parent device should be provided in port assignment information. Device id: %s', device_id) def _validate_ooi_reference_designator(self, device_id, device_port): ooi_rd = OOIReferenceDesignator(device_port.reference_designator) if ooi_rd.error: log.warning("Invalid OOIReferenceDesignator ( %s ) specified for device %s", device_port.reference_designator, device_id) if not ooi_rd.port: log.warning("Invalid OOIReferenceDesignator ( %s ) specified for device %s, could not retrieve port", device_port.reference_designator, device_id) def get_deployment_sites_devices(self, deployment_obj): # retrieve all site and device ids related to this deployment site_ids = [] device_ids = [] self.outil = ObservatoryUtil(self, enhanced_rr=self.enhanced_rr) top_site, top_device = self._find_top_site_device(deployment_obj._id) site_resources, site_children = self.outil.get_child_sites( parent_site_id=top_site._id, id_only=False) site_ids = site_resources.keys() # get_site_devices returns a tuple that includes all devices linked to deployment sites site_devices = self.outil.get_site_devices(site_ids) for site, tuple_list in site_devices.iteritems(): for (site_type, device_id, device_type) in tuple_list: device_ids.append(device_id) return site_ids, device_ids def prepare_activation(self, deployment_obj): """ Prepare (validate) a deployment for activation, returning lists of what associations need to be added and which ones need to be removed. """ self.match_list = [] self.remove_list = [] self.unmatched_device_list = [] self.models_map = {} self.top_device = '' self.top_site = '' self.deployment_obj = deployment_obj self.site_resources = {} self.device_resources = {} self.outil = ObservatoryUtil(self, enhanced_rr=self.enhanced_rr) # retrieve the site tree information using the OUTIL functions; site info as well has site children self.top_site, self.top_device = self._find_top_site_device(deployment_obj._id) # must have a site and a device to continue if not self.top_site or not self.top_device: return [], [] log.debug("port_assignments: %s", self.deployment_obj.port_assignments ) # retrieve all models to use in match validation self._get_models() self.site_resources, site_children = self.outil.get_child_sites( parent_site_id=self.top_site._id, id_only=False) log.debug("site_resources: %s", self.site_resources) log.debug("site_children: %s", site_children) site_ref_designator_map = self._get_site_ref_designator_map() # retrieve the device tree from outil then cache the device resources device_tree = self.outil.get_child_devices(device_id=self.top_device._id) self._get_device_resources(device_tree) self._match_devices(self.top_device._id, device_tree, site_ref_designator_map) # check for hasDevice relations to remove and existing hasDevice relations self. _find_pairs_to_remove() if self.unmatched_device_list: log.warning("Devices not matched to sites: %s ", self.unmatched_device_list) return self.remove_list, self.match_list def _match_devices(self, device_id, device_tree, site_ref_designator_map): # there will not be a port assignment for the top device if device_id == self.top_device._id: self._validate_models(self.top_site._id, self.top_device._id) self.match_list.append((self.top_site._id, self.top_device._id)) tuple_list = device_tree[device_id] for (pt, child_id, ct) in tuple_list: log.debug(" tuple - pt: %s child_id: %s ct: %s", pt, child_id, ct) # match this child device then if it has children, call _match_devices with this id # check that this device is represented in device tree and in port assignments if child_id in self.device_resources and child_id in self.deployment_obj.port_assignments: platform_port = self.deployment_obj.port_assignments[child_id] log.debug("device platform_port: %s", platform_port) # validate PlatformPort info for this device self._validate_port_assignments(child_id, platform_port) if platform_port.reference_designator in site_ref_designator_map: matched_site = site_ref_designator_map[platform_port.reference_designator] self._validate_models(matched_site, child_id) log.info("match_list append site: %s device: %s", matched_site, child_id) self.match_list.append((matched_site, child_id)) #recurse on the children of this device self._match_devices(child_id, device_tree, site_ref_designator_map) # otherwise cant be matched to a site else: self.unmatched_device_list.append(child_id)
class TestRollups(IonIntegrationTestCase): def setUp(self): # Start container #print 'instantiating container' self._start_container() #container = Container() #print 'starting container' #container.start() #print 'started container' self.container.start_rel_from_url('res/deploy/r2deploy.yml') self.RR = ResourceRegistryServiceClient(node=self.container.node) self.IMS = InstrumentManagementServiceClient(node=self.container.node) self.OMS = ObservatoryManagementServiceClient(node=self.container.node) self.RR2 = EnhancedResourceRegistryClient(self.RR) self._setup_statuses() def _make_status(self, bad_items_dict=None): if bad_items_dict is None: bad_items_dict = {} ret = {} for k in reverse_mapping.values(): if k in bad_items_dict: ret[k] = bad_items_dict[k] else: ret[k] = DeviceStatusType.STATUS_OK return ret def _setup_statuses(self): # set up according to https://docs.google.com/drawings/d/1kZ_L4xr4Be0OdqMDX6tiI50hROgvLHU4HcnD7e_NIKE/pub?w=1200z # https://confluence.oceanobservatories.org/display/syseng/CIAD+SA+OV+Observatory+Status+and+Events device_agents = {} ms = self._make_status # override the default "get agent" function and resource registyr IMS_SVC = self._get_svc(InstrumentManagementService) OMS_SVC = self._get_svc(ObservatoryManagementService) self.IMS_ASB = self._get_specific_attr(IMS_SVC, AgentStatusBuilder) self.OMS_ASB = self._get_specific_attr(OMS_SVC, AgentStatusBuilder) assert self.IMS_ASB assert self.OMS_ASB self.IMS_ASB.RR2 = IMS_SVC.RR2 self.OMS_ASB.RR2 = OMS_SVC.RR2 # create org org_id = self.OMS.create_marine_facility(any_old(RT.Org)) obs_id = self.OMS.create_observatory(any_old(RT.Observatory), org_id) # create instrument and platform devices and sites pst = dict([(i + 1, self.RR2.create(any_old(RT.PlatformSite))) for i in range(8)]) pdv = dict([(i + 1, self.RR2.create(any_old(RT.PlatformDevice))) for i in range(11)]) ist = dict([(i + 1, self.RR2.create(any_old(RT.InstrumentSite))) for i in range(6)]) idv = dict([(i + 1, self.RR2.create(any_old(RT.InstrumentDevice))) for i in range(6)]) # create associations has_site = [ (obs_id, pst[2]), (pst[2], pst[1]), (pst[1], ist[1]), (pst[2], pst[3]), (pst[3], ist[2]), (pst[3], ist[3]), (obs_id, pst[4]), (pst[4], pst[5]), (pst[4], pst[6]), (pst[6], pst[7]), (pst[7], ist[4]), (pst[6], pst[8]), (pst[8], ist[5]), (pst[8], ist[6]), ] has_device = [ (pst[2], pdv[2]), (pst[1], pdv[1]), (ist[1], idv[1]), (pst[3], pdv[3]), (pdv[3], idv[2]), (pdv[3], idv[3]), (ist[2], idv[2]), (ist[3], idv[3]), (pst[4], pdv[4]), (pdv[4], pdv[5]), (pdv[5], pdv[6]), (pdv[5], pdv[7]), (pdv[7], idv[4]), (pst[6], pdv[5]), (pst[7], pdv[6]), (pst[8], pdv[7]), (ist[5], idv[4]), (pdv[8], pdv[9]), (pdv[9], pdv[10]), (pdv[10], idv[5]), (pdv[9], pdv[11]), (pdv[11], idv[6]), ] for (s, o) in has_site: self.RR2.create_association(s, PRED.hasSite, o) self.assertIn( o, self.RR2.find_objects(s, PRED.hasSite, None, id_only=True)) for (s, o) in has_device: self.RR2.create_association(s, PRED.hasDevice, o) self.assertIn( o, self.RR2.find_objects(s, PRED.hasDevice, None, id_only=True)) self.assertEqual( pdv[1], self.RR2.find_platform_device_id_of_platform_site_using_has_device( pst[1])) # preparing to create fake agents, shortcut to status names o = DeviceStatusType.STATUS_OK w = DeviceStatusType.STATUS_WARNING c = DeviceStatusType.STATUS_CRITICAL # expected status for instruments and platforms idv_stat = ["ignore", c, o, w, o, w, c] # make the fake instrument agents, with their statuses for i, id in idv.iteritems(): idv_agent = FakeAgent() idv_agent.set_agent( "aggstatus", ms({AggregateStatusType.AGGREGATE_DATA: idv_stat[i]})) device_agents[id] = idv_agent # create fake agents for platforms pdv1_agent = FakeAgent() pdv1_agent.set_agent("aggstatus", ms()) pdv1_agent.set_agent("child_agg_status", {}) device_agents[pdv[1]] = pdv1_agent pdv2_agent = FakeAgent() pdv2_agent.set_agent( "aggstatus", ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING })) pdv2_agent.set_agent("child_agg_status", {}) device_agents[pdv[2]] = pdv2_agent pdv3_agent = FakeAgent() pdv3_agent.set_agent("aggstatus", ms()) pdv3_agent.set_agent( "child_agg_status", { idv[2]: ms(), idv[3]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING }), }) device_agents[pdv[3]] = pdv3_agent pdv4_agent = FakeAgent() pdv4_agent.set_agent("aggstatus", ms()) pdv4_agent.set_agent( "child_agg_status", { pdv[5]: ms(), pdv[6]: ms(), pdv[7]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING }), idv[4]: ms(), }) device_agents[pdv[4]] = pdv4_agent pdv5_agent = FakeAgent() pdv5_agent.set_agent("aggstatus", ms()) pdv5_agent.set_agent( "child_agg_status", { pdv[6]: ms(), pdv[7]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING }), idv[4]: ms(), }) device_agents[pdv[5]] = pdv5_agent pdv6_agent = FakeAgent() pdv6_agent.set_agent("aggstatus", ms()) pdv6_agent.set_agent("child_agg_status", {}) device_agents[pdv[6]] = pdv6_agent pdv7_agent = FakeAgent() pdv7_agent.set_agent( "aggstatus", ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING })) pdv7_agent.set_agent("child_agg_status", { idv[4]: ms(), }) device_agents[pdv[7]] = pdv7_agent pdv8_agent = FakeAgent() pdv8_agent.set_agent("aggstatus", ms()) pdv8_agent.set_agent( "child_agg_status", { pdv[9]: ms(), pdv[10]: ms(), idv[5]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING }), pdv[11]: ms(), idv[6]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_CRITICAL }), }) device_agents[pdv[8]] = pdv8_agent pdv9_agent = FakeAgent() pdv9_agent.set_agent("aggstatus", ms()) pdv9_agent.set_agent( "child_agg_status", { pdv[10]: ms(), idv[5]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING }), pdv[11]: ms(), idv[6]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_CRITICAL }), }) device_agents[pdv[9]] = pdv9_agent pdv10_agent = FakeAgent() pdv10_agent.set_agent("aggstatus", ms()) pdv10_agent.set_agent( "child_agg_status", { idv[5]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_WARNING }), }) device_agents[pdv[10]] = pdv10_agent pdv11_agent = FakeAgent() pdv11_agent.set_agent("aggstatus", ms()) pdv11_agent.set_agent( "child_agg_status", { idv[6]: ms({ AggregateStatusType.AGGREGATE_DATA: DeviceStatusType.STATUS_CRITICAL }), }) device_agents[pdv[8]] = pdv11_agent self.device_agents = device_agents self.IMS_ASB._get_agent_client = self.my_get_agent_client self.OMS_ASB._get_agent_client = self.my_get_agent_client # save created ids self.org_id = org_id self.obs_id = obs_id self.pst = pst self.pdv = pdv self.ist = ist self.idv = idv log.info("org ID: %s", org_id) log.info("observatory ID: %s", obs_id) for k, v in self.pst.iteritems(): log.info("platform site ID %s: %s", k, v) for k, v in self.ist.iteritems(): log.info("instrument site ID %s: %s", k, v) for k, v in self.pdv.iteritems(): log.info("platform device ID %s: %s", k, v) for k, v in self.idv.iteritems(): log.info("instrument device ID %s: %s", k, v) # define a function to get the agent client, using our fake agents def my_get_agent_client(self, device_id, **kwargs): try: return self.device_agents[device_id] except KeyError: raise BadRequest( "Tried to retrieve status for undefined device '%s'" % device_id) # some quick checks to help us debug the structure and statuses, to isolate problems def check_structure_assumptions(self): # check that all objects exist in the RR for adict in [self.pst, self.pdv, self.ist, self.idv]: for id in adict: assert id # pst1 should have status critical, pdev1 should be ok, and idv1/ist1 should be critical self.assertEqual( self.pdv[1], self.RR2.find_platform_device_id_of_platform_site_using_has_device( self.pst[1])) self.assertEqual( self.ist[1], self.RR2.find_instrument_site_id_of_platform_site_using_has_site( self.pst[1])) self.assertEqual( self.idv[1], self.RR2. find_instrument_device_id_of_instrument_site_using_has_device( self.ist[1])) self.assertEqual( DeviceStatusType.STATUS_CRITICAL, self.my_get_agent_client(self.idv[1]).get_agent([ "aggstatus" ])["aggstatus"][AggregateStatusType.AGGREGATE_DATA]) # pdv4 should have status warning, coming from pdv7 self.assertEqual( self.pdv[5], self.RR2. find_platform_device_id_of_platform_device_using_has_device( self.pdv[4])) self.assertIn( self.pdv[6], self.RR2. find_platform_device_ids_of_platform_device_using_has_device( self.pdv[5])) self.assertIn( self.pdv[7], self.RR2. find_platform_device_ids_of_platform_device_using_has_device( self.pdv[5])) self.assertEqual( self.idv[4], self.RR2. find_instrument_device_id_of_platform_device_using_has_device( self.pdv[7])) self.assertEqual( DeviceStatusType.STATUS_OK, self.my_get_agent_client(self.idv[4]).get_agent([ "aggstatus" ])["aggstatus"][AggregateStatusType.AGGREGATE_DATA]) self.assertEqual( DeviceStatusType.STATUS_WARNING, self.my_get_agent_client(self.pdv[7]).get_agent([ "aggstatus" ])["aggstatus"][AggregateStatusType.AGGREGATE_DATA]) @unittest.skip("errors in outil prevent this from passing") def test_complex_rollup_structure(self): self.check_structure_assumptions() o = DeviceStatusType.STATUS_OK u = DeviceStatusType.STATUS_UNKNOWN w = DeviceStatusType.STATUS_WARNING c = DeviceStatusType.STATUS_CRITICAL pst_stat = ["ignore", c, c, w, w, u, w, o, w] pdv_stat = ["ignore", o, w, w, w, w, o, w, c, c, w, c] ist_stat = ["ignore", c, o, w, u, o, u] idv_stat = ["ignore", c, o, w, o, w, c] for i, id in self.idv.iteritems(): label = "InstrumentDevice %s" % i log.info("Checking rollup of %s", label) self.assertProperRollup( label, self.IMS.get_instrument_device_extension(id), idv_stat[i]) for i, id in self.ist.iteritems(): label = "InstrumentSite %s" % i log.info("Checking rollup of %s", label) self.assertProperRollup(label, self.OMS.get_site_extension(id), ist_stat[i]) for i, id in self.pdv.iteritems(): label = "PlatformDevice %s" % i log.info("Checking rollup of %s", label) self.assertProperRollup(label, self.IMS.get_platform_device_extension(id), pdv_stat[i]) for i, id in self.pst.iteritems(): label = "PlatformSite %s" % i log.info("Checking rollup of %s", label) self.assertProperRollup(label, self.OMS.get_site_extension(id), pst_stat[i]) #TODO: check observatory and org rollups! #TODO: REMOVE THIS TEST when test_complex_rollup_structure is fixed #@unittest.skip("phasing out") def test_complex_rollup_structure_partially(self): o = DeviceStatusType.STATUS_OK u = DeviceStatusType.STATUS_UNKNOWN w = DeviceStatusType.STATUS_WARNING c = DeviceStatusType.STATUS_CRITICAL idv_stat = ["ignore", c, o, w, o, w, c] ist_stat = ["ignore", c, o, w, u, o, u] for i, id in self.idv.iteritems(): label = "InstrumentDevice %s" % i log.info("Checking rollup of %s", label) self.assertProperRollup( label, self.IMS.get_instrument_device_extension(id), idv_stat[i]) for i, id in self.ist.iteritems(): label = "InstrumentSite %s" % i log.info("Checking rollup of %s", label) self.assertProperRollup(label, self.OMS.get_site_extension(id), ist_stat[i]) def assertProperRollup(self, label, extended_resource, status): m = DeviceStatusType._str_map s = extended_resource.computed.data_status_roll_up.status v = extended_resource.computed.data_status_roll_up.value self.assertEqual(ComputedValueAvailability.PROVIDED, s) message = "Expected rollup status of %s to be %s but got %s" % ( label, m[status], m.get(v, "?? %s" % v)) self.assertEqual(status, v, message) # get an object of a specific type from within another python object def _get_specific_attr(self, parent_obj, attrtype): for d in dir(parent_obj): a = getattr(parent_obj, d) if isinstance(a, attrtype): return a return None # get a service of a given type from the capability container def _get_svc(self, service_cls): # get service from container proc manager relevant_services = [ item[1] for item in self.container.proc_manager.procs.items() if isinstance(item[1], service_cls) ] assert (0 < len(relevant_services)),\ "no services of type '%s' found running in container!" % service_cls service_itself = relevant_services[0] assert service_itself return service_itself