def setUp(self):
        # Start container

        logging.disable(logging.ERROR)
        self._start_container()
        self.container.start_rel_from_url('res/deploy/r2deploy.yml')
        # simulate preloading
        preload_ion_params(self.container)
        logging.disable(logging.NOTSET)

        #Instantiate a process to represent the test
        process=VisualizationServiceTestProcess()

        # Now create client to DataProductManagementService
        self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process)
        self.damsclient = DataAcquisitionManagementServiceProcessClient(node=self.container.node, process=process)
        self.pubsubclient =  PubsubManagementServiceProcessClient(node=self.container.node, process=process)
        self.ingestclient = IngestionManagementServiceProcessClient(node=self.container.node, process=process)
        self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataproductclient = DataProductManagementServiceProcessClient(node=self.container.node, process=process)
        self.dataprocessclient = DataProcessManagementServiceProcessClient(node=self.container.node, process=process)
        self.datasetclient =  DatasetManagementServiceProcessClient(node=self.container.node, process=process)
        self.workflowclient = WorkflowManagementServiceProcessClient(node=self.container.node, process=process)
        self.process_dispatcher = ProcessDispatcherServiceProcessClient(node=self.container.node, process=process)
        self.data_retriever = DataRetrieverServiceProcessClient(node=self.container.node, process=process)
        self.vis_client = VisualizationServiceProcessClient(node=self.container.node, process=process)

        self.ctd_stream_def = SBE37_CDM_stream_definition()
    def on_start(self):

        ImmediateProcess.on_start(self)

        # necessary arguments, passed in via configuration kwarg to schedule_process. process namespace to avoid collisions
        fuc_id = self.CFG.get_safe('process.fuc_id',
                                   None)  # FileUploadContext ID

        # Clients
        self.object_store = self.container.object_store
        self.resource_registry = self.container.resource_registry
        self.event_publisher = EventPublisher(OT.ResetQCEvent)
        self.data_product_management = DataProductManagementServiceProcessClient(
            process=self)
        self.create_map()

        # run process
        if fuc_id:
            self.process(fuc_id)

        # cleanup
        self.event_publisher.close()
Beispiel #3
0
    header_roles = get_role_message_headers(
        org_client.find_all_roles_by_user(system_actor._id))

    org_client.revoke_role(ion_org._id,
                           user._id,
                           'INSTRUMENT_OPERATOR',
                           headers={
                               'ion-actor-id': system_actor._id,
                               'ion-actor-roles': header_roles
                           })

    roles = org_client.find_roles_by_user(ion_org._id, user._id)
    for r in roles:
        log.info('User UserRole: ' + str(r))

    dp_client = DataProductManagementServiceProcessClient(node=container.node,
                                                          process=process)

    dp_obj = IonObject(RT.DataProduct,
                       name='DataProd1',
                       description='some new dp')

    dp_client.create_data_product(dp_obj,
                                  headers={'ion-actor-id': system_actor._id})

    dp_obj = IonObject(RT.DataProduct,
                       name='DataProd2',
                       description='and of course another new dp')

    dp_client.create_data_product(dp_obj,
                                  headers={'ion-actor-id': system_actor._id})