def on_initial_bootstrap(self, process, config, **kwargs): if os.environ.get('PYCC_MODE'): # This environment is an ion integration test log.info('PYCC_MODE: skipping qc_post_processor launch') return if self.process_exists(process, 'qc_post_processor'): # Short circuit the bootstrap to make sure not more than one is ever started return self.scheduler_service = SchedulerServiceProcessClient(process=process) self.process_dispatcher = ProcessDispatcherServiceProcessClient(process=process) self.run_interval = CFG.get_safe('service.qc_processing.run_interval', 24) interval_key = uuid4().hex # Unique identifier for this process config = DotDict() config.process.interval_key = interval_key process_definition = ProcessDefinition(name='qc_post_processor', executable={'module':'ion.processes.data.transforms.qc_post_processing', 'class':'QCPostProcessing'}) process_definition_id = self.process_dispatcher.create_process_definition(process_definition) process_id = self.process_dispatcher.create_process(process_definition_id) self.process_dispatcher.schedule_process(process_definition_id, process_id=process_id, configuration=config) timer_id = self.scheduler_service.create_interval_timer(start_time=str(time.time()), end_time='-1', #Run FOREVER interval=3600*self.run_interval, event_origin=interval_key)
def setUp(self): # Start container logging.disable(logging.ERROR) self._start_container() self.container.start_rel_from_url('res/deploy/r2deploy.yml') # simulate preloading preload_ion_params(self.container) logging.disable(logging.NOTSET) #Instantiate a process to represent the test process=VisualizationServiceTestProcess() # Now create client to DataProductManagementService self.rrclient = ResourceRegistryServiceProcessClient(node=self.container.node, process=process) self.damsclient = DataAcquisitionManagementServiceProcessClient(node=self.container.node, process=process) self.pubsubclient = PubsubManagementServiceProcessClient(node=self.container.node, process=process) self.ingestclient = IngestionManagementServiceProcessClient(node=self.container.node, process=process) self.imsclient = InstrumentManagementServiceProcessClient(node=self.container.node, process=process) self.dataproductclient = DataProductManagementServiceProcessClient(node=self.container.node, process=process) self.dataprocessclient = DataProcessManagementServiceProcessClient(node=self.container.node, process=process) self.datasetclient = DatasetManagementServiceProcessClient(node=self.container.node, process=process) self.workflowclient = WorkflowManagementServiceProcessClient(node=self.container.node, process=process) self.process_dispatcher = ProcessDispatcherServiceProcessClient(node=self.container.node, process=process) self.data_retriever = DataRetrieverServiceProcessClient(node=self.container.node, process=process) self.vis_client = VisualizationServiceProcessClient(node=self.container.node, process=process) self.ctd_stream_def = SBE37_CDM_stream_definition()
def on_restart(self, process, config, **kwargs): pds_client = ProcessDispatcherServiceProcessClient(process=process) def restart_transform(transform_id): transform = process.container.resource_registry.read(transform_id) configuration = transform.configuration proc_def_ids, other = process.container.resource_registry.find_objects( subject=transform_id, predicate=PRED.hasProcessDefinition, id_only=True) if len(proc_def_ids) < 1: log.warning( 'Transform did not have a correct process definition.') return pid = pds_client.schedule_process( process_definition_id=proc_def_ids[0], configuration=configuration) transform.process_id = pid process.container.resource_registry.update(transform) restart_flag = config.get_safe('service.transform_management.restart', False) if restart_flag: transform_ids, meta = process.container.resource_registry.find_resources( restype=RT.Transform, id_only=True) for transform_id in transform_ids: restart_transform(transform_id)
def on_initial_bootstrap(self, process, config, **kwargs): self.pds_client = ProcessDispatcherServiceProcessClient( process=process) self.resource_registry = ResourceRegistryServiceProcessClient( process=process) self.ingestion_worker(process, config) self.replay_defs(process, config) self.notification_worker(process, config) self.registration_worker(process, config) self.pydap_server(process, config)
def await_agent_ready(self, replay_timeout=5): ''' Determines if the process has been started @param replay_timeout Time to wait before raising a timeout @retval True if the process has been started ''' if self.process: pd_cli = ProcessDispatcherServiceProcessClient( process=self.process) else: pd_cli = ProcessDispatcherServiceClient() process_gate = ProcessStateGate(pd_cli.read_process, self.replay_process_id, ProcessStateEnum.RUNNING) return process_gate. await (replay_timeout)