def execute_processing_block(pb_id: str, log_level='DEBUG'): """Execute a processing block. Celery tasks that executes a workflow defined in a Configuration database Processing Block data object. Args: pb_id (str): The PB id for the PBC log_level (str): Python logging level. """ init_logger('sip', show_log_origin=True, propagate=False, log_level=log_level) LOG.info('+' * 40) LOG.info('+ Executing Processing block: %s!', pb_id) LOG.info('+' * 40) LOG.info('Processing Block Controller version: %s', __version__) LOG.info('Docker Swarm API version: %s', sip_swarm_api_version) LOG.info('Configuration database API version: %s', config_db_version) pb = ProcessingBlock(pb_id) LOG.info('Starting workflow %s %s', pb.workflow_id, pb.workflow_version) pb.set_status('running') docker = DockerSwarmClient() # Coping workflow stages to a dict workflow_stage_dict = {} for stage in pb.workflow_stages: workflow_stage_dict[stage.id] = deepcopy(stage.config) workflow_stage_dict[stage.id]['services'] = dict() # Loop until workflow stages are complete. while True: time.sleep(0.1) for workflow_stage in pb.workflow_stages: _start_workflow_stages(pb, pb_id, workflow_stage_dict, workflow_stage, docker) _update_workflow_stages(workflow_stage_dict[workflow_stage.id], workflow_stage, docker) if _abort_workflow(pb, workflow_stage_dict, docker): break if _workflow_complete(workflow_stage_dict): break pb_list = ProcessingBlockList() pb_list.set_complete(pb_id) pb.set_status('completed') LOG.info('-' * 40) LOG.info('- Destroying PBC for %s', pb_id) LOG.info('-' * 40) return pb.status
def _monitor_events(self): """Watch for Processing Block events.""" LOG.info("Starting to monitor PB events") check_counter = 0 while True: if check_counter == 50: check_counter = 0 LOG.debug('Checking for PB events...') published_events = self._pb_events.get_published_events() for event in published_events: if event.type == 'status_changed': LOG.info('PB status changed event: %s', event.data['status']) if event.data['status'] == 'created': LOG.info('Acknowledged PB created event (%s) for %s, ' '[timestamp: %s]', event.id, event.object_id, event.timestamp) pb = ProcessingBlock(event.object_id) self._queue.put(event.object_id, pb.priority, pb.type) if event.data['status'] == 'completed': LOG.info('Acknowledged PB completed event (%s) for %s,' ' [timestamp: %s]', event.id, event.object_id, event.timestamp) self._num_pbcs -= 1 if self._num_pbcs < 0: self._num_pbcs = 0 time.sleep(0.1) check_counter += 1
def _init_queue(): """Initialise the Processing Block queue from the database. This method should populate the queue from the current state of the Configuration Database. This needs to be based on the current set of Processing Blocks in the database and consider events on these processing blocks. """ LOG.info('Initialising Processing Block queue.') queue = ProcessingBlockQueue() active_pb_ids = ProcessingBlockList().active LOG.info('Initialising PC PB queue: %s', active_pb_ids) for pb_id in active_pb_ids: pb = ProcessingBlock(pb_id) queue.put(pb.id, pb.priority, pb.type) return queue
def pb_last_updated(self): """Return the PB .""" pb = ProcessingBlock(self._pb_id) return pb.updated.isoformat()
def pb_created(self): """Return the PB .""" pb = ProcessingBlock(self._pb_id) return pb.created.isoformat()
def pb_version(self): """Return the PB version.""" pb = ProcessingBlock(self._pb_id) return pb.version
def sbi_id(self): """Return the PB SBI ID.""" pb = ProcessingBlock(self._pb_id) return pb.sbi_id
def pb_type(self): """Return the PB type.""" pb = ProcessingBlock(self._pb_id) return pb.type
def pb_status(self): """Return the PB status.""" pb = ProcessingBlock(self._pb_id) return pb.status
def pb_config(self): """Return the PB configuration.""" pb = ProcessingBlock(self._pb_id) return json.dumps(pb.config)