class QLFProcess(object): """ Class responsible for managing Quick Look pipeline process. """ def __init__(self, data): self.pipeline_name = 'Quick Look' self.data = data self.models = QLFModels() def start_process(self): """ Start pipeline. """ logger.info('Started %s ...' % self.pipeline_name) logger.info('Night: %s' % self.data.get('night')) logger.info('Exposure: %s' % str(self.data.get('expid'))) self.data['start'] = datetime.datetime.now().replace(microsecond=0) # create process in database and obtain the process id process = self.models.insert_process(self.data, self.pipeline_name) self.data['process_id'] = process.id self.data['status'] = process.status # TODO: ingest configuration file used, this should be done by process # self.models.insert_config(process.id) logger.info('Process ID: %i' % process.id) logger.info('Starting...') output_dir = os.path.join('exposures', self.data.get('night'), self.data.get('zfill')) output_full_dir = os.path.join(self.data.get('desi_spectro_redux'), output_dir) # Make sure output dir is created if not os.path.isdir(output_full_dir): os.makedirs(output_full_dir) logger.info('Output dir: %s' % output_dir) self.data['output_dir'] = output_dir def finish_process(self): """ Finish pipeline. """ self.data['end'] = datetime.datetime.now().replace(microsecond=0) self.data['duration'] = str( self.data.get('end') - self.data.get('start')) logger.info("Process with expID {} completed in {}.".format( self.data.get('expid'), self.data.get('duration'))) self.models.update_process(process_id=self.data.get('process_id'), end=self.data.get('end'), process_dir=self.data.get('output_dir'), status=self.data.get('status'))
class QLFProcess(object): """ Class responsible for managing Quick Look pipeline process. """ def __init__(self, data): self.pipeline_name = 'Quick Look' self.data = data self.models = QLFModels() output_dir = os.path.join('exposures', self.data.get('night'), self.data.get('zfill')) output_full_dir = os.path.join(desi_spectro_redux, output_dir) # Remove old dir if os.path.isdir(output_full_dir): shutil.rmtree(output_full_dir) # Make output dir os.makedirs(output_full_dir) self.data['output_dir'] = output_dir def start_process(self): """ Start pipeline. """ self.data['start'] = datetime.datetime.now().replace(microsecond=0) # create process in database and obtain the process id process = self.models.insert_process(self.data, self.pipeline_name) self.data['process_id'] = process.id self.data['status'] = process.status # TODO: ingest configuration file used, this should be done by process # self.models.insert_config(process.id) pipe_logger.info('...{}'.format('\n' * 20)) pipe_logger.info('Process ID {}'.format(process.id)) pipe_logger.info('ExpID {} started.'.format(self.data.get('expid'))) return process.id def finish_process(self): """ Finish pipeline. """ self.data['end'] = datetime.datetime.now().replace(microsecond=0) self.data['duration'] = self.data.get('end') - self.data.get('start') pipe_logger.info("ExpID {} ended (runtime: {}).".format( self.data.get('expid'), str(self.data.get('duration')))) proc = Thread(target=self.ingest_parallel_qas) proc.start()