from __future__ import absolute_import, division, print_function, unicode_literals import hashlib import os from kombu import Connection, Queue, Exchange from banzai import logs __author__ = 'cmccully' logger = logs.get_logger(__name__) def post_to_archive_queue(image_path): def errback(exc, interval): logger.error('Error: %r', exc, exc_info=1) logger.info('Retry in %s seconds.', interval) fits_exchange = Exchange('fits_files', type='fanout') producer_queue = Queue('', fits_exchange, exclusive=True) with Connection('amqp://*****:*****@cerberus.lco.gtn') as conn: queue = conn.SimpleQueue(producer_queue) put = conn.ensure(queue, queue.put, max_retries=30, errback=errback) put({'path': image_path}) def make_output_directory(pipeline_context, image_config): # Create output directory if necessary output_directory = os.path.join(pipeline_context.processed_path, image_config.site, image_config.instrument, image_config.epoch)
def __init__(self, pipeline_context): self.logger = logs.get_logger(self.stage_name) self.pipeline_context = pipeline_context
import argparse import multiprocessing import os from kombu import Connection, Queue, Exchange from kombu.mixins import ConsumerMixin import banzai.images from banzai import bias, dark, flats, trim, photometry, astrometry, headers, qc from banzai import dbs from banzai import logs from banzai import munge, crosstalk, gain, mosaic from banzai.qc import pointing from banzai.utils import image_utils, date_utils logger = logs.get_logger(__name__) class PipelineContext(object): def __init__(self, args): self.processed_path = args.processed_path self.raw_path = args.raw_path self.post_to_archive = args.post_to_archive self.fpack = args.fpack self.rlevel = args.rlevel self.db_address = args.db_address self.log_level = args.log_level self.preview_mode = args.preview_mode self.filename = args.filename self.max_preview_tries = args.max_preview_tries
def __init__(self, pipeline_context): self.logger = logs.get_logger(self.stage_name) self.pipeline_context = pipeline_context