예제 #1
0
def main():
    """Execute the order disposition determination routine"""

    cron_cfg = retrieve_cfg(CRON_CFG_FILENAME)
    proc_cfg = retrieve_cfg(PROC_CFG_FILENAME)

    # Configure and get the logger for this task
    logger_filename = cron_cfg.get('logging', 'disposition_log_filename')

    # Configure and get the logger for this task
    logging.basicConfig(format=('%(asctime)s.%(msecs)03d %(process)d'
                                ' %(levelname)-8s'
                                ' %(filename)s:%(lineno)d:'
                                '%(funcName)s -- %(message)s'),
                        datefmt='%Y-%m-%d %H:%M:%S',
                        level=logging.INFO,
                        filename=logger_filename)

    logger = logging.getLogger(LOGGER_NAME)

    try:
        determine_order_disposition(proc_cfg)
    except Exception:
        logger.exception('Processing failed')
        sys.exit(1)  # EXIT_FAILURE

    sys.exit(0)  # EXIT_SUCCESS
예제 #2
0
def main():
    """Some parameter and logging setup, then call the process routine
    """

    # Create a command line argument parser
    description = 'Main mapper for a request'
    parser = ArgumentParser(description=description)

    # Add our only options to determine if we are a developer or not
    parser.add_argument('--developer',
                        action='store_true', dest='developer', default=False,
                        help='use a developer mode for sleeping')

    # Parse the command line arguments
    args = parser.parse_args()

    proc_cfg = retrieve_cfg(PROC_CFG_FILENAME)

    EspaLogging.configure_base_logger(filename=MAPPER_LOG_FILENAME)
    # Initially set to the base logger
    logger = EspaLogging.get_logger('base')

    try:
        # Joe-Developer doesn't want to wait so if set skip sleeping
        developer_sleep_mode = args.developer

        process(proc_cfg, developer_sleep_mode)
    except Exception:
        logger.exception('Processing failed stacktrace follows')
def main():
    """Execute the order disposition determination routine"""

    description = 'Scene state disposition/advancement'
    parser = ArgumentParser(description=description)

    # Add our only options to determine if we are a developer or not
    parser.add_argument('--user',
                        action='store', dest='username', default='ALL',
                        help='specific username to advance [ALL]')

    # Parse the command line arguments
    args = parser.parse_args()
    username = args.username if args.username != 'ALL' else None

    cron_cfg = retrieve_cfg(CRON_CFG_FILENAME)
    proc_cfg = retrieve_cfg(PROC_CFG_FILENAME)

    # Configure and get the logger for this task
    logger_filename = cron_cfg.get('logging', 'disposition_log_filename')

    # Configure and get the logger for this task
    logging.basicConfig(format=('%(asctime)s.%(msecs)03d %(process)d'
                                ' %(levelname)-8s'
                                ' %(filename)s:%(lineno)d:'
                                '%(funcName)s -- %(message)s'),
                        datefmt='%Y-%m-%d %H:%M:%S',
                        level=logging.INFO,
                        filename=logger_filename)

    logger = logging.getLogger(LOGGER_NAME)

    try:
        determine_order_disposition(proc_cfg, username)
    except Exception:
        logger.exception('Processing failed')
        sys.exit(1)  # EXIT_FAILURE

    sys.exit(0)  # EXIT_SUCCESS
def main ():
    logger = logging.getLogger(__name__)  # Get logger for the module.

    # get the command line arguments
    parser = OptionParser()
    parser.add_option ('-s', '--start_year', type='int', dest='syear',
        default=0, help='year for which to start pulling LAADS data')
    parser.add_option ('-e', '--end_year', type='int', dest='eyear',
        default=0, help='last year for which to pull LAADS data')
    parser.add_option ('--today', dest='today', default=False,
        action='store_true',
        help='process LAADS data up through the most recent year and DOY')
    msg = ('process or reprocess all LAADS data from today back to {}'
           .format(START_YEAR))
    parser.add_option ('--quarterly', dest='quarterly', default=False,
        action='store_true', help=msg)

    (options, args) = parser.parse_args()
    syear = options.syear           # starting year
    eyear = options.eyear           # ending year
    today = options.today           # process most recent year of data
    quarterly = options.quarterly   # process today back to START_YEAR

    # check the arguments
    if (today == False) and (quarterly == False) and \
       (syear == 0 or eyear == 0):
        msg = ('Invalid command line argument combination.  Type --help '
              'for more information.')
        logger.error(msg)
        return ERROR

    # determine the auxiliary directory to store the data
    auxdir = os.environ.get('L8_AUX_DIR')
    if auxdir is None:
        msg = 'L8_AUX_DIR environment variable not set... exiting'
        logger.error(msg)
        return ERROR

    # Get the application token for the LAADS https interface. for ESPA
    # systems, pull the token from the config file.
    if TOKEN is None:
        # ESPA Processing Environment
        # Read ~/.usgs/espa/processing.conf to get the URL for the ESPA API.
        # Connect to the ESPA API and get the application token for downloading
        # the LAADS data from the internal database.
        PROC_CFG_FILENAME = 'processing.conf'
        proc_cfg = retrieve_cfg(PROC_CFG_FILENAME)
        rpcurl = proc_cfg.get('processing', 'espa_api')
        server = api_connect(rpcurl)
        if server:
            token = server.get_configuration('aux.downloads.laads.token')
    else:
        # Non-ESPA processing.  TOKEN needs to be defined at the top of this
        # script.
        token = TOKEN

    if token is None:
        logger.error('Application token is None. This needs to be a valid '
            'token provided for accessing the LAADS data. '
            'https://ladsweb.modaps.eosdis.nasa.gov/tools-and-services/data-download-scripts/')
        return ERROR

    # if processing today then process the current year.  if the current
    # DOY is within the first month, then process the previous year as well
    # to make sure we have all the recently available data processed.
    if today:
        msg = 'Processing LAADS data up to the most recent year and DOY.'
        logger.info(msg)
        now = datetime.datetime.now()
        day_of_year = now.timetuple().tm_yday
        eyear = now.year
        if day_of_year <= 31:
            syear = eyear - 1
        else:
            syear = eyear

    elif quarterly:
        msg = 'Processing LAADS data back to {}'.format(START_YEAR)
        logger.info(msg)
        eyear = now.year
        syear = START_YEAR

    msg = 'Processing LAADS data for {} - {}'.format(syear, eyear)
    logger.info(msg)
    for yr in range(eyear, syear-1, -1):
        msg = 'Processing year: {}'.format(yr)
        logger.info(msg)
        status = getLadsData(auxdir, yr, today, token)
        if status == ERROR:
            msg = ('Problems occurred while processing LAADS data for year {}'
                   .format(yr))
            logger.error(msg)
            return ERROR

    msg = 'LAADS processing complete.'
    logger.info(msg)
    return SUCCESS
예제 #5
0
def main():
    """Configures an order from the command line input and calls the
       processing code using the order
    """

    args = parse_command_line()
    proc_cfg = config.retrieve_cfg(PROC_CFG_FILENAME)

    proc_cfg = override_config(args, proc_cfg)

    # Configure the base logger for this request
    EspaLogging.configure_base_logger(filename=cli_log_filename(args))
    # Configure the processing logger for this request
    EspaLogging.configure(settings.PROCESSING_LOGGER,
                          order=args.order_id,
                          product=args.product_id,
                          debug=args.debug)

    # CLI will use the base logger
    logger = EspaLogging.get_logger('base')

    logger.info('*** Begin ESPA Processing on host [{}] ***'.format(
        socket.gethostname()))

    # Set to error condition
    proc_status = False

    try:
        # Extra command line validation
        if args.pixel_size is not None and args.pixel_size_units is None:
            raise CliError('Must specify --pixel-size-units if specifying'
                           ' --pixel-size')

        export_environment_variables(proc_cfg)

        template = load_template(filename=TEMPLATE_FILENAME)

        order = update_template(args=args, template=template)

        # Change to the processing directory
        current_directory = os.getcwd()
        os.chdir(proc_cfg.get('processing', 'espa_work_dir'))

        try:
            # All processors are implemented in the processor module
            pp = processor.get_instance(proc_cfg, order)
            (destination_product_file, destination_cksum_file) = pp.process()

            # Set to success condition
            proc_status = True

        finally:
            # Change back to the previous directory
            os.chdir(current_directory)

    except Exception:
        logger.exception('*** Errors during processing ***')
        sys.exit(1)

    finally:
        logger.info('*** ESPA Processing Terminated ***')

        if not args.bridge_mode:
            archive_log_files(args, proc_cfg, proc_status)
예제 #6
0
def main():
    """Execute the core processing routine"""

    cron_cfg = retrieve_cfg(CRON_CFG_FILENAME)
    proc_cfg = retrieve_cfg(PROC_CFG_FILENAME)

    # Create a command line argument parser
    description = ('Builds and kicks-off hadoop jobs for the espa processing'
                   ' system (to process product requests)')
    parser = ArgumentParser(description=description)

    # Add parameters
    valid_priorities = queue_keys(cron_cfg)
    valid_product_types = ['landsat', 'modis', 'plot']

    parser.add_argument('--priority',
                        action='store',
                        dest='priority',
                        required=True,
                        choices=valid_priorities,
                        help='only process requests with this priority:'
                        ' one of [{0}]'.format(', '.join(valid_priorities)))

    parser.add_argument('--product-types',
                        action='store',
                        dest='product_types',
                        required=True,
                        nargs='+',
                        metavar='PRODUCT_TYPE',
                        help=('only process requests for the specified'
                              ' product type(s)'))

    parser.add_argument('--limit',
                        action='store',
                        dest='limit',
                        required=False,
                        default='500',
                        help='specify the max number of requests to process')

    parser.add_argument('--user',
                        action='store',
                        dest='user',
                        required=False,
                        default=None,
                        help='only process requests for the specified user')

    # Parse the command line arguments
    args = parser.parse_args()

    # Validate product_types
    if ((set(['landsat', 'plot']) == set(args.product_types))
            or (set(['modis', 'plot']) == set(args.product_types))
            or (set(['landsat', 'modis', 'plot']) == set(args.product_types))):
        print('Invalid --product-types: [plot] cannot be combined with any'
              ' other product types')
        sys.exit(1)  # EXIT_FAILURE

    # Configure and get the logger for this task
    logger_filename = cron_cfg.get('logging', 'log_filename')
    if 'plot' in args.product_types:
        logger_filename = cron_cfg.get('logging', 'plot_log_filename')

    logger_format = ('%(asctime)s.%(msecs)03d %(process)d'
                     ' %(levelname)-8s {0:>6}'
                     ' %(filename)s:%(lineno)d:%(funcName)s'
                     ' -- %(message)s'.format(args.priority.lower()))

    # Setup the default logger format and level.  Log to STDOUT.
    logging.basicConfig(format=logger_format,
                        datefmt='%Y-%m-%d %H:%M:%S',
                        level=logging.INFO,
                        filename=logger_filename)

    logger = logging.getLogger(LOGGER_NAME)

    # Determine the appropriate priority value to use for the queue and request
    queue_priority = args.priority.lower()
    request_priority = queue_priority
    if 'plot' in args.product_types:
        if queue_priority == 'all':
            # If all was specified default to high queue
            queue_priority = 'high'
        # The request priority is always None for plotting to get all of them
        request_priority = None
    else:
        if request_priority == 'all':
            # We need to use a value of None to get all of them
            request_priority = None

    # Setup and submit products to hadoop for processing
    try:
        process_requests(cron_cfg, proc_cfg, args, queue_priority,
                         request_priority)
    except Exception:
        logger.exception('Processing failed')
        sys.exit(1)  # EXIT_FAILURE

    sys.exit(0)  # EXIT_SUCCESS
예제 #7
0
def main():
    """Execute the core processing routine"""

    cron_cfg = retrieve_cfg(CRON_CFG_FILENAME)
    proc_cfg = retrieve_cfg(PROC_CFG_FILENAME)

    # Create a command line argument parser
    description = ('Builds and kicks-off hadoop jobs for the espa processing'
                   ' system (to process product requests)')
    parser = ArgumentParser(description=description)

    # Add parameters
    valid_priorities = queue_keys(cron_cfg)
    valid_product_types = ['landsat', 'modis', 'plot']

    parser.add_argument('--priority',
                        action='store', dest='priority', required=True,
                        choices=valid_priorities,
                        help='only process requests with this priority:'
                             ' one of [{0}]'
                             .format(', '.join(valid_priorities)))

    parser.add_argument('--product-types',
                        action='store', dest='product_types', required=True,
                        nargs='+', metavar='PRODUCT_TYPE',
                        help=('only process requests for the specified'
                              ' product type(s)'))

    parser.add_argument('--limit',
                        action='store', dest='limit', required=False,
                        default='500',
                        help='specify the max number of requests to process')

    parser.add_argument('--user',
                        action='store', dest='user', required=False,
                        default=None,
                        help='only process requests for the specified user')

    # Parse the command line arguments
    args = parser.parse_args()

    # Validate product_types
    if ((set(['landsat', 'plot']) == set(args.product_types)) or
            (set(['modis', 'plot']) == set(args.product_types)) or
            (set(['landsat', 'modis', 'plot']) == set(args.product_types))):
        print('Invalid --product-types: [plot] cannot be combined with any'
              ' other product types')
        sys.exit(1)  # EXIT_FAILURE

    # Configure and get the logger for this task
    logger_filename = cron_cfg.get('logging', 'log_filename')
    if 'plot' in args.product_types:
        logger_filename = cron_cfg.get('logging', 'plot_log_filename')

    logger_format = ('%(asctime)s.%(msecs)03d %(process)d'
                     ' %(levelname)-8s {0:>6}'
                     ' %(filename)s:%(lineno)d:%(funcName)s'
                     ' -- %(message)s'.format(args.priority.lower()))

    # Setup the default logger format and level.  Log to STDOUT.
    logging.basicConfig(format=logger_format,
                        datefmt='%Y-%m-%d %H:%M:%S',
                        level=logging.INFO,
                        filename=logger_filename)

    logger = logging.getLogger(LOGGER_NAME)

    # Determine the appropriate priority value to use for the queue and request
    queue_priority = args.priority.lower()
    request_priority = queue_priority
    if request_priority == 'all':
        # We need to use a value of None to get all of them
        request_priority = None

    # Setup and submit products to hadoop for processing
    try:
        process_requests(cron_cfg, proc_cfg, args,
                         queue_priority, request_priority)
    except Exception:
        logger.exception('Processing failed')
        sys.exit(1)  # EXIT_FAILURE

    sys.exit(0)  # EXIT_SUCCESS
예제 #8
0
def main():
    """Configures an order from the command line input and calls the
       processing code using the order
    """

    args = parse_command_line()
    proc_cfg = config.retrieve_cfg(PROC_CFG_FILENAME)

    proc_cfg = override_config(args, proc_cfg)

    # Configure the base logger for this request
    EspaLogging.configure_base_logger(filename=cli_log_filename(args))
    # Configure the processing logger for this request
    EspaLogging.configure(settings.PROCESSING_LOGGER,
                          order=args.order_id,
                          product=args.product_id,
                          debug=args.debug)

    # CLI will use the base logger
    logger = EspaLogging.get_logger('base')

    logger.info('*** Begin ESPA Processing on host [{}] ***'
                .format(socket.gethostname()))

    # Set to error condition
    proc_status = False

    try:
        # Extra command line validation
        if args.pixel_size is not None and args.pixel_size_units is None:
            raise CliError('Must specify --pixel-size-units if specifying'
                           ' --pixel-size')

        export_environment_variables(proc_cfg)

        template = load_template(filename=TEMPLATE_FILENAME)

        order = update_template(args=args, template=template)

        # Change to the processing directory
        current_directory = os.getcwd()
        os.chdir(proc_cfg.get('processing', 'espa_work_dir'))

        try:
            # All processors are implemented in the processor module
            pp = processor.get_instance(proc_cfg, order)
            (destination_product_file, destination_cksum_file) = pp.process()

            # Set to success condition
            proc_status = True

        finally:
            # Change back to the previous directory
            os.chdir(current_directory)

    except Exception:
        logger.exception('*** Errors during processing ***')
        sys.exit(1)

    finally:
        logger.info('*** ESPA Processing Terminated ***')

        if not args.bridge_mode:
            archive_log_files(args, proc_cfg, proc_status)