def test_modis_sensor_info(self): result_1 = sensor.modis_sensor_info(self.terra_product_id) result_2 = sensor.info(self.terra_product_id) self.assertEqual(result_1, result_2) result_1 = sensor.modis_sensor_info(self.aqua_product_id) result_2 = sensor.info(self.aqua_product_id) self.assertEqual(result_1, result_2)
def _deserialize(self, value, attr, data): try: _ = sensor.info(value) return super(SupportedSensorsField, self)._deserialize(value, attr, data) except sensor.ProductNotImplemented: raise ValidationError('Product not implemented')
def test_landsat_collection_sensor_info(self): result_1 = sensor.landsat_collection_sensor_info(self.lt04_product_id) result_2 = sensor.info(self.lt04_product_id) self.assertEqual(result_1, result_2) result_1 = sensor.landsat_collection_sensor_info(self.lt05_product_id) result_2 = sensor.info(self.lt05_product_id) self.assertEqual(result_1, result_2) result_1 = sensor.landsat_collection_sensor_info(self.le07_product_id) result_2 = sensor.info(self.le07_product_id) self.assertEqual(result_1, result_2) result_1 = sensor.landsat_collection_sensor_info(self.lc08_product_id) result_2 = sensor.info(self.lc08_product_id) self.assertEqual(result_1, result_2) result_1 = sensor.landsat_collection_sensor_info(self.lo08_product_id) result_2 = sensor.info(self.lo08_product_id) self.assertEqual(result_1, result_2) # LT08 is not supported today, so check for the known failure points with self.assertRaises(KeyError) as context: sensor.landsat_collection_sensor_info(self .lt08_product_id) self.assertTrue('LT08' in context) with self.assertRaises(sensor.ProductNotImplemented) as context: sensor.info(self.lt08_product_id) self.assertTrue('is not a supported Product ID format' in context)
def get_product_name(input_name, fmt_str): """Build the product name from the product information and current time """ return fmt_str.format(prefix=str(sensor.info(input_name).product_prefix), timestamp=datetime.datetime.utcnow())
def process(proc_cfg, developer_sleep_mode=False): """Read all lines from STDIN and process them Each line is converted to a JSON dictionary of the parameters for processing. Validation is performed on the JSON dictionary to test if valid for this mapper. After validation the generation of the products is performed. """ # Initially set to the base logger logger = EspaLogging.get_logger('base') processing_location = socket.gethostname() # Process each line from stdin for line in sys.stdin: if not line or len(line) < 1 or not line.strip().find('{') > -1: # this is how the nlineinputformat is supplying values: # 341104 {"orderid": # logger.info('BAD LINE:{}##'.format(line)) continue else: # take the entry starting at the first opening parenth to the end line = line[line.find('{'):] line = line.strip() # Reset these for each line (server, order_id, product_id) = (None, None, None) start_time = datetime.datetime.now() # Initialize so that we don't sleep dont_sleep = True try: line = line.replace('#', '') parms = json.loads(line) if not parameters.test_for_parameter(parms, 'options'): raise ValueError('Error missing JSON [options] record') # TODO scene will be replaced with product_id someday (order_id, product_id, product_type, options) = \ (parms['orderid'], parms['scene'], parms['product_type'], parms['options']) if product_id != 'plot': # Developer mode is always false unless you are a developer # so sleeping will always occur for none plotting requests # Override with the developer mode dont_sleep = developer_sleep_mode # Fix the orderid in-case it contains any single quotes # The processors can not handle single quotes in the email # portion due to usage in command lines. parms['orderid'] = order_id.replace("'", '') # If it is missing due to above TODO, then add it if not parameters.test_for_parameter(parms, 'product_id'): parms['product_id'] = product_id # Figure out if debug level logging was requested debug = False if parameters.test_for_parameter(options, 'debug'): debug = options['debug'] # Configure and get the logger for this order request EspaLogging.configure(settings.PROCESSING_LOGGER, order=order_id, product=product_id, debug=debug) logger = EspaLogging.get_logger(settings.PROCESSING_LOGGER) logger.info('Processing {}:{}'.format(order_id, product_id)) # Update the status in the database if parameters.test_for_parameter(parms, 'espa_api'): if parms['espa_api'] != 'skip_api': server = api_interface.api_connect(parms['espa_api']) if server is not None: status = server.update_status(product_id, order_id, processing_location, 'processing') if not status: logger.warning('Failed processing API call' ' to update_status to processing') if product_id != 'plot': # Make sure we can process the sensor tmp_info = sensor.info(product_id) del tmp_info # Make sure we have a valid output format if not parameters.test_for_parameter(options, 'output_format'): logger.warning('[output_format] parameter missing' ' defaulting to envi') options['output_format'] = 'envi' if (options['output_format'] not in parameters.VALID_OUTPUT_FORMATS): raise ValueError('Invalid Output format {}' .format(options['output_format'])) # ---------------------------------------------------------------- # NOTE: The first thing the product processor does during # initialization is validate the input parameters. # ---------------------------------------------------------------- destination_product_file = 'ERROR' destination_cksum_file = 'ERROR' pp = None try: # All processors are implemented in the processor module pp = processor.get_instance(proc_cfg, parms) (destination_product_file, destination_cksum_file) = \ pp.process() finally: # Free disk space to be nice to the whole system. if pp is not None: pp.remove_product_directory() # Sleep the number of seconds for minimum request duration sleep(get_sleep_duration(proc_cfg, start_time, dont_sleep)) archive_log_files(order_id, product_id) # Everything was successfull so mark the scene complete if server is not None: status = server.mark_scene_complete(product_id, order_id, processing_location, destination_product_file, destination_cksum_file, '') if not status: logger.warning('Failed processing API call to' ' mark_scene_complete') except Exception as excep: # First log the exception logger.exception('Exception encountered stacktrace follows') # Sleep the number of seconds for minimum request duration sleep(get_sleep_duration(proc_cfg, start_time, dont_sleep)) archive_log_files(order_id, product_id) if server is not None: try: status = set_product_error(server, order_id, product_id, processing_location) except Exception: logger.exception('Exception encountered stacktrace' ' follows') finally: # Reset back to the base logger logger = EspaLogging.get_logger('base')
def distribute_product(immutability, product_name, source_path, packaging_path, parms): ''' Description: Determines if the distribution method is set to local or remote and calls the correct distribution method. Returns: product_file - The full path to the product either on the local system or the remote destination. cksum_value - The check sum value of the product. Args: immutability - Wether or not to set the immutability flag on the product files product_name - The name of the product. source_path - The full path to of directory containing the data to package and distribute. package_dir - The full path on the local system for where the packaged product should be placed under. parms - All the user and system defined parameters. ''' env = Environment() distribution_method = env.get_distribution_method() # The file paths to the distributed product and checksum files product_file = 'ERROR' cksum_file = 'ERROR' if distribution_method == DISTRIBUTION_METHOD_LOCAL: # Use the local cache path cache_path = os.path.join(settings.ESPA_LOCAL_CACHE_DIRECTORY, parms['orderid']) # Override if we are doing bridge processing if parms['bridge_mode']: sensor_info = sensor.info(parms['product_id']) cache_path = os.path.join(settings.ESPA_LOCAL_CACHE_DIRECTORY, str(sensor_info.date_acquired.year), str(sensor_info.path).lstrip('0'), str(sensor_info.row).lstrip('0')) # Adjust the packaging_path to use the cache package_path = os.path.join(packaging_path, cache_path) (product_file, cksum_file) = \ distribute_product_local(immutability, product_name, source_path, package_path) else: # remote # Use the remote cache path cache_path = os.path.join(settings.ESPA_REMOTE_CACHE_DIRECTORY, parms['orderid']) (product_file, cksum_file) = \ distribute_product_remote(immutability, product_name, source_path, packaging_path, cache_path, parms) return (product_file, cksum_file)
def validate_reprojection_parameters(parms, product_id): ''' Description: Perform a check on the possible reprojection parameters Note: We blindly convert values to float or int without checking them. It is assumed that the web tier has validated them. ''' logger = EspaLogging.get_logger(settings.PROCESSING_LOGGER) # Create this and set to None if not present if not test_for_parameter(parms, 'projection'): logger.warning('projection: missing defaulting to None') parms['projection'] = None # Create this and set to 'near' if not present if not test_for_parameter(parms, 'resample_method'): logger.warning('resample_method: missing defaulting to near') parms['resample_method'] = 'near' # Make sure these have at least a False value required_parameters = ['reproject', 'image_extents', 'resize'] for parameter in required_parameters: if not test_for_parameter(parms, parameter): logger.warning( '{0}: missing defaulting to False'.format(parameter)) parms[parameter] = False if parms['reproject']: if not test_for_parameter(parms, 'target_projection'): raise RuntimeError('Missing target_projection parameter') else: # Convert to lower case target_projection = parms['target_projection'].lower() parms['target_projection'] = target_projection # Verify a valid projection if target_projection not in VALID_PROJECTIONS: msg = ('Invalid target_projection [{0}]:' ' Argument must be one of ({1})'.format( target_projection, ', '.join(VALID_PROJECTIONS))) raise ValueError(msg) if target_projection == 'sinu': if not test_for_parameter(parms, 'central_meridian'): raise RuntimeError('Missing central_meridian parameter') else: parms['central_meridian'] = \ float(parms['central_meridian']) if not test_for_parameter(parms, 'false_easting'): raise RuntimeError('Missing false_easting parameter') else: parms['false_easting'] = float(parms['false_easting']) if not test_for_parameter(parms, 'false_northing'): raise RuntimeError('Missing false_northing parameter') else: parms['false_northing'] = float(parms['false_northing']) if not test_for_parameter(parms, 'datum'): parms['datum'] = None if target_projection == 'aea': if not test_for_parameter(parms, 'std_parallel_1'): raise RuntimeError('Missing std_parallel_1 parameter') else: parms['std_parallel_1'] = float(parms['std_parallel_1']) if not test_for_parameter(parms, 'std_parallel_2'): raise RuntimeError('Missing std_parallel_2 parameter') else: parms['std_parallel_2'] = float(parms['std_parallel_2']) if not test_for_parameter(parms, 'origin_lat'): raise RuntimeError('Missing origin_lat parameter') else: parms['origin_lat'] = float(parms['origin_lat']) if not test_for_parameter(parms, 'central_meridian'): raise RuntimeError('Missing central_meridian parameter') else: parms['central_meridian'] = \ float(parms['central_meridian']) if not test_for_parameter(parms, 'false_easting'): raise RuntimeError('Missing false_easting parameter') else: parms['false_easting'] = float(parms['false_easting']) if not test_for_parameter(parms, 'false_northing'): raise RuntimeError('Missing false_northing parameter') else: parms['false_northing'] = float(parms['false_northing']) # The datum must be in uppercase for the processing code to # work so if it is present here, we force it if not test_for_parameter(parms, 'datum'): raise RuntimeError('Missing datum parameter') else: parms['datum'] = parms['datum'].upper() if parms['datum'] not in settings.VALID_DATUMS: valid_items = ', '.join(settings.VALID_DATUMS) raise ValueError('Invalid datum [{0}]:' ' Argument must be one of [{1}]'.format( parms['datum'], valid_items)) if target_projection == 'utm': if not test_for_parameter(parms, 'utm_zone'): raise RuntimeError('Missing utm_zone parameter') else: zone = int(parms['utm_zone']) if zone < 0 or zone > 60: raise ValueError('Invalid utm_zone [{0}]:' ' Value must be 0-60'.format(zone)) parms['utm_zone'] = zone if not test_for_parameter(parms, 'utm_north_south'): raise RuntimeError('Missing utm_north_south parameter') elif parms['utm_north_south'] not in VALID_NS: raise ValueError('Invalid utm_north_south [{0}]:' ' Argument must be one of [{1}]'.format( parms['utm_north_south'], ', '.join(VALID_NS))) if not test_for_parameter(parms, 'datum'): parms['datum'] = None if target_projection == 'ps': if not test_for_parameter(parms, 'latitude_true_scale'): # Must be tested before origin_lat raise RuntimeError('Missing latitude_true_scale parameter') else: value = float(parms['latitude_true_scale']) if ((value < 60.0 and value > -60.0) or value > 90.0 or value < -90.0): raise ValueError('Invalid latitude_true_scale [{0}]:' ' Value must be between' ' (-60.0 and -90.0) or' ' (60.0 and 90.0)'.format(value)) parms['latitude_true_scale'] = value if not test_for_parameter(parms, 'longitude_pole'): raise RuntimeError('Missing longitude_pole parameter') else: parms['longitude_pole'] = float(parms['longitude_pole']) if not test_for_parameter(parms, 'origin_lat'): # If the user did not specify the origin_lat value, then # set it based on the latitude true scale lat_ts = float(parms['latitude_true_scale']) if lat_ts < 0: parms['origin_lat'] = -90.0 else: parms['origin_lat'] = 90.0 else: value = float(parms['origin_lat']) if value != -90.0 and value != 90.0: raise ValueError( 'Invalid origin_lat [{0}]:' ' Value must be -90.0 or 90.0'.format(value)) parms['origin_lat'] = value if not test_for_parameter(parms, 'false_easting'): raise RuntimeError('Missing false_easting parameter') else: parms['false_easting'] = float(parms['false_easting']) if not test_for_parameter(parms, 'false_northing'): raise RuntimeError('Missing false_northing parameter') else: parms['false_northing'] = float(parms['false_northing']) if not test_for_parameter(parms, 'datum'): parms['datum'] = None if target_projection == 'lonlat': if not test_for_parameter(parms, 'datum'): parms['datum'] = None if parms['resample_method'] not in VALID_RESAMPLE_METHODS: raise ValueError('Invalid resample_method [{0}]:' ' Argument must be one of [{1}]'.format( parms['resample_method'], ', '.join(VALID_RESAMPLE_METHODS))) if parms['image_extents']: if not test_for_parameter(parms, 'image_extents_units'): raise RuntimeError('Missing image_extents_units parameter') else: if parms['image_extents_units'] not in VALID_IMAGE_EXTENTS_UNITS: raise ValueError('Invalid image_extents_units [{0}]:' ' Argument must be one of [{1}]'.format( parms['image_extents_units'], ', '.join(VALID_IMAGE_EXTENTS_UNITS))) if not test_for_parameter(parms, 'minx'): raise RuntimeError('Missing minx parameter') else: parms['minx'] = float(parms['minx']) if not test_for_parameter(parms, 'miny'): raise RuntimeError('Missing miny parameter') else: parms['miny'] = float(parms['miny']) if not test_for_parameter(parms, 'maxx'): raise RuntimeError('Missing maxx parameter') else: parms['maxx'] = float(parms['maxx']) if not test_for_parameter(parms, 'maxy'): raise RuntimeError('Missing maxy parameter') else: parms['maxy'] = float(parms['maxy']) else: # Default these parms['minx'] = None parms['miny'] = None parms['maxx'] = None parms['maxy'] = None parms['image_extents_units'] = None if parms['resize']: if not test_for_parameter(parms, 'pixel_size'): raise RuntimeError('Missing pixel_size parameter') else: parms['pixel_size'] = float(parms['pixel_size']) if not test_for_parameter(parms, 'pixel_size_units'): raise RuntimeError('Missing pixel_size_units parameter') else: if parms['pixel_size_units'] not in VALID_PIXEL_SIZE_UNITS: valid_items = ', '.join(VALID_PIXEL_SIZE_UNITS) raise ValueError('Invalid pixel_size_units [{0}]:' ' Argument must be one of [{1}]'.format( parms['pixel_size_units'], valid_items)) else: # Default this parms['pixel_size'] = None parms['pixel_size_units'] = None if ((parms['reproject'] or parms['image_extents']) and not parms['resize']): # Sombody asked for reproject or extents, but didn't specify a pixel # size units = 'meters' if parms['reproject'] and parms['target_projection'] == 'lonlat': units = 'dd' # Default to the sensor specific meters or dd equivalent parms['pixel_size'] = sensor.info(product_id).default_pixel_size[units] parms['pixel_size_units'] = units logger.warning( 'resize: parameter not provided' ' but required for reprojection or image extents' ' (Defaulting pixel_size({0}) and pixel_size_units({1})'.format( parms['pixel_size'], parms['pixel_size_units']))
def process_test_order(args, request_file, products_file, env_vars): """Process the test order file """ logger = logging.getLogger(__name__) template_file = 'template.json' template_dict = None order_id = args.request if args.pre: order_id = ''.join([order_id, '-PRE']) if args.post: order_id = ''.join([order_id, '-POST']) have_error = False status = True error_msg = '' products = list() if not args.plot: with open(products_file, 'r') as scenes_fd: while (1): product = scenes_fd.readline().strip() if not product: break products.append(product) else: products = ['plot'] logger.info('Processing Products [{0}]'.format(', '.join(products))) with open(template_file, 'r') as template_fd: template_contents = template_fd.read() if not template_contents: raise Exception( 'Template file [{0}] is empty'.format(template_file)) template_dict = json.loads(template_contents) if template_dict is None: logger.error('Loading template.json') for product_id in products: logger.info('Processing Product [{0}]'.format(product_id)) tmp_order = 'test-{0}-{1}'.format(order_id, product_id) with open(request_file, 'r') as request_fd: request_contents = request_fd.read() if not request_contents: raise Exception( 'Order file [{0}] is empty'.format(request_file)) logger.info('Processing Request File [{0}]'.format(request_file)) request_dict = json.loads(request_contents) if request_dict is None: logger.error('Loading [{0}]'.format(request_file)) # Merge the requested options with the template options, to create # a new dict with the requested options overriding the template. new_dict = template_dict.copy() new_dict.update(request_dict) new_dict['options'] = template_dict['options'].copy() new_dict['options'].update(request_dict['options']) # Turn it into a string for follow-on processing order_contents = json.dumps(new_dict, indent=4, sort_keys=True) sensor_code = get_satellite_sensor_code(product_id) with open(tmp_order, 'w') as tmp_fd: logger.info('Creating [{0}]'.format(tmp_order)) tmp_line = order_contents # Update the order for the developer download_url = 'null' # for plots if not sensor.is_modis(product_id) and not args.plot: product_path = ('{0}/{1}/{2}{3}'.format( env_vars['dev_data_dir']['value'], sensor_code, product_id, '.tar.gz')) logger.info( 'Using Product Path [{0}]'.format(product_path)) if not os.path.isfile(product_path): error_msg = ( 'Missing product data [{0}]'.format(product_path)) have_error = True break download_url = 'file://{0}'.format(product_path) elif not args.plot: if sensor.is_terra(product_id): base_source_path = '/MOLT' else: base_source_path = '/MOLA' parts = product_id.split('.') short_name = parts[0] version = parts[3] date_YYYYDDD = parts[1][1:] date_acquired = datetime.datetime.strptime( date_YYYYDDD, '%Y%j').date() xxx = ('{0}.{1}.{2}'.format( str(date_acquired.year).zfill(4), str(date_acquired.month).zfill(2), str(date_acquired.day).zfill(2))) product_path = ('{0}/{1}.{2}/{3}'.format( base_source_path, short_name, version, xxx)) if sensor.is_modis(product_id): download_url = ('http://{0}/{1}/{2}.hdf'.format( DAAC_HOSTNAME, product_path, product_id)) sensor_name = 'plot' if not args.plot: sensor_name = sensor.info(product_id).sensor_name logger.info('Processing Sensor [{0}]'.format(sensor_name)) else: logger.info('Processing Plot Request') tmp_line = tmp_line.replace('\n', '') tmp_line = tmp_line.replace('ORDER_ID', order_id) tmp_line = tmp_line.replace('SCENE_ID', product_id) if sensor_name in ['tm', 'etm', 'olitirs']: tmp_line = tmp_line.replace('PRODUCT_TYPE', 'landsat') elif sensor_name in ['terra', 'aqua']: tmp_line = tmp_line.replace('PRODUCT_TYPE', 'modis') else: tmp_line = tmp_line.replace('PRODUCT_TYPE', 'plot') tmp_line = tmp_line.replace('DOWNLOAD_URL', download_url) tmp_fd.write(tmp_line) # Validate again, since we modified it parms = json.loads(tmp_line) print(json.dumps(parms, indent=4, sort_keys=True)) if have_error: logger.error(error_msg) return False cmd = ('cd ..; cat test-orders/{0} | ./ondemand_mapper.py --developer'. format(tmp_order)) output = '' try: logger.info('Processing [{0}]'.format(cmd)) output = utilities.execute_cmd(cmd) if len(output) > 0: print output except Exception, e: logger.exception('Processing failed') status = False os.unlink(tmp_order)
def test_non_product_sensor_info(self): with self.assertRaises(sensor.ProductNotImplemented) as context: sensor.info(self.non_product_id) self.assertTrue('is not a supported product' in context)
def process_test_order(args, request_file, products_file, env_vars): """Process the test order file """ logger = logging.getLogger(__name__) template_file = 'template.json' template_dict = None order_id = args.request if args.pre: order_id = ''.join([order_id, '-PRE']) if args.post: order_id = ''.join([order_id, '-POST']) have_error = False status = True error_msg = '' products = list() if not args.plot: with open(products_file, 'r') as scenes_fd: while (1): product = scenes_fd.readline().strip() if not product: break products.append(product) else: products = ['plot'] logger.info('Processing Products [{0}]'.format(', '.join(products))) with open(template_file, 'r') as template_fd: template_contents = template_fd.read() if not template_contents: raise Exception('Template file [{0}] is empty' .format(template_file)) template_dict = json.loads(template_contents) if template_dict is None: logger.error('Loading template.json') for product_id in products: logger.info('Processing Product [{0}]'.format(product_id)) tmp_order = 'test-{0}-{1}'.format(order_id, product_id) with open(request_file, 'r') as request_fd: request_contents = request_fd.read() if not request_contents: raise Exception('Order file [{0}] is empty' .format(request_file)) logger.info('Processing Request File [{0}]'.format(request_file)) request_dict = json.loads(request_contents) if request_dict is None: logger.error('Loading [{0}]'.format(request_file)) # Merge the requested options with the template options, to create # a new dict with the requested options overriding the template. new_dict = template_dict.copy() new_dict.update(request_dict) new_dict['options'] = template_dict['options'].copy() new_dict['options'].update(request_dict['options']) # Turn it into a string for follow-on processing order_contents = json.dumps(new_dict, indent=4, sort_keys=True) sensor_code = get_satellite_sensor_code(product_id) with open(tmp_order, 'w') as tmp_fd: logger.info('Creating [{0}]'.format(tmp_order)) tmp_line = order_contents # Update the order for the developer download_url = 'null' # for plots if not sensor.is_modis(product_id) and not args.plot: product_path = ('{0}/{1}/{2}{3}' .format(env_vars['dev_data_dir']['value'], sensor_code, product_id, '.tar.gz')) logger.info('Using Product Path [{0}]' .format(product_path)) if not os.path.isfile(product_path): error_msg = ('Missing product data [{0}]' .format(product_path)) have_error = True break download_url = 'file://{0}'.format(product_path) elif not args.plot: if sensor.is_terra(product_id): base_source_path = '/MOLT' else: base_source_path = '/MOLA' parts = product_id.split('.') short_name = parts[0] version = parts[3] date_YYYYDDD = parts[1][1:] date_acquired = datetime.datetime.strptime(date_YYYYDDD, '%Y%j').date() xxx = ('{0}.{1}.{2}' .format(str(date_acquired.year).zfill(4), str(date_acquired.month).zfill(2), str(date_acquired.day).zfill(2))) product_path = ('{0}/{1}.{2}/{3}' .format(base_source_path, short_name, version, xxx)) if sensor.is_modis(product_id): download_url = ('http://{0}/{1}/{2}.hdf' .format(DAAC_HOSTNAME, product_path, product_id)) sensor_name = 'plot' if not args.plot: sensor_name = sensor.info(product_id).sensor_name logger.info('Processing Sensor [{0}]'.format(sensor_name)) else: logger.info('Processing Plot Request') tmp_line = tmp_line.replace('\n', '') tmp_line = tmp_line.replace('ORDER_ID', order_id) tmp_line = tmp_line.replace('SCENE_ID', product_id) if sensor_name in ['tm', 'etm', 'olitirs']: tmp_line = tmp_line.replace('PRODUCT_TYPE', 'landsat') elif sensor_name in ['terra', 'aqua']: tmp_line = tmp_line.replace('PRODUCT_TYPE', 'modis') else: tmp_line = tmp_line.replace('PRODUCT_TYPE', 'plot') tmp_line = tmp_line.replace('DOWNLOAD_URL', download_url) tmp_fd.write(tmp_line) # Validate again, since we modified it parms = json.loads(tmp_line) print(json.dumps(parms, indent=4, sort_keys=True)) if have_error: logger.error(error_msg) return False cmd = ('cd ..; cat test-orders/{0} | ./ondemand_mapper.py --developer' .format(tmp_order)) output = '' try: logger.info('Processing [{0}]'.format(cmd)) output = utilities.execute_cmd(cmd) if len(output) > 0: print output except Exception, e: logger.exception('Processing failed') status = False os.unlink(tmp_order)
def work(cfg, params, developer_sleep_mode=False): """ Take the environment configuration, order parameters and initiate order processing. Note: Much of this code was taken from the ondemand_mapper.py script in espa-processing. Args: cfg (dict): Configuration params given by config.config() and by the worker environment params (dict): JSON response from the API for a single granule or scene Returns: None, Products are generated, packaged, and distributed if processing was successful """ # This will be the Mesos node hostname processing_location = socket.gethostname() # Use the base_logger initially, if an exception occurs before the processing logger is configured # the base_logger will handle log it logger = base_logger if not parameters.test_for_parameter(params, 'options'): raise ValueError('Error missing JSON [options] record') start_time = datetime.datetime.now() # Initialize so that we don't sleep dont_sleep = True # Note that the API response "scene" value is what we use for product_id try: (order_id, product_id, product_type, options) = \ (params['orderid'], params['scene'], params['product_type'], params['options']) if product_id != 'plot': # Developer mode is always false unless you are a developer # so sleeping will always occur for non-plotting requests # Override with the developer mode dont_sleep = developer_sleep_mode # Fix the orderid in-case it contains any single quotes # The processors can not handle single quotes in the email # portion due to usage in command lines. params['orderid'] = order_id.replace("'", '') # product_id is not part of the API response - we add it here if not parameters.test_for_parameter(params, 'product_id'): params['product_id'] = product_id # Figure out if debug level logging was requested debug = False if parameters.test_for_parameter(options, 'debug'): debug = options['debug'] # Configure and get the logger for this order request EspaLogging.configure(settings.PROCESSING_LOGGER, order=order_id, product=product_id, debug=debug) # Replace the base_logger with the processing_logger logger = EspaLogging.get_logger(settings.PROCESSING_LOGGER) # add our stdout/stderr log streams logger.addHandler(get_stdout_handler()) logger.addHandler(get_stderr_handler()) logger.info('Processing {}:{}'.format(order_id, product_id)) logger.info('Attempting connection to {0}'.format(cfg['espa_api'])) # will throw an exception on init if unable to get a 200 response server = APIServer(cfg['espa_api']) # will throw an exception if does not receive a 200 response status = server.update_status(product_id, order_id, processing_location, 'processing') if product_id != 'plot': # Make sure we can process the sensor tmp_info = sensor.info(product_id) del tmp_info # Make sure we have a valid output format if not parameters.test_for_parameter(options, 'output_format'): logger.warning('[output_format] parameter missing defaulting to envi') options['output_format'] = 'envi' if (options['output_format'] not in parameters.VALID_OUTPUT_FORMATS): raise ValueError('Invalid Output format {}'.format(options['output_format'])) # ---------------------------------------------------------------- # NOTE: The first thing the product processor does during # initialization is validate the input parameters. # ---------------------------------------------------------------- destination_product_file = 'ERROR' destination_cksum_file = 'ERROR' pp = None try: # All processors are implemented in the processor module pp = processor.get_instance(cfg, params) (destination_product_file, destination_cksum_file) = pp.process() finally: # Free disk space to be nice to the whole system. if pp is not None: pp.remove_product_directory() # Sleep the number of seconds for minimum request duration sleep(utilities.get_sleep_duration(cfg, start_time, dont_sleep)) log_items = archive_log_files(order_id, product_id) for item in log_items: utilities.change_ownership(item, cfg.get('espa_user'), cfg.get('espa_group')) # Everything was successful so mark the scene complete server.mark_scene_complete(product_id, order_id, processing_location, destination_product_file, destination_cksum_file, '') # sets log_file_contents to empty string '' return True except Exception as e: # First log the exception logger.exception('Exception encountered in processing.main.work:\nexception: {}'.format(e)) try: # Sleep the number of seconds for minimum request duration logger.debug('Attempting to archive log files for order_id: {}\nproduct_id: {}'.format(order_id, product_id)) sleep(utilities.get_sleep_duration(cfg, start_time, dont_sleep)) log_items = archive_log_files(order_id, product_id) for item in log_items: utilities.change_ownership(item, cfg.get('espa_user'), cfg.get('espa_group')) except Exception as e2: logger.exception('Problem archiving log files. error: {}'.format(e2)) try: logger.debug('Attempting to set product error, order_id: {}\nproduct_id: {}'.format(order_id, product_id)) logged_contents = EspaLogging.read_logger_file(settings.PROCESSING_LOGGER) error_log = "Processing Log: {}\n\nException: {}".format(logged_contents, e) server.set_scene_error(product_id, order_id, processing_location, error_log) except Exception as e3: logger.exception('Unable to reach ESPA API and set product error for order_id: {}\nproduct_id: {}\nerror: {}'.format(order_id, product_id, e3)) raise e3 return False