def build_task(self, context, task): ''' Build up a set of inputs for a single context ''' LOG.debug("Running build_task()") LOG.debug("context: {}".format(context)) # Instantiate the hirs_csrb_daily computation hirs_csrb_daily_comp = hirs_csrb_daily.HIRS_CSRB_DAILY() num_days = monthrange(context['granule'].year, context['granule'].month)[1] interval = TimeInterval(context['granule'], context['granule'] + timedelta(num_days), False, True) daily_contexts = hirs_csrb_daily_comp.find_contexts( interval, context['satellite'], context['hirs2nc_delivery_id'], context['hirs_avhrr_delivery_id'], context['hirs_csrb_daily_delivery_id']) if len(daily_contexts) == 0: raise WorkflowNotReady('No HIRS_CSRB_DAILY inputs available for {}'.format(context['granule'])) SPC = StoredProductCatalog() for (i, daily_context) in enumerate(daily_contexts): hirs_csrb_daily_prod = hirs_csrb_daily_comp.dataset('means').product(daily_context) if SPC.exists(hirs_csrb_daily_prod): task.input('CSRB_DAILY-{}'.format(i), hirs_csrb_daily_prod, True)
def symlink(c, output, contexts): SPC = StoredProductCatalog() for context in contexts: if SPC.exists(c.dataset(output).product(context)): s_path = os.path.join( config.get()['product_dir'], SPC.file(c.dataset(output).product(context)).path) d_path = os.path.join(config.get()['results_dir'], c.context_path(context, output)) file_name = os.path.basename(s_path) if not os.path.exists(d_path): os.makedirs(d_path) if not os.path.isfile(os.path.join(d_path, file_name)): os.symlink(s_path, os.path.join(d_path, file_name))
def build_task(self, context, task): ''' Build up a set of inputs for a single context ''' global delta_catalog LOG.debug("Running build_task()") LOG.debug("context: {}".format(context)) # Initialize the hirs2nc and hirs_avhrr modules with the data locations hirs2nc.delta_catalog = delta_catalog hirs_avhrr.delta_catalog = delta_catalog # Instantiate the hirs and hirs_avhrr computations hirs2nc_comp = hirs2nc.HIRS2NC() hirs_avhrr_comp = hirs_avhrr.HIRS_AVHRR() SPC = StoredProductCatalog() day = TimeInterval( context['granule'], (context['granule'] + timedelta(days=1) - timedelta(seconds=1))) hirs2nc_contexts = hirs2nc_comp.find_contexts( day, context['satellite'], context['hirs2nc_delivery_id']) if len(hirs2nc_contexts) == 0: raise WorkflowNotReady('NO HIRS Data For {}'.format( context['granule'])) # Input Counter. ic = 0 for hirs2nc_context in hirs2nc_contexts: # Making Input contexts hirs_avhrr_context = hirs2nc_context.copy() hirs_avhrr_context['hirs_avhrr_delivery_id'] = context[ 'hirs_avhrr_delivery_id'] LOG.debug("HIRS context: {}".format(hirs2nc_context)) LOG.debug("HIRS_AVHRR context: {}".format(hirs_avhrr_context)) # Confirming we have HIRS1B and COLLO products... hirs2nc_prod = hirs2nc_comp.dataset('out').product(hirs2nc_context) hirs_avhrr_prod = hirs_avhrr_comp.dataset('out').product( hirs_avhrr_context) # If HIRS1B and COLLO products exist, add them and the Patmos-X # file for this context to the list of input files to be downloaded to # the workspace... if SPC.exists(hirs2nc_prod) and SPC.exists(hirs_avhrr_prod): # Its safe to require all three inputs task.input('HIR1B-{}'.format(ic), hirs2nc_prod) task.input('COLLO-{}'.format(ic), hirs_avhrr_prod) task.input( 'PTMSX-{}'.format(ic), delta_catalog.file('avhrr', hirs2nc_context['satellite'], 'PTMSX', hirs2nc_context['granule'])) ic += 1 LOG.debug( "There are {} valid HIR1B/COLLO/PTMSX contexts in ({} -> {})". format(ic, day.left, day.right)) if ic == 0: LOG.warn( "There are no valid HIR1B/COLLO/PTMSX contexts in ({} -> {}), aborting..." .format(day.left, day.right)) return interval = TimeInterval(context['granule'], context['granule'] + timedelta(days=1)) num_cfsr_files = 0 # Search for the old style pgbhnl.gdas.*.grb2 files from the PEATE if num_cfsr_files == 0: LOG.debug( "Trying to retrieve CFSR_PGRBHANL product (pgbhnl.gdas.*.grb2) CFSR files from DAWG..." ) try: cfsr_files = dawg_catalog.files('', 'CFSR_PGRBHANL', interval) num_cfsr_files = len(cfsr_files) if num_cfsr_files == 0: LOG.debug("\tpgbhnl.gdas.*.grb2 CFSR files from DAWG : {}". format(cfsr_files)) except Exception, err: LOG.error("{}.".format(err)) LOG.warn( "Retrieval of CFSR_PGRBHANL product (pgbhnl.gdas.*.grb2) CFSR files from DAWG failed" )
def build_task(self, context, task): ''' Build up a set of inputs for a single context ''' global delta_catalog LOG.debug("Running build_task()") # Initialize the hirs_tpw_orbital module with the data locations hirs_tpw_orbital.delta_catalog = delta_catalog # Instantiate the hirs_tpw_orbital computation hirs_tpw_orbital_comp = hirs_tpw_orbital.HIRS_TPW_ORBITAL() SPC = StoredProductCatalog() # TPW Orbital Input granule = context['granule'] wedge = timedelta(seconds=1) hour = timedelta(hours=1) day = timedelta(days=1) # Add an hour to each end of the day to make sure the day is completely covered interval = TimeInterval(context['granule'] - 1 * hour, (context['granule'] + day + 1 * hour)) hirs_tpw_orbital_contexts = hirs_tpw_orbital_comp.find_contexts( interval, context['satellite'], context['hirs2nc_delivery_id'], context['hirs_avhrr_delivery_id'], context['hirs_csrb_daily_delivery_id'], context['hirs_csrb_monthly_delivery_id'], context['hirs_ctp_orbital_delivery_id'], context['hirs_ctp_daily_delivery_id'], context['hirs_ctp_monthly_delivery_id'], context['hirs_tpw_orbital_delivery_id']) if len(hirs_tpw_orbital_contexts) == 0: raise WorkflowNotReady( 'No HIRS_TPW_ORBITAL inputs available for {}'.format( context['granule'])) LOG.debug("There are {} TPW Orbital contexts for {}.".format( len(hirs_tpw_orbital_contexts), interval)) for context in hirs_tpw_orbital_contexts: LOG.debug(context) # Knock off all but the last of the "previous" day's contexts this_day = granule.day previous_day = (granule - day + wedge).day next_day = (granule + day + wedge).day LOG.debug("previous_day: {}".format(previous_day)) LOG.debug("this_day: {}".format(this_day)) LOG.debug("next_day: {}".format(next_day)) start_idx = 0 end_idx = -1 num_contexts = len(hirs_tpw_orbital_contexts) indices = np.arange(num_contexts) reverse_indices = np.flip(np.arange(num_contexts) - num_contexts, axis=0) # have this set to zero unless we need to set it otherwise (say for Metop-B) interval_pad = 0 # Pruning all but the last of the previous day's contexts for idx in indices: if hirs_tpw_orbital_contexts[ idx + interval_pad]['granule'].day == this_day: start_idx = idx LOG.debug("Breaking: start_idx = {}, granule = {}".format( start_idx, hirs_tpw_orbital_contexts[start_idx]['granule'])) break # Pruning all but the first of the next day's contexts for idx in reverse_indices: if hirs_tpw_orbital_contexts[ idx - interval_pad]['granule'].day == this_day: end_idx = idx LOG.debug("Breaking: end_idx = {}, granule = {}".format( end_idx, hirs_tpw_orbital_contexts[end_idx]['granule'])) break hirs_tpw_orbital_contexts = hirs_tpw_orbital_contexts[ start_idx:end_idx + 1] #hirs_tpw_orbital_contexts = hirs_tpw_orbital_contexts[start_idx:end_idx] for context in hirs_tpw_orbital_contexts: LOG.debug("{}".format(context)) for idx, context in enumerate(hirs_tpw_orbital_contexts): hirs_tpw_orbital_prod = hirs_tpw_orbital_comp.dataset( 'shift').product(context) if SPC.exists(hirs_tpw_orbital_prod): task.input('TPWO_shift-{}'.format(str(idx).zfill(2)), hirs_tpw_orbital_prod) for idx, context in enumerate(hirs_tpw_orbital_contexts): hirs_tpw_orbital_prod = hirs_tpw_orbital_comp.dataset( 'noshift').product(context) if SPC.exists(hirs_tpw_orbital_prod): task.input('TPWO_noshift-{}'.format(str(idx).zfill(2)), hirs_tpw_orbital_prod)
# every module should have a LOG object LOG = logging.getLogger(__name__) # Set up the logging console_logFormat = '%(asctime)s : (%(levelname)s):%(filename)s:%(funcName)s:%(lineno)d: %(message)s' dateFormat = '%Y-%m-%d %H:%M:%S' levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logging.basicConfig(stream=sys.stdout, level=levels[1], format=console_logFormat, datefmt=dateFormat) # General information comp = HIRS_TPW_MONTHLY() SPC = StoredProductCatalog() # Latest Computation versions. hirs_version = 'v20151014' collo_version = 'v20151014' csrb_version = 'v20150915' ctp_version = 'v20150915' tpw_version = 'v20160222' platform_choices = [ 'noaa-06', 'noaa-07', 'noaa-08', 'noaa-09', 'noaa-10', 'noaa-11', 'noaa-12', 'noaa-14', 'noaa-15', 'noaa-16', 'noaa-17', 'noaa-18', 'noaa-19', 'metop-a', 'metop-b' ] platform = 'metop-b'
def build_task(self, context, task): ''' Build up a set of inputs for a single context ''' global delta_catalog LOG.debug("Running build_task()") # Initialize the hirs2nc and hirs_avhrr modules with the data locations hirs2nc.delta_catalog = delta_catalog hirs_avhrr.delta_catalog = delta_catalog # Instantiate the hirs, hirs_avhrr and hirs_csrb_monthly computations hirs2nc_comp = hirs2nc.HIRS2NC() hirs_avhrr_comp = hirs_avhrr.HIRS_AVHRR() hirs_csrb_monthly_comp = hirs_csrb_monthly.HIRS_CSRB_MONTHLY() SPC = StoredProductCatalog() # HIRS L1B Input hirs2nc_context = context.copy() [ hirs2nc_context.pop(k) for k in [ 'hirs_avhrr_delivery_id', 'hirs_csrb_daily_delivery_id', 'hirs_csrb_monthly_delivery_id', 'hirs_ctp_orbital_delivery_id' ] ] hirs2nc_prod = hirs2nc_comp.dataset('out').product(hirs2nc_context) if SPC.exists(hirs2nc_prod): task.input('HIR1B', hirs2nc_prod) else: raise WorkflowNotReady('No HIRS inputs available for {}'.format( hirs2nc_context['granule'])) # PTMSX Input LOG.debug('Getting PTMSX input...') sensor = 'avhrr' satellite = context['satellite'] file_type = 'PTMSX' granule = context['granule'] try: ptmsx_file = delta_catalog.file(sensor, satellite, file_type, granule) task.input('PTMSX', ptmsx_file) except WorkflowNotReady: raise WorkflowNotReady( 'No PTMSX inputs available for {}'.format(granule)) # Collo Input hirs_avhrr_context = hirs2nc_context hirs_avhrr_context['hirs_avhrr_delivery_id'] = context[ 'hirs_avhrr_delivery_id'] hirs_avhrr_prod = hirs_avhrr_comp.dataset('out').product( hirs_avhrr_context) if SPC.exists(hirs_avhrr_prod): task.input('COLLO', hirs_avhrr_prod) else: raise WorkflowNotReady( 'No HIRS_AVHRR inputs available for {}'.format( hirs_avhrr_context['granule'])) # CSRB Monthly Input hirs_csrb_monthly_context = context.copy() [ hirs_csrb_monthly_context.pop(k) for k in ['hirs_ctp_orbital_delivery_id'] ] hirs_csrb_monthly_context['granule'] = datetime( context['granule'].year, context['granule'].month, 1) hirs_csrb_monthly_prod = hirs_csrb_monthly_comp.dataset( 'zonal_means').product(hirs_csrb_monthly_context) if SPC.exists(hirs_csrb_monthly_prod): task.input('CSRB', hirs_csrb_monthly_prod) else: raise WorkflowNotReady( 'No HIRS_CSRB_MONTHLY inputs available for {}'.format( hirs_csrb_monthly_context['granule'])) # CFSR Input LOG.debug('Getting CFSR input...') cfsr_file = self.get_cfsr(context['granule']) if cfsr_file is not None: task.input('CFSR', cfsr_file) else: raise WorkflowNotReady( 'No CFSR inputs available for {}'.format(granule)) LOG.debug("Final task.inputs...") for task_key in task.inputs.keys(): LOG.debug("\t{}: {}".format(task_key, task.inputs[task_key]))
import sipsprod from glutil import ( check_call, dawg_catalog, delivered_software, #support_software, runscript, #prepare_env, #nc_gen, nc_compress, reraise_as, #set_official_product_metadata, FileNotFound) import flo.sw.hirs_ctp_daily as hirs_ctp_daily SPC = StoredProductCatalog() # every module should have a LOG object LOG = logging.getLogger(__name__) class HIRS_CTP_MONTHLY(Computation): parameters = [ 'granule', 'satellite', 'hirs2nc_delivery_id', 'hirs_avhrr_delivery_id', 'hirs_csrb_daily_delivery_id', 'hirs_csrb_monthly_delivery_id', 'hirs_ctp_orbital_delivery_id', 'hirs_ctp_daily_delivery_id', 'hirs_ctp_monthly_delivery_id' ] outputs = ['out']
def build_task(self, context, task): ''' Build up a set of inputs for a single context ''' global delta_catalog LOG.debug("Running build_task()") # Initialize the hirs2nc module with the data locations hirs2nc.delta_catalog = delta_catalog # Instantiate the hirs2nc and hirs_ctp_orbital computations hirs2nc_comp = hirs2nc.HIRS2NC() hirs_ctp_orbital_comp = hirs_ctp_orbital.HIRS_CTP_ORBITAL() SPC = StoredProductCatalog() # # HIRS L1B Input # hirs2nc_context = { 'satellite': context['satellite'], 'granule': context['granule'], 'hirs2nc_delivery_id': context['hirs2nc_delivery_id'] } hirs2nc_prod = hirs2nc_comp.dataset('out').product(hirs2nc_context) if SPC.exists(hirs2nc_prod): task.input('HIR1B', hirs2nc_prod) else: raise WorkflowNotReady('No HIRS inputs available for {}'.format( hirs2nc_context['granule'])) # # CTP Orbital Input # hirs_ctp_orbital_context = context.copy() [ hirs_ctp_orbital_context.pop(k) for k in [ 'hirs_ctp_daily_delivery_id', 'hirs_ctp_monthly_delivery_id', 'hirs_tpw_orbital_delivery_id' ] ] hirs_ctp_orbital_prod = hirs_ctp_orbital_comp.dataset('out').product( hirs_ctp_orbital_context) if SPC.exists(hirs_ctp_orbital_prod): task.input('CTPO', hirs_ctp_orbital_prod) else: raise WorkflowNotReady( 'No HIRS CTP Orbital inputs available for {}'.format( hirs_ctp_orbital_context['granule'])) # # CFSR Input # cfsr_granule = round_datetime(context['granule'], timedelta(hours=6)) cfsr_file = self.get_cfsr(cfsr_granule) if cfsr_file is not None: task.input('CFSR', cfsr_file) else: raise WorkflowNotReady( 'No CFSR inputs available for {}'.format(cfsr_granule)) LOG.debug("Final task.inputs...") # GPC for task_key in task.inputs.keys(): LOG.debug("\t{}: {}".format(task_key, task.inputs[task_key])) # GPC LOG.debug("Exiting build_task()...") # GPC