Example #1
0
    def run_task(self, inputs, context):
        '''
        Run the TPW Daily binary on a single context
        '''

        LOG.debug("Running run_task()...")

        for key in context.keys():
            LOG.debug("run_task() context['{}'] = {}".format(
                key, context[key]))

        rc = 0

        # Link the inputs into the working directory
        inputs = symlink_inputs_to_working_dir(inputs)

        # Create the TPW daily file for the current day.
        rc, tpw_daily_noshift_file = self.create_tpw_daily(inputs,
                                                           context,
                                                           shifted=False)
        rc, tpw_daily_shift_file = self.create_tpw_daily(inputs,
                                                         context,
                                                         shifted=True)

        return {
            'noshift': nc_compress(tpw_daily_noshift_file),
            'shift': nc_compress(tpw_daily_shift_file)
        }
Example #2
0
    def run_task(self, inputs, context):

        LOG.debug("Running run_task()...")

        for key in context.keys():
            LOG.debug("run_task() context['{}'] = {}".format(
                key, context[key]))

        rc = 0

        # Extract a binary array from a CFSR reanalysis GRIB2 file on a
        # global equal angle grid at 0.5 degree resolution. CFSR files
        rc, cfsr_files = self.extract_bin_from_cfsr(inputs, context)

        # Create the CFSR statistics for the current day.
        rc, output_stats_file = self.create_cfsr_statistics(
            inputs, context, cfsr_files)
        if rc != 0:
            LOG.warn('Something went wrong, rc={}...'.format(rc))
            return {}
        LOG.debug('create_cfsr_statistics() generated {}...'.format(
            output_stats_file))

        # Create the CFSR means for the current day
        rc, output_means_file = self.create_cfsr_means(inputs, context,
                                                       output_stats_file)
        if rc != 0:
            return rc
        LOG.debug(
            'create_cfsr_means() generated {}...'.format(output_means_file))

        LOG.debug('python return value = {}'.format(rc))

        extra_attrs = {
            'begin_time':
            context['granule'],
            'end_time':
            context['granule'] + timedelta(days=1) - timedelta(seconds=1)
        }

        LOG.debug('extra_attrs = {}'.format(extra_attrs))

        return {
            'stats': {
                'file': nc_compress(output_stats_file),
                'extra_attrs': extra_attrs
            },
            'means': {
                'file': nc_compress(output_means_file),
                'extra_attrs': extra_attrs
            }
        }
Example #3
0
    def run_task(self, inputs, context):
        '''
        Run the CTP Orbital binary on a single context
        '''

        LOG.debug("Running run_task()...")

        for key in context.keys():
            LOG.debug("run_task() context['{}'] = {}".format(
                key, context[key]))

        rc = 0

        # Extract a binary array from a CFSR reanalysis GRIB2 file on a
        # global equal angle grid at 0.5 degree resolution. CFSR files
        rc, cfsr_file = self.extract_bin_from_cfsr(inputs, context)

        # Link the inputs into the working directory
        inputs.pop('CFSR')
        inputs = symlink_inputs_to_working_dir(inputs)
        inputs['CFSR'] = cfsr_file

        # Create the CTP Orbital for the current granule.
        rc, ctp_orbital_file = self.create_ctp_orbital(inputs, context)

        return {'out': nc_compress(ctp_orbital_file)}
Example #4
0
    def run_task(self, inputs, context):
        '''
        Run the TPW Orbital binary on a single context
        '''

        LOG.debug("Running run_task()...")

        for key in context.keys():
            LOG.debug("run_task() context['{}'] = {}".format(
                key, context[key]))

        rc = 0

        # Extract a binary array from a CFSR reanalysis GRIB2 file on a
        # global equal angle grid at 0.5 degree resolution. CFSR files
        rc, cfsr_file = self.extract_bin_from_cfsr(inputs, context)

        # Link the inputs into the working directory
        inputs.pop('CFSR')
        inputs = symlink_inputs_to_working_dir(inputs)
        inputs['CFSR'] = cfsr_file

        # Link the shifted and nonshifted coefficient files into the current directory
        self.link_coeffs(context)

        # Create the TPW Orbital for the current granule.
        rc, tpw_orbital_noshift_file = self.create_tpw_orbital(inputs,
                                                               context,
                                                               shifted=False)
        rc, tpw_orbital_shift_file = self.create_tpw_orbital(inputs,
                                                             context,
                                                             shifted=True)

        interval = self.hirs_to_time_interval(inputs['HIR1B'])
        extra_attrs = {'begin_time': interval.left, 'end_time': interval.right}

        return {
            'shift': {
                'file': nc_compress(tpw_orbital_shift_file),
                'extra_attrs': extra_attrs
            },
            'noshift': {
                'file': nc_compress(tpw_orbital_noshift_file),
                'extra_attrs': extra_attrs
            }
        }
Example #5
0
    def run_task(self, inputs, context):

        for key in context.keys():
            LOG.debug("run_task() context['{}'] = {}".format(key, context[key]))

        rc = 0

        # Create the CFSR statistics for the current month.
        rc, output_stats_file = self.create_monthly_statistics(inputs, context)
        if rc != 0:
            return {}
        LOG.info('create_monthly_statistics() generated {}...'.format(output_stats_file))

        # Create the CFSR zonal means for the current month
        rc, output_zonal_means_file = self.create_monthly_zonal_means(output_stats_file, context)
        if rc != 0:
            return rc
        LOG.info('create_zonal_means() generated {}...'.format(output_zonal_means_file))

        return {'stats': nc_compress(output_stats_file), 'zonal_means': nc_compress(output_zonal_means_file)}
Example #6
0
    def run_task(self, inputs, context):
        '''
        Run the CTP Daily binary on a single context
        '''

        LOG.debug("Running run_task()...")

        for key in context.keys():
            LOG.debug("run_task() context['{}'] = {}".format(key, context[key]))

        rc = 0

        # Link the inputs into the working directory
        inputs = symlink_inputs_to_working_dir(inputs)

        # Create the CTP daily file for the current day.
        rc, ctp_daily_file = self.create_ctp_daily(inputs, context)

        return {'out': nc_compress(ctp_daily_file)}
Example #7
0
    def run_task(self, inputs, context):

        LOG.debug("Running run_task()...")

        for key in context.keys():
            LOG.debug("run_task() context['{}'] = {}".format(
                key, context[key]))

        granule = context['granule']
        satellite = context['satellite']
        hirs2nc_delivery_id = context['hirs2nc_delivery_id']
        hirs_version = self.satellite_version(context['satellite'],
                                              context['granule'])

        # Get the location of the binary package
        delivery = delivered_software.lookup('hirs2nc',
                                             delivery_id=hirs2nc_delivery_id)
        dist_root = pjoin(delivery.path, 'dist')
        envroot = pjoin(dist_root, 'env')

        # Get the required  environment variables
        env = prepare_env([delivery])
        LOG.debug(env)

        # What is the path of the python interpreter
        py_interp = "{}/bin/python".format(envroot)
        LOG.debug("py_interp = '{}'".format(py_interp))

        # Path of the hirs2nc binary
        hirs2nc_bin = pjoin(envroot, 'bin', 'hirs2nc')

        # Where are we running the package
        work_dir = abspath(curdir)
        LOG.debug("working dir = {}".format(work_dir))

        input_file = inputs['HIR1B']
        output_file = pjoin(work_dir,
                            basename('{}.nc'.format(inputs['HIR1B'])))
        LOG.debug("Input file = {}".format(input_file))
        LOG.debug("Output file = {}".format(output_file))

        # What are our inputs?
        for input in inputs.keys():
            inputs_dir = dirname(inputs[input])
            LOG.debug("inputs['{}'] = {}".format(input, inputs[input]))
        LOG.debug("Inputs dir = {}".format(inputs_dir))

        # Convert the flat HIRS file to NetCDF4
        cmd = '{} {} {} {} {}'.format(py_interp, hirs2nc_bin, hirs_version,
                                      input_file, output_file)
        try:
            LOG.debug("cmd = \\\n\t{}".format(cmd.replace(' ', ' \\\n\t')))
            rc_hirs2nc = 0
            runscript(cmd, requirements=[], env=env)
        except CalledProcessError as err:
            rc_hirs2nc = err.returncode
            LOG.error("hirs2nc binary {} returned a value of {}".format(
                hirs2nc_bin, rc_hirs2nc))
            return rc_hirs2nc, []

        # The staging routine assumes that the output file is located in the work directory
        # "tmp******", and that the output path is to be prepended, so return the basename.
        output = basename('{}.nc'.format(inputs['HIR1B']))

        data_interval = context['data_interval']
        extra_attrs = {
            'begin_time': data_interval.left,
            'end_time': data_interval.right
        }

        return {
            'out': {
                'file': nc_compress(output),
                'extra_attrs': extra_attrs
            }
        }