Ejemplo n.º 1
0
def open_atcf_db(dbname=ATCF_DECKS_DB):
    '''Open the ATCF Decks Database, create it if it doesn't exist'''

    # Make sure the directory exists.  If the db doesn't exist,
    # the sqlite3.connect command will create it - which will
    # fail if the directory doesn't exist.
    from os.path import dirname as pathdirname
    from geoips2.filenames.base_paths import make_dirs
    make_dirs(pathdirname(dbname))

    import sqlite3
    conn = sqlite3.connect(dbname)
    conn_cursor = conn.cursor()
    # Try to create the table - if it already exists, it will just fail
    # trying to create, pass, and return the already opened db.
    try:
        conn_cursor.execute('''CREATE TABLE atcf_deck_stormfiles
            (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
                filename text,
                last_updated timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL,
                storm_num integer,
                storm_basin text,
                start_datetime timestamp,
                start_lat real,
                start_lon real,
                start_vmax real,
                start_name real,
                vmax real,
                end_datetime timestamp)''')
        # Add in at some point?
        # storm_start_datetime timestamp,
    except sqlite3.OperationalError:
        pass
    return conn_cursor, conn
Ejemplo n.º 2
0
def write(data_dict, outfile=None, dimensions=None, metadata=None):
    '''
    Write registered data to netCDF file.
    +------------------+-----------+---------------------------------------------------+
    | Parameters:      | Type:     | Description:                                      |
    +==============----+===========+===================================================+
    | data_dict:       | *dict*    | Dictionary holding data to write.                 |
    |                  |           | Each key in data_dict is the name of the variable |
    |                  |           | to store, and holds another dictionary that       |
    |                  |           | should contain the following keys:                |
    |                  |           |        data, name, units, dimensions, and dtype   |
    +------------------+-----------+---------------------------------------------------+
    | outfile:         | *str*     | Name of netcdf file                               |
    +------------------+-----------+---------------------------------------------------+
    | dimensions:      | *dict*    | Dictionary holding dimensions for netcdf variables|
    +------------------+-----------+---------------------------------------------------+
    | metadata:        | *dict*    | Dictionary holding keys with information to write |
    |                  |           | as global attributes to the netcdf file (optional)|
    +------------------+-----------+---------------------------------------------------+

    For example, say we want to store the 11um field, data dict will be the following:
    data_dict = {'11um':{'data':11um_array,'name':'11um brightness temperature',
                         'dimensions':('lines','samples','dtype':np.float64)}
                }
    The dimensions dictionary in this case will be the following:
    dimensions = {'lines':<number_of_lines>,'samples':<number_of_samples>}
    '''
    # Check to see if we have all the information we need
    if not outfile:
        raise ValueError('Must provide name for out name for file!')
    if isinstance(dimensions, type(None)):
        raise ValueError('Must provide dictionary holding netcdf dimensions')

    # Check to see if writing path exists, and add option to create
    # said path if non-existent
    out_dir = os.path.dirname(outfile)
    if not os.path.exists(out_dir):
        LOG.info('Creating Directory: %s', out_dir)
        from geoips2.filenames.base_paths import make_dirs
        make_dirs(out_dir)

    LOG.info('Storing registered data to netCDF file')
    with netCDF4.Dataset(outfile, 'w', format='NETCDF4') as nc_file:
        # Define dimensions for variables
        create_dimensions(nc_file, dimensions)
        # Iterate through keys in data_dict and store to netcdf file
        for key, ncinfo in data_dict.iteritems():
            # Create variable sds
            create_variable_sds(nc_file,
                                var_short_name=key,
                                var_type=ncinfo['dtype'],
                                var_dims=ncinfo['dimensions'],
                                var_standard_name=ncinfo['name'],
                                var_units=ncinfo['units'])
            # Populate variable sds with data
            fill_variable_sds(nc_file, key, ncinfo['data'])
        # Finally add global attributes
        add_global_attributes(nc_file, metadata)
    LOG.info('Created: %s', outfile)
Ejemplo n.º 3
0
def write_xarray_netcdf(xarray_obj, ncdf_fname):
    ''' Write out xarray_obj to netcdf file named ncdf_fname '''
    def check_attr(xobj, attr):
        if isinstance(xobj.attrs[attr], datetime):
            xobj.attrs[attr] = xobj.attrs[attr].strftime('%c')
        if xobj.attrs[attr] is None:
            xobj.attrs[attr] = str(xobj.attrs[attr])
        if isinstance(xobj.attrs[attr], bool):
            xobj.attrs[attr] = str(xobj.attrs[attr])

    from geoips2.filenames.base_paths import make_dirs
    from os.path import dirname
    make_dirs(dirname(ncdf_fname))

    orig_attrs = xarray_obj.attrs.copy()
    orig_var_attrs = {}

    from datetime import datetime
    for attr in xarray_obj.attrs.keys():
        check_attr(xarray_obj, attr)
    for varname in xarray_obj.variables.keys():
        orig_var_attrs[varname] = xarray_obj[varname].attrs.copy()
        for attr in xarray_obj[varname].attrs.keys():
            check_attr(xarray_obj[varname], attr)

    roi_str = 'none'
    if 'interpolation_radius_of_influence' in xarray_obj.attrs.keys():
        roi_str = xarray_obj.interpolation_radius_of_influence

    sdt_str = 'none'
    if 'start_datetime' in xarray_obj.attrs.keys():
        sdt_str = xarray_obj.attrs['start_datetime']

    edt_str = 'none'
    if 'end_datetime' in xarray_obj.attrs.keys():
        edt_str = xarray_obj.attrs['end_datetime']

    dp_str = 'none'
    if 'data_provider' in xarray_obj.attrs.keys():
        dp_str = xarray_obj.attrs['data_provider']

    area_def_str = 'none'
    if 'area_def' in xarray_obj.attrs.keys():
        area_def = xarray_obj.attrs.pop('area_def')
        area_def_str = repr(area_def)
        xarray_obj.attrs['area_def_str'] = area_def_str

    LOG.info(
        'Writing xarray obj to file %s, source %s, platform %s, start_dt %s, end_dt %s, %s %s, %s %s, %s %s',
        ncdf_fname, xarray_obj.source_name, xarray_obj.platform_name, sdt_str,
        edt_str, 'provider', dp_str, 'roi', roi_str, 'area_def', area_def_str)

    xarray_obj.to_netcdf(ncdf_fname)
    xarray_obj.attrs = orig_attrs
    for varname in xarray_obj.variables.keys():
        xarray_obj[varname].attrs = orig_var_attrs[varname]

    return [ncdf_fname]
def write_yamldict(yamldict, out_fname, force=False):
    ''' Write yamldict to out_fname

    Args:
        yamldict (dict) : Dictionary to write out to YAML file
        out_fname (str) : Output filename to write YAML dict to

    Returns:
        (str) : Path to output file if successfully produced
    '''
    from geoips2.filenames.base_paths import make_dirs
    from os.path import dirname, exists
    import yaml
    make_dirs(dirname(out_fname))
    if not exists(out_fname) or force:
        with open(out_fname, 'w') as fobj:
            yaml.safe_dump(yamldict, fobj, default_flow_style=False)
            return [out_fname]
    else:
        LOG.info(
            'SKIPPING %s already exists, delete it if you need to recreate',
            out_fname)
        return []
def atcf_text_windspeeds(fname,
                         speed_array,
                         time_array,
                         lon_array,
                         lat_array,
                         platform_name,
                         dir_array=None,
                         append=False):
    ''' Write out ATCF formatted text file of wind speeds
        +------------------+-----------+-------------------------------------------------------+
        | Parameters:      | Type:     | Description:                                          |
        +==================+===========+=======================================================+
        | fname:           | *str*     | String full path to output filename                   |
        +------------------+-----------+-------------------------------------------------------+
        | speed_array:     | *ndarray* | array of windspeeds                                   |
        +------------------+-----------+-------------------------------------------------------+
        | time_array:      | *ndarray* | array of POSIX time stamps same length as speed_array |
        +------------------+-----------+-------------------------------------------------------+
        | lon_array:       | *ndarray* | array of longitudes of same length as speed_array     |
        +------------------+-----------+-------------------------------------------------------+
        | lat_array:       | *ndarray* | array of latitudes of same length as speed_array      |
        +------------------+-----------+-------------------------------------------------------+
        | platform_name:   | *str*     | String platform name                                  |
        +------------------+-----------+-------------------------------------------------------+
    '''
    output_products = []
    if not isinstance(platform_name, str):
        raise TypeError('Parameter platform_name must be a str')
    if not isinstance(fname, str):
        raise TypeError('Parameter fname must be a str')
    if not isinstance(speed_array, numpy.ndarray):
        raise TypeError(
            'Parameter speed_array must be a numpy.ndarray of wind speeds')
    if not isinstance(lat_array, numpy.ndarray):
        raise TypeError(
            'Parameter lat_array must be a numpy.ndarray of latitudes')
    if not isinstance(lon_array, numpy.ndarray):
        raise TypeError(
            'Parameter lon_array must be a numpy.ndarray of longitudes')
    if not isinstance(time_array, numpy.ndarray):
        raise TypeError(
            'Parameter time_array must be a numpy.ndarray of POSIX timestamps')

    source_name = ATCF_SOURCE_NAMES[platform_name].upper()

    from geoips2.filenames.base_paths import make_dirs
    make_dirs(os.path.dirname(fname))
    if hasattr(speed_array, 'mask'):
        if dir_array is not None:
            newmask = speed_array.mask | time_array.mask | lat_array.mask | lon_array.mask | dir_array.mask
        else:
            newmask = speed_array.mask | time_array.mask | lat_array.mask | lon_array.mask
        inds = numpy.ma.where(~newmask)
        speed_array = speed_array[inds]
        time_array = time_array[inds]
        lon_array = lon_array[inds]
        lat_array = lat_array[inds]
        if dir_array is not None:
            dir_array = dir_array[inds]

    openstr = 'w'
    if append:
        openstr = 'a'
    startdt_str = datetime.utcfromtimestamp(
        time_array[0]).strftime('%Y%m%d%H%M')
    header = ''
    if not os.path.exists(fname):
        header = 'METXSCT {0} ASC (FULL DAY)\n'.format(startdt_str)
    with open(fname, openstr) as fobj:
        if dir_array is not None:
            fobj.write(header)
            for speed, time, lon, lat, direction in zip(
                    speed_array, time_array, lon_array, lat_array, dir_array):
                # dtstr = time.strftime('%Y%m%d%H%M')
                dtstr = datetime.utcfromtimestamp(time).strftime('%Y%m%d%H%M')
                # if lon > 180:
                #     lon = lon - 360
                format_string = ' {0:>3s} {1:>8.1f} {2:>6.1f} {3:>3d} {4:>3d} {5:s}\n'
                fobj.write(
                    format_string.format(source_name, lat, lon, int(direction),
                                         int(speed), dtstr))
        else:
            for speed, time, lon, lat in zip(speed_array, time_array,
                                             lon_array, lat_array):
                # dtstr = time.strftime('%Y%m%d%H%M')
                dtstr = datetime.utcfromtimestamp(time).strftime('%Y%m%d%H%M')
                # if lon > 180:
                #     lon = lon - 360
                format_string = '{0:>6s}{1:>8.2f}{2:>8.2f}{3:>4d} {4:s}\n'
                if source_name == 'SMAP' or source_name == 'SMOS':
                    format_string = ' {0:<6s} {1:>5.1f} {2:>5.1f} {3:>3d} {4:s}\n'
                fobj.write(
                    format_string.format(source_name, lat, lon, int(speed),
                                         dtstr))
    import subprocess
    lsfulltime = subprocess.check_output(['ls', '--full-time', fname])
    LOG.info('WINDTEXTSUCCESS wrote out text windspeed file %s', lsfulltime)
    output_products = [fname]
    return output_products
Ejemplo n.º 6
0
def metoctiff(data_array,
              ullat_radians,
              urlat_radians,
              lllat_radians,
              lrlat_radians,
              uclat_radians,
              lclat_radians,
              ullon_radians,
              urlon_radians,
              lllon_radians,
              lrlon_radians,
              uclon_radians,
              lclon_radians,
              data_start_datetime,
              data_end_datetime,
              product_name,
              platform_name,
              data_units,
              output_filename,
              requested_data_min,
              requested_data_max,
              scale_data_min=1,
              scale_data_max=255,
              missing_value=0,
              product_description='None',
              mpl_cmap=None,
              existing_image=None,
              gzip_output=False):
    '''
        Generate a metoctiff with valid headers from existing image OR data found in data_array,
        with appropriate colormap, data ranges, tags
        NOTE: If you include the "existing_image" option, it will just read in an arbitrary existing image and plot it
        QUALITATIVELY with the appropriate colormap / lat / lons, but no quantitative information.
        +------------------+-----------+-------------------------------------------------------+
        | Parameters:      | Type:     | Description:                                          |
        +==================+===========+=======================================================+
        | data_array:      | *ndarray* | Array of data to scale properly for metoctiff         |
        +------------------+-----------+-------------------------------------------------------+
        | ullat_radians:   | *float*   | upper left lat of the data_array in radians           |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        | urlat_radians:   | *float*   | upper right lat of the data_array in radians          |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        | lllat_radians:   | *float*   | lower left lat of the data_array in radians           |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        | lrlat_radians:   | *float*   | lower right lat of the data_array in radians          |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        | ullon_radians:   | *float*   | upper left lon of the data_array in radians           |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        | urlon_radians:   | *float*   | upper right lon of the data_array in radians          |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        | lllon_radians:   | *float*   | lower left lon of the data_array in radians           |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        | lrlon_radians:   | *float*   | lower right lon of the data_array in radians          |
        |                  |           |            used for metoctiff "extratags" header      |
        +------------------+-----------+-------------------------------------------------------+
        |data_start_datetime:|*datetime*| datetime object indicating the data start datetime |
        |                  |           | used for metoctiff "description" header DATA_START_TIME |
        +------------------+-----------+-------------------------------------------------------+
        |data_end_datetime:| *datetime*| datetime object indicating the data end datetime   |
        |                  |           | used for metoctiff "description" header DATA_END_TIME |
        +------------------+-----------+-------------------------------------------------------+
        | product_name:    | *str*     | string of current product name of the data_array      |
        |                  |           | used for metoctiff "description" header DATA_NAME     |
        +------------------+-----------+-------------------------------------------------------+
        | platform_name:   | *str*     | string of current platform name of the data_array     |
        |                  |           | used for metoctiff "description" header PLATFORM_NAME |
        +------------------+-----------+-------------------------------------------------------+
        | data_units:      | *str*     | string of current units of the data_array             |
        |                  |           | used for metoctiff "description" header DATA_UNITS    |
        +------------------+-----------+-------------------------------------------------------+
        | output_filename: | *str*     | string of output filename to write the metoctiff      |
        +------------------+-----------+-------------------------------------------------------+
        |requested_data_min| *float*   | Minimum allowed value for the actual data,            |
        |                  |           |    for scaling from scale_data_min to scale_data_max  |
        |                  |           | metoctiff description tag:                            |
        |                  |           | DATA_RANGE=scale_data_min,                            |
        |                  |           |            scale_data_max,                            |
        |                  |           |            requested_data_min,                        |
        |                  |           |            requested_data_max/scale_data_max          |
        |                  |           |                                                       |
        +------------------+-----------+-------------------------------------------------------+
        |requested_data_max| *float*   | Maximum allowed value for the actual data,            |
        |                  |           |    for scaling from scale_data_min to scale_data_max  |
        |                  |           | metoctiff description tag:                            |
        |                  |           | DATA_RANGE=scale_data_min,                            |
        |                  |           |            scale_data_max,                            |
        |                  |           |            requested_data_min,                        |
        |                  |           |            requested_data_max/scale_data_max          |
        +------------------+-----------+-------------------------------------------------------+
        |scale_data_min    | *uint8*   | Minimum allowed value for the scaled data,            |
        |                  |           |    for scaling from scale_data_min to scale_data_max  |
        |                  | DEFAULT   | metoctiff description tag:                            |
        |                  | 1         | DATA_RANGE=scale_data_min,                            |
        |                  |           |            scale_data_max,                            |
        |                  |           |            requested_data_min,                        |
        |                  |           |            requested_data_max/scale_data_max          |
        |                  |           |                                                       |
        +------------------+-----------+-------------------------------------------------------+
        |scale_data_max    | *uint8*   | Maximum allowed value for the scaled data,            |
        |                  |           |    for scaling from scale_data_min to scale_data_max  |
        |                  | DEFAULT   | metoctiff description tag:                            |
        |                  | 255       | DATA_RANGE=scale_data_min,                            |
        |                  |           |            scale_data_max,                            |
        |                  |           |            requested_data_min,                        |
        |                  |           |            requested_data_max/scale_data_max          |
        +------------------+-----------+-------------------------------------------------------+
        |missing_value     | *uint8*   | Value that ATCF considers missing/bad data,           |
        |                  |           |    between 0 and 255                                  |
        |                  | DEFAULT   | metoctiff description tag:                            |
        |                  | 0         | DATA_RANGE=scale_data_min,                            |
        |                  |           |            scale_data_max,                            |
        |                  |           |            requested_data_min,                        |
        |                  |           |            requested_data_max/scale_data_max          |
        +------------------+-----------+-------------------------------------------------------+
        | mpl_cmap:        |*ColorMap* | matplotlib ColorMap object to create 255 color palette|
        |                  |           |     to map the scaled 1-255 data values in the jif    |
        |                  |           |     !!! ColorMap must match the range specified in    |
        |                  |           |     requested_data_min to requested_data_max          |
        |                  |           |     if you want specific colors to match specific     |
        |                  |           |     values !!!                                        |
        |                  |           |                                                       |
        +------------------+-----------+-------------------------------------------------------+
        |existing_image:   | *str*     | string of full path to an existing image              |
        |                  | *ndarray* | RGB or RGBA array of 0 to 1                           |
        |                  |           |   NOTE: Use of this option basically ignores most     |
        |                  |           |         everything else! Just reads it in and writes  |
        |                  |           |         it back out qualitatively, with the           |
        |                  |           |         appropriate colors and metoctiff headers      |
        +------------------+-----------+-------------------------------------------------------+
        | gzip_output:     |*bool*     | Flag to determine whether to gzip the output          |
        +------------------+-----------+-------------------------------------------------------+
    '''

    output_products = []
    LOG.info('Creating metoctiff image file, gzip_output=%s', gzip_output)

    #
    #  Get the image lat lon corners for the metoctiff extratags
    #  Added the image flip.
    #

    rsULLat = int(numpy.rad2deg(ullat_radians) * 100000)
    rsULLon = int(numpy.rad2deg(ullon_radians) * 100000)

    rsURLat = int(numpy.rad2deg(urlat_radians) * 100000)
    rsURLon = int(numpy.rad2deg(urlon_radians) * 100000)

    rsLLLat = int(numpy.rad2deg(lllat_radians) * 100000)
    rsLLLon = int(numpy.rad2deg(lllon_radians) * 100000)

    rsLRLat = int(numpy.rad2deg(lrlat_radians) * 100000)
    rsLRLon = int(numpy.rad2deg(lrlon_radians) * 100000)

    rsUCLat = int(numpy.rad2deg(uclat_radians) * 100000)
    rsUCLon = int(numpy.rad2deg(uclon_radians) * 100000)

    rsBCLat = int(numpy.rad2deg(lclat_radians) * 100000)
    rsBCLon = int(numpy.rad2deg(lclon_radians) * 100000)

    #
    #  NOTE: We are now passing Center Lat and Center Lon directly -
    #        these calculations fail when crossing the dateline.
    #  Get the center lat lon values of image for the metoctiff extratags
    #

    # rsUCLat = (rsULLat + rsURLat) / 2
    # rsUCLon = (rsULLon + rsURLon) / 2

    # rsBCLat = (rsLLLat + rsLRLat) / 2
    # rsBCLon = (rsLLLon + rsLRLon) / 2

    #
    #  Additional info for extratags required for metocTiff
    #

    nProjection = 4  # 1 = Polar Stereographic 2 = Lambert Conformal 4 = Mercator 8 = Normal.
    #   It's likely that mercator is analogous to 'cyl' in pyproj'''
    rsStandard1 = 0  # only used if lambert conformal projection is specified
    rsStandard2 = 0  # only used if lambert conformal projection is specified
    if rsBCLat >= 0:
        Hemisphere = 1  # northern
    else:
        Hemisphere = 2  # southern

    # Otherwise, if we passed a existing_image, read it in and grab data from
    # temporary image
    if existing_image is not None:
        scaledata, jif_cmap, requested_bounds, scale_bounds = get_data_from_image(
            existing_image)
        scale_data_min = scale_bounds[0]
        scale_data_max = scale_bounds[1]
        requested_data_min = requested_bounds[0]
        requested_data_max = requested_bounds[1]
    else:
        scaledata, jif_cmap = get_data_from_equations(
            data_array, mpl_cmap, requested_data_min, requested_data_max,
            scale_data_min, scale_data_max, missing_value)

    #
    #  Info for "description" tag included in metoctiff file.
    #  ATCF relies heavily on this description in order to display the tiff correctly
    #

    data_name = product_name
    platform = platform_name

    data_start_dtstr = data_start_datetime.strftime(
        '%a, %d %b %Y %H:%M:%S GMT')
    data_end_dtstr = data_end_datetime.strftime('%a, %d %b %Y %H:%M:%S GMT')

    #
    #  Write out the tiff file with all of the appropriate METOCTiff headers!
    #  Note it appears we need to use little endian order (big endian looks correct,
    #  but doesn't work for interrogating values in ATCF), and uint8
    #
    #  Note: Any data ranges are scaled to 0 to 249 in get_data_from_equations
    #

    # for byteorderstr in ['le', 'be']:
    #     for dtype in ['uint8', 'uint16', 'float64', 'float32']:
    for byteorderstr in ['le']:
        for dtype in ['uint8']:
            if byteorderstr == 'le':
                byteorder = '<'
            elif byteorderstr == 'be':
                byteorder = '>'

            # Just output filename - we've determined little endian, uint8, from data itself is the way to go.
            curr_output_filename = output_filename

            szDescription = 'DATA_PLATFORM="{0}";'.format(platform) + \
                            'DATA_NAME="{0}";'.format(data_name) + \
                            'DATA_START_TIME="{0}";'.format(data_start_dtstr) + \
                            'DATA_END_TIME="{0}";'.format(data_end_dtstr) + \
                            'DATA_UNITS="{0}";'.format(data_units)

            LOG.info('DATA_RANGE tag from %s to %s', requested_data_min,
                     requested_data_max)

            # From def get_data_from_equations
            # m = (float(scale_data_max) - scale_data_min) / (float(requested_data_max) - requested_data_min)
            # b = scale_data_min - m * float(requested_data_min)
            # scaledata = m * data_array + b
            # So realdata = (1/m) * (scaledata - b)
            # DATA range uses the 1/m scaling factor, as well as scale_data_min, scale_data_max, and requested_data_min
            # in order to recover the real data from the scaled values.

            # To determine MAX_DATA_VAL from scaling_term in DATA_RANGE:
            #   scaling_term*(SCALE_DATA_MAX-SCALE_DATA_MIN) + MIN_DATA_VAL
            scaling_term = (float(requested_data_max) - requested_data_min) / (
                float(scale_data_max) - scale_data_min)

            datarange = 'DATA_RANGE="{0},{1},{2},{3:0.6f},{4}";'.format(
                scale_data_min, scale_data_max, requested_data_min,
                scaling_term, product_description)

            # print(datarange)
            szDescription = '{0}{1}'.format(szDescription, datarange)

            scaledata = scaledata.astype(dtype)

            from geoips2.filenames.base_paths import make_dirs
            make_dirs(os.path.dirname(curr_output_filename))
            with tf.TiffWriter(curr_output_filename,
                               byteorder=byteorder) as mtif:
                LOG.info('Writing METOCTIFF jif file: %s...',
                         curr_output_filename)
                mtif.save(scaledata,
                          colormap=jif_cmap,
                          description=szDescription,
                          metadata=None,
                          extratags=[(284, 'H', 1, 1, True),
                                     (33000, 'i', 1, nProjection, True),
                                     (33001, 'i', 1, rsStandard1, True),
                                     (33002, 'i', 1, rsStandard2, True),
                                     (33003, 'i', 1, Hemisphere, True),
                                     (33004, 'i', 1, rsULLat, True),
                                     (33005, 'i', 1, rsULLon, True),
                                     (33006, 'i', 1, rsLLLat, True),
                                     (33007, 'i', 1, rsLLLon, True),
                                     (33008, 'i', 1, rsURLat, True),
                                     (33009, 'i', 1, rsURLon, True),
                                     (33010, 'i', 1, rsLRLat, True),
                                     (33011, 'i', 1, rsLRLon, True),
                                     (33012, 'i', 1, rsBCLat, True),
                                     (33013, 'i', 1, rsBCLon, True),
                                     (33014, 'i', 1, rsUCLat, True),
                                     (33015, 'i', 1, rsUCLon, True)])
                LOG.info('MTIFSUCCESS %s', curr_output_filename)
                output_products = [curr_output_filename]

    # Sanity Check
    LOG.info('Min/Max Left Lat %s %s', rsLLLat / 10**5 * 1.0,
             rsULLat / 10**5 * 1.0)
    LOG.info('Min/Max Right Lat %s %s', rsLRLat / 10**5 * 1.0,
             rsURLat / 10**5 * 1.0)
    LOG.info('Bottom/Top Center Lat %s %s', rsBCLat / 10**5 * 1.0,
             rsUCLat / 10**5 * 1.0)
    LOG.info('Min/Center/Max Lower Lon %s %s %s', rsLLLon / 10**5 * 1.0,
             rsBCLon / 10**5, rsLRLon / 10**5 * 1.0)
    LOG.info('Min/Center/Max Upper Lon %s %s %s', rsULLon / 10**5 * 1.0,
             rsUCLon / 10**5 * 1.0, rsURLon / 10**5 * 1.0)

    if gzip_output is True:
        try:
            #gzip the file and remove original
            LOG.info('Gzipping output to file %s.gz', output_filename)
            with open(output_filename, 'rb') as uncompressedFile, gzip.open(
                    output_filename + '.gz', 'wb') as compressedFile:
                shutil.copyfileobj(uncompressedFile, compressedFile)
            if os.path.isfile(output_filename):
                os.remove(output_filename)
            output_products = [output_filename + '.gz']
        except Exception as err:
            LOG.info('{0}: {1} >> {2}'.format(
                type(err).__name__, str(err.__doc__), str(err.args)))

    return output_products
Ejemplo n.º 7
0
def save_image(fig,
               out_fname,
               is_final=True,
               image_datetime=None,
               remove_duplicate_minrange=None):
    ''' Save the image specified by the matplotlib figure "fig" to the filename out_fname.

    Args:
        fig (Figure) : matplotlib.figure.Figure object that needs to be written to a file.
        out_fname (str) : string specifying the full path to the output filename
        is_final (bool) : Default True. Final imagery must set_axis_on for all axes. Non-final imagery must be
                                        transparent with set_axis_off for all axes, and no pad inches.

    Returns:
        No return values (image is written to disk and IMAGESUCCESS is written to log file)
    '''
    import matplotlib
    import matplotlib.pyplot as plt
    matplotlib.use('agg')
    rc_params = matplotlib.rcParams
    from os.path import dirname, exists as pathexists
    from geoips2.filenames.base_paths import make_dirs
    if is_final:
        if not pathexists(dirname(out_fname)):
            make_dirs(dirname(out_fname))
        for ax in fig.axes:
            LOG.info('Adding ax to %s', ax)
            ax.set_axis_on()
        # final with titles, labels, etc.  Note bbox_inches='tight' removes white space, pad_inches=0.1 puts back in
        # a bit of white space.
        LOG.info('Writing %s', out_fname)
        fig.savefig(out_fname,
                    dpi=rc_params['figure.dpi'],
                    pad_inches=0.1,
                    bbox_inches='tight',
                    transparent=False)
        if remove_duplicate_minrange is not None:
            remove_duplicates(out_fname, remove_duplicate_minrange)
    else:
        # Get rid of the colorbar axis for non-final imagery
        if not pathexists(dirname(out_fname)):
            make_dirs(dirname(out_fname))
        # no annotations
        # frameon=False makes it have no titles / lat/lons. does not avoid colorbar, since that is its own ax
        for ax in fig.axes:
            LOG.info('Removing ax from %s', ax)
            ax.set_axis_off()
        LOG.info('Writing %s', out_fname)
        fig.savefig(out_fname,
                    dpi=rc_params['figure.dpi'],
                    pad_inches=0.0,
                    transparent=True,
                    frameon=False)
        if remove_duplicate_minrange is not None:
            remove_duplicates(out_fname, remove_duplicate_minrange)

    LOG.info('IMAGESUCCESS wrote %s', out_fname)
    if image_datetime is not None:
        from datetime import datetime
        LOG.info('LATENCY %s %s', out_fname,
                 datetime.utcnow() - image_datetime)
    return [out_fname]