Example #1
0
def process_extents(div_name, dist_name, process_date,
                    src_dir, extents_list, options):
    ''' Main function for processing extents.  Calls lots of helper
    and utility functions. 
    div_name: string - Name of division, used in output file format.
    process_date: datetime.datetime object - date for which data is desired.
    extents_list: list - list of namedtuples for each watershed.

    Returns the path to the file if new data was written to a DSS file, 
    None otherwise.
    '''

    def verbose_print(to_print):
        if options.verbose:
            print to_print

    def clean_up_tmp_dir(tmp_dir):
        if not options.keep_tmp_dir:
            shutil.rmtree(tmp_dir)

    topdir = config.TOP_DIR

    verbose_print('Source directory: {0}'.format(src_dir))

    # Use 'us' prefix and adjust nodata value for dates before January 24, 2011.
    dataset_type = 'zz'
    nodata_val = '-9999'
    if process_date < datetime.datetime(2011, 1, 24, 0, 0):
        dataset_type = 'us'
        nodata_val = '55537'

    projdir = os.path.join(topdir, div_name, dist_name)

    projresdir = os.path.join(projdir, 'results_sn')
    projascdir = os.path.join(projresdir, 'asc_files')
    projdssdir = os.path.join(projresdir, 'dss_files')
    histdir = os.path.join(projresdir, 'history')

    # Build our results directories if needed.
    mkdir_p(projascdir)
    mkdir_p(projdssdir)
    mkdir_p(histdir)

    dstr = datetime.datetime.now().strftime('%y%m%d%H%M%S')
    ymdDate = process_date.strftime('%Y%m%d')

    # Break out if processing for the given date has already happened.
    histfile = os.path.join(histdir, 'proccomplete' + ymdDate + '.txt')
    if os.path.isfile(histfile):
        print '{0} {1} grids already processed for: {2}'.format(
            div_name, dist_name, process_date.strftime('%Y.%m.%d')
        )
        return None
    print 'Processing {0} {1} grids for: {2}'.format(
        div_name, dist_name, process_date.strftime('%Y.%m.%d')
    )

    tmpdir = os.path.join(projresdir, 'tmp' + dstr)
    os.mkdir(tmpdir)

    # Set up a dictionary mapping the various properties to their DSS names.
    PropDict = SetProps(process_date, div_name)
    enameDict = {}
    zerolist = ["0001", "0002", "0003"]
    extentGProps = {}
    maxExtent = getMaxExtent(extents_list)
    dssbasename = GetDSSBaseName(process_date)
    dssfile = os.path.join(projdssdir, dssbasename)

    # Define our files and make sure they all exist.
    snodaslist = [
        s.format(ds=dataset_type, ymd=ymdDate) for s in SNODAS_FILENAME_LIST
    ]

    # Loop through our source SNODAS files.
    for f in snodaslist:

        varcode = f[8:12]
        varprops = PropDict[varcode]
        path = "/" + "/".join(varprops[0]) + "/"
        dtype = varprops[1]

        easiername = \
            div_name + "_" + varprops[0][2].replace(" ", "_").lower() + ymdDate
        enameDict[varcode] = os.path.join(projascdir, easiername + ".asc")
        shgtif = os.path.join(tmpdir, f + "alb.tif")
        shgtifmath = os.path.join(tmpdir, easiername + ".tif")

        src_file = os.path.join(src_dir, f + '.bil')
        ReprojUseWarpBil(src_file, shgtif, maxExtent, nodata_val)
        mathranokay = True
        if varprops[2]:
            # NOTE: enamedict populated only for prior product numbers
            mathranokay = RasterMath(shgtif, shgtifmath, varcode, enameDict)
        else:
            shgtifmath = shgtif
        if mathranokay:
            enameDict[varcode] = shgtifmath
            for extentarr in extents_list:
                ds = gdal.Open(shgtifmath)
                if ds is None:
                    print 'Could not open ' + shgtifmath
                    return None
                nodata = ds.GetRasterBand(1).GetNoDataValue()
                fullext = GetDatasetExtent(ds)
                cellsize = ds.GetGeoTransform()[1]

                subext = extentarr[1]
                fullof, subof = min_box_os(fullext, subext, cellsize)
                xsize = int(fullof[2])
                ysize = int(fullof[3])
                dsProj = ds.GetProjection()

                cliparr = ds.GetRasterBand(1).ReadAsArray(
                    int(round(fullof[0])), int(round(fullof[1])),
                    xsize, ysize
                )
                outtmpname = os.path.join(tmpdir, extentarr[0] + "tmp.asc")
                driver = gdal.GetDriverByName("MEM")

                clipgeot = [subext[0], cellsize, 0, subext[3], 0, -cellsize]
                extentGProps[extentarr[0]] = [
                    dsProj, clipgeot, xsize, ysize, nodata]

                clipds = driver.Create("", xsize, ysize, 1, GDT_Float32)
                clipds.SetGeoTransform(clipgeot)
                clipds.SetProjection(ds.GetProjection())
                clipds.GetRasterBand(1).SetNoDataValue(nodata)
                clipds.GetRasterBand(1).WriteArray(cliparr, 0, 0)
                clipds.FlushCache()
                ascbasename = extentarr[0] + "_" + \
                    varprops[0][2].replace(" ", "_").lower() + ymdDate
                CreateASCII(clipds, ascbasename, tmpdir)
                clipds = None
                ds = None

                tmpasc = os.path.join(tmpdir, ascbasename + ".asc")
                projasc = os.path.join(projascdir, ascbasename + ".asc")
                shutil.copy(tmpasc, projasc)
                shutil.copy(
                    os.path.join(tmpdir, ascbasename + "tmp.prj"),
                    os.path.join(projascdir, ascbasename + ".prj")
                )

                varprops = PropDict[varcode]
                p = varprops[0]
                dtype = varprops[1]

                path = "/SHG/" + extentarr[0].upper() + "/" + p[2] + \
                    "/" + p[3] + "/" + p[4] + "/" + p[5] + "/"
                WriteToDSS(projasc, dssfile, dtype, path)
                outarr = None
                cliparr = None

    if len(extentGProps) == 0:
        print "An error occurred identifying extent properties."
        clean_up_tmp_dir(tmpdir)
        return None

    for varcode in zerolist:
        varprops = PropDict[varcode]
        for extentarr in extents_list:
            p = varprops[0]
            dtype = varprops[1]
            path = "/SHG/" + extentarr[0].upper() + "/" + p[2] + \
                "/" + p[3] + "/" + p[4] + "/" + p[5] + "/"
            ascbasename = extentarr[0] + "_" + \
                varprops[0][2].replace(" ", "_").lower() + ymdDate
            tmpasc = os.path.join(tmpdir, ascbasename + ".asc")
            projasc = os.path.join(projascdir, ascbasename + ".asc")

            WriteZeroDStoAsc(extentGProps[extentarr[0]], ascbasename, tmpdir)
            shutil.copy(tmpasc, projasc)
            shutil.copy(os.path.join(tmpdir, ascbasename + "tmp.prj"),
                        os.path.join(projascdir, ascbasename + ".prj"))
            dssdunits = varprops[3]
            WriteToDSS(projasc, dssfile, dtype, path, dssdunits)

    clean_up_tmp_dir(tmpdir)
    clean_up_tmp_dir(projascdir)

    # Write out file to track that we've run for this day.
    with open(histfile, "w") as fo:
        dstr = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
        fo.write(process_date.strftime("%a %b %d %H:%M:%S %Y"))
        fo.close
    return dssfile
Example #2
0
def prepare_source_data_for_date(process_date, src_dir, save_tiff=True):
    ''' Builds an unzip directory and extracts data from source files
    for a given day. 
    Returns the directory path to the unzipped files,
    or None if missing any source data. '''
    ymd_str = process_date.strftime('%Y%m%d')
    unzip_dir = os.path.join(config.TOP_DIR, 'unzipped_data', ymd_str)
    us_tif_dir = os.path.join(config.TOP_DIR, 'conus_tiffs')
    
    # Use 'us' prefix and adjust nodata value for dates before January 24, 2011.
    ds_type = 'zz'
    nodata_val = '-9999'
    if process_date < datetime.datetime(2011, 1, 24, 0, 0):
        ds_type = 'us'
        nodata_val = '55537'

    masterhdr = os.path.join(config.TOP_DIR, 'key', ds_type + '_master.hdr')

    # Create list of file names for this date.
    snodas_src_files = [
        f.format(ds=ds_type, ymd=ymd_str) for f in SNODAS_FILENAME_LIST
    ]

    # Make sure all files exist before trying any extractions.
    print_dashes()
    print 'Processing source data for:', process_date.strftime('%Y.%m.%d')
    msgs = []
    for filename in snodas_src_files:
        if not os.path.isfile(os.path.join(src_dir, filename + '.grz')):
            msgs += ['Missing source data file: {0}'.format(filename)]
    if msgs:
        for msg in msgs:
            print msg
        print_dashes()
        return None

    if save_tiff:
        mkdir_p(us_tif_dir)

    # Loop through our filenames and do the unzipping and other set up.
    mkdir_p(unzip_dir)
    for filename in snodas_src_files:
        src_file = os.path.join(src_dir, filename)
        unzip_file = os.path.join(unzip_dir, filename)
        ready_file = unzip_file + '.bil'
        if not os.path.isfile(ready_file):
            print 'Processing source to output file:', ready_file
            UnzipLinux(src_file, unzip_file)
            RawFileManip(unzip_file, masterhdr)
        else:
            print 'Using existing source file:', ready_file 
        
        # Save a full version of the day's data set.
        shgtif = os.path.join(us_tif_dir, filename + 'alb.tif')
        if save_tiff:
            if not os.path.isfile(shgtif):
                print 'Saving CONUS SHG tiff file:', shgtif
                ReprojUseWarpBil(ready_file, shgtif, nodata=nodata_val,
                                 tr_x='1000', tr_y='-1000')
            else:
                print 'CONUS SHG tiff already exists:', shgtif

    print_dashes()
    return unzip_dir
Example #3
0
def process_extents(office_symbol, process_date, src_dir, extents_list,
                    options):
    ''' Main function for processing extents.  Calls lots of helper
    and utility functions.
    office_symbol: string - unique office symbol, used in output file format
    process_date: datetime.datetime object - date for which data is desired.
    extents_list: list - list of namedtuples for each watershed.

    Returns the path to the file if new data was written to a DSS file,
    None otherwise.
    '''
    def verbose_print(to_print):
        if options.verbose:
            print(to_print)

    def clean_up_tmp_dir(tmp_dir):
        if not options.keep_tmp_dir:
            shutil.rmtree(tmp_dir)

    verbose_print('Source directory: {0}'.format(src_dir))

    # Use 'us' prefix and adjust nodata value for dates before January 24, 2011.
    dataset_type = 'zz'
    nodata_val = '-9999'
    if process_date < datetime.datetime(2011, 1, 24, 0, 0):
        dataset_type = 'us'
        nodata_val = '55537'

    # Use the proper results dir structure based on the config file.
    projfltdir = os.path.join(config.FLT_BASE_DIR, office_symbol)
    projdssdir = os.path.join(config.DSS_BASE_DIR, office_symbol)
    histdir = os.path.join(config.HISTORY_BASE_DIR, office_symbol)
    tmpdir = os.path.join(
        projfltdir,
        'tmp{}'.format(datetime.datetime.now().strftime('%y%m%d%H%M%S')))

    # Build our results directories if needed.
    for d in (projfltdir, projdssdir, histdir, tmpdir):
        mkdir_p(d)

    # Break out if processing for the given date has already happened.
    histfile = os.path.join(
        histdir, 'proccomplete{}.txt'.format(process_date.strftime('%Y%m%d')))
    if os.path.isfile(histfile):
        print('{0} grids already processed for: {1}'.format(
            office_symbol, process_date.strftime('%Y.%m.%d')))
        return None

    print('Processing {0} grids for: {1}'.format(
        office_symbol, process_date.strftime('%Y.%m.%d')))

    # Set up a dictionary mapping the various properties to their DSS names.
    PropDict = SetProps(process_date, office_symbol)
    scratchfile_dict = {}
    extentGProps = {}
    maxExtent = getMaxExtent(extents_list)
    dssfile = os.path.join(projdssdir,
                           '{}'.format(GetDSSBaseName(process_date)))

    # Instantiate new flt2dss.Operation
    flt2dss_operation = flt2dss.Operation()

    # Loop through our source SNODAS files.
    for f in [
            s.format(ds=dataset_type, ymd=process_date.strftime('%Y%m%d'))
            for s in SNODAS_FILENAME_LIST
    ]:

        # Strip variable key/ID from filename
        varcode = f[8:12]
        varprops = PropDict[varcode]

        # Filenames
        src_file = os.path.join(src_dir, '{}.bil'.format(f))
        shgtif = os.path.join(tmpdir, '{}alb.tif'.format(f))
        shgtifmath = os.path.join(tmpdir, '{}.tif'.format(f))

        # Reproject src_file to shgtif
        ReprojUseWarpBil(src_file, shgtif, maxExtent, nodata_val)

        # Set dictionary entry for variable to filepath to SHG projected TIF
        scratchfile_dict[varcode] = shgtif

        # if variable does not require computation to define grid values, set
        # "shgtifmath" (grid after computation) to shgtif (raw grid)
        # else run RasterMath() to write a grid with computed values
        mathranokay = True
        # If math is not required to derive final grid
        if varprops[2] is False:
            shgtifmath = shgtif
        else:
            # NOTE: scratchfile_dict populated only for prior product numbers
            mathranokay = RasterMath(shgtif, shgtifmath, varcode,
                                     scratchfile_dict)

        if mathranokay:
            scratchfile_dict[varcode] = shgtifmath
            for extentarr in extents_list:
                ds = gdal.Open(shgtifmath)
                if ds is None:
                    print('Could not open {}'.format(shgtifmath))
                    return None
                nodata = ds.GetRasterBand(1).GetNoDataValue()
                fullext = GetDatasetExtent(ds)
                cellsize = ds.GetGeoTransform()[1]

                extent_name, subext = extentarr[0], extentarr[1]
                fullof, subof = min_box_os(fullext, subext, cellsize)
                xsize = int(fullof[2])
                ysize = int(fullof[3])
                dsProj = ds.GetProjection()

                cliparr = ds.GetRasterBand(1).ReadAsArray(
                    int(round(fullof[0])), int(round(fullof[1])), xsize, ysize)

                clipgeot = [subext[0], cellsize, 0, subext[3], 0, -cellsize]
                extentGProps[extent_name] = [
                    dsProj, clipgeot, xsize, ysize, nodata
                ]

                driver = gdal.GetDriverByName("MEM")
                clipds = driver.Create("", xsize, ysize, 1, gdal.GDT_Float32)
                clipds.SetGeoTransform(clipgeot)
                clipds.SetProjection(ds.GetProjection())
                clipds.GetRasterBand(1).SetNoDataValue(nodata)
                clipds.GetRasterBand(1).WriteArray(cliparr, 0, 0)
                clipds.FlushCache()

                file_basename1 = '{}_{}{}'.format(
                    extent_name.replace(" ", "_"),
                    varprops[0][2].replace(" ", "_").lower(),
                    process_date.strftime('%Y.%m.%d'))

                # Write grid to "file_basename1" in the tmpdir
                WriteGrid(clipds, file_basename1, tmpdir,
                          config.SCRATCH_FILE_DRIVER)
                cliparr = None
                clipds = None
                ds = None

                # Create a flt2dss Task
                flt2dss_task = flt2dss.Task(
                    infile=os.path.join(tmpdir,
                                        '{}.{}'.format(file_basename1, 'bil')),
                    dss_file=dssfile,
                    data_type=varprops[1],
                    pathname='/SHG/{}/{}/{}/{}/{}/'.format(
                        extentarr[0].upper(), varprops[0][2], varprops[0][3],
                        varprops[0][4], varprops[0][5]),
                    grid_type='SHG',
                    data_unit=varprops[3])

                # Add flt2dss Task to Operation
                flt2dss_operation.add_task(flt2dss_task)

    if len(extentGProps) == 0:
        print("An error occurred identifying extent properties.")
        clean_up_tmp_dir(tmpdir)
        return None

    # Write Zero Grids for LIQUID WATER, COLD CONTENT ATI, MELTRATE ATI
    for varcode in ["0001", "0002", "0003"]:
        varprops = PropDict[varcode]
        for extentarr in extents_list:

            file_basename2 = '{}_{}{}'.format(
                extentarr[0].replace(" ", "_"),
                varprops[0][2].replace(" ", "_").lower(),
                process_date.strftime('%Y%m%d'))

            # Write grid to "file_basename1" in the tmpdir
            WriteZeroGrid(extentGProps[extentarr[0]], file_basename2, tmpdir,
                          config.SCRATCH_FILE_DRIVER)

            # Create a flt2dss Task
            flt2dss_task = flt2dss.Task(infile=os.path.join(
                tmpdir, '{}.{}'.format(file_basename2, 'bil')),
                                        dss_file=dssfile,
                                        data_type=varprops[1],
                                        pathname='/SHG/{}/{}/{}/{}/{}/'.format(
                                            extentarr[0].upper(),
                                            varprops[0][2], varprops[0][3],
                                            varprops[0][4], varprops[0][5]),
                                        grid_type='SHG',
                                        data_unit=varprops[3])

            # Add flt2dss Task to Operation
            flt2dss_operation.add_task(flt2dss_task)

    # Write grids to DSS
    flt2dss_operation.execute()

    clean_up_tmp_dir(tmpdir)

    # Write out file to track that we've run for this day.
    with open(histfile, "w") as fo:
        fo.write(process_date.strftime("%a %b %d %H:%M:%S %Y"))
        fo.close
    return dssfile
Example #4
0
def prepare_source_data_for_date(process_date, src_dir, save_tiff=True):
    ''' Builds an unzip directory and extracts data from source files
    for a given day.
    Returns the directory path to the unzipped files,
    or None if missing any source data. '''
    ymd_str = process_date.strftime('%Y%m%d')
    unzip_dir = os.path.join(config.PROCESSED_SRC_DIR, 'unzipped_data',
                             ymd_str)
    us_tif_dir = os.path.join(config.PROCESSED_SRC_DIR, 'conus_tiffs')

    # Use 'us' prefix and adjust nodata value for dates before January 24, 2011.
    ds_type = 'zz'
    nodata_val = '-9999'
    if process_date < datetime.datetime(2011, 1, 24, 0, 0):
        ds_type = 'us'
        nodata_val = '55537'

    masterhdr = os.path.join(config.HEADER_KEY_DIR, ds_type + '_master.hdr')

    # Create list of file names for this date.
    snodas_src_files = [
        f.format(ds=ds_type, ymd=ymd_str) for f in SNODAS_FILENAME_LIST
    ]

    # Make sure all files exist before trying any extractions.
    print_dashes()
    print('Processing source data for: {}'.format(
        process_date.strftime('%Y.%m.%d')))
    msgs = []
    for filename in snodas_src_files:
        _file = os.path.join(src_dir, filename + '.grz')
        if not os.path.isfile(_file):
            msgs += ['Missing source data file: {0}'.format(_file)]
    if msgs:
        for msg in msgs:
            print(msg)
        print_dashes()
        return None

    if save_tiff:
        mkdir_p(us_tif_dir)

    # Loop through our filenames and do the unzipping and other set up.
    mkdir_p(unzip_dir)
    for filename in snodas_src_files:
        src_file = os.path.join(src_dir, filename)
        unzip_file = os.path.join(unzip_dir, filename)
        ready_file = unzip_file + '.bil'
        if not os.path.isfile(ready_file):
            print('Processing source to output file: {}'.format(ready_file))
            try:
                UnzipLinux(src_file, unzip_file)
                RawFileManip(unzip_file, masterhdr)
            except:
                print('ERROR: Failure in UnzipLinux or RawFileManip')
                return None
        else:
            print('Using existing source file: {}'.format(ready_file))

        # Save a full version of the day's data set.
        shgtif = os.path.join(us_tif_dir, filename + 'alb.tif')
        if save_tiff:
            if not os.path.isfile(shgtif):
                print('Saving CONUS SHG tiff file: {}'.format(shgtif))
                ReprojUseWarpBil(ready_file,
                                 shgtif,
                                 nodata=nodata_val,
                                 tr_x='1000',
                                 tr_y='-1000')
            else:
                print('CONUS SHG tiff already exists: {}'.format(shgtif))

    print_dashes()
    return unzip_dir
Example #5
0
def process_extents(div_name, dist_name, process_date, src_dir, extents_list,
                    options):
    ''' Main function for processing extents.  Calls lots of helper
    and utility functions. 
    div_name: string - Name of division, used in output file format.
    process_date: datetime.datetime object - date for which data is desired.
    extents_list: list - list of namedtuples for each watershed.

    Returns the path to the file if new data was written to a DSS file, 
    None otherwise.
    '''
    def verbose_print(to_print):
        if options.verbose:
            print to_print

    def clean_up_tmp_dir(tmp_dir):
        if not options.keep_tmp_dir:
            shutil.rmtree(tmp_dir)

    verbose_print('Source directory: {0}'.format(src_dir))

    # Use 'us' prefix and adjust nodata value for dates before January 24, 2011.
    dataset_type = 'zz'
    nodata_val = '-9999'
    if process_date < datetime.datetime(2011, 1, 24, 0, 0):
        dataset_type = 'us'
        nodata_val = '55537'

    projdir = os.path.join(config.TOP_DIR, div_name, dist_name)

    # Use the proper results dir structure based on the config file.
    if config.LEGACY_DIRECTORY_STRUCTURE:
        projresdir = os.path.join(projdir, 'results_sn')
        projascdir = os.path.join(projresdir, 'asc_files')
        projdssdir = os.path.join(projresdir, 'dss_files')
        histdir = os.path.join(projresdir, 'history')
    else:
        projascdir = os.path.join(config.ASC_BASE_DIR, div_name, dist_name)
        projdssdir = os.path.join(config.DSS_BASE_DIR, div_name, dist_name)
        histdir = os.path.join(config.HISTORY_BASE_DIR, div_name, dist_name)

    # Build our results directories if needed.
    mkdir_p(projascdir)
    mkdir_p(projdssdir)
    mkdir_p(histdir)

    dstr = datetime.datetime.now().strftime('%y%m%d%H%M%S')
    ymdDate = process_date.strftime('%Y%m%d')

    # Break out if processing for the given date has already happened.
    histfile = os.path.join(histdir, 'proccomplete' + ymdDate + '.txt')
    if os.path.isfile(histfile):
        print '{0} {1} grids already processed for: {2}'.format(
            div_name, dist_name, process_date.strftime('%Y.%m.%d'))
        return None
    print 'Processing {0} {1} grids for: {2}'.format(
        div_name, dist_name, process_date.strftime('%Y.%m.%d'))

    tmpdir = os.path.join(projascdir, 'tmp' + dstr)
    os.mkdir(tmpdir)

    # Set up a dictionary mapping the various properties to their DSS names.
    PropDict = SetProps(process_date, div_name)
    enameDict = {}
    zerolist = ["0001", "0002", "0003"]
    extentGProps = {}
    maxExtent = getMaxExtent(extents_list)
    dssbasename = GetDSSBaseName(process_date)
    dssfile = os.path.join(projdssdir, dssbasename)

    # Define our files and make sure they all exist.
    snodaslist = [
        s.format(ds=dataset_type, ymd=ymdDate) for s in SNODAS_FILENAME_LIST
    ]

    # Loop through our source SNODAS files.
    for f in snodaslist:

        varcode = f[8:12]
        varprops = PropDict[varcode]
        path = "/" + "/".join(varprops[0]) + "/"
        dtype = varprops[1]

        easiername = \
            div_name + "_" + varprops[0][2].replace(" ", "_").lower() + ymdDate
        enameDict[varcode] = os.path.join(projascdir, easiername + ".asc")
        shgtif = os.path.join(tmpdir, f + "alb.tif")
        shgtifmath = os.path.join(tmpdir, easiername + ".tif")

        src_file = os.path.join(src_dir, f + '.bil')
        ReprojUseWarpBil(src_file, shgtif, maxExtent, nodata_val)
        mathranokay = True
        if varprops[2]:
            # NOTE: enamedict populated only for prior product numbers
            mathranokay = RasterMath(shgtif, shgtifmath, varcode, enameDict)
        else:
            shgtifmath = shgtif
        if mathranokay:
            enameDict[varcode] = shgtifmath
            for extentarr in extents_list:
                ds = gdal.Open(shgtifmath)
                if ds is None:
                    print 'Could not open ' + shgtifmath
                    return None
                nodata = ds.GetRasterBand(1).GetNoDataValue()
                fullext = GetDatasetExtent(ds)
                cellsize = ds.GetGeoTransform()[1]

                subext = extentarr[1]
                fullof, subof = min_box_os(fullext, subext, cellsize)
                xsize = int(fullof[2])
                ysize = int(fullof[3])
                dsProj = ds.GetProjection()

                cliparr = ds.GetRasterBand(1).ReadAsArray(
                    int(round(fullof[0])), int(round(fullof[1])), xsize, ysize)
                outtmpname = os.path.join(tmpdir, extentarr[0] + "tmp.asc")
                driver = gdal.GetDriverByName("MEM")

                clipgeot = [subext[0], cellsize, 0, subext[3], 0, -cellsize]
                extentGProps[extentarr[0]] = [
                    dsProj, clipgeot, xsize, ysize, nodata
                ]

                clipds = driver.Create("", xsize, ysize, 1, GDT_Float32)
                clipds.SetGeoTransform(clipgeot)
                clipds.SetProjection(ds.GetProjection())
                clipds.GetRasterBand(1).SetNoDataValue(nodata)
                clipds.GetRasterBand(1).WriteArray(cliparr, 0, 0)
                clipds.FlushCache()
                ascbasename = extentarr[0] + "_" + \
                    varprops[0][2].replace(" ", "_").lower() + ymdDate
                CreateASCII(clipds, ascbasename, tmpdir)
                clipds = None
                ds = None

                tmpasc = os.path.join(tmpdir, ascbasename + ".asc")
                projasc = os.path.join(projascdir, ascbasename + ".asc")
                shutil.copy(tmpasc, projasc)
                shutil.copy(os.path.join(tmpdir, ascbasename + "tmp.prj"),
                            os.path.join(projascdir, ascbasename + ".prj"))

                varprops = PropDict[varcode]
                p = varprops[0]
                dtype = varprops[1]

                path = "/SHG/" + extentarr[0].upper() + "/" + p[2] + \
                    "/" + p[3] + "/" + p[4] + "/" + p[5] + "/"
                WriteToDSS(projasc, dssfile, dtype, path)
                outarr = None
                cliparr = None

    if len(extentGProps) == 0:
        print "An error occurred identifying extent properties."
        clean_up_tmp_dir(tmpdir)
        return None

    for varcode in zerolist:
        varprops = PropDict[varcode]
        for extentarr in extents_list:
            p = varprops[0]
            dtype = varprops[1]
            path = "/SHG/" + extentarr[0].upper() + "/" + p[2] + \
                "/" + p[3] + "/" + p[4] + "/" + p[5] + "/"
            ascbasename = extentarr[0] + "_" + \
                varprops[0][2].replace(" ", "_").lower() + ymdDate
            tmpasc = os.path.join(tmpdir, ascbasename + ".asc")
            projasc = os.path.join(projascdir, ascbasename + ".asc")

            WriteZeroDStoAsc(extentGProps[extentarr[0]], ascbasename, tmpdir)
            shutil.copy(tmpasc, projasc)
            shutil.copy(os.path.join(tmpdir, ascbasename + "tmp.prj"),
                        os.path.join(projascdir, ascbasename + ".prj"))
            dssdunits = varprops[3]
            WriteToDSS(projasc, dssfile, dtype, path, dssdunits)

    clean_up_tmp_dir(tmpdir)

    # Write out file to track that we've run for this day.
    with open(histfile, "w") as fo:
        dstr = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
        fo.write(process_date.strftime("%a %b %d %H:%M:%S %Y"))
        fo.close
    return dssfile