示例#1
0
def copy_EPIC_input_folders():
    """
    :return:
    """
    # Change directory to where EPIC sims will take place
    time_stamp = time.strftime('%m_%d_%Y_%Hh_%Mm')
    epic_run_dir = constants.run_dir + '_' + time_stamp + os.sep
    constants.make_dir_if_missing(epic_run_dir)

    try:
        # Copy all files from constants.sims_dir to constants.run_dir
        copytree(constants.temp_dir + os.sep, epic_run_dir)
        # Copy over .DAT files produced through NARR, SSURGO and seimf scripts
        copytree(constants.epic_dir + os.sep + 'Data' + os.sep + constants.MGT_TAG, epic_run_dir)
    except:
        logging.info('Error in copying files to ' + constants.run_dir)
    os.chdir(epic_run_dir)

    # Create symlinks to all folders in the management
    sub_dirs = os.listdir(constants.mgt_dir)

    for dir in sub_dirs:
        link = epic_run_dir + os.sep + dir + os.sep # Specifies the name of the symbolic link that is being created.
        trgt = constants.mgt_dir + os.sep + dir + os.sep  # Specifies the path (relative or absolute) that the new symbolic link refers to.

        # Windows
        if os.name == 'nt':
            subprocess.call('mklink /J "%s" "%s"' % (link, trgt), shell=True)

    return time_stamp, epic_run_dir
示例#2
0
def copy_EPIC_input_folders():
    """
    :return:
    """
    # Change directory to where EPIC sims will take place
    time_stamp = time.strftime('%m_%d_%Y_%Hh_%Mm')
    epic_run_dir = constants.run_dir + '_' + time_stamp + os.sep
    constants.make_dir_if_missing(epic_run_dir)

    try:
        # Copy all files from constants.sims_dir to constants.run_dir
        copytree(constants.temp_dir + os.sep, epic_run_dir)
        # Copy over .DAT files produced through NARR, SSURGO and seimf scripts
        copytree(
            constants.epic_dir + os.sep + 'Data' + os.sep + constants.MGT_TAG,
            epic_run_dir)
    except:
        logging.info('Error in copying files to ' + constants.run_dir)
    os.chdir(epic_run_dir)

    # Create symlinks to all folders in the management
    sub_dirs = os.listdir(constants.mgt_dir)

    for dir in sub_dirs:
        link = epic_run_dir + os.sep + dir + os.sep  # Specifies the name of the symbolic link that is being created.
        trgt = constants.mgt_dir + os.sep + dir + os.sep  # Specifies the path (relative or absolute) that the new symbolic link refers to.

        # Windows
        if os.name == 'nt':
            subprocess.call('mklink /J "%s" "%s"' % (link, trgt), shell=True)

    return time_stamp, epic_run_dir
示例#3
0
def copy_EPIC_mgt_files():
    # Copy over mgt directory (containing .ops files) to the EPIC input files directory
    try:
        constants.make_dir_if_missing(constants.mgt_dir + os.sep + constants.MGT_TAG)
        copytree(constants.epic_dir + os.sep + 'Data' + os.sep + constants.MGT_TAG, constants.mgt_dir + os.sep + constants.MGT_TAG)
    except:
        logging.info('Cannot copy over management directory to EPIC input files directory')
示例#4
0
def copy_EPIC_mgt_files():
    # Copy over mgt directory (containing .ops files) to the EPIC input files directory
    try:
        constants.make_dir_if_missing(constants.mgt_dir + os.sep +
                                      constants.MGT_TAG)
        copytree(
            constants.epic_dir + os.sep + 'Data' + os.sep + constants.MGT_TAG,
            constants.mgt_dir + os.sep + constants.MGT_TAG)
    except:
        logging.info(
            'Cannot copy over management directory to EPIC input files directory'
        )
示例#5
0
def store_EPIC_Output(dir_path='', epic_run_dir=''):
    """
    :param time_stamp:
    :param epic_run_dir:
    :return:
    """
    # Create output directory
    out_dir = constants.make_dir_if_missing(constants.epic_dir + os.sep + 'output' + os.sep + dir_path)

    # Loop over all EPIC output files and move them to separate subfolders in the output directory
    for fl_type in constants.EPICOUT_FLS:
        fl_dir = constants.make_dir_if_missing(out_dir + os.sep + fl_type)
        for file_name in glob.iglob(os.path.join(epic_run_dir, '*.'+fl_type)):
            # @TODO: xxxxx as terminator needs to be standardized
            if os.path.basename(file_name)[:-4] <> 'xxxxx':
                shutil.move(file_name, fl_dir + os.sep + os.path.basename(file_name))
示例#6
0
def store_EPIC_Output(dir_path='', epic_run_dir=''):
    """
    :param time_stamp:
    :param epic_run_dir:
    :return:
    """
    # Create output directory
    out_dir = constants.make_dir_if_missing(constants.epic_dir + os.sep +
                                            'output' + os.sep + dir_path)

    # Loop over all EPIC output files and move them to separate subfolders in the output directory
    for fl_type in constants.EPICOUT_FLS:
        fl_dir = constants.make_dir_if_missing(out_dir + os.sep + fl_type)
        for file_name in glob.iglob(os.path.join(epic_run_dir,
                                                 '*.' + fl_type)):
            # @TODO: xxxxx as terminator needs to be standardized
            if os.path.basename(file_name)[:-4] <> 'xxxxx':
                shutil.move(file_name,
                            fl_dir + os.sep + os.path.basename(file_name))
示例#7
0
def erase_PAD(state, ras, replace):
    # Process: Erase

    pad_state = constants.pad_dir + 'PAD-US_' + state + '\\PADUS1_3_' + state + '.gdb\\PADUS1_3' + state
    pad_out_dir = constants.pad_dir + 'output\\' + state + os.sep
    bound_out_dir = constants.bound_dir + 'output\\' + state + os.sep
    state_dir = constants.out_dir + os.sep + state + os.sep

    constants.make_dir_if_missing(pad_out_dir)
    constants.make_dir_if_missing(bound_out_dir)
    constants.make_dir_if_missing(state_dir)

    select_state = bound_out_dir + state + '.shp'
    erased_pad = pad_out_dir + state + '.shp'
    extract_comb = state_dir + 'ext_' + state + '_' + str(
        constants.START_YEAR)[2:] + '_' + str(constants.END_YEAR)[2:]

    #
    if arcpy.Exists(select_state) and not (replace):
        pass
    else:
        where = '"STATE_ABBR" = ' + "'%s'" % state.upper()
        try:
            arcpy.Select_analysis(constants.BOUNDARIES, select_state, where)
        except:
            logging.info(arcpy.GetMessages())

    #
    if arcpy.Exists(erased_pad) and not (replace):
        pass
    else:
        try:
            arcpy.Erase_analysis(select_state, pad_state, erased_pad, "")
        except:
            logging.info(arcpy.GetMessages())

    #
    if arcpy.Exists(extract_comb) and not (replace):
        pass
    else:
        try:
            # Create bounding box from polygon (xmin, ymin, xmax, ymax)
            #desc = arcpy.Describe(erased_pad)
            #rectangle = "%s %s %s %s" % (desc.extent.XMin, desc.extent.YMin, desc.extent.XMax,   desc.extent.YMax)

            #arcpy.Clip_management(ras,rectangle,extract_comb,erased_pad,"#","ClippingGeometry")
            arcpy.gp.ExtractByMask_sa(ras, erased_pad, extract_comb)
        except:
            logging.info(arcpy.GetMessages())

    logging.info('\t Erasing PAD from state ' + state)
    return extract_comb
def erase_PAD(state,ras,replace):
    # Process: Erase
    
    pad_state     = constants.pad_dir+'PAD-US_'+state+'\\PADUS1_3_'+state+'.gdb\\PADUS1_3'+state    
    pad_out_dir   = constants.pad_dir+'output\\'+state+os.sep
    bound_out_dir = constants.bound_dir+'output\\'+state+os.sep
    state_dir     = constants.out_dir+os.sep+state+os.sep
    
    constants.make_dir_if_missing(pad_out_dir)
    constants.make_dir_if_missing(bound_out_dir)
    constants.make_dir_if_missing(state_dir)
    
    select_state  = bound_out_dir+state+'.shp'
    erased_pad    = pad_out_dir+state+'.shp'
    extract_comb  = state_dir+'ext_'+state+'_'+str(constants.START_YEAR)[2:]+'_'+str(constants.END_YEAR)[2:]

    #
    if arcpy.Exists(select_state) and not(replace):
        pass
    else:
        where = '"STATE_ABBR" = ' + "'%s'" %state.upper()
        try:
            arcpy.Select_analysis(constants.BOUNDARIES,select_state,where)
        except:
            logging.info(arcpy.GetMessages())

    #
    if arcpy.Exists(erased_pad) and not(replace):
        pass
    else:
        try:
            arcpy.Erase_analysis(select_state,pad_state,erased_pad, "")
        except:
            logging.info(arcpy.GetMessages())

    #
    if arcpy.Exists(extract_comb) and not(replace):
        pass
    else:
        try:
            # Create bounding box from polygon (xmin, ymin, xmax, ymax)
            #desc = arcpy.Describe(erased_pad)
            #rectangle = "%s %s %s %s" % (desc.extent.XMin, desc.extent.YMin, desc.extent.XMax,   desc.extent.YMax)
            
            #arcpy.Clip_management(ras,rectangle,extract_comb,erased_pad,"#","ClippingGeometry")
            arcpy.gp.ExtractByMask_sa(ras,erased_pad,extract_comb)
        except:
            logging.info(arcpy.GetMessages())

    logging.info('\t Erasing PAD from state '+state)
    return extract_comb
def merge_ssurgo_rasters(st):
    files_to_delete = [] # Temporary files which will be deleted at the end
    list_sgo_files  = [] # List of SSURGO files to merge
    
    cdl_spatial_ref = arcpy.SpatialReference('NAD 1983 Contiguous USA Albers')
    
    # Iterate over all the states contained in the dictionary state_names
    logging.info(st)
        
    # Output directory to store merged SSURGO files for each state
    out_ssurgo_dir = constants.r_soil_dir+os.sep+constants.SOIL+os.sep+st 
    constants.make_dir_if_missing(out_ssurgo_dir)

    # For each state, process the SSURGO spatial files
    for dir_name, subdir_list, file_list in os.walk(constants.data_dir):
        if '_'+st+'_' in dir_name and constants.SPATIAL in subdir_list:
            in_ssurgo_dir = dir_name+os.sep+constants.SPATIAL+os.sep

            # The reclassification is done to make the VALUE equal to the MUKEY
            recl_ssurgo_csv = open(out_ssurgo_dir+os.sep+st+'.csv', 'wb')
            recl_ssurgo_csv.write('FROM, TO, VALUE\n')
            recl_ssurgo_csv.flush()   

            # Iterate through each of the soil files for a given state
            in_ssurgo_file = dir_name+os.sep+constants.SPATIAL+os.sep+\
                            fnmatch.filter(os.listdir(dir_name+os.sep+constants.SPATIAL+os.sep),\
                            'soilmu_a_*.shp')[0]

            # reproj_file is the name of the reprojected ssurgo file
            reproj_file      = out_ssurgo_dir+os.sep+os.path.basename(in_ssurgo_file[:-4])+'_reproj.shp'

            # out_ssurgo_file is the reprojected and reclassified (to CDL resolution) SSURGO file
            out_ssurgo_file  = out_ssurgo_dir+os.sep+os.path.basename(in_ssurgo_file[:-4])[9:]

            # reclass_ssurgo_file has the MUKEY as the VALUE column
            recl_ssurgo_file = out_ssurgo_dir+os.sep+os.path.basename(in_ssurgo_file[:-4])[9:]+'_recl'
            list_sgo_files.append(recl_ssurgo_file)

            # Append the files to the list of ssurgo files to be merged to form one raster 
            merged_soil_folder = out_ssurgo_dir
            merged_soil_file   = st+'_'+constants.SOIL
            
            files_to_delete.append(out_ssurgo_file)
            files_to_delete.append(recl_ssurgo_file)
            files_to_delete.append(reproj_file)
            
            if not(arcpy.Exists(recl_ssurgo_file)):
                logging.info('Shapefile '+os.path.basename(in_ssurgo_file)+\
                            ' is being reprojected, reclassified and converted to raster '+\
                            os.path.basename(out_ssurgo_file))
        
                try:
                    arcpy.Project_management(in_ssurgo_file, reproj_file, cdl_spatial_ref)
                    arcpy.FeatureToRaster_conversion(reproj_file, constants.MUKEY, out_ssurgo_file, constants.cdl_res)

                    # Create table for performing reclassification
                    recl_ssurgo_csv = open(out_ssurgo_dir+os.sep+st+'.csv', 'a+')
                    with arcpy.da.SearchCursor(out_ssurgo_file, ['VALUE',constants.MUKEY]) as cursor:
                        for row in cursor:
                            recl_ssurgo_csv.write(str(row[0])+', '+str(row[0])+', '+str(row[1])+'\n')        
                    recl_ssurgo_csv.close()

                    out_reclass = ReclassByTable(out_ssurgo_file,out_ssurgo_dir+os.sep+st+'.csv', "FROM", "TO", "VALUE", "DATA")
                    out_reclass.save(recl_ssurgo_file)
                except:
                    logging.info(arcpy.GetMessages())
                    delete_temp_files(files_to_delete)
            else:
                logging.info('File present: '+recl_ssurgo_file)

    # Create new raster mosaic
    if not(arcpy.Exists(merged_soil_folder+os.sep+merged_soil_file)):
        list_sgo_files = ';'.join(list_sgo_files)  
        try:                  
            arcpy.MosaicToNewRaster_management(list_sgo_files,merged_soil_folder,merged_soil_file, "",\
                                                "32_BIT_SIGNED", "", "1", "LAST","FIRST")
            arcpy.BuildRasterAttributeTable_management(merged_soil_folder+os.sep+merged_soil_file, "Overwrite")
            logging.info('Created Mosaiced raster '+merged_soil_folder+os.sep+merged_soil_file)
        except:
            logging.info(arcpy.GetMessages())
            delete_temp_files(files_to_delete)
    else:
        logging.info('File present: '+merged_soil_folder+os.sep+merged_soil_file)
    delete_temp_files(files_to_delete)
示例#10
0
def merge_ssurgo_rasters(st):
    files_to_delete = []  # Temporary files which will be deleted at the end
    list_sgo_files = []  # List of SSURGO files to merge

    cdl_spatial_ref = arcpy.SpatialReference('NAD 1983 Contiguous USA Albers')

    # Iterate over all the states contained in the dictionary state_names
    logging.info(st)

    # Output directory to store merged SSURGO files for each state
    out_ssurgo_dir = constants.r_soil_dir + os.sep + constants.SOIL + os.sep + st
    constants.make_dir_if_missing(out_ssurgo_dir)

    # For each state, process the SSURGO spatial files
    for dir_name, subdir_list, file_list in os.walk(constants.data_dir):
        if '_' + st + '_' in dir_name and constants.SPATIAL in subdir_list:
            in_ssurgo_dir = dir_name + os.sep + constants.SPATIAL + os.sep

            # The reclassification is done to make the VALUE equal to the MUKEY
            recl_ssurgo_csv = open(out_ssurgo_dir + os.sep + st + '.csv', 'wb')
            recl_ssurgo_csv.write('FROM, TO, VALUE\n')
            recl_ssurgo_csv.flush()

            # Iterate through each of the soil files for a given state
            in_ssurgo_file = dir_name+os.sep+constants.SPATIAL+os.sep+\
                            fnmatch.filter(os.listdir(dir_name+os.sep+constants.SPATIAL+os.sep),\
                            'soilmu_a_*.shp')[0]

            # reproj_file is the name of the reprojected ssurgo file
            reproj_file = out_ssurgo_dir + os.sep + os.path.basename(
                in_ssurgo_file[:-4]) + '_reproj.shp'

            # out_ssurgo_file is the reprojected and reclassified (to CDL resolution) SSURGO file
            out_ssurgo_file = out_ssurgo_dir + os.sep + os.path.basename(
                in_ssurgo_file[:-4])[9:]

            # reclass_ssurgo_file has the MUKEY as the VALUE column
            recl_ssurgo_file = out_ssurgo_dir + os.sep + os.path.basename(
                in_ssurgo_file[:-4])[9:] + '_recl'
            list_sgo_files.append(recl_ssurgo_file)

            # Append the files to the list of ssurgo files to be merged to form one raster
            merged_soil_folder = out_ssurgo_dir
            merged_soil_file = st + '_' + constants.SOIL

            files_to_delete.append(out_ssurgo_file)
            files_to_delete.append(recl_ssurgo_file)
            files_to_delete.append(reproj_file)

            if not (arcpy.Exists(recl_ssurgo_file)):
                logging.info('Shapefile '+os.path.basename(in_ssurgo_file)+\
                            ' is being reprojected, reclassified and converted to raster '+\
                            os.path.basename(out_ssurgo_file))

                try:
                    arcpy.Project_management(in_ssurgo_file, reproj_file,
                                             cdl_spatial_ref)
                    arcpy.FeatureToRaster_conversion(reproj_file,
                                                     constants.MUKEY,
                                                     out_ssurgo_file,
                                                     constants.cdl_res)

                    # Create table for performing reclassification
                    recl_ssurgo_csv = open(
                        out_ssurgo_dir + os.sep + st + '.csv', 'a+')
                    with arcpy.da.SearchCursor(
                            out_ssurgo_file,
                        ['VALUE', constants.MUKEY]) as cursor:
                        for row in cursor:
                            recl_ssurgo_csv.write(
                                str(row[0]) + ', ' + str(row[0]) + ', ' +
                                str(row[1]) + '\n')
                    recl_ssurgo_csv.close()

                    out_reclass = ReclassByTable(
                        out_ssurgo_file, out_ssurgo_dir + os.sep + st + '.csv',
                        "FROM", "TO", "VALUE", "DATA")
                    out_reclass.save(recl_ssurgo_file)
                except:
                    logging.info(arcpy.GetMessages())
                    delete_temp_files(files_to_delete)
            else:
                logging.info('File present: ' + recl_ssurgo_file)

    # Create new raster mosaic
    if not (arcpy.Exists(merged_soil_folder + os.sep + merged_soil_file)):
        list_sgo_files = ';'.join(list_sgo_files)
        try:
            arcpy.MosaicToNewRaster_management(list_sgo_files,merged_soil_folder,merged_soil_file, "",\
                                                "32_BIT_SIGNED", "", "1", "LAST","FIRST")
            arcpy.BuildRasterAttributeTable_management(
                merged_soil_folder + os.sep + merged_soil_file, "Overwrite")
            logging.info('Created Mosaiced raster ' + merged_soil_folder +
                         os.sep + merged_soil_file)
        except:
            logging.info(arcpy.GetMessages())
            delete_temp_files(files_to_delete)
    else:
        logging.info('File present: ' + merged_soil_folder + os.sep +
                     merged_soil_file)
    delete_temp_files(files_to_delete)
示例#11
0
def seimf(state, init_site=0):
    """
    1. Combine soil and landuse data
    2. Invokes other functions to create sites (write_epic_site_fl) and EPICRUN.dat (write_epicrun_fl)
    :param state:
    :return:
    """
    logging.info(state)

    # Reclassify the SEIMF dataset so that VALUES from different states do not overlap

    sgo_dir = constants.epic_dir + os.sep + 'Data' + os.sep + 'ssurgo' + os.sep + state + os.sep
    lu_dir = constants.epic_dir + os.sep + 'Data' + os.sep + 'LU' + os.sep + state + os.sep

    constants.make_dir_if_missing(constants.out_dir)

    # Combine SSURGO and land use data
    out_raster = constants.out_dir + os.sep + 'SEIMF_' + state
    inp_rasters = '"'  # contains the list of rasters which are to be merged together to create the SEIMF geodatabase

    if not (arcpy.Exists(out_raster)):
        inp_rasters += sgo_dir + os.sep + state + '_ssurgo' + '; ' + lu_dir + os.sep + 'open_' + str(
            constants.year) + '_' + state + '"'
        try:
            out_combine = Combine(inp_rasters)
            out_combine.save(out_raster)
            logging.info('Combined rasters to SEIMF raster ' + out_raster)
        except:
            logging.info(arcpy.GetMessages())
    else:
        logging.info('File present: ' + out_raster)

    max_site = increment_raster_VAT(state=state,
                                    ras=out_raster,
                                    incr_val=init_site)

    # Compute centroid of each HSMU using zonal geometry
    zgeom_dbf = constants.out_dir + os.sep + state + '.dbf'
    reproj_ras = constants.out_dir + os.sep + state + '_reproj'

    try:
        # Spatial reference factory codes:
        # http://resources.arcgis.com/en/help/main/10.1/018z/pdf/geographic_coordinate_systems.pdf
        # 4269: GCS_North_American_1983
        cdl_spatial_ref = arcpy.SpatialReference(4269)
        arcpy.ProjectRaster_management(out_raster, reproj_ras, cdl_spatial_ref)
        logging.info('Reprojected: ' + reproj_ras)

        if not (arcpy.Exists(zgeom_dbf)):
            # Zonal geometry is time consuming, so cache the operation
            ZonalGeometryAsTable(reproj_ras, 'VALUE', zgeom_dbf)
            logging.info('Computed zonal geometry ' + zgeom_dbf)
        else:
            logging.info('File present: ' + zgeom_dbf)

        join_flds = '"'
        join_flds += state.upper() + '_SSURGO;OPEN_' + str(
            constants.year) + '_' + state.upper(
            ) + ';XCENTROID;YCENTROID' + '"'

        arcpy.JoinField_management(out_raster, "VALUE", zgeom_dbf, "VALUE",
                                   join_flds)
        logging.info('JoinField_management ')
    except:
        logging.info(arcpy.GetMessages())

    site_dict = write_epic_site_fl(state, out_raster, site_num=init_site)

    write_epicrun_fl(state, site_dict, site_num=init_site)

    return max_site
示例#12
0
def reclassify_and_combine(state, state_lcc, state_cdl_files, range_of_yrs,
                           replace):
    to_comb_rasters = []
    end_open_ras = constants.epic_dir + os.sep + state + os.sep + 'Open_' + str(
        constants.END_YEAR) + '_' + state  # OPEN_20xx_<state_name>
    start_open_ras = constants.epic_dir + os.sep + state + os.sep + 'Open_' + str(
        constants.START_YEAR) + '_' + state  # OPEN_20xx_<state_name>
    constants.make_dir_if_missing(constants.epic_dir + os.sep + state)

    # Create output directory for each state
    state_dir = constants.out_dir + os.sep + state + os.sep
    constants.make_dir_if_missing(state_dir)

    # Reclass for each year
    idx = 0
    for yr in range_of_yrs:
        recl_raster = constants.shared_dir + constants.RECL + '_' + state + '_' + str(
            yr)

        if arcpy.Exists(recl_raster) and not (replace):
            idx += 1
        else:
            try:
                out_reclass = ReclassByASCIIFile(state_cdl_files[idx],
                                                 constants.REMAP_FILE,
                                                 "NODATA")
                out_reclass.save(recl_raster)
                idx += 1
            except:
                logging.info(arcpy.GetMessages())

        logging.info('\tReclassified...' + recl_raster)
        to_comb_rasters.append(recl_raster)

        # Extract open land acreage in the last year
        if (yr == constants.END_YEAR):
            if arcpy.Exists(end_open_ras) and not (replace):
                pass
            else:
                where = "VALUE = " + str(constants.OPEN)
                try:
                    att_extract = ExtractByAttributes(recl_raster, where)
                    att_extract.save(end_open_ras)

                    create_zonal_state(state, end_open_ras,
                                       constants.START_YEAR)
                except:
                    logging.info(arcpy.GetMessages())
                logging.info('\tExtracted Open Lands...' + end_open_ras)
        elif (yr == constants.START_YEAR
              ):  # Extract open land acreage in the first year
            if arcpy.Exists(start_open_ras) and not (replace):
                pass
            else:
                where = "VALUE = " + str(constants.OPEN)
                try:
                    att_extract = ExtractByAttributes(recl_raster, where)
                    att_extract.save(start_open_ras)

                    create_zonal_state(state, start_open_ras,
                                       constants.END_YEAR)
                except:
                    logging.info(arcpy.GetMessages())
                logging.info('\tExtracted Open Lands...' + start_open_ras)

    to_comb_rasters.append(state_lcc)

    # Combine all input rasters
    comb_raster = constants.shared_dir + os.sep + 'comb_' + state + '_' + str(
        range_of_yrs[0])[2:] + '_' + str(
            range_of_yrs[len(range_of_yrs) - 1])[2:]

    if arcpy.Exists(comb_raster) and not (replace):
        pass
    else:
        try:
            out_combine = Combine(to_comb_rasters)
            out_combine.save(comb_raster)
        except:
            logging.info(arcpy.GetMessages())

    logging.info('\tCombined...' + comb_raster)
    return comb_raster
示例#13
0
def reclassify_and_combine(state,state_lcc,state_cdl_files,range_of_yrs,replace):
    to_comb_rasters = []
    end_open_ras    = constants.epic_dir+os.sep+state+os.sep+'Open_'+str(constants.END_YEAR)+'_'+state # OPEN_20xx_<state_name>
    start_open_ras  = constants.epic_dir+os.sep+state+os.sep+'Open_'+str(constants.START_YEAR)+'_'+state # OPEN_20xx_<state_name>
    constants.make_dir_if_missing(constants.epic_dir+os.sep+state)

    # Create output directory for each state
    state_dir  = constants.out_dir+os.sep+state+os.sep
    constants.make_dir_if_missing(state_dir)

    # Reclass for each year
    idx = 0
    for yr in range_of_yrs:
        recl_raster = constants.shared_dir+constants.RECL+'_'+state+'_'+str(yr)

        if arcpy.Exists(recl_raster) and not(replace):      
            idx += 1      
        else:
            try:
                out_reclass = ReclassByASCIIFile(state_cdl_files[idx],constants.REMAP_FILE,"NODATA")        
                out_reclass.save(recl_raster)
                idx        += 1
            except:
                logging.info(arcpy.GetMessages())
        
        logging.info('\tReclassified...'+recl_raster)
        to_comb_rasters.append(recl_raster)

        # Extract open land acreage in the last year
        if (yr == constants.END_YEAR): 
            if arcpy.Exists(end_open_ras) and not(replace):
                pass
            else:
                where = "VALUE = "+str(constants.OPEN)
                try:
                    att_extract = ExtractByAttributes(recl_raster,where) 
                    att_extract.save(end_open_ras)     
                    
                    create_zonal_state(state,end_open_ras,constants.START_YEAR)               
                except:
                    logging.info(arcpy.GetMessages())
                logging.info('\tExtracted Open Lands...'+end_open_ras)
        elif(yr == constants.START_YEAR): # Extract open land acreage in the first year
            if arcpy.Exists(start_open_ras) and not(replace):
                pass
            else:
                where = "VALUE = "+str(constants.OPEN)
                try:
                    att_extract = ExtractByAttributes(recl_raster,where) 
                    att_extract.save(start_open_ras)        
                    
                    create_zonal_state(state,start_open_ras,constants.END_YEAR)                        
                except:
                    logging.info(arcpy.GetMessages())
                logging.info('\tExtracted Open Lands...'+start_open_ras)

    to_comb_rasters.append(state_lcc)
    
    # Combine all input rasters
    comb_raster = constants.shared_dir+os.sep+'comb_'+state+'_'+str(range_of_yrs[0])[2:]+'_'+str(range_of_yrs[len(range_of_yrs)-1])[2:]
      
    if arcpy.Exists(comb_raster) and not(replace):
        pass
    else:     
        try:   
            out_combine = Combine(to_comb_rasters)
            out_combine.save(comb_raster)
        except:
            logging.info(arcpy.GetMessages())
        
    logging.info('\tCombined...'+comb_raster)
    return comb_raster
示例#14
0
def seimf(state, init_site=0):
    """
    1. Combine soil and landuse data
    2. Invokes other functions to create sites (write_epic_site_fl) and EPICRUN.dat (write_epicrun_fl)
    :param state:
    :return:
    """
    logging.info(state)

    # Reclassify the SEIMF dataset so that VALUES from different states do not overlap

    sgo_dir = constants.epic_dir + os.sep + 'Data' + os.sep + 'ssurgo' + os.sep + state + os.sep
    lu_dir  = constants.epic_dir + os.sep + 'Data' + os.sep + 'LU' + os.sep + state + os.sep

    constants.make_dir_if_missing(constants.out_dir)

    # Combine SSURGO and land use data
    out_raster = constants.out_dir + os.sep + 'SEIMF_' + state
    inp_rasters = '"' # contains the list of rasters which are to be merged together to create the SEIMF geodatabase

    if not(arcpy.Exists(out_raster)):
        inp_rasters += sgo_dir + os.sep + state + '_ssurgo' + '; ' + lu_dir + os.sep + 'open_' + str(constants.year) + '_' + state + '"'
        try:
            out_combine = Combine(inp_rasters)
            out_combine.save(out_raster)
            logging.info('Combined rasters to SEIMF raster ' + out_raster)
        except:
            logging.info(arcpy.GetMessages())
    else:
        logging.info('File present: ' + out_raster)

    max_site = increment_raster_VAT(state=state, ras=out_raster, incr_val=init_site)

    # Compute centroid of each HSMU using zonal geometry
    zgeom_dbf  = constants.out_dir + os.sep + state + '.dbf'
    reproj_ras = constants.out_dir + os.sep + state + '_reproj'

    try:
        # Spatial reference factory codes:
        # http://resources.arcgis.com/en/help/main/10.1/018z/pdf/geographic_coordinate_systems.pdf
        # 4269: GCS_North_American_1983
        cdl_spatial_ref = arcpy.SpatialReference(4269)
        arcpy.ProjectRaster_management(out_raster, reproj_ras, cdl_spatial_ref)
        logging.info('Reprojected: ' + reproj_ras)

        if not(arcpy.Exists(zgeom_dbf)):
            # Zonal geometry is time consuming, so cache the operation
            ZonalGeometryAsTable(reproj_ras, 'VALUE', zgeom_dbf)
            logging.info('Computed zonal geometry '+zgeom_dbf)
        else:
            logging.info('File present: ' + zgeom_dbf)

        join_flds  = '"'
        join_flds += state.upper()+'_SSURGO;OPEN_'+str(constants.year)+'_'+state.upper()+';XCENTROID;YCENTROID'+'"'

        arcpy.JoinField_management(out_raster,"VALUE",zgeom_dbf,"VALUE",join_flds)
        logging.info('JoinField_management ')
    except:
        logging.info(arcpy.GetMessages())

    site_dict = write_epic_site_fl(state, out_raster, site_num=init_site)
    
    write_epicrun_fl(state, site_dict, site_num=init_site)

    return max_site
示例#15
0
def SSURGO_to_csv():
    sgo_data = pd.DataFrame()

    for st in constants.list_st:
        logging.info(st)

        # For each state, process the SSURGO tabular files
        for dir_name, subdir_list, file_list in os.walk(constants.data_dir):
            if('_'+st+'_' in dir_name and constants.TABULAR in subdir_list):
                logging.info(dir_name[-3:]) # County FIPS code

                try:
                    tmp_df = read_ssurgo_tables(dir_name+os.sep+constants.TABULAR)
                except ValueError:
                    logging.info('Empty dataframe from one of SSURGO files')
                    continue

                tmp_df['state']  = st
                tmp_df['county'] = dir_name[-3:]
                tmp_df['FIPS']   = int(us.states.lookup(st).fips+dir_name[-3:])

                sgo_data         = pd.concat([tmp_df,sgo_data],ignore_index =True)

    # Drop columns with all missing values
    sgo_data.dropna(axis=1,how='all',inplace=True)
    # Replace hydgrp values with integers
    sgo_data.replace(constants.hydgrp_vars,inplace=True)    

    # If any null values exist, replace with mean of value in mukey
    df3 = pd.DataFrame()
    logging.info('If any null values exist, replace with mean of value in mukey')
    if(np.any(sgo_data.isnull())):
        df1 = sgo_data.set_index('mukey')
        df2 = sgo_data.groupby('mukey').mean()
        df3 = df1.combine_first(df2)

        # If any null values remain, replace by county mean
        logging.info('If any null values remain, replace by county mean')
        if(np.any(df3.isnull())):     
            df1      = df3.reset_index().set_index('FIPS')
            cnt_mean = sgo_data.groupby(['FIPS']).mean()
            df3      = df1.combine_first(cnt_mean)
        else:
            pass

        # If any null values remain, replace by state mean
        logging.info('If any null values remain, replace by state mean')
        if(np.any(df3.isnull())):
            df1     = df3.reset_index().set_index('state')
            st_mean = sgo_data.groupby(['state']).mean()
            df3     = df1.combine_first(st_mean)
        else:
            pass
    else:
        pass

    df3.reset_index(inplace=True)
    # Convert niccdcd and hydgrp to integers
    df3['hydgrp']  = df3['hydgrp'].astype(int)
    df3['niccdcd'] = df3['niccdcd'].astype(int)

    # Drop components with non zero initial depth
    #logging.info('Drop faulty components')
    #drop_df = df3.groupby('cokey').filter(lambda x: x['hzdept_r'].min() <= 0)

    logging.info('Select the dominant component')
    dom_df = df3.groupby('mukey').apply(lambda g: g[g['comppct_r']==g['comppct_r'].max()])

    #drop_df.to_csv(constants.out_dir+'drop.csv')
    out_ssurgo_dir = constants.r_soil_dir+os.sep+constants.SOIL+os.sep
    constants.make_dir_if_missing(out_ssurgo_dir)
    df3.to_csv(out_ssurgo_dir+os.sep+constants.all)
    dom_df.to_csv(out_ssurgo_dir+os.sep+constants.dominant)
    logging.info('Done!')
    return dom_df