Esempio n. 1
0
        def __callback_runImport():
            if self.__input_string.get() <> 'Required':

                # Write variables to .var file
                VAR.set_variable("INPUT_FILE", "STRING",
                                 self.__input_string.get())
                VAR.set_variable("WORKSPACE", "STRING",
                                 self.__workspace_string.get())

                # Remove GUI window and destroy it.
                try:
                    master.destroy()
                except:
                    pass
                try:
                    self.root.destroy()
                except:
                    pass

                # RUN APPLICATION
                # Import needs to be here in case ARCPY not found. Crashes on import if not
                try:
                    import glacier_utilities.functions.data_pop as data_pop  #@UnresolvedImport
                    print 'STARTING GLIMS ID GENERATION'
                    data_pop.generate_GLIMSIDs(self.__input_string.get(),
                                               self.__workspace_string.get())
                    print 'FINISHED GLIMS ID GENERATION'
                except:
                    tkMessageBox.showwarning(
                        'Warning', 'Could NOT populate RGI ID field.')
            else:
                tkMessageBox.showwarning(
                    'Warning', 'You must select Input and Output files.')
Esempio n. 2
0
    def __init__ (self, input_features, output_location, DEM, variables):
        
        # Create a copy of the input file in the output folder. This will be the
        # actual result file after fields are updated. This is done so no changes
        # are imposed on the original file.
        try:
            output = output_location + '\\' + os.path.basename(input_features)
            input_copy = ARCPY.CopyFeatures_management(input_features, output)
        except:
            print 'Output Glacier file already exists or the output folder is not available.'
            sys.exit()
        
        try: # Start Log file and write it to the output folder
            log_path = os.path.dirname(os.path.abspath(output))
            __Log = LOG.Log(log_path)
        except:
            print 'Log file could not be written to the output folder.'
            sys.exit()
            
        try:  # Get ArcInfo License if it's available
            import arcinfo                          #@UnresolvedImport @UnusedImport
        except:
            __Log.print_line('ArcInfo license NOT available')
            sys.exit()

        try: # Check out Spatial Analyst extension if available.
            if ARCPY.CheckExtension('Spatial') == 'Available':
                ARCPY.CheckOutExtension('Spatial')
                __Log.print_line('Spatial Analyst is available')
        except: 
            __Log.print_line('Spatial Analyst extension not available')
            sys.exit()
        
        try: # Set environment
            workspace = output_location + '\\workspace'
            os.makedirs(workspace) # Create Workspace
            ARCPY.env.workspace = workspace
        except:
            __Log.print_line('WARNING - Workspace folder already exists.')
            sys.exit()
                        
        
        # Read Variables
        centerlines = variables.read_variable('CENTERLINES')
        hypsometry = variables.read_variable('HYPSOMETRY')
        slope = variables.read_variable('SLOPE')
        aspect = variables.read_variable('ASPECT')
        glimsids = variables.read_variable('GLIMSIDS')
        rgiids = variables.read_variable('RGIIDS')    
        rgiversion = variables.read_variable('RGIVERSION')   
        rgiregion = variables.read_variable('RGIREGION')   
        
        # Create output table header information to populate the tables with
        table_output = os.path.dirname(os.path.abspath(output))
        Attribute_header = variables.read_variable('ATTABLE')
        Statistics_header = variables.read_variable('STATABLE')
        max_bin = variables.read_variable('MAXBIN')
        min_bin =  variables.read_variable('MINBIN')
        bin_size = variables.read_variable('BINSIZE')
        bin_header = DP.generate_bin_header(max_bin, min_bin, bin_size)
        header = Attribute_header + Statistics_header + bin_header
        
        # Read other variables
        check_header = variables.read_variable('RGI_SPEC')
        subset_buffer = variables.read_variable('BUFFER')
        scaling = variables.read_variable('SCALING')
        eu_cell_size = variables.read_variable('EU_CELL_SIZE')
        power = variables.read_variable('POWER')
        
        # Other variables
        currently_processing = 1
        total_features = str(int(ARCPY.GetCount_management(input_copy).getOutput(0)))

        # Print run time variables to log file
        __Log.print_line("Input File: " + os.path.basename(input_features))
        __Log.print_line("Input DEM: " + os.path.basename(DEM))
        __Log.print_line('Output Folder: ' + os.path.dirname(os.path.abspath(output)))
        __Log.print_line('Output Glaciers: ' + os.path.basename(output))
        if centerlines == True: __Log.print_line('Output Centerlines: centerlines.shp' )
        __Log.print_break()
        __Log.print_line("Runtime Parameters")
        __Log.print_line("     Run Hypsometry: " + str(hypsometry))
        __Log.print_line("     Run Slope: " + str(slope))
        __Log.print_line("     Run Aspect: " + str(aspect))
        __Log.print_line("     Generate GLIMS ID's: " + str(glimsids))
        __Log.print_line("     Generate RGI ID's: " + str(rgiids))
        __Log.print_line("          RGI Version: " + str(rgiversion))
        __Log.print_line("          RGI Region: " + str(rgiregion))
        __Log.print_line("     Maximum Bin Elevation: " + str(max_bin))
        __Log.print_line("     Minimum Bin Elevation: " + str(min_bin))
        __Log.print_line("     Bin Size: " + str(bin_size))
        __Log.print_line("     Subset Buffer: " + str(subset_buffer) + 'x')
        __Log.print_line("     DEM Scaling Factor: " + str(scaling))
        __Log.print_line("     Centerline Euclidean Cell Size: " + str(eu_cell_size))
        __Log.print_line("     Centerline Line Power Factor: " + str(power))
        __Log.print_break() # Break for next section in the log file.
        
        #_______________________________________________________________________
        #*******Input File Cleanup**********************************************  
        
        __Log.print_line('Input Polygon Checks')
        # Check to see if the input file follows RGI table headings.
        formate_error, not_found = DP.check_formate(input_features, check_header)
        if formate_error == False:
            __Log.print_line('    Input header information is consistent with the standard set')
        if formate_error == True:
            __Log.print_line('    ERROR - Input header information is NOT consistent with the standard set')
            __Log.print_line('        Items not found: ' + not_found)
            sys.exit()
        
        # Check geometries. If there are errors, correct them and print the
        # results to the log file
        repair = DP.repair_geometry(input_copy)
        __Log.print_line('    Geometry - ' + repair[0] + ' errors found (Repaired ' + repair[1] + ')')
        
        # Check to see if there are any multi-part polygons in the input file. If
        # so, prompt the user to stop and correct. Print to log file.
        multipart = DP.check_multipart(input_copy, workspace) # Check for multi-part Polygons
        __Log.print_line('    Multi-Part Polygons - ' + multipart + ' found')
        
        # Check to see if the area from the AREA column matches the actual area
        # calculated. If not signal the user to correct. Print results to log.
        area = DP.check_area(input_copy, workspace)
        __Log.print_line('    Area - ' + area[2] + ' difference')
        __Log.print_line('        Original area: ' + area[0] + ' , Final area: ' + area[1], True)
        
        # Check to see if there are any topology errors in the input file. If there 
        # are signal the user to correct before moving forward. Print to log.
        topology = DP.check_topology(input_copy, workspace)
        __Log.print_line('    Topology - ' + topology[0] + ' errors on ' + topology[1] + ' features')
        __Log.print_line('        Rule set - Must Not Overlap (Area)', True)
        
        # Warnings: 
        if multipart <> str(0): print "WARNING:  Multi-part features found.."
        if area [2] > 1 or area[2] < -1: 'WARNING: The AREA difference exceeds the threshold.'
        if topology[0] <> str(0): raw_input(str(topology[0]) + " WARNINGS: Topology errors found.")
       
        __Log.print_break() # Break for next section in the log file.
        
        #_______________________________________________________________________
        #*******Prepare Input file*********************************************
        
        if glimsids == True: # Generate GLIMS id's if applicable
            __Log.print_line('Generating GLIMS IDs')
            glims_ids = POP.generate_GLIMSIDs(input_copy, workspace) # Copy to Output
            __Log.print_line('   GLIMS IDs - ' + glims_ids + ' GLIMS IDs Generated')
            
        if rgiids == True: # Generate RGI id's if applicable
            __Log.print_line('Generating RGI IDs')
            rgi_ids = POP.generate_RGIIDs(input_copy, rgiversion, rgiregion) # Copy to Output
            __Log.print_line('   RGI IDs - ' + rgi_ids + ' RGI IDs Generated')
        
        __Log.print_break() # Break for next section in the log file.
        
        #_______________________________________________________________________
        #*******Calculate Statistics********************************************

        # Generate center lines output file to append centerlines
        if centerlines == True:
            output_centerlines = ARCPY.CreateFeatureclass_management(output_location, 'centerlines.shp', 'POLYLINE', '', '', 'ENABLED', input_features)
            ARCPY.AddField_management(output_centerlines, 'GLIMSID', 'TEXT', '', '', '25')
            ARCPY.AddField_management(output_centerlines, 'LENGTH', 'FLOAT')
            ARCPY.AddField_management(output_centerlines, 'SLOPE', 'FLOAT')
            ARCPY.DeleteField_management(output_centerlines, 'Id')

            
        # Create an instance of hypsometry, slope and aspect table if applicable
        if hypsometry == True: hypso_csv = CSV.CSV(table_output, 'Stats_Hypsometry', header) 
        if slope == True: slope_csv = CSV.CSV(table_output, 'Stats_Slope', header) 
        if aspect == True: aspect_csv = CSV.CSV(table_output, 'Stats_Aspect', header) 
        

        if centerlines == True or hypsometry == True or slope == True or aspect == True:
            rows = ARCPY.SearchCursor(input_copy) # Open shapefile to read features
            for row in rows: # For each feature in the shapefile
                
                # Get Attributes information such as GLIMS ID, Lat, Lon, area... etc.
                attribute_info, attribute_error = DC.get_attributes(row, Attribute_header)
                print ''
                print ''
                print 'Currently running: ' + str(currently_processing) + ' of ' + str(total_features)
                print 'Feature ' + str(attribute_info[0]) + ' ' + str(attribute_info[1])
                print '    Glacier Type: '  + str(attribute_info[2])
                print '    Area: ' + str(attribute_info[7]) + ' Sqr.'
                print '    Centroid (DD): ' + str(attribute_info[5]) + ', ' + str(attribute_info[6])
                if attribute_error == True: # If function failed
                    __Log.print_line(str(row.GLIMSID) + ' - ERROR - Could not read attributes')
                                
                                
                # Subset the DEM based on a single buffered glacier outline
                subset, subset_error = DC.subset(row, DEM, workspace, subset_buffer)
                if subset_error == True: # If function failed
                    __Log.print_line(str(row.GLIMSID) + ' - ERROR - Could not subset feature')
                
                
                # Get basic statistics such as minimum elevation, mean... etc.
                if hypsometry == True or slope == True or aspect == True:
                    statistics_info, statistics_error = DC.get_statistics(row, subset, workspace, scaling) 
                    print '    Elevation: ' + str(statistics_info[0]) + ' Min. / ' + str (statistics_info[1]) + ' Max.'
                    print '    Area Weighted Avg. Elev. = ' + str(statistics_info[2])
                    if statistics_error == True: # If function failed
                        __Log.print_line(str(row.GLIMSID) + ' - ERROR - Could not generate basic statistics')
                
                
                if hypsometry == True or slope == True or aspect == True:
                    print '    Running Hypsometry for Bin Mask & Table Statistics'
                    hypsometry_info, hypso_error, bin_mask = DC.get_hypsometry(row, subset, workspace, scaling, max_bin, min_bin, bin_size)
                    if hypsometry == True and hypso_error == False:
                        hypso_csv.print_line(attribute_info + statistics_info + hypsometry_info) # Print hypsometry data.
                    if hypso_error == True:
                        __Log.print_line(str(row.GLIMSID) + 'ERROR - Could not generate hypsometry information')
                
                
                if centerlines == True or slope == True or aspect == True:
                    print '    Running Center Line'
                    centerline, center_length, center_angle, centerline_error = DC.get_centerline(row, subset, workspace, power, eu_cell_size)
                    if centerline_error == False: 
                        print '    Center Line Length: ' + str(center_length) + ' & Slope Angle: ' + str(center_angle)
                        if centerlines == True:
                            ARCPY.Append_management(centerline, output_centerlines)
                    if centerline_error == True:
                        __Log.print_line(str(row.GLIMSID) + ' - ERROR - Could not generate center line')
                
                
                    if slope == True:
                        print '    Running Slope Table Statistics'
                        slope_info, slope_error = DC.get_slope(centerline, bin_mask, bin_header, workspace, scaling, bin_size)
                        slope_csv.print_line(attribute_info + statistics_info + slope_info)
                        if slope_error == True:
                            __Log.print_line(str(row.GLIMSID) + ' - ERROR - Could not calculate binned slope data')
                            
                
                    if aspect == True:
                        print '    Running Aspect Table Statistics'
                        aspect_info, aspect_error = DC.get_aspect(centerline, bin_mask, bin_header, workspace, scaling)        
                        aspect_csv.print_line(attribute_info + statistics_info + aspect_info)
                        if aspect_error == True:
                            __Log.print_line(str(row.GLIMSID) + ' - ERROR - Could not calculate binned aspect data')
                             
                # Clean Up Workspace
                try: ARCPY.Delete_management(subset)
                except: pass
                try: ARCPY.Delete_management(centerline)
                except: pass

                currently_processing += 1
            del row , rows #Delete cursors and remove locks
            
        try: # Script Complete. Try and delete workspace 
            ARCPY.Delete_management(workspace)
            __Log.print_break()
            __Log.print_line('Processing Complete')
        except: 
            __Log.print_break()
            __Log.print_line('Workspace Could not be deleted')
            __Log.print_line('Processing Complete')
Esempio n. 3
0
    def __init__(self, variables):

        # Read variables
        input_folder = variables.read_variable('INPUT_FOLDER')
        output_folder = variables.read_variable('OUTPUT_FOLDER')
        check_header = variables.read_variable('RGI_SPEC')
        version = variables.read_variable('RGIVERSION')

        # Setup working environment
        environment = ENV.setup_arcgis(output_folder)
        workspace = environment.workspace
        __Log = environment.log

        # Print run time parameters
        __Log.print_line("Input File: " + input_folder)
        __Log.print_line('Output Folder: ' + output_folder)
        __Log.print_line('RGI Header to Match (Name Only): ')
        __Log.print_line('   ' + str(check_header))
        __Log.print_break()

        tracking_list = [[
            "File Name", "Tot.", "GM", "M-P", "Area km2", "% Diff.",
            "Topology Errors", "Date Error", "Format Error", "Case Error",
            "Sliver Error"
        ]]  # A list to hold tracking information

        # For each feature class within the input folder...
        for shapefile in glob.glob(os.path.join(input_folder, '*.shp')):
            tracking_info = [
            ]  # A list to hold individual tracking information

            __Log.print_line(os.path.basename(shapefile))
            tracking_info.append(os.path.basename(shapefile)[0:12])
            tracking_info.append(str(ARCPY.GetCount_management(shapefile)))

            # Copy feature to workspace (output folder)
            working_shapefile = output_folder + '\\' + os.path.basename(
                shapefile)
            ARCPY.CopyFeatures_management(shapefile, working_shapefile)

            # Check to see if the input file follows RGI table headings.
            formate_error, not_found = DP.check_formate(
                working_shapefile, check_header)
            if formate_error == False:
                __Log.print_line(
                    '    Input header information is consistent with the standard set'
                )
            if formate_error == True:
                __Log.print_line(
                    '    ERROR - Input header information is NOT consistent with the standard set'
                )
                __Log.print_line('        Items not found: ' + not_found)

            # Check geometries. If there are errors, correct them and print the
            # results to the log file
            repair = DP.repair_geometry(working_shapefile)
            __Log.print_line('    Geometry - ' + repair[0] +
                             ' errors found (Repaired ' +
                             str(int(repair[0]) - int(repair[1])) + ')')
            tracking_info.append(str(repair[0]))

            # Check to see if there are any multi-part polygons in the input file. If
            # so, prompt the user to stop and correct. Print to log file.
            multipart = DP.check_multipart(
                working_shapefile, workspace)  # Check for multi-part Polygons
            __Log.print_line('    Multi-Part Polygons - ' + multipart +
                             ' found')
            tracking_info.append(str(multipart))

            # Check to see if the area from the AREA column matches the actual area
            # calculated. If not signal the user to correct. Print results to log.
            area = DP.check_area(working_shapefile, workspace)
            __Log.print_line('    Area - ' + area[2] + ' difference')
            __Log.print_line(
                '        Original area: ' + area[0] + ' , Final area: ' +
                area[1], True)
            tracking_info.append(area[0])
            tracking_info.append(
                str(
                    round(((float(area[0]) / float(area[1])) * 100.0) - 100.0,
                          1)))

            # Check to see if there are any topology errors in the input file. If there
            # are signal the user to correct before moving forward. Print to log.
            topology = DP.check_topology(working_shapefile, workspace)
            __Log.print_line('    Topology - ' + topology[0] + ' errors on ' +
                             topology[1] + ' features')
            __Log.print_line('        Rule set - Must Not Overlap (Area)',
                             True)
            tracking_info.append(str(topology[0]))

            # Check to see if the fix column lengths such as RGIID, GLIMSID, GLACTYPE
            # are consistent with what is expected.
            __Log.print_line('    Field Length Check:')
            RGI_length = CHECK.check_attribute_length(working_shapefile,
                                                      'RGIID')
            GLIMS_length = CHECK.check_attribute_length(
                working_shapefile, 'GLIMSID')
            GLACTYPE_Length = CHECK.check_attribute_length(
                working_shapefile, 'GLACTYPE')
            BGNDATE_Length = CHECK.check_attribute_length(
                working_shapefile, 'BGNDATE')
            ENDDATE_Length = CHECK.check_attribute_length(
                working_shapefile, 'ENDDATE')
            __Log.print_line(
                '        RGID Expected: 14 - Actual Length(s): ' +
                ','.join(RGI_length), True)
            __Log.print_line(
                '        GLID Expected: 14 - Actual Length(s): ' +
                ','.join(GLIMS_length), True)
            __Log.print_line(
                '        GLAC Expected:  4 - Actual Length(s): ' +
                ','.join(GLACTYPE_Length), True)
            __Log.print_line(
                '        BGND Expected:  8 - Actual Length(s): ' +
                ','.join(BGNDATE_Length), True)
            __Log.print_line(
                '        ENDD Expected:  8 - Actual Length(s): ' +
                ','.join(ENDDATE_Length), True)

            # Check to see if the values in the RGIFLAG column has values that are expected
            RGIFLAG = CHECK.check_attributes(working_shapefile, 'RGIFLAG')
            GLACTYPE = CHECK.check_attributes(working_shapefile, 'GLACTYPE')
            __Log.print_line('    RGIFLAG Entries: ' + ','.join(RGIFLAG))
            __Log.print_line('    GLACTYPE Entries: ' + ','.join(GLACTYPE))

            # Check no data date values are 9's and not 0's
            nodata_bgndate = CHECK.check_nodata_data(working_shapefile,
                                                     'BGNDATE')
            __Log.print_line('    Start Date Format 9 and not 0: ' +
                             str(nodata_bgndate))
            nodata_enddate = CHECK.check_nodata_data(working_shapefile,
                                                     'ENDDATE')
            __Log.print_line('    End Date Format 9 and not 0: ' +
                             str(nodata_enddate))
            tracking_info.append(
                str(
                    sum(nodata_bgndate.values()) +
                    sum(nodata_enddate.values())))

            # Check the date format. Dates should be 'YYYYMMDD'
            format_error = CHECK.check_date_format(working_shapefile,
                                                   'BGNDATE', 'ENDDATE')
            __Log.print_line('    Date Format Errors: ' + str(format_error))
            tracking_info.append(str(format_error))

            # Check for case errors in fields. Should be first letter upper case,
            # lower case everything else.
            case_errors = CHECK.check_is_uppercase(working_shapefile,
                                                   'RGIFLAG')
            __Log.print_line('    RGIFLAG Case Errors: ' + str(case_errors))
            tracking_info.append(str(case_errors))

            # Check the number of glaciers that do not meet the threshold
            threshold_error = CHECK.check_area(working_shapefile, 0.001,
                                               'AREA')
            __Log.print_line('    Threshold (0.001 km2) Errors: ' +
                             str(threshold_error))
            tracking_info.append(str(threshold_error))

            # Regenerate basic stats.
            POP.auto_generate_RGIIDs(working_shapefile, version)
            __Log.print_line('    Recalculated RGI IDs')

            POP.generate_GLIMSIDs(working_shapefile, workspace)
            __Log.print_line('    Recalculated GLIMS IDs')

            POP.generate_centroid(working_shapefile)
            __Log.print_line('    Recalculated CENLAT and CENLON')

            __Log.print_break()  # Break for next section in the log file.
            tracking_list.append(tracking_info)

        # Print Tracking Info Lists
        __Log.print_line('Summary')
        __Log.print_line('-' * 80, True)
        __Log.print_line(
            '<table align="center" width="700em" border="1" cellpadding="5em">',
            True)
        for tracking in tracking_list:
            __Log.print_line(
                '<tr><td>' + ('</td><td>'.join(tracking)) + '</td></tr>', True)
        __Log.print_line('</Table>', True)

        # Script Complete. Try and delete workspace
        removed = environment.remove_workspace()
        if removed == True:
            __Log.print_break()
            __Log.print_line('Processing Complete')
        else:
            __Log.print_break()
            __Log.print_line('Workspace Could not be deleted')
            __Log.print_line('Processing Complete')
Esempio n. 4
0
    def __init__(self, input_features, output_location, DEM, variables):

        # Create a copy of the input file in the output folder. This will be the
        # actual result file after fields are updated. This is done so no changes
        # are imposed on the original file.
        try:
            output = output_location + '\\' + os.path.basename(input_features)
            input_copy = ARCPY.CopyFeatures_management(input_features, output)
        except:
            print 'Output Glacier file already exists or the output folder is not available.'
            sys.exit()

        try:  # Start Log file and write it to the output folder
            log_path = os.path.dirname(os.path.abspath(output))
            __Log = LOG.Log(log_path)
        except:
            print 'Log file could not be written to the output folder.'
            sys.exit()

        try:  # Get ArcInfo License if it's available
            import arcinfo  #@UnresolvedImport @UnusedImport
        except:
            __Log.print_line('ArcInfo license NOT available')
            sys.exit()

        try:  # Check out Spatial Analyst extension if available.
            if ARCPY.CheckExtension('Spatial') == 'Available':
                ARCPY.CheckOutExtension('Spatial')
                __Log.print_line('Spatial Analyst is available')
        except:
            __Log.print_line('Spatial Analyst extension not available')
            sys.exit()

        try:  # Set environment
            workspace = output_location + '\\workspace'
            os.makedirs(workspace)  # Create Workspace
            ARCPY.env.workspace = workspace
        except:
            __Log.print_line('WARNING - Workspace folder already exists.')
            sys.exit()

        # Read Variables
        centerlines = variables.read_variable('CENTERLINES')
        hypsometry = variables.read_variable('HYPSOMETRY')
        slope = variables.read_variable('SLOPE')
        aspect = variables.read_variable('ASPECT')
        glimsids = variables.read_variable('GLIMSIDS')
        rgiids = variables.read_variable('RGIIDS')
        rgiversion = variables.read_variable('RGIVERSION')
        rgiregion = variables.read_variable('RGIREGION')

        # Create output table header information to populate the tables with
        table_output = os.path.dirname(os.path.abspath(output))
        Attribute_header = variables.read_variable('ATTABLE')
        Statistics_header = variables.read_variable('STATABLE')
        max_bin = variables.read_variable('MAXBIN')
        min_bin = variables.read_variable('MINBIN')
        bin_size = variables.read_variable('BINSIZE')
        bin_header = DP.generate_bin_header(max_bin, min_bin, bin_size)
        header = Attribute_header + Statistics_header + bin_header

        # Read other variables
        check_header = variables.read_variable('RGI_SPEC')
        subset_buffer = variables.read_variable('BUFFER')
        scaling = variables.read_variable('SCALING')
        eu_cell_size = variables.read_variable('EU_CELL_SIZE')
        power = variables.read_variable('POWER')

        # Other variables
        currently_processing = 1
        total_features = str(
            int(ARCPY.GetCount_management(input_copy).getOutput(0)))

        # Print run time variables to log file
        __Log.print_line("Input File: " + os.path.basename(input_features))
        __Log.print_line("Input DEM: " + os.path.basename(DEM))
        __Log.print_line('Output Folder: ' +
                         os.path.dirname(os.path.abspath(output)))
        __Log.print_line('Output Glaciers: ' + os.path.basename(output))
        if centerlines == True:
            __Log.print_line('Output Centerlines: centerlines.shp')
        __Log.print_break()
        __Log.print_line("Runtime Parameters")
        __Log.print_line("     Run Hypsometry: " + str(hypsometry))
        __Log.print_line("     Run Slope: " + str(slope))
        __Log.print_line("     Run Aspect: " + str(aspect))
        __Log.print_line("     Generate GLIMS ID's: " + str(glimsids))
        __Log.print_line("     Generate RGI ID's: " + str(rgiids))
        __Log.print_line("          RGI Version: " + str(rgiversion))
        __Log.print_line("          RGI Region: " + str(rgiregion))
        __Log.print_line("     Maximum Bin Elevation: " + str(max_bin))
        __Log.print_line("     Minimum Bin Elevation: " + str(min_bin))
        __Log.print_line("     Bin Size: " + str(bin_size))
        __Log.print_line("     Subset Buffer: " + str(subset_buffer) + 'x')
        __Log.print_line("     DEM Scaling Factor: " + str(scaling))
        __Log.print_line("     Centerline Euclidean Cell Size: " +
                         str(eu_cell_size))
        __Log.print_line("     Centerline Line Power Factor: " + str(power))
        __Log.print_break()  # Break for next section in the log file.

        #_______________________________________________________________________
        #*******Input File Cleanup**********************************************

        __Log.print_line('Input Polygon Checks')
        # Check to see if the input file follows RGI table headings.
        formate_error, not_found = DP.check_formate(input_features,
                                                    check_header)
        if formate_error == False:
            __Log.print_line(
                '    Input header information is consistent with the standard set'
            )
        if formate_error == True:
            __Log.print_line(
                '    ERROR - Input header information is NOT consistent with the standard set'
            )
            __Log.print_line('        Items not found: ' + not_found)
            sys.exit()

        # Check geometries. If there are errors, correct them and print the
        # results to the log file
        repair = DP.repair_geometry(input_copy)
        __Log.print_line('    Geometry - ' + repair[0] +
                         ' errors found (Repaired ' + repair[1] + ')')

        # Check to see if there are any multi-part polygons in the input file. If
        # so, prompt the user to stop and correct. Print to log file.
        multipart = DP.check_multipart(
            input_copy, workspace)  # Check for multi-part Polygons
        __Log.print_line('    Multi-Part Polygons - ' + multipart + ' found')

        # Check to see if the area from the AREA column matches the actual area
        # calculated. If not signal the user to correct. Print results to log.
        area = DP.check_area(input_copy, workspace)
        __Log.print_line('    Area - ' + area[2] + ' difference')
        __Log.print_line(
            '        Original area: ' + area[0] + ' , Final area: ' + area[1],
            True)

        # Check to see if there are any topology errors in the input file. If there
        # are signal the user to correct before moving forward. Print to log.
        topology = DP.check_topology(input_copy, workspace)
        __Log.print_line('    Topology - ' + topology[0] + ' errors on ' +
                         topology[1] + ' features')
        __Log.print_line('        Rule set - Must Not Overlap (Area)', True)

        # Warnings:
        if multipart <> str(0): print "WARNING:  Multi-part features found.."
        if area[2] > 1 or area[2] < -1:
            'WARNING: The AREA difference exceeds the threshold.'
        if topology[0] <> str(0):
            raw_input(str(topology[0]) + " WARNINGS: Topology errors found.")

        __Log.print_break()  # Break for next section in the log file.

        #_______________________________________________________________________
        #*******Prepare Input file*********************************************

        if glimsids == True:  # Generate GLIMS id's if applicable
            __Log.print_line('Generating GLIMS IDs')
            glims_ids = POP.generate_GLIMSIDs(input_copy,
                                              workspace)  # Copy to Output
            __Log.print_line('   GLIMS IDs - ' + glims_ids +
                             ' GLIMS IDs Generated')

        if rgiids == True:  # Generate RGI id's if applicable
            __Log.print_line('Generating RGI IDs')
            rgi_ids = POP.generate_RGIIDs(input_copy, rgiversion,
                                          rgiregion)  # Copy to Output
            __Log.print_line('   RGI IDs - ' + rgi_ids + ' RGI IDs Generated')

        __Log.print_break()  # Break for next section in the log file.

        #_______________________________________________________________________
        #*******Calculate Statistics********************************************

        # Generate center lines output file to append centerlines
        if centerlines == True:
            output_centerlines = ARCPY.CreateFeatureclass_management(
                output_location, 'centerlines.shp', 'POLYLINE', '', '',
                'ENABLED', input_features)
            ARCPY.AddField_management(output_centerlines, 'GLIMSID', 'TEXT',
                                      '', '', '25')
            ARCPY.AddField_management(output_centerlines, 'LENGTH', 'FLOAT')
            ARCPY.AddField_management(output_centerlines, 'SLOPE', 'FLOAT')
            ARCPY.DeleteField_management(output_centerlines, 'Id')

        # Create an instance of hypsometry, slope and aspect table if applicable
        if hypsometry == True:
            hypso_csv = CSV.CSV(table_output, 'Stats_Hypsometry', header)
        if slope == True:
            slope_csv = CSV.CSV(table_output, 'Stats_Slope', header)
        if aspect == True:
            aspect_csv = CSV.CSV(table_output, 'Stats_Aspect', header)

        if centerlines == True or hypsometry == True or slope == True or aspect == True:
            rows = ARCPY.SearchCursor(
                input_copy)  # Open shapefile to read features
            for row in rows:  # For each feature in the shapefile

                # Get Attributes information such as GLIMS ID, Lat, Lon, area... etc.
                attribute_info, attribute_error = DC.get_attributes(
                    row, Attribute_header)
                print ''
                print ''
                print 'Currently running: ' + str(
                    currently_processing) + ' of ' + str(total_features)
                print 'Feature ' + str(attribute_info[0]) + ' ' + str(
                    attribute_info[1])
                print '    Glacier Type: ' + str(attribute_info[2])
                print '    Area: ' + str(attribute_info[7]) + ' Sqr.'
                print '    Centroid (DD): ' + str(
                    attribute_info[5]) + ', ' + str(attribute_info[6])
                if attribute_error == True:  # If function failed
                    __Log.print_line(
                        str(row.GLIMSID) +
                        ' - ERROR - Could not read attributes')

                # Subset the DEM based on a single buffered glacier outline
                subset, subset_error = DC.subset(row, DEM, workspace,
                                                 subset_buffer)
                if subset_error == True:  # If function failed
                    __Log.print_line(
                        str(row.GLIMSID) +
                        ' - ERROR - Could not subset feature')

                # Get basic statistics such as minimum elevation, mean... etc.
                if hypsometry == True or slope == True or aspect == True:
                    statistics_info, statistics_error = DC.get_statistics(
                        row, subset, workspace, scaling)
                    print '    Elevation: ' + str(
                        statistics_info[0]) + ' Min. / ' + str(
                            statistics_info[1]) + ' Max.'
                    print '    Area Weighted Avg. Elev. = ' + str(
                        statistics_info[2])
                    if statistics_error == True:  # If function failed
                        __Log.print_line(
                            str(row.GLIMSID) +
                            ' - ERROR - Could not generate basic statistics')

                if hypsometry == True or slope == True or aspect == True:
                    print '    Running Hypsometry for Bin Mask & Table Statistics'
                    hypsometry_info, hypso_error, bin_mask = DC.get_hypsometry(
                        row, subset, workspace, scaling, max_bin, min_bin,
                        bin_size)
                    if hypsometry == True and hypso_error == False:
                        hypso_csv.print_line(
                            attribute_info + statistics_info +
                            hypsometry_info)  # Print hypsometry data.
                    if hypso_error == True:
                        __Log.print_line(
                            str(row.GLIMSID) +
                            'ERROR - Could not generate hypsometry information'
                        )

                if centerlines == True or slope == True or aspect == True:
                    print '    Running Center Line'
                    centerline, center_length, center_angle, centerline_error = DC.get_centerline(
                        row, subset, workspace, power, eu_cell_size)
                    if centerline_error == False:
                        print '    Center Line Length: ' + str(
                            center_length) + ' & Slope Angle: ' + str(
                                center_angle)
                        if centerlines == True:
                            ARCPY.Append_management(centerline,
                                                    output_centerlines)
                    if centerline_error == True:
                        __Log.print_line(
                            str(row.GLIMSID) +
                            ' - ERROR - Could not generate center line')

                    if slope == True:
                        print '    Running Slope Table Statistics'
                        slope_info, slope_error = DC.get_slope(
                            centerline, bin_mask, bin_header, workspace,
                            scaling, bin_size)
                        slope_csv.print_line(attribute_info + statistics_info +
                                             slope_info)
                        if slope_error == True:
                            __Log.print_line(
                                str(row.GLIMSID) +
                                ' - ERROR - Could not calculate binned slope data'
                            )

                    if aspect == True:
                        print '    Running Aspect Table Statistics'
                        aspect_info, aspect_error = DC.get_aspect(
                            centerline, bin_mask, bin_header, workspace,
                            scaling)
                        aspect_csv.print_line(attribute_info +
                                              statistics_info + aspect_info)
                        if aspect_error == True:
                            __Log.print_line(
                                str(row.GLIMSID) +
                                ' - ERROR - Could not calculate binned aspect data'
                            )

                # Clean Up Workspace
                try:
                    ARCPY.Delete_management(subset)
                except:
                    pass
                try:
                    ARCPY.Delete_management(centerline)
                except:
                    pass

                currently_processing += 1
            del row, rows  #Delete cursors and remove locks

        try:  # Script Complete. Try and delete workspace
            ARCPY.Delete_management(workspace)
            __Log.print_break()
            __Log.print_line('Processing Complete')
        except:
            __Log.print_break()
            __Log.print_line('Workspace Could not be deleted')
            __Log.print_line('Processing Complete')
Esempio n. 5
0
 or intended use. The creators and distributors of the application shall not
 be held liable for improper or incorrect use of the utility described and/
 or contained herein.
****************************************************************************"""
import sys, os
sys.path.append (os.path.dirname(os.path.dirname(__file__)))

import glacier_utilities.functions.data_pop as DP
import arcpy as ARCPY                                        #@UnresolvedImport

# Read parameter values from ArcGIS tool input
# 1 - The file RGI ID values will be added to. File must have a 'glimsid' field.
# 2 - A workspace to re-project data to. 

try: glims_file = ARCPY.GetParameterAsText(0)
except: ARCPY.AddError('GLIMS Input File Error')

try: glims_workspace = ARCPY.GetParameterAsText(1)
except: ARCPY.AddError('GLIMS Version Information Error')

# Run the Generate GLIMS ID function
try:
    DP.generate_GLIMSIDs(glims_file, glims_workspace)
except:
    ARCPY.AddError('Errors generated during function execution')

# Driver - Currently Does nothing
def driver():
    pass
if __name__ == '__main__':
    driver()
Esempio n. 6
0
    def __init__ (self, variables):
        
        # Read variables
        input_folder = variables.read_variable('INPUT_FOLDER')
        output_folder = variables.read_variable('OUTPUT_FOLDER')
        check_header = variables.read_variable('RGI_SPEC')
        version = variables.read_variable('RGIVERSION')

        # Setup working environment
        environment = ENV.setup_arcgis(output_folder)
        workspace = environment.workspace
        __Log = environment.log
        
        # Print run time parameters
        __Log.print_line("Input File: " + input_folder)
        __Log.print_line('Output Folder: ' + output_folder)
        __Log.print_line('RGI Header to Match (Name Only): ')
        __Log.print_line('   ' + str(check_header))
        __Log.print_break()
        
        tracking_list = [["File Name", "Tot.", "GM", "M-P", "Area km2", "% Diff.", "Topology Errors", "Date Error", "Format Error", "Case Error", "Sliver Error"]] # A list to hold tracking information
        
        # For each feature class within the input folder...
        for shapefile in glob.glob (os.path.join (input_folder, '*.shp')):
            tracking_info = [] # A list to hold individual tracking information
            
            __Log.print_line(os.path.basename(shapefile))
            tracking_info.append(os.path.basename(shapefile)[0:12])
            tracking_info.append(str(ARCPY.GetCount_management(shapefile)))
                    
            # Copy feature to workspace (output folder)
            working_shapefile = output_folder + '\\' + os.path.basename(shapefile)
            ARCPY.CopyFeatures_management (shapefile, working_shapefile)


            # Check to see if the input file follows RGI table headings.
            formate_error, not_found = DP.check_formate(working_shapefile, check_header)
            if formate_error == False:
                __Log.print_line('    Input header information is consistent with the standard set')
            if formate_error == True:
                __Log.print_line('    ERROR - Input header information is NOT consistent with the standard set')
                __Log.print_line('        Items not found: ' + not_found)
                
            # Check geometries. If there are errors, correct them and print the
            # results to the log file
            repair = DP.repair_geometry(working_shapefile)
            __Log.print_line('    Geometry - ' + repair[0] + ' errors found (Repaired ' + str(int(repair [0]) - int(repair[1])) + ')')
            tracking_info.append(str(repair[0]))
                
            # Check to see if there are any multi-part polygons in the input file. If
            # so, prompt the user to stop and correct. Print to log file.
            multipart = DP.check_multipart(working_shapefile, workspace) # Check for multi-part Polygons
            __Log.print_line('    Multi-Part Polygons - ' + multipart + ' found')
            tracking_info.append(str(multipart))
             
            # Check to see if the area from the AREA column matches the actual area
            # calculated. If not signal the user to correct. Print results to log.
            area = DP.check_area(working_shapefile, workspace)
            __Log.print_line('    Area - ' + area[2] + ' difference')
            __Log.print_line('        Original area: ' + area[0] + ' , Final area: ' + area[1], True)
            tracking_info.append(area [0])
            tracking_info.append(str(round(( (float(area[0])/float(area[1])) *100.0) -100.0, 1)))
             
            # Check to see if there are any topology errors in the input file. If there 
            # are signal the user to correct before moving forward. Print to log.
            topology = DP.check_topology(working_shapefile, workspace)
            __Log.print_line('    Topology - ' + topology[0] + ' errors on ' + topology[1] + ' features')
            __Log.print_line('        Rule set - Must Not Overlap (Area)', True)
            tracking_info.append(str(topology[0]))
             
            # Check to see if the fix column lengths such as RGIID, GLIMSID, GLACTYPE
            # are consistent with what is expected.      
            __Log.print_line('    Field Length Check:')
            RGI_length = CHECK.check_attribute_length(working_shapefile, 'RGIID')
            GLIMS_length = CHECK.check_attribute_length(working_shapefile, 'GLIMSID')
            GLACTYPE_Length = CHECK.check_attribute_length(working_shapefile, 'GLACTYPE')
            BGNDATE_Length = CHECK.check_attribute_length(working_shapefile, 'BGNDATE')
            ENDDATE_Length = CHECK.check_attribute_length(working_shapefile, 'ENDDATE')
            __Log.print_line('        RGID Expected: 14 - Actual Length(s): ' + ','.join(RGI_length), True)
            __Log.print_line('        GLID Expected: 14 - Actual Length(s): ' + ','.join(GLIMS_length), True)
            __Log.print_line('        GLAC Expected:  4 - Actual Length(s): ' + ','.join(GLACTYPE_Length), True)
            __Log.print_line('        BGND Expected:  8 - Actual Length(s): ' + ','.join(BGNDATE_Length), True)
            __Log.print_line('        ENDD Expected:  8 - Actual Length(s): ' + ','.join(ENDDATE_Length), True)
            
            # Check to see if the values in the RGIFLAG column has values that are expected
            RGIFLAG = CHECK.check_attributes(working_shapefile, 'RGIFLAG')
            GLACTYPE = CHECK.check_attributes(working_shapefile, 'GLACTYPE')
            __Log.print_line('    RGIFLAG Entries: ' + ','.join(RGIFLAG))
            __Log.print_line('    GLACTYPE Entries: ' + ','.join(GLACTYPE))
             
            # Check no data date values are 9's and not 0's
            nodata_bgndate = CHECK.check_nodata_data (working_shapefile, 'BGNDATE')
            __Log.print_line('    Start Date Format 9 and not 0: ' + str(nodata_bgndate))
            nodata_enddate = CHECK.check_nodata_data (working_shapefile, 'ENDDATE')
            __Log.print_line('    End Date Format 9 and not 0: ' + str(nodata_enddate))
            tracking_info.append(str(sum(nodata_bgndate.values()) + sum(nodata_enddate.values())))
           
            # Check the date format. Dates should be 'YYYYMMDD'
            format_error = CHECK.check_date_format (working_shapefile, 'BGNDATE', 'ENDDATE')
            __Log.print_line('    Date Format Errors: ' + str(format_error))
            tracking_info.append(str(format_error))
           
           
            # Check for case errors in fields. Should be first letter upper case, 
            # lower case everything else.
            case_errors = CHECK.check_is_uppercase(working_shapefile, 'RGIFLAG')
            __Log.print_line('    RGIFLAG Case Errors: ' + str(case_errors))
            tracking_info.append(str(case_errors))
            
            # Check the number of glaciers that do not meet the threshold
            threshold_error = CHECK.check_area(working_shapefile, 0.001, 'AREA')
            __Log.print_line('    Threshold (0.001 km2) Errors: ' + str(threshold_error))
            tracking_info.append(str(threshold_error))
           
            # Regenerate basic stats.
            POP.auto_generate_RGIIDs (working_shapefile, version)
            __Log.print_line ('    Recalculated RGI IDs')
            
            POP.generate_GLIMSIDs(working_shapefile, workspace)
            __Log.print_line ('    Recalculated GLIMS IDs')
            
            POP.generate_centroid(working_shapefile)
            __Log.print_line ('    Recalculated CENLAT and CENLON')
            
           
            __Log.print_break() # Break for next section in the log file.
            tracking_list.append(tracking_info)
            
        # Print Tracking Info Lists
        __Log.print_line('Summary')
        __Log.print_line('-' * 80, True)
        __Log.print_line('<table align="center" width="700em" border="1" cellpadding="5em">', True)
        for tracking in tracking_list:
            __Log.print_line('<tr><td>' + ('</td><td>'.join(tracking)) + '</td></tr>', True)
        __Log.print_line('</Table>', True)
        
        # Script Complete. Try and delete workspace   
        removed = environment.remove_workspace()
        if removed == True:
            __Log.print_break()
            __Log.print_line('Processing Complete')
        else:
            __Log.print_break()
            __Log.print_line('Workspace Could not be deleted')
            __Log.print_line('Processing Complete')